Portrait

Dmitry Abramov

Frontend Developer

Contacts

  • Phone Icon

    +375(25)693-30-85

  • Email Icon

    abrannik25@gmail.com

  • Telegram Icon

    @de4lt_dd

Skills

  • HTML5, CSS3
  • Git, GitHub
  • VS Code, VS
  • Adobe Photoshop
  • JavaScript

Languages

  • Russian
  • Belarusian
  • English (B1)

Education

Bachelor, Belarusian State University, Minsk

Code Example

                        
import requests
from bs4 import BeautifulSoup
import csv

CSV = 'cards.csv'
HOST = 'https://mcdonalds.by/ru/'
URL = 'https://mcdonalds.by/ru/news.html'
HEADERS = {'accept': 'text/html,application/xhtml+xml,application/xml',
'user-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)'}


def get_html(url, params=''):
r = requests.get(url, headers=HEADERS, params=params)
return r


def get_content(html):
soup = BeautifulSoup(html, 'html.parser')
items = soup.find_all('div', class_='row mc-news-all-blocks-holder')
cards = []
# print(items)
for item in items:
    cards.append(
        {
            'title': item.find('div', class_='col-lg-12 col-md-12 col-sm-12 col-xs-12').get_text(strip=True),
            'link_product': item.find('div', class_='mc-news-all-img-holder').find('a').get('href')
        }
        )
return cards


def save_doc(items, path):
with open(path, 'w', newline='') as file:
    writer = csv.writer(file, delimiter=';')
    writer.writerow(['Название продукта: ', 'Ссылка: '])
    for item in items:
        writer.writerow([item['title'], item['link_product']])


def parser():
PAGE = input('Количество страниц: ')
PAGE = int(PAGE.strip())
html = get_html(URL)
if html.status_code == 200:
    cards = []
    for page in range(1, PAGE):
        print(f'Парсим страницу: {page}')
        html = get_html(URL, params={'page': page})
        cards.extend(get_content(html.text))
        save_doc(cards, CSV)
else:
    print('Error')

parser()