Help please, I have big problems with writing CSV, if all of the following output as print, then everything is super, but I can’t write to CSV:
File "C:/Users/VIK/PycharmProjects/untitled2/tests.py", line 71, in <module> write_csv(get_all_items(html)) File "C:/Users/VIK/PycharmProjects/untitled2/tests.py", line 21, in write_csv writer.writerow((data['title'], TypeError: list indices must be integers or slices, not str import requests from bs4 import BeautifulSoup import csv def request(url): """Get HTML code of any page""" r = requests.get(url) return r.text def get_pagination(html): soup = BeautifulSoup(html, 'lxml') all_page = soup.find_all('a', class_='pagination-page') last_page = all_page[-1].get('href').split('=')[1].split('&')[0] return int(last_page) def write_csv(data): with open('avito_estate.csv', 'a') as f: writer = csv.writer(f) writer.writerow((data['title'], data['href'], data['price'], data['category'])) def get_all_items(html): """Get all need items if html, argument is def request""" soup = BeautifulSoup(html, 'lxml') all_items_soup = soup.find('div', class_='catalog-list').find_all('div', class_='item_table') all_items = [] for item in all_items_soup: try: title = item.find('a', class_='item-description-title-link').get('title') except: title = '' try: href = 'https://www.avito.ru' + item.find('a', class_='item-description-title-link').get('href') except: href = '' try: price = item.find('div', class_='about').text.strip() except: price = '' try: category = item.find('div', class_='data').find('p').text.strip() except: category = '' data = {'title': title, 'price': price, 'href': href, 'category': category} all_items.append(data) return all_items #url с пагинацией #https://www.avito.ru/kaliningrad/nedvizhimost?p=1&user=1 url = 'https://www.avito.ru/kaliningrad/nedvizhimost?p=1&user=1' base_url = 'https://www.avito.ru/kaliningrad/nedvizhimost' page_url = '?p=' last_url = '&user=1' total_pages = get_pagination(request(url)) for i in range(1, total_pages): url_gen = base_url + page_url + str(i) + last_url html = request(url_gen) write_csv(get_all_items(html))
write_csvfunction. Try writing downwriter.writerow(data). - insolor