Writing information to csv in python

import requests
from bs4 import BeautifulSoup
import csv
from urlparse import urljoin
import urllib2

base_url = 'http://www.baseball-reference.com/' # base url for concatenation
data = requests.get("http://www.baseball-reference.com/teams/BAL/2014-schedule-scores.shtml") #website for scraping
soup = BeautifulSoup(data.content)
b=5

for link in soup.find_all('a'):

    if not link.has_attr('href'):
        continue

    if link.get_text() != 'boxscore':
        continue

    url = base_url + link['href']

    response = requests.get(url)
    html = response.content
    soup = BeautifulSoup(html)

    # Scores
    table = soup.find('table', attrs={'id': 'BaltimoreOriolespitching'})
    for row in table.findAll('tr'):
        list_of_cells = []
        for cell in row.findAll('td'):
            text = cell.text.replace(' ', '')
            list_of_cells.append(text)
        for list in list_of_cells:
            with open('test1.csv', 'w', newline='') as fp:
                a = csv.writer(fp, delimiter=',')
                a.writerows(list)

      

I am trying to write the information cleaned to csv so that each piece of information has its own cell. The more I play with the code, I either get an indented error or the first line prints to csv and that's it.

IndentationError: deferred block expected

+3


source to share


1 answer


I think the first thing to consider is opening the file and creating a CSV record outside of the loop. I think you are rewriting the CSV file ( 'w'

) on each pass through the loop for

. So try this:



with open('test1.csv', 'w', newline='') as fp:
    csvw = csv.writer(fp, delimiter=',')

    for link in soup.find_all('a'):

        if not link.has_attr('href'):
            continue

        if link.get_text() != 'boxscore':
            continue

        url = base_url + link['href']

        response = requests.get(url)
        html = response.content
        soup = BeautifulSoup(html)

        # Scores
        table = soup.find('table', attrs={'id': 'BaltimoreOriolespitching'})
        for row in table.findAll('tr'):
            list_of_cells = []
            for cell in row.findAll('td'):
                text = cell.text.replace(' ', '')
                list_of_cells.append(text)
            for list in list_of_cells:
                    csvw.writerows(list)

      

+3


source







All Articles