python爬虫系列(4.1-关于文件的写入)

一、关于python中json模块的回顾

1、json.dumps():将python中字典转换为json字符串

2、json.loads():将json字符串转换为python字典

二、使用python中自带的文件写入功能

1、使用前面使用 bs4 爬取获取贵州农产品爬取的数据

2、存储到本地文件中

...

def down_data(self):

    """

    下载数据

    :return:

    """

    soup = BeautifulSoup(self.get_html, 'lxml')

    table = soup.find('table', attrs={'class': 'table table-hover'})

    trs = table.find('tbody').find_all('tr')

    food_list = []

    fb = open('food.json', 'a', encoding='utf-8')

    for tr in trs:

        food_dict = {}

        tds = tr.find_all('td')

        name = tds[0].get_text()

        price = tds[1].get_text()

        address = tds[3].get_text()

        time = tds[4].get_text()

        food_dict['name'] = name

        food_dict['price'] = price

        food_dict['address'] = address

        food_dict['time'] = time

        food_list.append(food_dict)

        # 将数据写入本地文件中

        fb.write(json.dumps(food_dict, indent=2, ensure_ascii=False) + ',\n')

    fb.close()

    return food_list

三、使用codecs模块来打开文件写入本地

1、导包

import codecs

2、保存数据的代码

def down_data(self):

    """

    下载数据

    :return:

    """

    soup = BeautifulSoup(self.get_html, 'lxml')

    table = soup.find('table', attrs={'class': 'table table-hover'})

    trs = table.find('tbody').find_all('tr')

    food_list = []

    # 使用codecs的方法打开

    fb = codecs.open('food.json', 'wb', encoding='utf-8')

    for tr in trs:

        food_dict = {}

        tds = tr.find_all('td')

        name = tds[0].get_text()

        price = tds[1].get_text()

        address = tds[3].get_text()

        time = tds[4].get_text()

        food_dict['name'] = name

        food_dict['price'] = price

        food_dict['address'] = address

        food_dict['time'] = time

        food_list.append(food_dict)

        fb.write(json.dumps(food_dict, indent=2, ensure_ascii=False) + ',\n')

    fb.close()

    return food_list

猜你喜欢

转载自blog.csdn.net/qq_40925239/article/details/83902608
今日推荐