scrapy----将数据保存到MySQL数据库中

1.在pipelines.py中自定义自己的pipeline

import pymysql
class PymysqlPipeline(object):
    def __init__(self):
        # 连接数据库
        self.connect = pymysql.connect(
            host='localhost',
            db='bole',
            user='root',
            passwd='123456',
            charset='utf8',
            port=330,
            use_unicode=True)
        self.cursor = self.connect.cursor()

    def process_item(self, item, spider):
        cursor = self.cursor
        sql = 'insert into bole(title, datetime, category, content, dianzanshu, shoucanshu, pinglunshu) values (%s,%s,%s,%s,%s,%s,%s)'
        cursor.execute(sql, (
            item['title'], item['datetime'], item['category'], item['content'], item['dianzanshu'],
            item['shoucanshu'],
            item['pinglunshu']))
        self.connect.commit()

        return item

2.在settings中开启自己的pipeline

ITEM_PIPELINES = {
   'Bole.pipelines.PymysqlPipeline': 1,
}


猜你喜欢

转载自blog.csdn.net/qq_38661599/article/details/80946045