scrapy爬虫写入数据库万能语句

#pipelines.py 中
import pymysql
class BaikePipeline(object):
    def __init__(self):#先初始化
        self.conn = None
        self.cur = None

    def open_spider(self, spider):
        self.conn = pymysql.connect(host="localhost", user="root", password="123456",
                                    database="script", port=3306, charset='utf8')
        self.cur = self.conn.cursor()

    def process_item(self, item, spider):
        
#万能语句写入数据 
  '''
 # zip(可迭代对象)--打包
        a = [1,2,3]        
        b = [4,5,6]       
     c = [7,8,9,10]    
        zip(a,b) ---> [(1,4),(2,5),(3,6)]       
         zip(a,c) ---> [(1,7),(2,8),(3,9)],把多余的筛选掉
'''
cols, values zip(*item.items())

sql="insert into %s(%s)VALUES (%s)"%('baike',','.join(cols),','.join(["%s"]*len(values))) 

# print(sql)#查看sql执行是否正确 

self.cur.execute(sql,values) 

self.conn.commit() 

print(self.cur._last_executed)#看上一条执行语句 

return item def close_spider(self,spider): 

self.cur.close()

self.conn.close()

发布了18 篇原创文章 · 获赞 7 · 访问量 1万+

猜你喜欢

转载自blog.csdn.net/qq_39965716/article/details/80607080