The data is written to a MySQL database crawling

Import the urllib.request
 Import Requests
 Import pymysql # packet connection to the database 
from lxml Import etree

DB = pymysql.connect ( ' localhost ' , ' dsuser ' , ' badpassword ' , ' dsdb ' ) # address, user passwords, database name 
Print ( ' Success ' )
CUR = db.cursor () # establish cursor 
cur.execute ( ' the DROP TABLE EXISTS the IF PK ' )
SQL = ' the CREATE TABLE PK (title VARCHAR (50)) ' # establish a data table 
cur.execute (SQL)
 Print ( ' Creating Success ' )
url='https://www.51job.com/'
response=requests.get(url)
response.encoding='gbk'
html=etree.HTML(response.text)
title=html.xpath('//div[@class="cn hlist"]//a/span/text()')
for i in range(len(title)):
    SQLE = ' the INSERT the INTO PK (title) the VALUES (% S) ' # insert statement 
    value = (title [I])
    cur.execute(sqle,value)
    the db.commit () # commit the database 
cur.close ()   
    
    

 

Guess you like

Origin www.cnblogs.com/persistence-ok/p/11645659.html