import requests,re,json
from requests.exceptions import RequestException
from multiprocessing import Pool
def get_page_source(url):
headers={ "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko)Chrome/65.0.3325.181 Safari/537.36" }
try:
response=requests.get(url,headers=headers)
if response.status_code is 200:
return response.text
except RequestException:
return None
def pares_page_source(html):
pattern=re.compile('<dd>.*?board-index.*?>(\d+)</i>.*?<a.*?title="(.*?)".*?<img.*?-src="(.*?)".*?="star">\n(.*?)\n.*?'
'releasetime">(.*?)</p>.*?integer">(.*?)</i>.*?fraction">(\d+).*?</dd>',re.S)
result=re.findall(pattern,html)
for i in result:
yield{
"index":i[0],
"title": i[1],
"image":i[2],
"actor":i[3].strip(),
"time":i[4][5:],
"score":i[5]+i[6]
}
def write_to_txt(content):
with open("maoyan.txt","a",encoding="utf-8")as f:
f.write(json.dumps(content,ensure_ascii=False)+"\n")
def main(i):
url="http://www.maoyan.com/board/4?offset="+str(i)
html=get_page_source(url)
#print(type(html))
for i in pares_page_source(html):
write_to_txt(i)
print(i)
if __name__ == '__main__':
for i in range(10):
main(i*10)