# coding=utf-8
import urllib2
import re
import random
import time
#存放浏览器类型的列表
user_agents = [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/17.17134',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60',
'Opera/8.0 (Windows NT 5.1; U; en)',
'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36']
proxys = [{'http:': 'localhost:80'}]#存放代理ip的列表,这里只存放了本机ip
def getArticleUrl():
'''
获取文章的url
:return: 一个存储着所有文章的url列表
'''
agent = random.choice(user_agents)#随机一个user-agent
heads = {'User-Agent': agent, "Accept-Language": "zh-CN,zh;q=0.8,en;q=0.6"}
url = 'https://blog.csdn.net/Sweeneyzuo/article/list/'#本人博客地址
acticleUrls = []
for x in range(1, 6):#本人的博客总共5页
u = url + str(x)
request = urllib2.Request(u, headers=heads)
proxy = random.choice(proxys)#随机一个代理ip
pro_handler = urllib2.ProxyHandler(proxy)
openner = urllib2.build_opener(pro_handler)
response = openner.open(request)
html = response.read()
pattern = re.compile(r'<a href="(.*?)" target="_blank">')
list = pattern.findall(html)
for i in list:
if i.__contains__('Sweeney'):#因为本人博客url包含关键词Sweeney
acticleUrls.append(i)
return acticleUrls
def incrCount(url):
agent = random.choice(user_agents)#随机一个user-agent
heads = {'User-Agent': agent}
request = urllib2.Request(url, headers=heads)
proxy = random.choice(proxys)#随机一个代理ip
pro_handler = urllib2.ProxyHandler(proxy)
openner = urllib2.build_opener(pro_handler)
response = openner.open(request)
urls = getArticleUrl()
i = 1
print urls
while True:
url = random.choice(urls)
incrCount(url)
print '正在访问第' + str(i) + '次'
if i == 10000:#理论上增加10000次访问量,实际达不到
break
i += 1
if i % 200 == 0:
time.sleep(5)
print '结束'
python实现增加csdn访问量
猜你喜欢
转载自blog.csdn.net/Sweeneyzuo/article/details/84695984
今日推荐
周排行