day03爬虫

版权声明:本文由lianyhai编写,不得用于商业用途,其他用途请随便。如果非要用做商业用途请给我微信打一下钱谢谢!哈哈哈哈 https://blog.csdn.net/qq_36303521/article/details/89205189
from selenium import webdriver
import time
browser = webdriver.Chrome()
browser.get("http://mail.163.com")
time.sleep(3)

browser.maximize_window()
time.sleep(5)

browser.switch_to.frame(0)
email = browser.find_element_by_name('email')
email.send_keys('[email protected]')
password = browser.find_element_by_name('password')
password.send_keys('####333')
login = browser.find_element_by_id('dologin')
login.click()
time.sleep(12)

import requests, json, re, random,time
from bs4 import BeautifulSoup

class getUrl(object):
	"""docstring for getUrl"""
	def __init__(self):
		self.headers={
            "Connection": "keep-alive",  
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 "  
                          "(KHTML, like Gecko) Chrome/51.0.2704.63 Safari/537.36",  
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",  
            "Accept-Encoding": "gzip, deflate, sdch",  
            "Accept-Language": "zh-CN,zh;q=0.8"
        };

	def run(self):
		# 以第一页为例
		url = 'http://www.xicidaili.com/nn'
		headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36',
        }
		req = requests.get(url,headers=self.headers=headers)
		html = req.text
		soup = BeautifulSoup(html, 'lxml')
		ip_list = soup.find(id='ip_list').find_all('tr')
		for i in range(1, len(ip_list)):
			ip_info = ip_list[i]
			tds = ip_info.find_all('td')
			ip = tds[1].text + ':' + tds[2].text
			# 验证ip是否可用
			if self.verify_IP(ip):
				#可用ip写入文件
				dir_file = open("ip_records.txt",'a', encoding="utf-8")
				dir_file.write(ip+"\n")
				dir_file.close() 
				time.sleep(5)
				
	def verify_IP(self,ip):
		proxies = {"http": ip}
		url = "http://www.baidu.com/"
		try:
			req = requests.get(url, headers=self.headers,proxies=proxies, timeout=3)
			if req.status_code == 200:
				return True
			else:
				return False
		except requests.RequestException as e:
			print("验证代理IP" + ip + "时发生如下错误 :")
			print(e)
			return False


if __name__ == '__main__':
	geturl = getUrl()
	geturl.run()

总体代码如上,因为有事在忙,所以先摸鱼一下

猜你喜欢

转载自blog.csdn.net/qq_36303521/article/details/89205189
今日推荐