关于代理Ip设置

from selenium import webdriver
import urllib.request
from bs4 import BeautifulSoup as bs
import pickle
import time


def web():

    p ='151.253.165.70:8080'
    
    chromeOptions = webdriver.ChromeOptions()

    # 设置代理
    chromeOptions.add_argument("--proxy-server=http://{0}".format(p))
    # 一定要注意,=两边不能有空格,不能是这样--proxy-server = http://202.20.16.82:10152
    browser = webdriver.Chrome( options = chromeOptions)

    # 查看本机ip,查看代理是否起作用

    #browser = webdriver.Chrome()
    
    browser.get("https://www.ip.cn/")

    time.sleep(3)


def add():

    ip  = '116.196.85.150:3128'  #这里是IP地址 也可以做一个列表随机弹出一个
    
    url = 'https://www.ip.cn/' #url

    headers = {}#headers
    headers['Accept']='text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'
    headers['Accept-Language']='zh-CN,zh;q=0.9'
    headers['Cache-Control']='no-cache'
    headers['Connection']=' keep-alive'
    headers['Upgrade-Insecure-Requests']='1'
    headers['User-Agent']='Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36'

    
    proxies = {"http": ip, "https": ip} # ip地址分http https

    handler = urllib.request.ProxyHandler(proxies)#设置代理
    opener = urllib.request.build_opener(handler)#再加入headers

    req = urllib.request.Request(url, headers=headers)#url
    
    html = opener.open(req)#发起请求 注意是用opener  这个变量是我们刚设置的
    
    html = html.read()
    html = html.decode('utf-8')
    html = bs(html,'lxml')
    cont = html.findAll('div',id='result')[0]
    print(cont.text)




猜你喜欢

转载自blog.csdn.net/AnYeZhiYin/article/details/103920105