Reptile small example

Bing Translator, post requests

import requests

# 必应翻译
url = "https://cn.bing.com/tlookupv3?isVertical=1&&IG=AC43A2DD353A42D292C13DA2ED005444&IID=translator.5028.2"

formdata = {
    'from':'en',
    'to':'zh-Hans',
    'text':'dog'
}

headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36'}

res = requests.post(url=url,headers=headers,data=formdata)

print(res.json())
print(res.text)

 

 

proxy

! # / usr / bin / Python the env 
# - * - Coding: UTF-. 8 - * - 
Import Requests 
Import Random 
IF the __name__ == "__main__": 
    # the UA different browsers 
    header_list = [ 
        # travel 
        { "user-agent" : "the Mozilla / 4.0 (compatible; MSIE 7.0; the Windows NT 5.1; 2.0 Maxthon)"}, 
        # Firefox 
        { "user-agent": " Mozilla / 5.0 (Windows NT 6.1; rv: 2.0.1) Gecko / 20100101 Firefox / 4.0.1 "}, 
        # Google 
        { 
            " the User-Agent ":" Mozilla / 5.0 (Macintosh; Intel Mac OS the X-10_7_0) AppleWebKit / 535.11 (KHTML, like Gecko) Chrome / 17.0.963.56 Safari / 535.11 "} 
    ] 
    # different proxy the IP 
    proxy_list = [  
        { "HTTP":"112.115.57.20:3128"},
        { 'HTTP': '121.41.171.223:3128'}
    ]
    # Get random agent UA and the IP 
    header = The random.choice (header_list) 
    Proxy The random.choice = (proxy_list) 

    URL = 'http://www.baidu.com/s?ie=UTF-8&wd=ip' 
    # 3 parameters: agent set 
    Response = requests.get (URL = URL, header = headers, Proxies = proxy) 
    response.encoding = 'UTF-. 8' 
    
    with Open ( 'daili.html', 'WB') AS FP: 
        fp.write (Response .content) 
    # switched to the original the IP 
    requests.get (URL, Proxies = { "HTTP": ""})

 

Login page crawling --cookie

Import Requests
 from lxml Import etree 

headers = {
     " the User-- Agent " : " the Mozilla / 5.0 (the Windows NT 10.0; the WOW64) AppleWebKit / 537.36 (KHTML, like the Gecko) the Chrome / 70.0.3538.67 Safari / 537.36 " 
} 
# 1. landing operation, the server creates a cookie object for the current user (the user to store the current status information, and identity) 
# 2. request a personal home page (carries the cookie created in step 1), to obtain the current user's personal home page data 

# Log 
 # login request url (available through packet capture tool) 
LOGIN_URL = " http://www.renren.com/ajaxLogin/login?1=1&uniqueTimestamp=2018922053679 " 
the session = requests.session ()   ## Create a session object, the object will automatically request the cookie store and carry 
Data = {
     " captcha_type " : " web_login " ,
     " Domain " : " renren.com " ,
     " In Email " : " 18,829,037,944 " ,
     " F " : " " ,
     " ICODE " : " " ,
     " key_id " : " 1 " ,
    "origURL":" Http://www.renren.com/home " ,
     " password " : " 30f28dff42c847e99969e7e91f8356bcb80aa2e9993893add81b6ff76c899be3 " ,
     " rKey " : " f1ace095ea75f09850cbb28b87a04b9e " , 
} 
session.post (URL = LOGIN_URL, Data = Data, headers = headers) #   use session send a request, the purpose is to save the session cookie times the request 

GET_URL = " http://www.renren.com/968520666/profile " 
#   use the session to send a request again, this time the request already carries a cookie 
the Response = session.get (url = get_url,headers=headers)
# Set the contents of the encoding format in response 
response.encoding = ' UTF-. 8 ' 
page_text = response.text 

# response content file is written 
with Open ( ' ./renren01.html ' , ' W ' , encoding = " UTF-. 8 " ) AS fp: 
    fp.write (response.text) 
    Print ( " over " )

 

Guess you like

Origin www.cnblogs.com/Mr-Feng/p/11274263.html