python-8.网络爬虫

修改headers

通过 Request 的 headers 参数修改
通过 Request.add_header() 方法修改

import urllib.request
import urllib.parse
import json

content=input('请输入需要翻译的内容:')
url='http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'

#head={}
#head['user_agent']='Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36'

data={}
data['i']=content
data['from']='AUTO'
data['to']='AUTO'
data['smartresult']='dict'
data['client']='fanyideskweb'
data['salt']='1523334803377'
data['sign']='b2c40c2c92b7029b7f53c4a272257144'
data['doctype']='json'
data['version']='2.1'
data['keyfrom']='fanyi.web'
data['action']='FY_BY_CLICKBUTTION'
data['typoResult']='false'

data=urllib.parse.urlencode(data).encode('utf-8')
req=urllib.request.Request(url,data)#req=urllib.request.Request(url,data,head)
req.add_header('user_agent','Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36')
response=urllib.request.urlopen(req)
html=response.read().decode('utf-8')

target=json.loads(html)
print('翻译结果:%s'% target['translateResult'][0][0]['tgt'])

频繁使用网络爬虫

方法1:time
方法2:代理

import urllib.request
import urllib.parse
import json
import time

while True:

    content=input('请输入需要翻译的内容(输入q!退出):')
    if content=='q!':
        break
    url='http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'

    data={}
    data['i']=content
    data['from']='AUTO'
    data['to']='AUTO'
    data['smartresult']='dict'
    data['client']='fanyideskweb'
    data['salt']='1523334803377'
    data['sign']='b2c40c2c92b7029b7f53c4a272257144'
    data['doctype']='json'
    data['version']='2.1'
    data['keyfrom']='fanyi.web'
    data['action']='FY_BY_CLICKBUTTION'
    data['typoResult']='false'
    data=urllib.parse.urlencode(data).encode('utf-8')
    req=urllib.request.Request(url,data)
    req.add_header('user_agent','Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36')
    response=urllib.request.urlopen(req)
    html=response.read().decode('utf-8')

    target=json.loads(html)
    print('翻译结果:%s'% target['translateResult'][0][0]['tgt'])

    time.sleep(5)#

使用代理的步骤

  1. 参数是一个字典 { ‘类型’ : ‘代理ip:端口号’ }
    proxy_support = urllib.request.ProxyHandler({})
  2. 定制、创建一个 opener
    opener = urllib.request.build_opener(proxy_support)
  3. a. 安装 opener
    urllib.request.install_opener(opener)
    b. 调用 opener
    opener.open(url)
import urllib.request

url='http://www.whatismyip.com.tw'#显示ip地址

proxy_support=urllib.request.ProxyHandler({'http':'61.160.212.181:808'})
opener = urllib.request.build_opener(proxy_support)
opener.addheaders=[('user_agent','Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36')]


urllib.request.install_opener(opener)
response=urllib.request.urlopen(url)

html=response.read().decode('utf-8')

print(html)

异常处理Exception

import urllib.error
URLError 属性:reason
HTTPError 属性:code ;reason

猜你喜欢

转载自blog.csdn.net/syqnyue/article/details/79781989
今日推荐