scrapy在middlewares.py中添加随机user-agent

作用:可以在请求中重写headers
pip3 install fake_useragent
导入fake_useragent 包
from fake_useragent import UserAgent 
class RandomUserAgentMiddlerware(object):
    def __init__(self,crawler):
        super(RandomUserAgentMiddlerware,self).__init__()
        self.ua = UserAgent()
        self.ua_type = crawler.settings.get('RANDOM_UA_TYPE','random')
    @classmethod
    def from_crawler(cls,crawler):
        return cls(crawler)
    def process_request(self,request,spider):
        def get_ua():
            return getattr(self.ua,self.ua_type)
        request.headers.setdefault("User-Agent",get_ua())

猜你喜欢

转载自blog.csdn.net/qq123aa2006/article/details/88949770