Python Scrpay 中使用HTTP代理

增加一个代理中间件

# 修改settings文件
DOWNLOADER_MIDDLEWARES = {  
     'scrapy.contrib.downloadermiddleware.httpproxy.HttpProxyMiddleware':None,  
     'myproxies.middlewares.ProxyMiddleWare':125,  
     'scrapy.downloadermiddlewares.defaultheaders.DefaultHeadersMiddleware':None  
}  

修改中间件middlewares.py

import random  
import scrapy  
from scrapy import log  
  
  
# logger = logging.getLogger()  
  
class ProxyMiddleWare(object):  
    """docstring for ProxyMiddleWare"""  
    def process_request(self,request, spider):  
        '''对request对象加上proxy'''  
        proxy = self.get_random_proxy()  
        print("this is request ip:"+proxy)  
        request.meta['proxy'] = proxy   
  
  
    def process_response(self, request, response, spider):  
        '''对返回的response处理'''  
        # 如果返回的response状态不是200,重新生成当前request对象  
        if response.status != 200:  
            proxy = self.get_random_proxy()  
            print("this is response ip:"+proxy)  
            # 对当前reque加上代理  
            request.meta['proxy'] = proxy   
            return request  
        return response  
  
    def get_random_proxy(self):  
        proxy = requests.get('https://kuyukuyu.com/agents/get?uuid=你自己的账号密匙')
        return proxy  
发布时间:2020-05-02

在线客服