使用代理

时间:2012-02-18 11:43:56

标签: python proxy urllib2

我已经构建了一个脚本(借助于互联网资源),它从特定网站获取可用代理列表,然后逐个检查以查找工作代理。一旦它发现它从该代理构建和开启。这是我的代码。

import urllib2
import urllib
import cookielib
import socket
import time

def getOpener(pip=None):
    if pip:
        proxy_handler = urllib2.ProxyHandler({'http': pip})
        opener = urllib2.build_opener(proxy_handler)
    else:
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookielib.CookieJar()))
    opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:7.0.1) Gecko/20100101 Firefox/7.0.1')]
    urllib2.install_opener(opener)
    return opener

def getContent(opnr, url):
    req = urllib2.Request(url)
    sock = opnr.open(req)
    return sock.read()

def is_bad_proxy(pip):
    try:
        opnr = getOpener(pip)
        data = getContent(opnr, 'http://www.google.com')
    except urllib2.HTTPError, e:
        return e.code
    except Exception, detail:
        return True
    return False

def getProxiesList():
    proxies = []
    opnr = getOpener()
    content = getContent(opnr, 'http://somesite.com/')
    urls = re.findall("<a href='([^']+)'[^>]*>.*?HTTP Proxies.*?</a>", content)
    for eachURL in urls:
        content = getContent(opnr, eachURL)
        proxies.extend(re.findall('\d{,3}\.\d{,3}\.\d{,3}\.\d{,3}:\d+', content))
    return proxies

def getWorkingProxy(proxyList, i=-1):
    for j in range(i+1, len(proxyList)):
        currentProxy = proxyList[j]
        if not is_bad_proxy(currentProxy):
            log("%s is working" % (currentProxy))
            return currentProxy, j
        else:
            log("Bad Proxy %s" % (currentProxy))
    return None, -1

if __name__ == "__main__":
    socket.setdefaulttimeout(60)
    proxyList = getProxiesList()
    proxy, index = getWorkingProxy(proxyList)
    if proxy:
        _web = getOpener(proxy)

当我在某种程度上使用一个代理时,我必须一次又一次地重复这个过程。问题是does building an opener again and again will cause issues??因为我遇到了以下错误HTTPError: HTTP Error 503: Too many open connections。请帮我解决错误的原因是什么?提前谢谢。

1 个答案:

答案 0 :(得分:0)

我检查过,proxyList包含重复项。很多openers试图使用导致错误HTTPError: HTTP Error 503: Too many open connections

的相同代理