def get(self, url, proxy=None):
if proxy:
proxy = urllib2.ProxyHandler({'http': proxy})
opener = urllib2.build_opener(proxy)
urllib2.install_opener(opener)
try:
response = urllib2.urlopen(url)
except HTTPError, e:
resp = e.read()
self.status_code = e.code
except URLError, e:
resp = e.read()
self.status_code = e.code
else:
self.status_code = response.code
resp = response.read()
return resp
python类ProxyHandler()的实例源码
def download_from_url(url):
proxy = env_server.get_proxy()
if proxy['enabled']:
server = proxy['server'].replace('http://', '')
proxy_dict = {
'http': 'http://{login}:{pass}@{0}'.format(server, **proxy)
}
proxy_handler = urllib2.ProxyHandler(proxy_dict)
auth = urllib2.HTTPBasicAuthHandler()
opener = urllib2.build_opener(proxy_handler, auth, urllib2.HTTPHandler)
urllib2.install_opener(opener)
run_thread = tc.ServerThread(env_inst.ui_main)
run_thread.kwargs = dict(url=url, timeout=1)
run_thread.routine = urllib2.urlopen
run_thread.run()
result_thread = tc.treat_result(run_thread, silent=True)
if result_thread.isFailed():
return False
else:
return result_thread.result
def ipcheck(proxy):
try:
pxhandle = urllib2.ProxyHandler({"http": proxy})
opener = urllib2.build_opener(pxhandle)
urllib2.install_opener(opener)
myip = urllib2.urlopen('http://www.whatismyip.com/automation/n09230945.asp').read()
xs = re.findall(('\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}'), StripTags(myip))
if xs[0] == myipadress or myipadress == myip:
trans_list.append(proxy)
print proxy[:-1],"\t- ALIVE -", timer(), "- TRANSPARENT"
elif xs == None:
pass
else:
anon_list.append(proxy)
print proxy[:-1],"\t- ALIVE -", timer(), "- EXT-iP :",xs[0]
except KeyboardInterrupt:
print "\n\nCTRL+C - check temporary proxylist file\n\n"
sys.exit(0)
except:
pass
def download_vcpython27(self):
"""
Download vcpython27 since some Windows 7 boxes have it and some don't.
:return: None
"""
self._prepare_for_download()
logger.info('Beginning download of vcpython27... this may take a few minutes...')
with open(os.path.join(DOWNLOADS_DIR, 'vcpython27.msi'), 'wb') as f:
if self.PROXY is not None:
opener = urllib2.build_opener(
urllib2.HTTPHandler(),
urllib2.HTTPSHandler(),
urllib2.ProxyHandler({'http': self.PROXY, 'https': self.PROXY})
)
urllib2.install_opener(opener)
f.write(urllib2.urlopen(self.VCPYTHON27_DOWNLOAD_URL, timeout=self.DOWNLOAD_TIMEOUT).read())
logger.debug('Download of vcpython27 complete')
def download_python(self):
"""
Download Python
:return: None
"""
self._prepare_for_download()
logger.info('Beginning download of python')
with open(os.path.join(DOWNLOADS_DIR, 'python-installer.msi'), 'wb') as f:
if self.PROXY is not None:
opener = urllib2.build_opener(
urllib2.HTTPHandler(),
urllib2.HTTPSHandler(),
urllib2.ProxyHandler({'http': self.PROXY, 'https': self.PROXY})
)
urllib2.install_opener(opener)
f.write(urllib2.urlopen(self.PYTHON_DOWNLOAD_URL, timeout=self.DOWNLOAD_TIMEOUT).read())
logger.debug('Download of python complete')
def wait_xxnet_exit():
def http_request(url, method="GET"):
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
try:
req = opener.open(url)
return req
except Exception as e:
#logging.exception("web_control http_request:%s fail:%s", url, e)
return False
for i in range(20):
host_port = config.get(["modules", "launcher", "control_port"], 8085)
req_url = "http://127.0.0.1:{port}/quit".format(port=host_port)
if http_request(req_url) == False:
return True
time.sleep(1)
return False
def wait_xxnet_exit():
def http_request(url, method="GET"):
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
try:
req = opener.open(url)
return req
except Exception as e:
#logging.exception("web_control http_request:%s fail:%s", url, e)
return False
for i in range(20):
host_port = config.get(["modules", "launcher", "control_port"], 8085)
req_url = "http://127.0.0.1:{port}/quit".format(port=host_port)
if http_request(req_url) == False:
return True
time.sleep(1)
return False
def __init__(self, url, proxy, cafile):
self.url = url
self.proxy = proxy
if proxy:
logging.info("Using HTTPS proxy: " + proxy)
proxy_handler = urllib2.ProxyHandler({'https': proxy})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
self.kwargs = {}
if cafile and hasattr(ssl, "create_default_context"):
logging.info("Using CA file: " + cafile)
ctx = ssl.create_default_context()
ctx.load_verify_locations(cafile = cafile)
self.kwargs['context'] = ctx
# given an infoMap returned by the local node, call up the home server
def download(url, headers, proxy, num_retries, data=None):
print 'Downloading:', url
request = urllib2.Request(url, data, headers)
opener = urllib2.build_opener()
if proxy:
proxy_params = {urlparse.urlparse(url).scheme: proxy}
opener.add_handler(urllib2.ProxyHandler(proxy_params))
try:
response = opener.open(request)
html = response.read()
code = response.code
except urllib2.URLError as e:
print 'Download error:', e.reason
html = ''
if hasattr(e, 'code'):
code = e.code
if num_retries > 0 and 500 <= code < 600:
# retry 5XX HTTP errors
html = download(url, headers, proxy, num_retries-1, data)
else:
code = None
return html
def download(url, headers, proxy, num_retries, data=None):
print 'Downloading:', url
request = urllib2.Request(url, data, headers)
opener = urllib2.build_opener()
if proxy:
proxy_params = {urlparse.urlparse(url).scheme: proxy}
opener.add_handler(urllib2.ProxyHandler(proxy_params))
try:
response = opener.open(request)
html = response.read()
code = response.code
except urllib2.URLError as e:
print 'Download error:', e.reason
html = ''
if hasattr(e, 'code'):
code = e.code
if num_retries > 0 and 500 <= code < 600:
# retry 5XX HTTP errors
return download(url, headers, proxy, num_retries-1, data)
else:
code = None
return html
def download5(url, user_agent='wswp', proxy=None, num_retries=2):
"""Download function with support for proxies"""
print 'Downloading:', url
headers = {'User-agent': user_agent}
request = urllib2.Request(url, headers=headers)
opener = urllib2.build_opener()
if proxy:
proxy_params = {urlparse.urlparse(url).scheme: proxy}
opener.add_handler(urllib2.ProxyHandler(proxy_params))
try:
html = opener.open(request).read()
except urllib2.URLError as e:
print 'Download error:', e.reason
html = None
if num_retries > 0:
if hasattr(e, 'code') and 500 <= e.code < 600:
# retry 5XX HTTP errors
html = download5(url, user_agent, proxy, num_retries-1)
return html
def download(self, url, headers, proxy, num_retries, data=None):
print 'Downloading:', url
request = urllib2.Request(url, data, headers or {})
opener = self.opener or urllib2.build_opener()
if proxy:
proxy_params = {urlparse.urlparse(url).scheme: proxy}
opener.add_handler(urllib2.ProxyHandler(proxy_params))
try:
response = opener.open(request)
html = response.read()
code = response.code
except Exception as e:
print 'Download error:', str(e)
html = ''
if hasattr(e, 'code'):
code = e.code
if num_retries > 0 and 500 <= code < 600:
# retry 5XX HTTP errors
return self._get(url, headers, proxy, num_retries-1, data)
else:
code = None
return {'html': html, 'code': code}
def add_proxy(self, addr, proxy_type='all',
user=None, password=None):
"""Add proxy"""
if proxy_type == 'all':
self.proxies = {
'http': addr,
'https': addr,
'ftp': addr
}
else:
self.proxies[proxy_type] = addr
proxy_handler = urllib2.ProxyHandler(self.proxies)
self.__build_opener()
self.opener.add_handler(proxy_handler)
if user and password:
pwd_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
pwd_manager.add_password(None, addr, user, password)
proxy_auth_handler = urllib2.ProxyBasicAuthHandler(pwd_manager)
self.opener.add_handler(proxy_auth_handler)
urllib2.install_opener(self.opener)
def check_single_proxy_status(self, proxy_address, domain_check):
try:
parse = urlparse(proxy_address)
proxy_scheme = parse.scheme
proxy = str(parse.hostname) + ':' + str(parse.port)
proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy})
opener = urllib2.build_opener(proxy_handler)
opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36')]
urllib2.install_opener(opener)
req = urllib2.Request(domain_check)
start_time = time.time()
sock = urllib2.urlopen(req)
end_time = time.time()
diff_time = round(end_time - start_time, 3)
log.console_log(Y + "{}[+] {} OK! Response Time : {}s".format(Y, proxy_address, str(diff_time), W ))
return 'ok'
except urllib2.HTTPError, e:
print('Error code: ' + str(e.code))
return e.code
except Exception, detail:
print('ERROR ' + str(detail))
return 1
def _update_opener(self):
'''
Builds and installs a new opener to be used by all future calls to
:func:`urllib2.urlopen`.
'''
if self._http_debug:
http = urllib2.HTTPHandler(debuglevel=1)
else:
http = urllib2.HTTPHandler()
if self._proxy:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.ProxyHandler({'http':
self._proxy}),
urllib2.HTTPBasicAuthHandler(),
http)
else:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.HTTPBasicAuthHandler(),
http)
urllib2.install_opener(opener)
def _update_opener(self):
'''
Builds and installs a new opener to be used by all future calls to
:func:`urllib2.urlopen`.
'''
if self._http_debug:
http = urllib2.HTTPHandler(debuglevel=1)
else:
http = urllib2.HTTPHandler()
if self._proxy:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.ProxyHandler({'http':
self._proxy}),
urllib2.HTTPBasicAuthHandler(),
http)
else:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.HTTPBasicAuthHandler(),
http)
urllib2.install_opener(opener)
def _update_opener(self):
'''
Builds and installs a new opener to be used by all future calls to
:func:`urllib2.urlopen`.
'''
if self._http_debug:
http = urllib2.HTTPHandler(debuglevel=1)
else:
http = urllib2.HTTPHandler()
if self._proxy:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.ProxyHandler({'http':
self._proxy}),
urllib2.HTTPBasicAuthHandler(),
http)
else:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.HTTPBasicAuthHandler(),
http)
urllib2.install_opener(opener)
def _update_opener(self):
'''
Builds and installs a new opener to be used by all future calls to
:func:`urllib2.urlopen`.
'''
if self._http_debug:
http = urllib2.HTTPHandler(debuglevel=1)
else:
http = urllib2.HTTPHandler()
if self._proxy:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.ProxyHandler({'http':
self._proxy}),
urllib2.HTTPBasicAuthHandler(),
http)
else:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.HTTPBasicAuthHandler(),
http)
urllib2.install_opener(opener)
def check_gn_proxy(proxy, protocal_type='HTTP'):
url = 'http://icanhazip.com'
proxy_handler = urllib2.ProxyHandler({
'http': 'http://' + proxy,
'https': 'https://' + proxy,
})
if protocal_type == 'HTTPS':
url = 'https://icanhazip.com'
opener = urllib2.build_opener(proxy_handler, urllib2.HTTPHandler)
try:
response = opener.open(url, timeout=3)
res_ip = response.read().strip()
return response.code == 200 and res_ip == proxy.split(':')[0]
except Exception:
return False
def checkAlive(self,ip,port,protocol):
testUrl = "https://www.baidu.com/"
req_timeout = 3
cookies = urllib2.HTTPCookieProcessor()
proxyHost = ""
if protocol == 'HTTP' or protocol == 'HTTPS':
proxyHost = {"http":r'http://%s:%s' % (ip, port)}
#print proxyHost
proxyHandler = urllib2.ProxyHandler(proxyHost)
opener = urllib2.build_opener(cookies, proxyHandler)
opener.addheaders = [('User-Agent',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36')]
try:
req = opener.open(testUrl, timeout=req_timeout)
result = req.read()
#print result
gevent.sleep(2)
return True
except urllib2.HTTPError as e:
print e.message
return False
def open(aurl,post='',Referer=''):
#proxy = 'http://127.0.0.1:8088'
#opener = urllib2.build_opener( urllib2.ProxyHandler({'http':proxy}) )
#urllib2.install_opener(opener)
if post!='':
test_data_urlencode = urllib.urlencode(post)
req = urllib2.Request(url=aurl,data = test_data_urlencode)
else:
req = urllib2.Request(url=aurl)
if Referer!='':
req.add_header('Referer',Referer)
if aspxsession!="":
req.add_header('Cookie',aspxsession)
res_data = urllib2.urlopen(req)
return res_data
#????????session
def open(aurl,post='',Referer=''):
#proxy = 'http://127.0.0.1:8088'
#opener = urllib2.build_opener( urllib2.ProxyHandler({'http':proxy}) )
#urllib2.install_opener(opener)
if post!='':
test_data_urlencode = urllib.urlencode(post)
req = urllib2.Request(url=aurl,data = test_data_urlencode)
else:
req = urllib2.Request(url=aurl)
if Referer!='':
req.add_header('Referer',Referer)
if aspxsession!="":
req.add_header('Cookie',aspxsession)
res_data = urllib2.urlopen(req)
return res_data
#????????session
def setUp(self):
super(ProxyAuthTests, self).setUp()
# Ignore proxy bypass settings in the environment.
def restore_environ(old_environ):
os.environ.clear()
os.environ.update(old_environ)
self.addCleanup(restore_environ, os.environ.copy())
os.environ['NO_PROXY'] = ''
os.environ['no_proxy'] = ''
self.digest_auth_handler = DigestAuthHandler()
self.digest_auth_handler.set_users({self.USER: self.PASSWD})
self.digest_auth_handler.set_realm(self.REALM)
# With Digest Authentication
def create_fake_proxy_handler(*args, **kwargs):
return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs)
self.server = LoopbackHttpServerThread(create_fake_proxy_handler)
self.server.start()
self.server.ready.wait()
proxy_url = "http://127.0.0.1:%d" % self.server.port
handler = urllib2.ProxyHandler({"http" : proxy_url})
self.proxy_digest_handler = urllib2.ProxyDigestAuthHandler()
self.opener = urllib2.build_opener(handler, self.proxy_digest_handler)
def test_proxy(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("http_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://acme.example.com/")
self.assertEqual(req.get_host(), "acme.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "http_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_https_proxy_authorization(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
https_handler = MockHTTPSHandler()
o.add_handler(https_handler)
req = Request("https://www.example.com/")
req.add_header("Proxy-Authorization","FooBar")
req.add_header("User-Agent","Grail")
self.assertEqual(req.get_host(), "www.example.com")
self.assertIsNone(req._tunnel_host)
r = o.open(req)
# Verify Proxy-Authorization gets tunneled to request.
# httpsconn req_headers do not have the Proxy-Authorization header but
# the req will have.
self.assertNotIn(("Proxy-Authorization","FooBar"),
https_handler.httpconn.req_headers)
self.assertIn(("User-Agent","Grail"),
https_handler.httpconn.req_headers)
self.assertIsNotNone(req._tunnel_host)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
def test_proxy_basic_auth(self):
opener = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
opener.add_handler(ph)
password_manager = MockPasswordManager()
auth_handler = urllib2.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
)
def setUp(self):
super(ProxyAuthTests, self).setUp()
# Ignore proxy bypass settings in the environment.
def restore_environ(old_environ):
os.environ.clear()
os.environ.update(old_environ)
self.addCleanup(restore_environ, os.environ.copy())
os.environ['NO_PROXY'] = ''
os.environ['no_proxy'] = ''
self.digest_auth_handler = DigestAuthHandler()
self.digest_auth_handler.set_users({self.USER: self.PASSWD})
self.digest_auth_handler.set_realm(self.REALM)
# With Digest Authentication
def create_fake_proxy_handler(*args, **kwargs):
return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs)
self.server = LoopbackHttpServerThread(create_fake_proxy_handler)
self.server.start()
self.server.ready.wait()
proxy_url = "http://127.0.0.1:%d" % self.server.port
handler = urllib2.ProxyHandler({"http" : proxy_url})
self.proxy_digest_handler = urllib2.ProxyDigestAuthHandler()
self.opener = urllib2.build_opener(handler, self.proxy_digest_handler)
def test_proxy(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("http_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://acme.example.com/")
self.assertEqual(req.get_host(), "acme.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "http_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_https_proxy_authorization(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
https_handler = MockHTTPSHandler()
o.add_handler(https_handler)
req = Request("https://www.example.com/")
req.add_header("Proxy-Authorization","FooBar")
req.add_header("User-Agent","Grail")
self.assertEqual(req.get_host(), "www.example.com")
self.assertIsNone(req._tunnel_host)
r = o.open(req)
# Verify Proxy-Authorization gets tunneled to request.
# httpsconn req_headers do not have the Proxy-Authorization header but
# the req will have.
self.assertNotIn(("Proxy-Authorization","FooBar"),
https_handler.httpconn.req_headers)
self.assertIn(("User-Agent","Grail"),
https_handler.httpconn.req_headers)
self.assertIsNotNone(req._tunnel_host)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
def test_proxy_basic_auth(self):
opener = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
opener.add_handler(ph)
password_manager = MockPasswordManager()
auth_handler = urllib2.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
)