def getJSONfromURL(url):
user_agent = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)'
values = {}
headers = { 'Authorization': BEAR}
h = MyHTTPRedirectHandler()
opener = urllib2.build_opener(h)
urllib2.install_opener(opener)
data = urllib.urlencode(values)
json_data = ""
try:
req = urllib2.build_opener(h)
req = urllib2.Request(url)
response = urllib2.urlopen(req)
the_page = response.read()
json_data = json.loads(the_page)
except:
print("Error reading data members")
return json_data
python类install_opener()的实例源码
def getJson(limit=13,since='2016-05-25',until='2016-05-26'):
url='https://graph.facebook.com/v2.5/'+FACEBOOK_GROUP+'/feed?fields=reactions.limit(500){link,name,pic_square,type},message,name,id,created_time,permalink_url,shares,comments.limit(500){created_time,likes.limit(500),message,from,comments.limit(507){likes,message,from,created_time}},from&limit=%s&since=%s&until=%s&access_token=%s' % (limit,since,until,TOKEN)
user_agent = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)'
values = {}
headers = { 'Authorization': BEAR}
h = MyHTTPRedirectHandler()
opener = urllib2.build_opener(h)
urllib2.install_opener(opener)
data = urllib.urlencode(values)
json_data = ""
try:
req = urllib2.build_opener(h)
req = urllib2.Request(url)
response = urllib2.urlopen(req)
the_page = response.read()
json_data = json.loads(the_page)
except:
print("Error reading data")
return json_data
def __init__(self, configuration):
self.setup(configuration)
self.echo = None
if "ECHO" in configuration:
self.echo = configuration['ECHO']
if self.proxy_scheme is not None and self.proxy_host is not None and \
self.proxy_port is not None:
credentials = ""
if self.proxy_username is not None and self.proxy_password is not None:
credentials = self.proxy_username + ":" + self.proxy_password + "@"
proxyDict = {
self.proxy_scheme: self.proxy_scheme + "://" + credentials +
self.proxy_host + ":" + self.proxy_port
}
proxy = urllib2.ProxyHandler(proxyDict)
if credentials != '':
auth = urllib2.HTTPBasicAuthHandler()
opener = urllib2.build_opener(proxy, auth, urllib2.HTTPHandler)
else:
opener = urllib2.build_opener(proxy)
urllib2.install_opener(opener)
def totalPage(self):
self.loadedCookies = self.loadCookies()
if not self.loadedCookies:
return False
# page index start from 0 end at max-1
req = urllib2.Request('http://dict.youdao.com/wordbook/wordlist?p=0&tags=')
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.loadedCookies))
urllib2.install_opener(opener)
response = urllib2.urlopen(req)
source = response.read()
if '????' in source:
return False
else:
try:
return int(re.search('<a href="wordlist.p=(.*).tags=" class="next-page">????</a>', source, re.M | re.I).group(1)) - 1
except Exception:
return 1
def _init_urllib(self, secure, debuglevel=0):
cj = cookielib.CookieJar()
no_proxy_support = urllib2.ProxyHandler({})
cookie_handler = urllib2.HTTPCookieProcessor(cj)
ctx = None
if not secure:
self._logger.info('[WARNING] Skip certificate verification.')
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
https_handler = urllib2.HTTPSHandler(debuglevel=debuglevel, context=ctx)
opener = urllib2.build_opener(no_proxy_support,
cookie_handler,
https_handler,
MultipartPostHandler.MultipartPostHandler)
opener.addheaders = [('User-agent', API_USER_AGENT)]
urllib2.install_opener(opener)
def verify(cls, args):
cookie = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
urllib2.install_opener(opener)
postdata = "_SESSION[login_in]=1&_SESSION[admin]=1&_SESSION[login_time]=300000000000000000000000\r\n"
# get session
request = urllib2.Request(args['options']['target'] + "/index.php", data=postdata)
r = urllib2.urlopen(request)
# login test
request2 = urllib2.Request(args['options']['target'] + "/admin/admin.php", data=postdata)
r = urllib2.urlopen(request2)
content = r.read()
if "admin_form.php?action=form_list&nav=list_order" in content:
if "admin_main.php?nav=main" in content:
args['success'] = True
args['test_method'] = 'http://www.wooyun.org/bugs/wooyun-2014-059180'
return args
args['success'] = False
return args
def index(request):
if request.method == "GET":
try:
ssl._create_default_https_context = ssl._create_unverified_context
opener = wdf_urllib.build_opener(
wdf_urllib.HTTPCookieProcessor(CookieJar()))
wdf_urllib.install_opener(opener)
except:
pass
uuid = getUUID()
url = 'https://login.weixin.qq.com/qrcode/' + uuid
params = {
't': 'webwx',
'_': int(time.time()),
}
request = getRequest(url=url, data=urlencode(params))
response = wdf_urllib.urlopen(request)
context = {
'uuid': uuid,
'response': response.read(),
'delyou': '',
}
return render_to_response('index.html', context)
def setup_wsse_handler(base_url, username, password, preempt = True):
"""
Configure urllib2 to try/use WSSE authentication, with a specific
`username` and `password` when visiting any page that have a given
`base_url`. Once this function has been called, all future requests
through urllib2 should be able to handle WSSE authentication.
"""
# Create a password manager
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
# Add username/password for domain defined by base_url
passman.add_password(None, base_url, username, password)
# Create the auth handler and install it in urllib2
authhandler = WSSEAuthHandler(passman, preempt = preempt)
opener = urllib2.build_opener(authhandler)
urllib2.install_opener(opener)
# Example of how to use without handlers
def error_handler(url):
global HANDLE_ERRORS
orig = HANDLE_ERRORS
keepalive_handler = HTTPHandler()
opener = urllib2.build_opener(keepalive_handler)
urllib2.install_opener(opener)
pos = {0: 'off', 1: 'on'}
for i in (0, 1):
print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
HANDLE_ERRORS = i
try:
fo = urllib2.urlopen(url)
foo = fo.read()
fo.close()
try: status, reason = fo.status, fo.reason
except AttributeError: status, reason = None, None
except IOError, e:
print " EXCEPTION: %s" % e
raise
else:
print " status = %s, reason = %s" % (status, reason)
HANDLE_ERRORS = orig
hosts = keepalive_handler.open_connections()
print "open connections:", ' '.join(hosts)
keepalive_handler.close_all()
def comp(N, url):
print ' making %i connections to:\n %s' % (N, url)
sys.stdout.write(' first using the normal urllib handlers')
# first use normal opener
opener = urllib2.build_opener()
urllib2.install_opener(opener)
t1 = fetch(N, url)
print ' TIME: %.3f s' % t1
sys.stdout.write(' now using the keepalive handler ')
# now install the keepalive handler and try again
opener = urllib2.build_opener(HTTPHandler())
urllib2.install_opener(opener)
t2 = fetch(N, url)
print ' TIME: %.3f s' % t2
print ' improvement factor: %.2f' % (t1/t2, )
def send_common_request(url, is_post, cookie, para=''):
"""
?????WEB???????
:url: ??URL
:is_post: ???POST
:cookie: cookie
"""
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:42.0) Gecko/20100101 Firefox/42.0',
'Cookie': cookie
}
# dns cache
# socket.getaddrinfo = new_getaddrinfo
try:
encoding_support = ContentEncodingProcessor()
opener = urllib2.build_opener(encoding_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
if is_post == 2: # post
# url, query = url.split('?', 1)
return urllib2.urlopen(urllib2.Request(url, para, headers=headers)).read()
else:
return urllib2.urlopen(urllib2.Request('?'.join([url, para]), headers=headers)).read()
except:
return ''
def error_handler(url):
global HANDLE_ERRORS
orig = HANDLE_ERRORS
keepalive_handler = HTTPHandler()
opener = urllib2.build_opener(keepalive_handler)
urllib2.install_opener(opener)
pos = {0: 'off', 1: 'on'}
for i in (0, 1):
print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
HANDLE_ERRORS = i
try:
fo = urllib2.urlopen(url)
foo = fo.read()
fo.close()
try: status, reason = fo.status, fo.reason
except AttributeError: status, reason = None, None
except IOError, e:
print " EXCEPTION: %s" % e
raise
else:
print " status = %s, reason = %s" % (status, reason)
HANDLE_ERRORS = orig
hosts = keepalive_handler.open_connections()
print "open connections:", hosts
keepalive_handler.close_all()
def comp(N, url):
print ' making %i connections to:\n %s' % (N, url)
sys.stdout.write(' first using the normal urllib handlers')
# first use normal opener
opener = urllib2.build_opener()
urllib2.install_opener(opener)
t1 = fetch(N, url)
print ' TIME: %.3f s' % t1
sys.stdout.write(' now using the keepalive handler ')
# now install the keepalive handler and try again
opener = urllib2.build_opener(HTTPHandler())
urllib2.install_opener(opener)
t2 = fetch(N, url)
print ' TIME: %.3f s' % t2
print ' improvement factor: %.2f' % (t1/t2, )
def error_handler(url):
global HANDLE_ERRORS
orig = HANDLE_ERRORS
keepalive_handler = HTTPHandler()
opener = urllib2.build_opener(keepalive_handler)
urllib2.install_opener(opener)
pos = {0: 'off', 1: 'on'}
for i in (0, 1):
print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
HANDLE_ERRORS = i
try:
fo = urllib2.urlopen(url)
foo = fo.read()
fo.close()
try: status, reason = fo.status, fo.reason
except AttributeError: status, reason = None, None
except IOError, e:
print " EXCEPTION: %s" % e
raise
else:
print " status = %s, reason = %s" % (status, reason)
HANDLE_ERRORS = orig
hosts = keepalive_handler.open_connections()
print "open connections:", ' '.join(hosts)
keepalive_handler.close_all()
def comp(N, url):
print ' making %i connections to:\n %s' % (N, url)
sys.stdout.write(' first using the normal urllib handlers')
# first use normal opener
opener = urllib2.build_opener()
urllib2.install_opener(opener)
t1 = fetch(N, url)
print ' TIME: %.3f s' % t1
sys.stdout.write(' now using the keepalive handler ')
# now install the keepalive handler and try again
opener = urllib2.build_opener(HTTPHandler())
urllib2.install_opener(opener)
t2 = fetch(N, url)
print ' TIME: %.3f s' % t2
print ' improvement factor: %.2f' % (t1/t2, )
def checker():
while True:
if proxyq.empty() is not True:
proxy = "http://{}".format( proxyq.get() )
url = "http://icanhazip.com"
proxy_handler = urllib2.ProxyHandler( { "http" : proxy } )
opener = urllib2.build_opener( proxy_handler )
urllib2.install_opener( opener )
printq.put( "[>] Trying {}".format( proxy ) )
try:
response = urllib2.urlopen( url, timeout=3 ).readlines()
for line in response:
if line.rstrip( "\n" ) in proxy:
printq.put( "[+] Working proxy: {}".format( proxy ) )
with open( "working.txt", "a" ) as log:
log.write( "{}\n".format( proxy ) )
log.close()
except Exception as ERROR:
printq.put( "[!] Bad proxy: {}".format( proxy ) )
proxyq.task_done()
def init_options(proxy=None, cookie=None, ua=None, referer=None):
globals()["_headers"] = dict(filter(lambda _: _[1], ((COOKIE, cookie), (UA, ua or NAME), (REFERER, referer))))
urllib2.install_opener(urllib2.build_opener(urllib2.ProxyHandler({'http': proxy})) if proxy else None)
# if __name__ == "__main__":
# print "%s #v%s\n by: %s\n" % (NAME, VERSION, AUTHOR)
# parser = optparse.OptionParser(version=VERSION)
# parser.add_option("-u", "--url", dest="url", help="Target URL (e.g. \"http://www.target.com/page.php?id=1\")")
# parser.add_option("--data", dest="data", help="POST data (e.g. \"query=test\")")
# parser.add_option("--cookie", dest="cookie", help="HTTP Cookie header value")
# parser.add_option("--user-agent", dest="ua", help="HTTP User-Agent header value")
# parser.add_option("--referer", dest="referer", help="HTTP Referer header value")
# parser.add_option("--proxy", dest="proxy", help="HTTP proxy address (e.g. \"http://127.0.0.1:8080\")")
# options, _ = parser.parse_args()
# if options.url:
# init_options(options.proxy, options.cookie, options.ua, options.referer)
# result = scan_page(options.url if options.url.startswith("http") else "http://%s" % options.url, options.data)
# print "\nscan results: %s vulnerabilities found" % ("possible" if result else "no")
# else:
# parser.print_help()
def setUp(self):
mechanize._testcase.TestCase.setUp(self)
self.test_uri = urljoin(self.uri, "test_fixtures")
self.server = self.get_cached_fixture("server")
if self.no_proxies:
old_opener_m = mechanize._opener._opener
old_opener_u = urllib2._opener
mechanize.install_opener(mechanize.build_opener(
mechanize.ProxyHandler(proxies={})))
urllib2.install_opener(urllib2.build_opener(
urllib2.ProxyHandler(proxies={})))
def revert_install():
mechanize.install_opener(old_opener_m)
urllib2.install_opener(old_opener_u)
self.add_teardown(revert_install)