def wsopen(self, url, post, **params):
noparam = params.pop('noparam',False)
if noparam:
params = {}
else:
if self.user is not None:
params['user'] = self.user
if self.password is not None:
params.pop('hmac', None)
HMAC=hmac.new(self.password)
for k,v in sorted(params.items()):
HMAC.update("%s=%s" % (k,v))
params.update({'hmac':HMAC.hexdigest()})
query = urllib.urlencode(params)
if post:
body = query
elif query:
url = "{}?{}".format(url, query)
if self.debug:
if post:
print("POST:\n{}\n{!r}\n".format(url, body), file=sys.stderr)
else:
print("GET:\n{}\n".format(url), file=sys.stderr)
class URLopener(urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
return urllib.addinfourl(fp, headers, "http:" + url, errcode)
try:
urllib._urlopener = URLopener()
if post:
resp = urllib.urlopen(url, body)
else:
resp = urllib.urlopen(url)
except IOError as e:
raise WSError(url, msg=e)
if self.debug:
print("RESPONSE:\n{}\n{}".format(resp.getcode(), resp.info()), file=sys.stderr)
if resp.getcode() != 200:
raise WSError(url, resp.getcode(), resp.read())
return resp
python类_urlopener()的实例源码
def NotifySearch(self):
""" Send notification of the new Sitemap(s) to the search engines. """
if self._suppress:
output.Log('Search engine notification is suppressed.', 1)
return
output.Log('Notifying search engines.', 1)
# Override the urllib's opener class with one that doesn't ignore 404s
class ExceptionURLopener(urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
output.Log('HTTP error %d: %s' % (errcode, errmsg), 2)
raise IOError
#end def http_error_default
#end class ExceptionURLOpener
old_opener = urllib._urlopener
urllib._urlopener = ExceptionURLopener()
# Build the URL we want to send in
if self._sitemaps > 1:
url = self._filegen.GenerateURL(SITEINDEX_SUFFIX, self._base_url)
else:
url = self._filegen.GenerateURL(0, self._base_url)
# Test if we can hit it ourselves
try:
u = urllib.urlopen(url)
u.close()
except IOError:
output.Error('When attempting to access our generated Sitemap at the '
'following URL:\n %s\n we failed to read it. Please '
'verify the store_into path you specified in\n'
' your configuration file is web-accessable. Consult '
'the FAQ for more\n information.' % url)
output.Warn('Proceeding to notify with an unverifyable URL.')
# Cycle through notifications
# To understand this, see the comment near the NOTIFICATION_SITES comment
for ping in NOTIFICATION_SITES:
query_map = ping[3]
query_attr = ping[5]
query_map[query_attr] = url
query = urllib.urlencode(query_map)
notify = urlparse.urlunsplit((ping[0], ping[1], ping[2], query, ping[4]))
# Send the notification
output.Log('Notifying: %s' % ping[1], 1)
output.Log('Notification URL: %s' % notify, 2)
try:
u = urllib.urlopen(notify)
u.read()
u.close()
except IOError:
output.Warn('Cannot contact: %s' % ping[1])
if old_opener:
urllib._urlopener = old_opener
#end def NotifySearch
def install_patches():
if six.PY3:
# The old urllib does not exist in Py3, so delegate to urllib2 patcher
from . import urllib2
urllib2.install_patches()
return
import urllib
import urlparse
log.info('Instrumenting urllib methods for tracing')
class TracedURLOpener(urllib.FancyURLopener):
def open(self, fullurl, data=None):
parsed_url = urlparse.urlparse(fullurl)
host = parsed_url.hostname or None
port = parsed_url.port or None
span = utils.start_child_span(
operation_name='urllib', parent=get_current_span())
span.set_tag(ext_tags.SPAN_KIND, ext_tags.SPAN_KIND_RPC_CLIENT)
# use span as context manager so that its finish() method is called
with span:
span.set_tag(ext_tags.HTTP_URL, fullurl)
if host:
span.set_tag(ext_tags.PEER_HOST_IPV4, host)
if port:
span.set_tag(ext_tags.PEER_PORT, port)
# TODO add callee service name
# TODO add headers to propagate trace
# cannot use super here, this is an old style class
fileobj = urllib.FancyURLopener.open(self, fullurl, data)
if fileobj.getcode() is not None:
span.set_tag('http.status_code', fileobj.getcode())
return fileobj
def retrieve(self, url, filename=None, reporthook=None, data=None):
raise NotImplementedError
urllib._urlopener = TracedURLOpener()