def download_and_import(self, repo):
try:
response = urllib2.urlopen(GITHUB_LINK.format(repo))
response_sio = StringIO.StringIO(response.read())
with zipfile.ZipFile(response_sio) as repo_zip:
repo_zip.extractall(tempfile.tempdir)
deck_base_name = repo.split("/")[-1]
deck_directory_wb = Path(tempfile.tempdir).joinpath(deck_base_name + "-" + BRANCH_NAME)
deck_directory = Path(tempfile.tempdir).joinpath(deck_base_name)
utils.fs_remove(deck_directory)
deck_directory_wb.rename(deck_directory)
# Todo progressbar on download
AnkiJsonImporter.import_deck(self.collection, deck_directory)
except (urllib2.URLError, urllib2.HTTPError, OSError) as error:
aqt.utils.showWarning("Error while trying to get deck from Github: {}".format(error))
raise
python类HTTPError()的实例源码
def fetch_decode(url, encoding=None):
""" Fetch url and decode. """
try:
req = g.opener.open(url)
except HTTPError as e:
if e.getcode() == 503:
time.sleep(.5)
return fetch_decode(url, encoding)
else:
raise
ct = req.headers['content-type']
if encoding:
return req.read().decode(encoding)
elif "charset=" in ct:
dbg("charset: %s", ct)
encoding = re.search(r"charset=([\w-]+)\s*(:?;|$)", ct).group(1)
return req.read().decode(encoding)
else:
dbg("encoding unknown")
return req.read()
def call_gdata(api, qs):
"""Make a request to the youtube gdata api."""
qs = dict(qs)
qs['key'] = g.api_key
url = g.urls['gdata'] + api + '?' + urlencode(qs)
try:
data = g.opener.open(url).read().decode('utf-8')
except HTTPError as e:
try:
errdata = e.file.read().decode()
error = json.loads(errdata)['error']['message']
errmsg = 'Youtube Error %d: %s' % (e.getcode(), error)
except:
errmsg = str(e)
raise GdataError(errmsg)
return json.loads(data)
def getUrlContent( self, url ):
# connect to server:
# if there is a 500 error, try a few more times before giving up
# any other error, just bail
#print "ATB---", url
for tries in range(3):
try:
resp = urllib2.urlopen( url )
return resp.read()
except urllib2.HTTPError as e:
if e.getcode() == 500:
self.writeLog( "Try #{0}: ".format(tries+1) )
time.sleep(1)
self.writeLog( str(e) + "\n" )
if e.getcode() != 500:
break
except Exception as e:
self.writeLog( str(e) + "\n" )
raise ComicVineTalkerException(ComicVineTalkerException.Network, "Network Error!")
raise ComicVineTalkerException(ComicVineTalkerException.Unknown, "Error on Comic Vine server")
def check_for_update():
if os.path.exists(FILE_UPDATE):
mtime = os.path.getmtime(FILE_UPDATE)
last = datetime.utcfromtimestamp(mtime).strftime('%Y-%m-%d')
today = datetime.utcnow().strftime('%Y-%m-%d')
if last == today:
return
try:
with open(FILE_UPDATE, 'a'):
os.utime(FILE_UPDATE, None)
request = urllib2.Request(
CORE_VERSION_URL,
urllib.urlencode({'version': main.__version__}),
)
response = urllib2.urlopen(request)
with open(FILE_UPDATE, 'w') as update_json:
update_json.write(response.read())
except (urllib2.HTTPError, urllib2.URLError):
pass
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
login_path = os.environ.get("APPCFG_LOGIN_PATH", "/_ah")
req = self._CreateRequest("%s://%s%s/login?%s" %
(self.scheme, self.host, login_path,
urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _GetRemoteResourceLimits(logging_context):
"""Get the resource limit as reported by the admin console.
Get the resource limits by querying the admin_console/appserver. The
actual limits returned depends on the server we are talking to and
could be missing values we expect or include extra values.
Args:
logging_context: The _ClientDeployLoggingContext for this upload.
Returns:
A dictionary.
"""
try:
yaml_data = logging_context.Send('/api/appversion/getresourcelimits')
except urllib2.HTTPError, err:
if err.code != 404:
raise
return {}
return yaml.safe_load(yaml_data)
def Send(self, url, payload='', **kwargs):
"""Sends a request to the server, with common params."""
start_time_usec = self.GetCurrentTimeUsec()
request_size_bytes = len(payload)
try:
logging.info('Send: %s, params=%s', url, self.request_params)
kwargs.update(self.request_params)
result = self.rpcserver.Send(url, payload=payload, **kwargs)
self._RegisterReqestForLogging(url, 200, start_time_usec,
request_size_bytes)
return result
except urllib2.HTTPError, e:
self._RegisterReqestForLogging(url, e.code, start_time_usec,
request_size_bytes)
raise e
def _IsExceptionClientDeployLoggable(self, exception):
"""Determines if an exception qualifes for client deploy log reistration.
Args:
exception: The exception to check.
Returns:
True iff exception qualifies for client deploy logging - basically a
system error rather than a user or error or cancellation.
"""
if isinstance(exception, KeyboardInterrupt):
return False
if (isinstance(exception, urllib2.HTTPError)
and 400 <= exception.code <= 499):
return False
return True
def _LogDoUploadException(exception):
"""Helper that logs exceptions that occurred during DoUpload.
Args:
exception: An exception that was thrown during DoUpload.
"""
def InstanceOf(tipe):
return isinstance(exception, tipe)
if InstanceOf(KeyboardInterrupt):
logging.info('User interrupted. Aborting.')
elif InstanceOf(urllib2.HTTPError):
logging.info('HTTP Error (%s)', exception)
elif InstanceOf(CannotStartServingError):
logging.error(exception.message)
else:
logging.exception('An unexpected error occurred. Aborting.')
def get_page(self, url, data=None):
handlers = [PoolHTTPHandler]
opener = urllib2.build_opener(*handlers)
if data: data = urllib.urlencode(data)
request = urllib2.Request(url, data, self.headers)
try:
response = opener.open(request)
return response.read()
except (urllib2.HTTPError, urllib2.URLError), e:
raise BrowserError(url, str(e))
except (socket.error, socket.sslerror), msg:
raise BrowserError(url, msg)
except socket.timeout, e:
raise BrowserError(url, "timeout")
except KeyboardInterrupt:
raise
except:
raise BrowserError(url, "unknown error")
def broadcast_tx(self, tx):
s = io.BytesIO()
tx.stream(s)
tx_as_hex = b2h(s.getvalue())
data = urlencode(dict(tx=tx_as_hex)).encode("utf8")
URL = "http://blockchain.info/pushtx"
try:
d = urlopen(URL, data=data).read()
return d
except HTTPError as ex:
try:
d = ex.read()
ex.message = d
except:
pass
raise ex
def discordEmbeddedPush(self, embed):
"""
Send embedded message to discord bot huehue
"""
data = json.dumps({"embeds": [embed]})
req = urllib2.Request(self._discordWebhookUrl, data, {
'Content-Type': 'application/json',
"User-Agent": "B3DiscordbanPlugin/1.1" #Is that a real User-Agent? Nope but who cares.
})
# Final magic happens here, we will never get an error ofcourse ;)
try:
urllib2.urlopen(req)
except urllib2.HTTPError as ex:
self.debug("Cannot push data to Discord. is your webhook url right?")
self.debug("Data: %s\nCode: %s\nRead: %s" % (data, ex.code, ex.read()))
def _do_put_request(self, resource, param_dict):
req_url = urlparse.urlunparse(["http", self.host, "api/v%s/%s" % (self.api_version, resource), "", "", ""])
print "req_url=%s" % (req_url)
opener = urllib2.build_opener(urllib2.HTTPHandler)
req = urllib2.Request(req_url, data=json.dumps(param_dict))
req.add_header('Content-Type', 'application/json')
req.get_method = lambda: 'PUT'
try:
return eval(opener.open(req).read())
except urllib2.HTTPError, err:
return parse_errors(err)
#---------------------------------------------
# error parsing
# --------------------------------------------
def Post(self, url, data, refer=None):
try:
# print "requesting " + str(url) + " with data:"
# print data
# print "Cookies: "
# print self.__cookie
req = urllib2.Request(url, urllib.urlencode(data))
if refer is not None:
req.add_header('Referer', refer)
else:
req.add_header('Referer', 'http://d1.web2.qq.com/proxy.html?v=20151105001&callback=1&id=2')
# print "Headers: "
# print req.headers
tmp_req = urllib2.urlopen(req, timeout=180)
self.__cookie.save('cookie/cookie.data',ignore_discard=True,ignore_expires=True)
return tmp_req.read()
except urllib2.HTTPError, e:
return e.read()
def raise_for_status(self):
"""Raise stored error if one occurred.
error will be instance of :class:`urllib2.HTTPError`
"""
if self.error is not None:
raise self.error
return
def run(self):
try:
client = DnsdbClient(self.dnsdb_server, self.dnsdb_key)
self.report({
"records": map(lambda r: self.update_date('time_first', self.update_date('time_last', r)), self.execute_dnsdb_service(client))
})
except HTTPError, e:
if e.code != 404:
self.unexpectedError(e)
else:
self.report({"records": []})
def linksExtractor(url, fileFormat='png'):
tag = 'a'
attr = 'href'
if (fileFormat in ['png', 'jpg', 'jpeg', 'tiff', 'bmp', 'svg', 'gif']):
tag = 'img'
attr = 'src'
try:
headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)'}
req=urllib2.Request(url, None, headers)
htmlDoc=urllib2.urlopen(req).read()
except urllib2.HTTPError as err:
print("Server Response : " + str(err.code()))
return "Server refused to connect!"
except urllib2.URLError:
return 'Invalid URL!'
page = BeautifulSoup(htmlDoc, 'html.parser')
page.prettify()
res = []
for link in page.find_all(tag):
pre = link.get(attr)
pre = str(pre)
if (pre[-len(fileFormat):] == fileFormat):
res.append(pre)
else:
pass
if (len(res) < 1):
return 'EMPTY'
return res
def update_plex():
Logger.info("plex - sending request to update Plex")
url = 'http://%s/library/sections/all/refresh?X-Plex-Token=%s' % (PLEX_IP, PLEX_TOKEN)
try:
urllib2.urlopen(url).read()
except urllib2.HTTPError, e:
Logger.warning("plex - unable to make request to Plex - HTTP Error %s", str(e.code))
except urllib2.URLError, e:
Logger.warning("plex - unable to make request to Plex - URL Error %s", e.reason)
else:
Logger.info("plex - update successful")
def update_plex():
Logger.info("plex - sending request to update Plex")
url = 'http://%s/library/sections/all/refresh?X-Plex-Token=%s' % (PLEX_IP, PLEX_TOKEN)
try:
urllib2.urlopen(url).read()
except urllib2.HTTPError, e:
Logger.warning("plex - unable to make request to Plex - HTTP Error %s", str(e.code))
except urllib2.URLError, e:
Logger.warning("plex - unable to make request to Plex - URL Error %s", e.reason)
else:
Logger.info("plex - update successful")