def test_stream_cancel(event_loop):
async def cancel(task):
await asyncio.sleep(0.001)
task.cancel()
async def test_stream_iterations(stream):
while True:
await test_stream_iteration(stream)
with aiohttp.ClientSession(loop=event_loop) as session:
client = peony.client.BasePeonyClient("", "", session=session)
context = peony.stream.StreamResponse(method='GET',
url="http://whatever.com",
client=client)
with context as stream:
with patch.object(stream, '_connect',
side_effect=stream_content):
coro = test_stream_iterations(stream)
task = event_loop.create_task(coro)
cancel_task = event_loop.create_task(cancel(task))
with aiohttp.Timeout(1):
await asyncio.wait([task, cancel_task])
python类Timeout()的实例源码
def test_auth_with_valid_data(self):
s = TestAuthSession(login=USER_LOGIN, password=USER_PASSWORD, app_id=APP_ID)
s.driver.session = aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False),
response_class=CustomClientResponse)
yield from s.authorize()
params = {'client_id': APP_ID, 'display': 'page', 'redirect_uri': REDIRECT_URI, 'response_type': 'code'}
with aiohttp.Timeout(10):
response = yield from s.driver.session.get("https://oauth.vk.com/authorize",
params=params, allow_redirects=True)
s.close()
code = response.url.query.get('code')
self.assertIsNotNone(code)
s = AuthorizationCodeSession(APP_ID, APP_SECRET, REDIRECT_URI, code)
yield from s.authorize()
s.close()
self.assertIsNotNone(s.access_token)
def post_resource(self, api_path, data, **kwargs):
"""
Helper method for HTTP POST API requests.
Args:
api_path(str): REST API path
data: JSON data for POST request
Keyword Args:
kwargs: keyword args used for replacing items in the API path
"""
post_headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
**self.headers
}
url = self.build_api_url(
api_path.format(
tenant=self.tenant,
controllerId=self.controller_id,
**kwargs))
self.logger.debug('POST {}'.format(url))
with aiohttp.Timeout(self.timeout):
async with self.session.post(url, headers=post_headers,
data=json.dumps(data)) as resp:
await self.check_http_status(resp)
def put_resource(self, api_path, data, **kwargs):
"""
Helper method for HTTP PUT API requests.
Args:
api_path(str): REST API path
data: JSON data for POST request
Keyword Args:
kwargs: keyword args used for replacing items in the API path
"""
put_headers = {
'Content-Type': 'application/json',
**self.headers
}
url = self.build_api_url(
api_path.format(
tenant=self.tenant,
controllerId=self.controller_id,
**kwargs))
self.logger.debug('PUT {}'.format(url))
self.logger.debug(json.dumps(data))
with aiohttp.Timeout(self.timeout):
async with self.session.put(url, headers=put_headers,
data=json.dumps(data)) as resp:
await self.check_http_status(resp)
def _request(self, method, store_path=None, **kwargs):
with aiohttp.Timeout(self.timeout, loop=self.session.loop):
url = self.url()
self.logger.debug('HTTP %s %s' % (method.upper(), url))
kwargs['headers'] = self.headers
async with self.session.request(method, url, **kwargs) as response:
if self.headers['Content-Type'] == 'application/json':
result = await response.text()
if store_path is not None:
await self._store(store_path, method, url, result)
if response.status == 404: # pragma: no cover
raise RestNotFoundException("Not found.")
try:
return json.loads(result)
except json.decoder.JSONDecodeError:
raise RestDecoderException(result)
else: # pragma: no cover
return await response.text()
def fetch(retry=0):
proxy = 'http://{}'.format(Proxy.get_random()['address'])
headers = {'user-agent': get_user_agent()}
conn = aiohttp.ProxyConnector(proxy=proxy)
url = 'http://httpbin.org/ip'
try:
with aiohttp.ClientSession(connector=conn) as session:
with aiohttp.Timeout(TIMEOUT):
async with session.get(url, headers=headers) as resp:
return await resp.json()
except (ProxyConnectionError, TimeoutError):
try:
p = Proxy.objects.get(address=proxy)
if p:
p.delete()
except DoesNotExist:
pass
retry += 1
if retry > 5:
raise TimeoutError()
await asyncio.sleep(1)
return await fetch(retry=retry)
def fetch(url, retry=0):
proxy = 'http://{}'.format(Proxy.get_random()['address'])
headers = {'user-agent': get_user_agent()}
conn = aiohttp.ProxyConnector(proxy=proxy)
js_url = gen_js_url(url)
try:
with aiohttp.ClientSession(connector=conn) as session:
with aiohttp.Timeout(TIMEOUT):
async with session.get(url, headers=headers) as resp:
html_text = await resp.text()
async with session.get(js_url, headers=headers) as resp:
js_data = await resp.json()
except:
retry += 1
if retry > 5:
raise CrawlerError()
await asyncio.sleep(1)
return await fetch(url, retry=retry)
return html_text, js_data
def _download(self, request):
log.debug("Http Request: %s %s" % (request.method, request.url))
with aiohttp.ClientSession(connector=None if (request.proxy is None) else aiohttp.ProxyConnector(proxy=request.proxy),
cookies=request.cookies) as session:
with aiohttp.Timeout(self._timeout):
async with session.request(request.method,
request.url,
headers=request.headers,
data=request.body) as resp:
body = await resp.read()
response = HttpResponse(resp.url,
resp.status,
headers=resp.headers,
body=body,
cookies=resp.cookies)
return response
def _download(self, request):
log.debug("Http Request: %s %s" % (request.method, request.url))
with aiohttp.ClientSession(connector=None if (request.proxy is None) else aiohttp.ProxyConnector(proxy=request.proxy),
cookies=request.cookies) as session:
with aiohttp.Timeout(self._timeout):
async with session.request(request.method,
request.url,
headers=request.headers,
data=request.body) as resp:
body = await resp.read()
response = HttpResponse(resp.url,
resp.status,
headers=resp.headers,
body=body,
cookies=resp.cookies)
return response
def request_url_json(self, url, params, limit=True):
# simple token bucket limiting
current_time = time.time()
delta_time = current_time - self.last_time
self.last_time = current_time
self.bucket += delta_time * (self.limit_messages / self.limit_time)
if self.bucket > self.limit_messages:
self.bucket = self.limit_messages
if self.bucket < 1:
raise RiotApiRateExceededException("Riot Api rate request exceeded. Please wait until making the next request")
self.bucket -= 1
with aiohttp.Timeout(self.timeout):
response = yield from self.session.get(url, params=params)
if response.status != 200:
raise RiotApiHttpException(response.status)
return (yield from response.json())
def request_url_json(self, url, limit=True):
# simple token bucket limiting
current_time = time.time()
delta_time = current_time - self.last_time
self.last_time = current_time
self.bucket += delta_time * (self.limit_messages / self.limit_time)
if self.bucket > self.limit_messages:
self.bucket = self.limit_messages
if self.bucket < 1:
raise OverwatchApiRateExceededException()
self.bucket -= 1
with aiohttp.Timeout(self.timeout):
response = yield from self.session.get(url)
if response.status != 200:
raise OverwatchApiHttpException(response.status)
return (yield from response.json())
def cmd_setavatar(self, message, url=None):
"""
Usage:
{command_prefix}setavatar [url]
Changes the bot's avatar.
Attaching a file and leaving the url parameter blank also works.
"""
if message.attachments:
thing = message.attachments[0]['url']
else:
thing = url.strip('<>')
try:
with aiohttp.Timeout(10):
async with self.aiosession.get(thing) as res:
await self.edit_profile(avatar=await res.read())
except Exception as e:
raise exceptions.CommandError("Unable to change avatar: %s" % e, expire_in=20)
return Response(":ok_hand:", delete_after=20)
def fetch(session, url, dest='.', overwrite=False, verbose=False):
"Fetch a single PDF file if not already existing."
pdf_name = os.path.basename(url)
path = os.path.join(dest, pdf_name)
if not os.path.exists(path) or overwrite:
# if verbose:
# print(url)
with aiohttp.Timeout(60, loop=session.loop):
async with session.get(url) as response:
pdf = await response.read()
# if verbose:
# print('%s %d' % (url, len(pdf)))
async with aiofiles.open(path, mode='wb') as f:
await f.write(pdf)
if verbose:
print('saved %s (%d bytes)' % (path, len(pdf)))
def fetch_image(self, session, relative, image_url):
fname = self.file_api.get_file_name(image_url)
p = os.path.join(relative, fname)
fetched = False
try:
with aiohttp.Timeout(self.timeout):
async with session.get(image_url) as r:
if r.status == 200 and self.file_api.get_file_name(r.url) == fname:
c = await r.read()
if c:
with open(self.file_api.to_abs(p), "wb") as f:
f.write(c)
fetched = True
except FileNotFoundError as ex:
self.logger.error("{0} is not found.".format(p))
except concurrent.futures._base.TimeoutError as tx:
self.logger.warning("{0} is timeouted.".format(image_url))
except Exception as ex:
self.logger.warning("fetch image is failed. url: {0}, cause: {1}".format(image_url, str(ex)))
return fetched
def _load_remote_data(self, url):
result = None
async def _load_remote_data_async():
nonlocal result
with aiohttp.Timeout(self.load_timeout):
response = await self.session.get(url)
result = await response.read()
try:
response.raise_for_status()
except aiohttp.ClientError as exc:
raise TransportError(
message=str(exc),
status_code=response.status,
content=result
).with_traceback(exc.__traceback__) from exc
# Block until we have the data
self.loop.run_until_complete(_load_remote_data_async())
return result
def get_json (self, url, timeout=None):
if timeout is None:
timeout = pub_config["http"]["timeout_seconds"]
result = {}
try:
with aiohttp.Timeout(timeout):
async with self._session.get(url) as resp:
try:
res = await resp.json()
except Exception as e:
logging.info("failed decoding response, got exception: {}".format(e))
res = {}
result["res"] = res
result["status"] = resp.status
except TimeoutError as e:
logging.info("HTTP timeout ({}) with error: {}".format(url, e))
return result
def _fetch_page(self, request):
try:
with aiohttp.Timeout(10):
async with aiohttp.get(request['url'], params=request['params'], headers=request['headers']) as response:
try:
assert response.status == 200
if request['type'] == 'json':
content = await response.json()
else:
content = await response.text(request['type'])
obj = {'order':request['order'], 'content':content}
redis_push(self.redis, self.content_key, obj)
except AssertionError:
logging.warning('{} {}'.format(response.status, url))
except: # kinds of error, not only asyncio.TimeoutError
#redis_push(self.redis, self.request_key, request)
pass
def _verify_proxy(self, proxy):
addr = proxy['protocol'] + '://' + proxy['ip'] +':'+proxy['port']
conn = aiohttp.ProxyConnector(proxy=addr)
try:
session = aiohttp.ClientSession(connector=conn)
with aiohttp.Timeout(10):
async with session.get(self.test_url[random.randrange(len(self.test_url))]) as response: # close connection and response, otherwise will tip: Unclosed connection and Unclosed response
try:
assert response.status == 200
redis_sadd(self.redis, self.proxy_key, proxy)
except:
pass
except: #ProxyConnectionError, HttpProxyError and etc?
pass
finally:
session.close() # close session when timeout
def exchange(amount, from_curr, to_curr, timeout=10):
"""Converts an amount of money from one currency to another
Args:
amount (float): The amount of money you want to convert
from_curr (str): The currency you want to convert from,
either country symbol (e.g USD) or currency smybol (e.g. £)
to_curr (str): The currency you want to convert to, same format as from_curr
timeout (int, optional): The time in seconds aiohttp will take to timeout the request
Returns:
float: the converted amount of money to 2 d.p., or the original amount of the conversion failed.
"""
try:
with aiohttp.ClientSession() as session:
with aiohttp.Timeout(timeout):
resp = yield from session.get("http://api.fixer.io/latest?symbols=" + from_curr + "," + to_curr)
data = yield from resp.json()
if "rates" in data:
return int((amount / data["rates"][from_curr]) * data["rates"][to_curr] * 100)/100
except:
return amount
def get_recommendations(appid, timeout=10):
appid = str(appid)
similar = []
with aiohttp.ClientSession() as session:
with aiohttp.Timeout(timeout):
resp = yield from session.get("http://store.steampowered.com/recommended/morelike/app/" + appid)
text = yield from resp.text()
print(text)
soup = BeautifulSoup(text, "html.parser")
items = soup.find_all("div", {"class": "similar_grid_item"})
print("found %s items" % len(items))
for item in items:
subsoup = item.find("div", {"class": "similar_grid_capsule"})
if subsoup is not None:
similar_id = subsoup.get("data-ds-appid")
if similar_id is not None:
similar.append(similar_id)
else:
print("failed to find appid")
else:
print("failed to get item")
return similar
def get_user(steamid, timeout=10, be_specific=False):
"""Gets some information about a specific steamid
Args:
steamid (str): The user's steamid
timeout (int, optional): The amount of time before aiohttp raises a timeout error
Returns:
a UserResult object
"""
if not is_integer(steamid):
steamid = yield from search_for_userid(steamid, be_specific=be_specific)
if steamid is not None:
_check_key_set()
with aiohttp.ClientSession() as session:
with aiohttp.Timeout(timeout):
resp = yield from session.get("http://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/?key=" + STEAM_KEY + "&steamids=" + steamid)
data = yield from resp.json()
if "response" in data and "players" in data["response"] and len(data["response"]["players"]) > 0:
player = data["response"]["players"][0]
return UserResult(player)
return None
def get_user_id(name, timeout=10):
"""Resolves a username to a steamid, however is limited to ONLY vanity URL's. search_user_id is recommended
Args:
name (str): The name of the user to find the steamid of
timeout (int, optional): The amount of time before aiohttp raises a timeout error
Returns:
either None or a steamid (str) if a vanity url matching that name is found
"""
if name in userid_cache:
return userid_cache[name]
else:
_check_key_set()
with aiohttp.ClientSession() as session:
with aiohttp.Timeout(timeout):
resp = yield from session.get("http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/?key=" + STEAM_KEY + "&vanityurl=" + parse.quote(name))
data = yield from resp.json()
if "response" in data and "success" in data["response"] and data["response"]["success"] == 1:
id = data["response"]["steamid"]
if STEAM_CACHE:
userid_cache[name] = id
return id
return None
def steam_user_data(timeout=10):
"""Gets information about the amount of users on steam over the past 48 hours
Args:
timeout (int, optional): The amount of time before aiohttp raises a timeout error
Returns:
A tuple containing (min_users (int), max_users (int), current_users (int))"""
with aiohttp.ClientSession() as session:
with aiohttp.Timeout(timeout):
resp = yield from session.get("http://store.steampowered.com/stats/userdata.json")
data = yield from resp.json()
data = data[0]["data"]
min_users = -1
max_users = -1
for pair in data:
if min_users == -1 or pair[1] < min_users:
min_users = pair[1]
if max_users == -1 or pair[1] > max_users:
max_users = pair[1]
return min_users, max_users, data[-1][1]
def get_user_achievements(username, gameid, timeout=10, be_specific=False):
"""Gets information about a specific user's achievements for a specific game
Args:
username (str): the id or name of the user you want the achievements for
gameid (str): the id or name of the game you want the achievements for
timeout (int): the amount of time before aiohttp raises a timeout error
Returns:
UserAchievement: the user achievements found"""
if not is_integer(username):
username = yield from search_for_userid(username, timeout=timeout, be_specific=be_specific)
if not is_integer(gameid):
gameid, gamename = yield from get_app(gameid, timeout=timeout)
else:
gamename = "???"
_check_key_set()
if username is not None and gameid is not None:
with aiohttp.ClientSession() as session:
with aiohttp.Timeout(timeout):
resp = yield from session.get("http://api.steampowered.com/ISteamUserStats/GetPlayerAchievements/v0001/?appid=" + gameid + "&key=" + STEAM_KEY + "&steamid=" + username)
data = yield from resp.json()
if "playerstats" in data and "achievements" in data["playerstats"]:
return UserAchievements(gameid, gamename, data["playerstats"]["achievements"])
def isitdown(self, url):
"""Checks if a website is down or up."""
if url == "":
await self.bot.say("You haven't entered a website to check.")
return
if "http://" not in url or "https://" not in url:
url = "http://" + url
try:
with aiohttp.Timeout(15):
await self.bot.say("Testing " + url + "…")
try:
response = await aiohttp.get(url, headers = { 'user_agent': headers })
if response.status == 200:
await self.bot.say(url + " is up and running.")
else:
await self.bot.say(url + " is down.")
except:
await self.bot.say(url + " is down.")
except asyncio.TimeoutError:
await self.bot.say(url + " is down.")
def fetch(session, url, proxy=None):
with aiohttp.Timeout(10):
# http://127.0.0.1:8123
async with session.get(url, proxy=proxy) as response:
return await response.text()
def fetch(self, url):
"""Load a webpage and read return the body as plaintext."""
self.logger.info("{url}: loading...".format(**locals()))
try:
with aiohttp.Timeout(self.page_load_timeout, loop=self.loop):
async with self.session.get(url,
allow_redirects=True,
headers=self.headers) as resp:
if resp.status != 200:
self.logger.warning("{url} was not reachable. HTTP "
"error code {resp.status} was "
"returned".format(**locals()))
raise SorterResponseCodeError
self.logger.info("{url}: loaded "
"successfully.".format(**locals()))
return await resp.text()
except asyncio.TimeoutError:
self.logger.warning("{url}: timed out after "
"{self.page_load_timeout}.".format(**locals()))
raise SorterTimeoutError
except (aiosocks.errors.SocksError,
aiohttp.errors.ServerDisconnectedError,
aiohttp.errors.ClientResponseError) as exc:
self.logger.warning("{url} was not reachable: "
"{exc}".format(**locals()))
raise SorterConnectionError
except aiohttp.errors.ClientOSError as exception_msg:
if "SSL" in exception_msg:
self.logger.warning("{url}: certificate error (probably due to "
"use of a self-signed "
"cert.".format(**locals()))
raise SorterCertError
else:
raise
except (ssl.CertificateError, aiohttp.errors.ClientOSError):
self.logger.warning("{url}: certificate error (probably due to "
"use of a self-signed "
"cert.".format(**locals()))
raise SorterCertError
def call(self, endpoint, method='POST', raw=False, *args, **kwargs):
if 'headers' not in kwargs:
kwargs['headers'] = await self.get_headers()
uri = self.uri(endpoint)
logger.debug('Fetching: %s', uri)
logger.debug('Headers: %s', kwargs['headers'])
logger.debug('Cookies: %s', self.session.cookies)
with aiohttp.Timeout(self.request_timeout):
async with self.session.request(
method, uri, *args, **kwargs) as response:
body = await response.read()
if not response.status == 200:
try:
json = await response.json()
except Exception: # TODO: narrow exception
json = None
ex = BadRequest if response.status == 400 else HTTPError
raise ex(response.status, body, kwargs.get('data'), json)
if raw:
return body
json = await response.json()
if json.get('error'):
raise ResponseError(response.status, body, kwargs.get('data'), json)
return json
def fetch(session, url):
with aiohttp.Timeout(10, loop=session.loop):
async with session.get(url) as response:
tmp = await response.text()
return (tmp, response.status)
def connect(self):
"""
Create the connection
Returns
-------
self
Raises
------
exception.PeonyException
On a response status in 4xx that are not status 420 or 429
Also on statuses in 1xx or 3xx since this should not be the status
received here
"""
with aiohttp.Timeout(self.timeout):
self.response = await self._connect()
if self.response.status in range(200, 300):
self._error_timeout = 0
self.state = NORMAL
elif self.response.status == 500:
self.state = DISCONNECTION
elif self.response.status in range(501, 600):
self.state = RECONNECTION
elif self.response.status in (420, 429):
self.state = ENHANCE_YOUR_CALM
else:
logger.debug("raising error during stream connection")
raise await exceptions.throw(self.response,
loads=self.client._loads,
url=self.kwargs['url'])
logger.debug("stream state: %d" % self.state)