def close(self, timeout=5):
"""Stop a ffmpeg instance."""
if not self.is_running:
_LOGGER.warning("FFmpeg isn't running!")
return
try:
# send stop to ffmpeg
with async_timeout.timeout(timeout, loop=self._loop):
yield from self._proc.communicate(input=b'q')
_LOGGER.debug("Close FFmpeg process")
except (asyncio.TimeoutError, ValueError):
_LOGGER.warning("Timeout while waiting of FFmpeg")
self._proc.kill()
finally:
self._clear()
python类timeout()的实例源码
def test_timeout(loop):
canceled_raised = False
@asyncio.coroutine
def long_running_task():
try:
yield from asyncio.sleep(10, loop=loop)
except asyncio.CancelledError:
nonlocal canceled_raised
canceled_raised = True
raise
with pytest.raises(asyncio.TimeoutError):
with timeout(0.01, loop=loop) as t:
yield from long_running_task()
assert t._loop is loop
assert canceled_raised, 'CancelledError was not raised'
def test_timeout_time(loop):
foo_running = None
start = loop.time()
with pytest.raises(asyncio.TimeoutError):
with timeout(0.1, loop=loop):
foo_running = True
try:
yield from asyncio.sleep(0.2, loop=loop)
finally:
foo_running = False
dt = loop.time() - start
if not (0.09 < dt < 0.11) and os.environ.get('APPVEYOR'):
pytest.xfail('appveyor sometimes is toooo sloooow')
assert 0.09 < dt < 0.11
assert not foo_running
def test_timeout_remaining(loop):
with timeout(None, loop=loop) as cm:
assert cm.remaining is None
t = timeout(1.0, loop=loop)
assert t.remaining is None
with timeout(1.0, loop=loop) as cm:
yield from asyncio.sleep(0.1, loop=loop)
assert cm.remaining < 1.0
with pytest.raises(asyncio.TimeoutError):
with timeout(0.1, loop=loop) as cm:
yield from asyncio.sleep(0.5, loop=loop)
assert cm.remaining == 0.0
def ocr(self, ctx):
"""OCR an image.
Usage: ocr [attach an image]"""
or_check_perms(ctx, ('bot_owner',))
warnings.simplefilter('error', Image.DecompressionBombWarning)
if ctx.message.attachments:
with async_timeout.timeout(5):
async with self.bot.cog_http.get(ctx.message.attachments[0].proxy_url) as r:
raw_image = await r.read()
else:
await ctx.send(':warning: No attachment found.')
return
img_bytes = BytesIO(raw_image)
image = Image.open(img_bytes)
text = tesserocr.image_to_text(image)
if text:
await ctx.send(text)
else:
await ctx.send('No results.')
def update_dbots(self):
if not discord_bots_token:
self.logger.warning('Tried to contact Discord Bots, but no token set!')
return False
data = dict(guild_count=len(self.bot.guilds))
dest = 'https://bots.discord.pw/api/bots/' + str(self.bot.user.id) + '/stats'
headers = {
'Authorization': discord_bots_token,
'Content-Type': 'application/json'
}
with async_timeout.timeout(6):
async with self.bot.cog_http.post(dest, data=json.dumps(data), headers=headers) as r:
resp_key = f'(got {r.status} {r.reason})'
if r.status == 200:
self.logger.info('Successfully sent Discord Bots our guild count (got 200 OK)')
else:
self.logger.warning('Failed sending our guild count to Discord Bots! ' + resp_key)
def update_discordlist(self):
if not discordlist_token:
self.logger.warning('Tried to contact DiscordList, but no token set!')
return False
data = {
'token': discordlist_token,
'guilds': len(self.bot.guilds)
}
dest = 'https://bots.discordlist.net/api'
headers = {'Content-Type': 'application/json'}
with async_timeout.timeout(6):
async with self.bot.cog_http.post(dest, data=json.dumps(data), headers=headers) as r:
resp_key = f'(got {r.status} {r.reason})'
if r.status == 200:
self.logger.info('Successfully sent DiscordList our guild count! (got 200 OK)')
else:
self.logger.warning('Failed sending our guild count to DiscordList! ' + resp_key)
def add_emote(self, ctx, emote: str):
"""Add a Twitch, FrankerFaceZ, BetterTTV, or Discord emote to the current guild.
Usage: add_emote [name of emote]"""
echeck_perms(ctx, ('bot_owner',))
emote = emote.replace(':', '')
with async_timeout.timeout(12):
try:
async with self.bot.cog_http.get('https://static-cdn.jtvnw.net/emoticons/v1/' + str(self.bot.emotes['twitch'][emote]['image_id']) + '/1.0') as resp:
emote_img = await resp.read()
except KeyError: # let's try frankerfacez
try:
async with self.bot.cog_http.get('https://cdn.frankerfacez.com/emoticon/' + str(self.bot.emotes['ffz'][emote]) + '/1') as resp:
emote_img = await resp.read()
except KeyError: # let's try BetterTTV
try:
async with self.bot.cog_http.get(self.bot.emotes['bttv'][emote]) as resp:
emote_img = await resp.read()
except KeyError: # let's try Discord
await ctx.send('**No such emote!** I can fetch from Twitch, FrankerFaceZ, BetterTTV, or Discord (soon).')
return False
result = ctx.guild.create_custom_emoji(emote, emote_img)
await ctx.send('Added. ' + str(result))
def fetch(client, url, novels_name):
with async_timeout.timeout(20):
try:
headers = {
'user-agent': get_random_user_agent(),
'referer': "https://www.bing.com/"
}
params = {'q': novels_name, 'ensearch': 0}
async with client.get(url, params=params, headers=headers) as response:
assert response.status == 200
LOGGER.info('Task url: {}'.format(response.url))
try:
text = await response.text()
except:
text = await response.read()
return text
except Exception as e:
LOGGER.exception(e)
return None
def fetch(client, url, novels_name):
with async_timeout.timeout(20):
try:
headers = {
'user-agent': get_random_user_agent(),
'referer': "https://duckduckgo.com/"
}
params = {'q': novels_name}
async with client.get(url, params=params, headers=headers) as response:
assert response.status == 200
LOGGER.info('Task url: {}'.format(response.url))
try:
text = await response.text()
except:
text = await response.read()
return text
except Exception as e:
LOGGER.exception(e)
return None
def fetch(client, url, name, is_web):
with async_timeout.timeout(15):
try:
headers = {'user-agent': get_random_user_agent()}
if is_web:
params = {'wd': name, 'ie': 'utf-8', 'rn': CONFIG.BAIDU_RN, 'vf_bl': 1}
else:
params = {'word': name}
async with client.get(url, params=params, headers=headers) as response:
assert response.status == 200
LOGGER.info('Task url: {}'.format(response.url))
try:
text = await response.text()
except:
text = await response.read()
return text
except Exception as e:
LOGGER.exception(e)
return None
def get_real_url(client, url):
with async_timeout.timeout(10):
try:
headers = {'user-agent': get_random_user_agent()}
async with client.head(url, headers=headers, allow_redirects=True) as response:
assert response.status == 200
LOGGER.info('Parse url: {}'.format(response.url))
# text = ""
# try:
# text = await response.text()
# except:
# text = await response.read()
# if text:
# print(text)
# text = re.findall(r'replace\(\"(.*?)\"\)', str(text))
# text = text[0] if text[0] else ""
url = response.url if response.url else None
return url
except Exception as e:
LOGGER.exception(e)
return None
def data_extraction_for_phone(html):
with async_timeout.timeout(10):
try:
# Get title
data_log = eval(html['data-log'])
url = data_log.get('mu', None)
if not url:
return None
# Get title
title = html.find('h3').get_text()
# Get author and update_time (option)
novel_mess = html.findAll(class_='c-gap-right-large')
basic_mess = [i.get_text() for i in novel_mess] if novel_mess else None
return {'title': title, 'url': url, 'basic_mess': basic_mess}
except Exception as e:
LOGGER.exception(e)
return None
def target_fetch(client, url):
"""
:param client: aiohttp client
:param url: target url
:return: text
"""
with async_timeout.timeout(30):
try:
headers = {'user-agent': get_random_user_agent()}
async with client.get(url, headers=headers) as response:
assert response.status == 200
LOGGER.info('Task url: {}'.format(response.url))
try:
text = await response.text()
except:
try:
text = await response.read()
except aiohttp.ServerDisconnectedError as e:
LOGGER.exception(e)
text = None
return text
except Exception as e:
LOGGER.exception(e)
return None
def _update_proxy_list(self):
try:
self.params['detail'] = ''
async with aiohttp.ClientSession(loop=self.loop) as session:
with async_timeout.timeout(self.timeout, loop=self.loop):
async with session.request('GET', self.agent_addr,
auth=self.auth,
params=self.params) as resp:
body = await resp.read()
proxies = json.loads(body.decode('utf-8'))
if len(proxies) > 0:
res = []
for p in proxies:
if self.min_success_rate > 0:
if p['success'] >= self.min_success_rate * (p['success'] + p['fail']):
res.append(p['addr'])
elif self.min_count > 0 and len(res) < self.min_count:
res.append(p['addr'])
else:
break
else:
res.append(p['addr'])
self.proxies = res
except Exception:
log.warning("Error occurred when get proxy list", exc_info=True)
def _update_proxy_list(self):
try:
self.params['detail'] = ''
async with aiohttp.ClientSession(loop=self.loop) as session:
with async_timeout.timeout(self.timeout, loop=self.loop):
async with session.request('GET',
self.agent_addr,
auth=self.auth,
params=self.params) as resp:
body = await resp.read()
proxies = json.loads(body.decode('utf-8'))
self._remove_block()
t = time.time()
for p in proxies:
r = 0.8 * (p['success'] / (p['success'] + p['fail'] + 1.0))
proxy = PoolProxyInfo(p['addr'], t, base_rate=r)
self._add_new_proxy(proxy)
except Exception:
log.warning("Error occurred when get proxy list", exc_info=True)
def check_proxy(self, addr):
if not addr.startswith("http://"):
proxy = "http://{0}".format(addr)
else:
proxy = addr
try:
async with aiohttp.ClientSession(loop=self._loop) as session:
with async_timeout.timeout(self._timeout, loop=self._loop):
seed = str(random.randint(0, 99999999))
url = "http://httpbin.org/get?seed={}".format(seed)
async with session.request("GET", url, proxy=proxy) as resp:
body = await resp.read()
data = json.loads(body.decode('utf-8'))
if "args" not in data:
return False
args = data["args"]
if "seed" not in args or args["seed"] != seed:
return False
except Exception:
return False
log.debug("Proxy {} is OK".format(addr))
return True
def check_proxy(self, addr):
if not addr.startswith("http://"):
proxy = "http://{0}".format(addr)
else:
proxy = addr
try:
async with aiohttp.ClientSession(loop=self._loop) as session:
with async_timeout.timeout(self._timeout, loop=self._loop):
async with session.request("GET", self._url, proxy=proxy) as resp:
url = str(resp.url)
if not self.match_status(self._http_status, resp.status):
return False
if self._url_match and not self._url_match.search(url):
return False
body = await resp.read()
if self._body_match and not self._body_match.search(body):
return False
except Exception:
return False
return True
def _update_proxy(self, urls):
for u in urls:
retry_cnt = 3
while retry_cnt > 0:
retry_cnt -= 1
try:
async with aiohttp.ClientSession(loop=self._loop) as session:
with async_timeout.timeout(self._timeout, loop=self._loop):
async with session.request("GET", u, headers=self._headers) as resp:
url = str(resp.url)
body = await resp.read()
except Exception as e:
log.info("{} error occurred when update proxy on url={}: {}".format(type(e), u, e))
else:
retry_cnt = 0
addr_list = self._proxy_finder.find_proxy(url, body)
log.debug("Find {} proxies on the page '{}'".format(len(addr_list), u))
if addr_list:
await self._callback(*addr_list)
await asyncio.sleep(self._sleep_time, loop=self._loop)
def verbose_ping(dest_addr, timeout=2, count=3):
"""
Send >count< ping to >dest_addr< with the given >timeout< and display
the result.
:param dest_addr:
:param timeout:
:param count:
"""
for i in range(count):
try:
delay = await ping(dest_addr, timeout)
except Exception as e:
print("%s failed: %s" % (dest_addr, str(e)))
break
if delay is None:
print('%s timed out after %ss' % (dest_addr, timeout))
else:
delay *= 1000
print("%s get ping in %0.4fms" % (dest_addr, delay))
print()
def create_connection(service, address=('127.0.0.1', 6000), *,
protocol_cls=TBinaryProtocol, timeout=None, loop=None, **kw):
"""Create a thrift connection.
This function is a :ref:`coroutine <coroutine>`.
Open a connection to the thrift server by address argument.
:param service: a thrift service object
:param address: a (host, port) tuple
:param protocol_cls: protocol type, default is :class:`TBinaryProtocol`
:param timeout: if specified, would raise `asyncio.TimeoutError` if one rpc call is longer than `timeout`
:param loop: :class:`Eventloop <asyncio.AbstractEventLoop>` instance, if not specified, default loop is used.
:param kw: params relaied to asyncio.open_connection()
:return: newly created :class:`ThriftConnection` instance.
"""
host, port = address
reader, writer = yield from asyncio.open_connection(
host, port, loop=loop, **kw)
iprotocol = protocol_cls(reader)
oprotocol = protocol_cls(writer)
return ThriftConnection(service, iprot=iprotocol, oprot=oprotocol,
address=address, loop=loop, timeout=timeout)
def __call__(self, reader, writer):
iproto = self.protocol_cls(reader)
oproto = self.protocol_cls(writer)
while not reader.at_eof():
try:
with async_timeout.timeout(self.timeout):
yield from self.processor.process(iproto, oproto)
except ConnectionError:
logger.debug('client has closed the connection')
writer.close()
except asyncio.TimeoutError:
logger.debug('timeout when processing the client request')
writer.close()
except asyncio.IncompleteReadError:
logger.debug('client has closed the connection')
writer.close()
except Exception:
# app exception
logger.exception('unhandled app exception')
writer.close()
writer.close()
def __init__(self, pp, headers={}, proxy_headers={},
max_retries=0, timeout=60, ca_certs=None,
discard_timeout=False):
'''pp - proxypool to use
headers - the headers to use in the underlying requests.Session
max_retries and timeout - the same as in ClientSession.
discard_timeout set to True will discard timed out proxies. Should have some sort of refresh, or we'll run out of proxies
'''
self.pp = pp # Proxypool
self.p = None
self.headers=headers # To use when downloading
self.proxy_headers=proxy_headers # To use when downloading
self.max_retries=max_retries
self.timeout=timeout
self.discard_timeout=discard_timeout
self.ca_certs=ca_certs
self.s=None
def get_url(hass, url):
websession = async_get_clientsession(hass)
request = None
try:
with async_timeout.timeout(10, loop=hass.loop):
request = yield from websession.get(url)
if request.status != 200:
_LOGGER.error("Error %d on load url %s",
request.status, request.url)
return None
return (yield from request.read())
except (asyncio.TimeoutError, aiohttp.errors.ClientError):
_LOGGER.error('Timeout downloading url.')
finally:
if request is not None:
yield from request.release()
return None
def __init__(self, loop=None,
host='127.0.0.1', port=80, request_timeout=10,
polling_timeout=30, polling_interval=5):
"""
:param loop: event loop
:param host: API Server host
:param port: API Server port
:param request_timeout: HTTP request timeout
:param polling_timeout: Async API polling timeout
:param polling_interval: Async API polling interval
"""
super().__init__()
self.loop = loop
self._host = host
self._port = port
self._request_timeout = request_timeout
self._polling_timeout = polling_timeout
self._polling_interval = polling_interval
self.session = None
self._conn = aiohttp.TCPConnector(
verify_ssl=False, limit=50, use_dns_cache=True)
def poll(self, action, body):
if not action.NEED_POLL:
return raise_error(202, body)
location = body['location']
with async_timeout.timeout(self._polling_timeout):
count = 0
while True:
status, body = await self._do_request('GET', location)
if status in [200, 503]:
return {"value": json.loads(body)}
count += 1
await asyncio.sleep(self._polling_interval)
# polling timeout
return raise_error(500, "Location {} polling timeout, count: {}".format(location, count))
def _download_file(self, url, name):
"""Async file download
Args:
url (str):
The URL from which to download the file
name (str):
The name to give to the downloaded file
"""
with async_timeout.timeout(10):
async with self.api.session.get(url) as response:
filename = os.path.basename(name)
with open(filename, 'wb') as f_handle:
while True:
chunk = await response.content.read(1024)
if not chunk:
break
f_handle.write(chunk)
return await response.release()
def fetch_http(sanic_http_request: HTTPRequest,
jsonrpc_request: SingleJsonRpcRequest,
url: str,
batch_index: int) -> SingleJsonRpcResponse:
session = sanic_http_request.app.config.aiohttp['session']
args = sanic_http_request.app.config.args
headers = {}
headers['x-amzn-trace_id'] = sanic_http_request.headers.get('x-amzn-trace-id')
headers['x-jussi-request-id'] = sanic_http_request.headers.get('x-jussi-request-id')
upstream_request = {k: jsonrpc_request[k] for k in
{'jsonrpc', 'method', 'params'} if k in jsonrpc_request}
upstream_request['id'] = sanic_http_request['request_id_int'] + batch_index
with async_timeout.timeout(args.upstream_http_timeout):
async with session.post(url, json=upstream_request, headers=headers) as resp:
upstream_response = await resp.json()
del upstream_response['id']
if 'id' in jsonrpc_request:
upstream_response['id'] = jsonrpc_request['id']
return upstream_response
# pylint: enable=no-value-for-parameter
def finish(self, timeout=None):
"""
Cancel all pending tasks and optionally re-enqueue jobs which haven't finished after the timeout.
:param timeout: how long to wait for tasks to finish, defaults to ``shutdown_delay``
"""
timeout = timeout or self.shutdown_delay
self.running = False
cancelled_tasks = 0
if self.pending_tasks:
with await self._finish_lock:
work_logger.info('drain waiting %0.1fs for %d tasks to finish', timeout, len(self.pending_tasks))
_, pending = await asyncio.wait(self.pending_tasks, timeout=timeout, loop=self.loop)
if pending:
pipe = self.redis.pipeline()
for task in pending:
if task.re_enqueue:
pipe.rpush(task.job.raw_queue, task.job.raw_data)
task.cancel()
cancelled_tasks += 1
if pipe._results:
await pipe.execute()
self.pending_tasks = set()
return cancelled_tasks
def _send(self, url):
"""Send the url to the Hook API."""
response = None
try:
_LOGGER.debug("Sending: %s", url)
websession = async_get_clientsession(self.hass)
with async_timeout.timeout(TIMEOUT, loop=self.hass.loop):
response = yield from websession.get(
url, params={"token": self._token})
data = yield from response.json()
except (asyncio.TimeoutError,
aiohttp.errors.ClientError,
aiohttp.errors.ClientDisconnectedError) as error:
_LOGGER.error("Failed setting state: %s", error)
return False
finally:
if response is not None:
yield from response.release()
_LOGGER.debug("Got: %s", data)
return data['return_value'] == '1'