def __init__(self, auth_url, username, tenant, loop=None, log=None,
cafile=None, token_renew_delay=3300):
self.auth_url = auth_url
self.username = username
self.tenant = tenant
self.log = log
self.token_renew_delay = token_renew_delay
self.loop = loop or asyncio.get_event_loop()
self.headers = {"content-type": "application/json",
"accept": "application/json"}
if cafile:
sslcontext = ssl.create_default_context(cafile=cafile)
conn = aiohttp.TCPConnector(ssl_context=sslcontext)
self.session = aiohttp.ClientSession(connector=conn, loop=self.loop)
else:
session = aiohttp.ClientSession(loop=self.loop)
python类TCPConnector()的实例源码
def test_auth_with_valid_data(self):
s = TestAuthSession(login=USER_LOGIN, password=USER_PASSWORD, app_id=APP_ID)
s.driver.session = aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False),
response_class=CustomClientResponse)
yield from s.authorize()
params = {'client_id': APP_ID, 'display': 'page', 'redirect_uri': REDIRECT_URI, 'response_type': 'code'}
with aiohttp.Timeout(10):
response = yield from s.driver.session.get("https://oauth.vk.com/authorize",
params=params, allow_redirects=True)
s.close()
code = response.url.query.get('code')
self.assertIsNotNone(code)
s = AuthorizationCodeSession(APP_ID, APP_SECRET, REDIRECT_URI, code)
yield from s.authorize()
s.close()
self.assertIsNotNone(s.access_token)
def main():
"""Scriptworker entry point: get everything set up, then enter the main loop."""
context, credentials = get_context_from_cmdln(sys.argv[1:])
log.info("Scriptworker starting up at {} UTC".format(arrow.utcnow().format()))
cleanup(context)
conn = aiohttp.TCPConnector(limit=context.config['aiohttp_max_connections'])
loop = asyncio.get_event_loop()
with aiohttp.ClientSession(connector=conn) as session:
context.session = session
context.credentials = credentials
while True:
try:
loop.run_until_complete(async_main(context))
except Exception:
log.critical("Fatal exception", exc_info=1)
raise
def __awaitable__(self):
if self._data is None:
with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=self.api.verify_ssl)) as session:
wait_time = self._wait_time()
if wait_time is None and self.api:
try:
await self._make_async_request(session)
except ServiceUnavailableException:
await asyncio.sleep(60)
self._wait_time()
await self._make_async_request(session)
else:
await asyncio.sleep(wait_time)
await self._make_async_request(session)
return self
def _send_to_external_chat(self, bot, event, config):
if event.from_bot:
# don't send my own messages
return
conversation_id = event.conv_id
conversation_text = event.text
user_id = event.user_id
url = config["HUBOT_URL"] + conversation_id
payload = {"from" : str(user_id.chat_id), "message" : conversation_text}
headers = {'content-type': 'application/json'}
connector = aiohttp.TCPConnector(verify_ssl=False)
asyncio.ensure_future(
aiohttp.request('post', url, data=json.dumps(payload),
headers=headers, connector=connector)
)
def test_single_proxy(self, proxy):
"""
??????
:param proxy:
:return:
"""
conn = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(connector=conn) as session:
try:
if isinstance(proxy, bytes):
proxy = proxy.decode('utf-8')
real_proxy = 'http://' + proxy
print('????', proxy)
async with session.get(TEST_URL, proxy=real_proxy, timeout=15, allow_redirects=False) as response:
if response.status in VALID_STATUS_CODES:
self.redis.max(proxy)
print('????', proxy)
else:
self.redis.decrease(proxy)
print('???????? ', response.status, 'IP', proxy)
except (ClientError, aiohttp.client_exceptions.ClientConnectorError, asyncio.TimeoutError, AttributeError):
self.redis.decrease(proxy)
print('??????', proxy)
def __init__(self, vault_url: str = VAUTL_URL, token: Optional[str] = None, verify: bool = True, timeout: int = 10,
session: Optional[aiohttp.ClientSession]=None, loop: asyncio.AbstractEventLoop=None):
self.loop = loop
if loop is None:
self.loop = asyncio.get_event_loop()
self.vault_url = vault_url.rstrip('/')
self.session = session
if session is None:
if not verify:
connector = aiohttp.TCPConnector(verify_ssl=False, loop=self.loop)
else:
connector = None
self.session = aiohttp.ClientSession(connector=connector, read_timeout=timeout, conn_timeout=timeout, loop=self.loop) # pylint: disable=unexpected-keyword-arg
self._auth_token = token
self.timeout = timeout
def get_tcp_connector(self) -> aiohttp.TCPConnector:
if self._connector_owner:
# return valid connector
if self._tcp_connector and not self._tcp_connector.closed:
return self._tcp_connector
# create ssl context if no valid connector is present
ssl_context = ssl.create_default_context(cafile=self.cafile)
# memoize tcp_connector for reuse
# noinspection PyAttributeOutsideInit
self._tcp_connector = aiohttp.TCPConnector(
loop=self.loop,
ssl_context=ssl_context,
keepalive_timeout=self.keepalive_timeout,
)
return self._tcp_connector
return self._tcp_connector
def _async_request_(self, method, url, verify, params, headers, data):
connector = aiohttp.TCPConnector(verify_ssl=verify)
async with aiohttp.ClientSession(connector=connector) as session:
if data is not None:
async with getattr(session, method)(url, params=params,
headers=headers, data=data) as response:
data = await response.read()
result = resources.loads(data.decode('UTF-8'))
else:
async with getattr(session, method)(url, params=params,
headers=headers) as response:
data = await response.read()
result = resources.loads(data.decode('UTF-8'))
if result.__kind__ != getattr(self, '_%s_type_' % method):
raise Exception(str(result))
return result
def __aenter__(self):
connector = aiohttp.TCPConnector(verify_ssl=self._verify)
self._session_cm = aiohttp.ClientSession(connector=connector)
self._session = await self._session_cm.__aenter__()
if self._data is not None:
self._response_cm = getattr(self._session, self._method)(
self._url, params=self._params, headers=self._headers,
data=data)
else:
self._response_cm = getattr(self._session, self._method)(
self._url, params=self._params, headers=self._headers)
self._response = await self._response_cm.__aenter__()
if self._response.status != 200:
raise Exception(await self._response.text())
return WatcherSession(self)
def oc_classify(records, one_codex_api_key, progress=False, stdout=False):
oc_auth = aiohttp.BasicAuth(one_codex_api_key)
conn = aiohttp.TCPConnector(limit=10)
with aiohttp.ClientSession(auth=oc_auth, connector=conn) as oc_session:
with aiohttp.ClientSession(connector=conn) as ebi_session:
tasks = [classify_taxify(oc_session, ebi_session, r.id, str(r.seq)) for r in records]
# No async generators in 3.5... :'(
# return [await f for f in tqdm.tqdm(asyncio.as_completed(tasks), total=len(tasks))]
records = []
for f in tqdm.tqdm(asyncio.as_completed(tasks),
disable=not progress,
total=len(tasks)):
response = await f
record = build_record(response[0], response[1])
if stdout:
print(record.format('fasta'), end='')
records.append(record)
return records
# --------------------------------------------------------------------------------------------------
def Server(self):
self.Running = asyncio.Future()
self.startindexIndex = self.ipfactory.getIndex()
context = ssl.create_default_context()
context.check_hostname = False
if(not self.scan):
self.loop.create_task(self.SaveIp())
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=context, force_close=True),
conn_timeout=0.7, read_timeout=0.8) as self.session:
self.start_time = time.time()
# print("create session Success")
# print("startindex Scan Ip")
while self._running:
if self.now < self.max:
self.now += 1
# print("create task at", self.now)
# print("startindex Task Sum: ", self.now)
self.loop.create_task(self.worker())
if self.now == self.max:
self.future = asyncio.Future()
else:
await self.future
def create_session(self, loop):
conn = None
if self.proxy and self.proxy_user:
conn = aiohttp.ProxyConnector(
loop=loop,
limit=self.parallel,
proxy=self.proxy,
proxy_auth=aiohttp.BasicAuth(self.proxy_user, self.proxy_password)
)
elif self.proxy:
conn = aiohttp.ProxyConnector(loop=loop, limit=self.parallel, proxy=self.proxy)
else:
conn = aiohttp.TCPConnector(loop=loop, limit=self.parallel)
session = aiohttp.ClientSession(connector=conn)
return session
def _get_connector(ssl_context):
if _scheme in SECURE_SCHEMES:
if ssl_context is None:
raise ValueError("Please pass ssl_context for secure protocol")
if _client_module == AIOHTTP_CLIENT_MODULE:
import aiohttp
connector = aiohttp.TCPConnector(ssl_context=ssl_context,
loop=loop)
elif _client_module == TORNADO_CLIENT_MODULE:
from functools import partial
from tornado import httpclient
connector = partial(
httpclient.HTTPRequest, ssl_options=sslcontext)
else:
raise ValueError("Unknown client module")
elif _scheme in INSECURE_SCHEMES:
connector = None
else:
raise ValueError("Unknown protocol")
return connector
def ws_connect(url, *, protocols=(), timeout=10.0, connector=None, auth=None,
ws_response_class=ClientWebSocketResponse, autoclose=True,
autoping=True, loop=None, origin=None, headers=None):
warnings.warn("Use ClientSession().ws_connect() instead",
DeprecationWarning)
if loop is None:
loop = asyncio.get_event_loop()
if connector is None:
connector = aiohttp.TCPConnector(loop=loop, force_close=True)
session = aiohttp.ClientSession(loop=loop, connector=connector, auth=auth,
ws_response_class=ws_response_class,
headers=headers)
return _DetachedWSRequestContextManager(
session._ws_connect(url,
protocols=protocols,
timeout=timeout,
autoclose=autoclose,
autoping=autoping,
origin=origin),
session=session)
def wait_with_progress(urlList, concurency = 30, timeout = 120, rawResults = False, cloudflare = False, headers = None):
sem = asyncio.Semaphore(concurency)
# Client session worker
headers = headers or {}
headers.update({
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36 vulners.com/bot'})
if cloudflare:
sessionClient = CloudflareScraper
else:
sessionClient = aiohttp.ClientSession
urlToResultDict = {}
with sessionClient(connector=aiohttp.TCPConnector(verify_ssl=False), headers=headers) as session:
coros = [parseUrl(url = d, semaphore = sem, session = session, timeout = timeout, rawResults=rawResults) for d in urlList]
for f in tqdm.tqdm(asyncio.as_completed(coros), total=len(coros)):
result = yield from f
urlToResultDict.update(result)
return urlToResultDict
def _youtube(self, context, *, query: str):
"""Search on Youtube"""
try:
url = 'https://www.youtube.com/results?'
payload = {'search_query': ''.join(query)}
headers = {'user-agent': 'Red-cog/1.0'}
conn = aiohttp.TCPConnector()
session = aiohttp.ClientSession(connector=conn)
async with session.get(url, params=payload, headers=headers) as r:
result = await r.text()
session.close()
yt_find = re.findall(r'href=\"\/watch\?v=(.{11})', result)
url = 'https://www.youtube.com/watch?v={}'.format(yt_find[0])
await self.bot.say(url)
except Exception as e:
message = 'Something went terribly wrong! [{}]'.format(e)
await self.bot.say(message)
def on_startup(app):
connector = aiohttp.TCPConnector(limit=5, use_dns_cache=True, loop=app.loop)
session = aiohttp.ClientSession(connector=connector, raise_for_status=True)
bot = TelegramBot(app['config'].token, session)
image_model = fit_model(app['config'].sample_df)
def config_injections(binder):
# injection bindings
binder.bind(Config, app['config'])
binder.bind(TelegramBot, bot)
binder.bind(GradientBoostingClassifier, image_model)
binder.bind_to_constructor(AsyncIOMotorDatabase, init_database)
try:
inject.configure(config_injections)
except inject.InjectorException:
log.error("Injector already configured", exc_info=True)
setup_logging(log)
app.loop.create_task(bot.set_hook())
def __init__(self, loop=None,
host='127.0.0.1', port=80, request_timeout=10,
polling_timeout=30, polling_interval=5):
"""
:param loop: event loop
:param host: API Server host
:param port: API Server port
:param request_timeout: HTTP request timeout
:param polling_timeout: Async API polling timeout
:param polling_interval: Async API polling interval
"""
super().__init__()
self.loop = loop
self._host = host
self._port = port
self._request_timeout = request_timeout
self._polling_timeout = polling_timeout
self._polling_interval = polling_interval
self.session = None
self._conn = aiohttp.TCPConnector(
verify_ssl=False, limit=50, use_dns_cache=True)
def fetchData(url, callback = pd.Xm, params=None):
#set request url and parameters here or you can pass from outside.
con = aiohttp.TCPConnector(limit=config.REQ_AMOUNTS)
s = aiohttp.ClientSession(headers = config.HEADERS, connector=con)
#use s.** request a webside will keep-alive the connection automaticaly,
#so you can set multi request here without close the connection
#while in the same domain.
#i.e.
#await s.get('***/page1')
#await s.get('***/page2')
########################################################################
r=yield from s.get(url, params = params)
#here the conection closed automaticly.
data = yield from r.text(encoding='utf-8')
yield from callback(data)
r.close()
def render(url, width=1024, max_height=4096, trim_image=False):
with DefaultClientSession(connector=TCPConnector()) as session:
async with session.request(method="post", url=render_url(), data=json.dumps({
"url": url,
"key": api_key(),
"width": str(width),
"max_height": str(max_height),
"trim": "true" if trim_image else "false",
})) as r:
if r.status == 200:
buffer = io.BytesIO()
buffer.write(await r.read())
return Response("", attachments=[MemoryAttachment(buffer, "screenshot.jpg", "image/jpeg")])
else:
try:
data = await r.json()
raise CommandError("error occurred: {}".format(data['error']))
except JSONDecodeError:
raise CommandError("error occurred with status code {}".format(r.status))
def process(self, request):
"""
Processes passed :class:`Request <okami.api.Request>` object.
Exceptions should be caught otherwise entire pipeline terminates.
:param request: :class:`Request <okami.api.Request>` object
:returns: altered passed :class:`Request <okami.api.Request>` object
"""
if not self.controller.session or self.controller.session.closed:
try:
self.controller.session = self.controller.spider.session()
except NotImplementedError:
connector = aiohttp.TCPConnector(
limit=settings.CONN_MAX_CONCURRENT_CONNECTIONS,
verify_ssl=settings.CONN_VERIFY_SSL,
)
self.controller.session = aiohttp.ClientSession(connector=connector)
return request
def _local_request(self, method, uri, cookies=None, *args, **kwargs):
import aiohttp
if uri.startswith(('http:', 'https:', 'ftp:', 'ftps://' '//')):
url = uri
else:
url = 'http://{host}:{port}{uri}'.format(
host=HOST, port=PORT, uri=uri)
log.info(url)
conn = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(
cookies=cookies, connector=conn) as session:
async with getattr(
session, method.lower())(url, *args, **kwargs) as response:
try:
response.text = await response.text()
except UnicodeDecodeError as e:
response.text = None
response.body = await response.read()
return response
def __init__(self,
*,
config: Config,
loop: asyncio.BaseEventLoop=_init_loop(),
show_progress: bool=True):
self.config = config
self.show_progress = show_progress
db_engine = create_engine(f'sqlite:///{config.config_dir/config.db_name}')
ModelBase.metadata.create_all(db_engine)
self.db = sessionmaker(bind=db_engine)()
self.client = ClientSession(connector=TCPConnector(limit_per_host=10, loop=loop),
headers={'User-Agent': _UA_STRING}, loop=loop)
self.resolvers = {n: r(manager=self)
for n, r in BaseResolver.__members__.items()}
self.runner = _Runner(self)
self._loop = loop
self._resolver_lock = asyncio.Lock(loop=loop)
self._tpes = [ThreadPoolExecutor(max_workers=1), ThreadPoolExecutor(max_workers=1)]
def fetch_all_tiles_aiohttp(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyProperties,
progress_bar: bool, n_parallel: int, timeout: float) -> List[HipsTile]:
"""Generator function to fetch HiPS tiles from a remote URL using aiohttp."""
import aiohttp
connector = aiohttp.TCPConnector(limit=n_parallel)
async with aiohttp.ClientSession(connector=connector) as session:
futures = []
for meta in tile_metas:
url = hips_survey.tile_url(meta)
future = asyncio.ensure_future(fetch_tile_aiohttp(url, meta, session, timeout))
futures.append(future)
futures = asyncio.as_completed(futures)
if progress_bar:
from tqdm import tqdm
futures = tqdm(futures, total=len(tile_metas), desc='Fetching tiles')
tiles = []
for future in futures:
tiles.append(await future)
return tiles
def test_create_clientsession_with_ssl_and_cookies(self):
"""Test create clientsession with ssl."""
def _async_helper():
return client.async_create_clientsession(
self.hass,
cookies={'bla': True}
)
session = run_callback_threadsafe(
self.hass.loop,
_async_helper,
).result()
assert isinstance(
session, aiohttp.ClientSession)
assert isinstance(
self.hass.data[client.DATA_CONNECTOR], aiohttp.TCPConnector)
def test_create_clientsession_without_ssl_and_cookies(self):
"""Test create clientsession without ssl."""
def _async_helper():
return client.async_create_clientsession(
self.hass,
False,
cookies={'bla': True}
)
session = run_callback_threadsafe(
self.hass.loop,
_async_helper,
).result()
assert isinstance(
session, aiohttp.ClientSession)
assert isinstance(
self.hass.data[client.DATA_CONNECTOR_NOTVERIFY],
aiohttp.TCPConnector)
def _async_get_connector(hass, verify_ssl=True):
"""Return the connector pool for aiohttp.
This method must be run in the event loop.
"""
if verify_ssl:
if DATA_CONNECTOR not in hass.data:
connector = aiohttp.TCPConnector(loop=hass.loop)
hass.data[DATA_CONNECTOR] = connector
_async_register_connector_shutdown(hass, connector)
else:
connector = hass.data[DATA_CONNECTOR]
else:
if DATA_CONNECTOR_NOTVERIFY not in hass.data:
connector = aiohttp.TCPConnector(loop=hass.loop, verify_ssl=False)
hass.data[DATA_CONNECTOR_NOTVERIFY] = connector
_async_register_connector_shutdown(hass, connector)
else:
connector = hass.data[DATA_CONNECTOR_NOTVERIFY]
return connector
def http_post(shark, url, data):
log = shark.log.bind(url=url)
opts = shark.config['HTTP']
if opts.get('ssl_cafile'):
ssl_context = ssl.create_default_context(cafile=opts['ssl_cafile'])
else:
ssl_context = None
conn = aiohttp.TCPConnector(ssl_context=ssl_context)
async with aiohttp.ClientSession(connector=conn) as session:
wait = opts['wait']
for n in range(opts['tries']):
if n > 0:
await asyncio.sleep(wait)
try:
log.debug('http request', data=data)
async with session.post(url, json=data,
timeout=opts['timeout']) as resp:
if resp.status == 429: # Too many requests.
wait = _get_rate_limit_wait(log, resp, opts)
continue
else:
wait = opts['wait']
resp.raise_for_status()
data = await resp.json()
log.debug('http response', data=data)
return data
except aiohttp.ClientError:
log.exception('unhandled exception in http_post')
except asyncio.TimeoutError:
log.exception('timeout in http_post')
return {'status': 'error', 'error': c.ERR_SERVICE_UNAVAILABLE}
def check_urls(urls, loop):
tasks = list()
conn = aiohttp.TCPConnector(limit=100, limit_per_host=2, loop=loop)
async with aiohttp.ClientSession(connector=conn, read_timeout=300, conn_timeout=10, loop=loop) as session:
for metadata in urls:
task = fetch(metadata, session)
tasks.append(task)
responses = dict()
for f in tqdm.tqdm(asyncio.as_completed(tasks), total=len(tasks)):
resource_id, url, err, http_last_modified, hash, force_hash = await f
responses[resource_id] = (url, err, http_last_modified, hash, force_hash)
return responses