python类Timeout()的实例源码

test_stream.py 文件源码 项目:peony-twitter 作者: odrling 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def test_stream_cancel(event_loop):
    async def cancel(task):
        await asyncio.sleep(0.001)
        task.cancel()

    async def test_stream_iterations(stream):
        while True:
            await test_stream_iteration(stream)

    with aiohttp.ClientSession(loop=event_loop) as session:
        client = peony.client.BasePeonyClient("", "", session=session)
        context = peony.stream.StreamResponse(method='GET',
                                              url="http://whatever.com",
                                              client=client)

        with context as stream:
            with patch.object(stream, '_connect',
                              side_effect=stream_content):
                coro = test_stream_iterations(stream)
                task = event_loop.create_task(coro)
                cancel_task = event_loop.create_task(cancel(task))

                with aiohttp.Timeout(1):
                    await asyncio.wait([task, cancel_task])
test_sessions.py 文件源码 项目:aiovk 作者: Fahreeve 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def test_auth_with_valid_data(self):
        s = TestAuthSession(login=USER_LOGIN, password=USER_PASSWORD, app_id=APP_ID)
        s.driver.session = aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False),
                                                 response_class=CustomClientResponse)
        yield from s.authorize()
        params = {'client_id': APP_ID, 'display': 'page', 'redirect_uri': REDIRECT_URI, 'response_type': 'code'}
        with aiohttp.Timeout(10):
            response = yield from s.driver.session.get("https://oauth.vk.com/authorize",
                                                       params=params, allow_redirects=True)
        s.close()
        code = response.url.query.get('code')
        self.assertIsNotNone(code)

        s = AuthorizationCodeSession(APP_ID, APP_SECRET, REDIRECT_URI, code)
        yield from s.authorize()
        s.close()
        self.assertIsNotNone(s.access_token)
client.py 文件源码 项目:rauc-hawkbit 作者: rauc 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def post_resource(self, api_path, data, **kwargs):
        """
        Helper method for HTTP POST API requests.

        Args:
            api_path(str): REST API path
            data: JSON data for POST request
        Keyword Args:
            kwargs: keyword args used for replacing items in the API path
        """
        post_headers = {
            'Content-Type': 'application/json',
            'Accept': 'application/json',
            **self.headers
        }
        url = self.build_api_url(
                api_path.format(
                    tenant=self.tenant,
                    controllerId=self.controller_id,
                    **kwargs))
        self.logger.debug('POST {}'.format(url))
        with aiohttp.Timeout(self.timeout):
            async with self.session.post(url, headers=post_headers,
                                         data=json.dumps(data)) as resp:
                await self.check_http_status(resp)
client.py 文件源码 项目:rauc-hawkbit 作者: rauc 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def put_resource(self, api_path, data, **kwargs):
        """
        Helper method for HTTP PUT API requests.

        Args:
            api_path(str): REST API path
            data: JSON data for POST request
        Keyword Args:
            kwargs: keyword args used for replacing items in the API path
        """
        put_headers = {
            'Content-Type': 'application/json',
            **self.headers
        }
        url = self.build_api_url(
                api_path.format(
                    tenant=self.tenant,
                    controllerId=self.controller_id,
                    **kwargs))
        self.logger.debug('PUT {}'.format(url))
        self.logger.debug(json.dumps(data))
        with aiohttp.Timeout(self.timeout):
            async with self.session.put(url, headers=put_headers,
                                        data=json.dumps(data)) as resp:
                await self.check_http_status(resp)
rest.py 文件源码 项目:py-restfmclient 作者: pcdummy 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def _request(self, method, store_path=None, **kwargs):
        with aiohttp.Timeout(self.timeout, loop=self.session.loop):
            url = self.url()
            self.logger.debug('HTTP %s %s' % (method.upper(), url))
            kwargs['headers'] = self.headers
            async with self.session.request(method, url, **kwargs) as response:
                if self.headers['Content-Type'] == 'application/json':
                    result = await response.text()
                    if store_path is not None:
                        await self._store(store_path, method, url, result)

                    if response.status == 404:  # pragma: no cover
                        raise RestNotFoundException("Not found.")

                    try:
                        return json.loads(result)
                    except json.decoder.JSONDecodeError:
                        raise RestDecoderException(result)

                else:  # pragma: no cover
                    return await response.text()
aiohttp_fetcher.py 文件源码 项目:web_develop 作者: dongweiming 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def fetch(retry=0):
    proxy = 'http://{}'.format(Proxy.get_random()['address'])
    headers = {'user-agent': get_user_agent()}
    conn = aiohttp.ProxyConnector(proxy=proxy)

    url = 'http://httpbin.org/ip'

    try:
        with aiohttp.ClientSession(connector=conn) as session:
            with aiohttp.Timeout(TIMEOUT):
                async with session.get(url, headers=headers) as resp:
                    return await resp.json()
    except (ProxyConnectionError, TimeoutError):
        try:
            p = Proxy.objects.get(address=proxy)
            if p:
                p.delete()
        except DoesNotExist:
            pass
        retry += 1
        if retry > 5:
            raise TimeoutError()
        await asyncio.sleep(1)
        return await fetch(retry=retry)
save_article_content.py 文件源码 项目:web_develop 作者: dongweiming 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def fetch(url, retry=0):
    proxy = 'http://{}'.format(Proxy.get_random()['address'])
    headers = {'user-agent': get_user_agent()}
    conn = aiohttp.ProxyConnector(proxy=proxy)

    js_url = gen_js_url(url)

    try:
        with aiohttp.ClientSession(connector=conn) as session:
            with aiohttp.Timeout(TIMEOUT):
                async with session.get(url, headers=headers) as resp:
                    html_text = await resp.text()

                async with session.get(js_url, headers=headers) as resp:
                    js_data = await resp.json()
    except:
        retry += 1
        if retry > 5:
            raise CrawlerError()
        await asyncio.sleep(1)
        return await fetch(url, retry=retry)
    return html_text, js_data
downloader.py 文件源码 项目:wsp 作者: wangybnet 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def _download(self, request):
        log.debug("Http Request: %s %s" % (request.method, request.url))
        with aiohttp.ClientSession(connector=None if (request.proxy is None) else aiohttp.ProxyConnector(proxy=request.proxy),
                                   cookies=request.cookies) as session:
            with aiohttp.Timeout(self._timeout):
                async with session.request(request.method,
                                           request.url,
                                           headers=request.headers,
                                           data=request.body) as resp:
                    body = await resp.read()
        response = HttpResponse(resp.url,
                                resp.status,
                                headers=resp.headers,
                                body=body,
                                cookies=resp.cookies)
        return response
downloader.py 文件源码 项目:wsp 作者: wangybnet 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def _download(self, request):
        log.debug("Http Request: %s %s" % (request.method, request.url))
        with aiohttp.ClientSession(connector=None if (request.proxy is None) else aiohttp.ProxyConnector(proxy=request.proxy),
                                   cookies=request.cookies) as session:
            with aiohttp.Timeout(self._timeout):
                async with session.request(request.method,
                                           request.url,
                                           headers=request.headers,
                                           data=request.body) as resp:
                    body = await resp.read()
        response = HttpResponse(resp.url,
                                resp.status,
                                headers=resp.headers,
                                body=body,
                                cookies=resp.cookies)
        return response
riot_api.py 文件源码 项目:lolme 作者: Isaac-Lozano 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def request_url_json(self, url, params, limit=True):
        # simple token bucket limiting
        current_time = time.time()
        delta_time = current_time - self.last_time
        self.last_time = current_time
        self.bucket += delta_time * (self.limit_messages / self.limit_time)
        if self.bucket > self.limit_messages:
            self.bucket = self.limit_messages
        if self.bucket < 1:
            raise RiotApiRateExceededException("Riot Api rate request exceeded. Please wait until making the next request")
        self.bucket -= 1
        with aiohttp.Timeout(self.timeout):
            response = yield from self.session.get(url, params=params)
            if response.status != 200:
                raise RiotApiHttpException(response.status)
            return (yield from response.json())
overwatch_api.py 文件源码 项目:lolme 作者: Isaac-Lozano 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def request_url_json(self, url, limit=True):
        # simple token bucket limiting
        current_time = time.time()
        delta_time = current_time - self.last_time
        self.last_time = current_time
        self.bucket += delta_time * (self.limit_messages / self.limit_time)
        if self.bucket > self.limit_messages:
            self.bucket = self.limit_messages
        if self.bucket < 1:
            raise OverwatchApiRateExceededException()
        self.bucket -= 1
        with aiohttp.Timeout(self.timeout):
            response = yield from self.session.get(url)
            if response.status != 200:
                raise OverwatchApiHttpException(response.status)
            return (yield from response.json())
bot.py 文件源码 项目:discord_chat_bot 作者: chromaticity 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def cmd_setavatar(self, message, url=None):
        """
        Usage:
            {command_prefix}setavatar [url]

        Changes the bot's avatar.
        Attaching a file and leaving the url parameter blank also works.
        """

        if message.attachments:
            thing = message.attachments[0]['url']
        else:
            thing = url.strip('<>')

        try:
            with aiohttp.Timeout(10):
                async with self.aiosession.get(thing) as res:
                    await self.edit_profile(avatar=await res.read())

        except Exception as e:
            raise exceptions.CommandError("Unable to change avatar: %s" % e, expire_in=20)

        return Response(":ok_hand:", delete_after=20)
freebora.py 文件源码 项目:freebora 作者: deeplook 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def fetch(session, url, dest='.', overwrite=False, verbose=False):
    "Fetch a single PDF file if not already existing."

    pdf_name = os.path.basename(url)
    path = os.path.join(dest, pdf_name)
    if not os.path.exists(path) or overwrite:
        # if verbose:
        #     print(url)
        with aiohttp.Timeout(60, loop=session.loop):
            async with session.get(url) as response:
                pdf = await response.read()
                # if verbose:
                #     print('%s %d' % (url, len(pdf)))
                async with aiofiles.open(path, mode='wb') as f:
                    await f.write(pdf)
                    if verbose:
                        print('saved %s (%d bytes)' % (path, len(pdf)))
__init__.py 文件源码 项目:mlimages 作者: icoxfog417 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def fetch_image(self, session, relative, image_url):
        fname = self.file_api.get_file_name(image_url)
        p = os.path.join(relative, fname)
        fetched = False
        try:
            with aiohttp.Timeout(self.timeout):
                async with session.get(image_url) as r:
                    if r.status == 200 and self.file_api.get_file_name(r.url) == fname:
                        c = await r.read()
                        if c:
                            with open(self.file_api.to_abs(p), "wb") as f:
                                f.write(c)
                                fetched = True
        except FileNotFoundError as ex:
            self.logger.error("{0} is not found.".format(p))
        except concurrent.futures._base.TimeoutError as tx:
            self.logger.warning("{0} is timeouted.".format(image_url))
        except Exception as ex:
            self.logger.warning("fetch image is failed. url: {0}, cause: {1}".format(image_url, str(ex)))
        return fetched
transport.py 文件源码 项目:python-zeep 作者: mvantellingen 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def _load_remote_data(self, url):
        result = None

        async def _load_remote_data_async():
            nonlocal result
            with aiohttp.Timeout(self.load_timeout):
                response = await self.session.get(url)
                result = await response.read()
                try:
                    response.raise_for_status()
                except aiohttp.ClientError as exc:
                    raise TransportError(
                        message=str(exc),
                        status_code=response.status,
                        content=result
                    ).with_traceback(exc.__traceback__) from exc

        # Block until we have the data
        self.loop.run_until_complete(_load_remote_data_async())
        return result
http_client.py 文件源码 项目:pnu 作者: erasaur 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def get_json (self, url, timeout=None):
        if timeout is None:
            timeout = pub_config["http"]["timeout_seconds"]

        result = {}
        try:
            with aiohttp.Timeout(timeout):
                async with self._session.get(url) as resp:
                    try:
                        res = await resp.json()
                    except Exception as e:
                        logging.info("failed decoding response, got exception: {}".format(e))
                        res = {}
                    result["res"] = res
                    result["status"] = resp.status
        except TimeoutError as e:
            logging.info("HTTP timeout ({}) with error: {}".format(url, e)) 

        return result
claw.py 文件源码 项目:my-spider 作者: time-river 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def _fetch_page(self, request):
        try:
            with aiohttp.Timeout(10):
                async with aiohttp.get(request['url'], params=request['params'], headers=request['headers']) as response:
                    try:
                        assert response.status == 200
                        if request['type'] == 'json':
                            content = await response.json()
                        else:
                            content = await response.text(request['type'])
                        obj = {'order':request['order'], 'content':content}
                        redis_push(self.redis, self.content_key, obj)
                    except AssertionError:
                        logging.warning('{} {}'.format(response.status, url))
        except: # kinds of error, not only asyncio.TimeoutError
            #redis_push(self.redis, self.request_key, request)
            pass
proxy.py 文件源码 项目:my-spider 作者: time-river 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def _verify_proxy(self, proxy):
        addr = proxy['protocol'] + '://' + proxy['ip'] +':'+proxy['port']
        conn = aiohttp.ProxyConnector(proxy=addr)
        try:
            session = aiohttp.ClientSession(connector=conn)
            with aiohttp.Timeout(10):
                async with session.get(self.test_url[random.randrange(len(self.test_url))]) as response: # close connection and response, otherwise will tip: Unclosed connection and Unclosed response
                    try:
                        assert response.status == 200
                        redis_sadd(self.redis, self.proxy_key, proxy)
                    except: 
                        pass      
        except: #ProxyConnectionError, HttpProxyError and etc?
            pass
        finally:
            session.close() # close session when timeout
aiosteamsearch.py 文件源码 项目:steamsearch 作者: billy-yoyo 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def exchange(amount, from_curr, to_curr, timeout=10):
    """Converts an amount of money from one currency to another

    Args:
        amount (float): The amount of money you want to convert
        from_curr (str): The currency you want to convert from,
            either country symbol (e.g USD) or currency smybol (e.g. £)
        to_curr (str): The currency you want to convert to, same format as from_curr
        timeout (int, optional): The time in seconds aiohttp will take to timeout the request
    Returns:
        float: the converted amount of money to 2 d.p., or the original amount of the conversion failed.
    """
    try:
        with aiohttp.ClientSession() as session:
            with aiohttp.Timeout(timeout):
                resp = yield from session.get("http://api.fixer.io/latest?symbols=" + from_curr + "," + to_curr)
                data = yield from resp.json()
                if "rates" in data:
                    return int((amount / data["rates"][from_curr]) * data["rates"][to_curr] * 100)/100
    except:
        return amount
aiosteamsearch.py 文件源码 项目:steamsearch 作者: billy-yoyo 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def get_recommendations(appid, timeout=10):
    appid = str(appid)
    similar = []
    with aiohttp.ClientSession() as session:
        with aiohttp.Timeout(timeout):
            resp = yield from session.get("http://store.steampowered.com/recommended/morelike/app/" + appid)
            text = yield from resp.text()
            print(text)

            soup = BeautifulSoup(text, "html.parser")


            items = soup.find_all("div", {"class": "similar_grid_item"})
            print("found %s items" % len(items))
            for item in items:
                subsoup = item.find("div", {"class": "similar_grid_capsule"})
                if subsoup is not None:
                    similar_id = subsoup.get("data-ds-appid")
                    if similar_id is not None:
                        similar.append(similar_id)
                    else:
                        print("failed to find appid")
                else:
                    print("failed to get item")
    return similar
aiosteamsearch.py 文件源码 项目:steamsearch 作者: billy-yoyo 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def get_user(steamid, timeout=10, be_specific=False):
    """Gets some information about a specific steamid

    Args:
        steamid (str): The user's steamid
        timeout (int, optional): The amount of time before aiohttp raises a timeout error
    Returns:
        a UserResult object
        """
    if not is_integer(steamid):
        steamid = yield from search_for_userid(steamid, be_specific=be_specific)
    if steamid is not None:
        _check_key_set()
        with aiohttp.ClientSession() as session:
            with aiohttp.Timeout(timeout):
                resp = yield from session.get("http://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/?key=" + STEAM_KEY + "&steamids=" + steamid)
                data = yield from resp.json()

                if "response" in data and "players" in data["response"] and len(data["response"]["players"]) > 0:
                    player = data["response"]["players"][0]
                    return UserResult(player)
    return None
aiosteamsearch.py 文件源码 项目:steamsearch 作者: billy-yoyo 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def get_user_id(name, timeout=10):
    """Resolves a username to a steamid, however is limited to ONLY vanity URL's. search_user_id is recommended

    Args:
        name (str): The name of the user to find the steamid of
        timeout (int, optional): The amount of time before aiohttp raises a timeout error
    Returns:
        either None or a steamid (str) if a vanity url matching that name is found
        """
    if name in userid_cache:
        return userid_cache[name]
    else:
        _check_key_set()
        with aiohttp.ClientSession() as session:
            with aiohttp.Timeout(timeout):

                resp = yield from session.get("http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/?key=" + STEAM_KEY + "&vanityurl=" + parse.quote(name))
                data = yield from resp.json()

                if "response" in data and "success" in data["response"] and data["response"]["success"] == 1:
                    id = data["response"]["steamid"]
                    if STEAM_CACHE:
                        userid_cache[name] = id
                    return id
                return None
aiosteamsearch.py 文件源码 项目:steamsearch 作者: billy-yoyo 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def steam_user_data(timeout=10):
    """Gets information about the amount of users on steam over the past 48 hours

    Args:
        timeout (int, optional): The amount of time before aiohttp raises a timeout error
    Returns:
        A tuple containing (min_users (int), max_users (int), current_users (int))"""
    with aiohttp.ClientSession() as session:
        with aiohttp.Timeout(timeout):
            resp = yield from session.get("http://store.steampowered.com/stats/userdata.json")
            data = yield from resp.json()
            data = data[0]["data"]

            min_users = -1
            max_users = -1
            for pair in data:
                if min_users == -1 or pair[1] < min_users:
                    min_users = pair[1]
                if max_users == -1 or pair[1] > max_users:
                    max_users = pair[1]
            return min_users, max_users, data[-1][1]
aiosteamsearch.py 文件源码 项目:steamsearch 作者: billy-yoyo 项目源码 文件源码 阅读 39 收藏 0 点赞 0 评论 0
def get_user_achievements(username, gameid, timeout=10, be_specific=False):
    """Gets information about a specific user's achievements for a specific game

    Args:
        username (str): the id or name of the user you want the achievements for
        gameid (str): the id or name of the game you want the achievements for
        timeout (int): the amount of time before aiohttp raises a timeout error
    Returns:
        UserAchievement: the user achievements found"""
    if not is_integer(username):
        username = yield from search_for_userid(username, timeout=timeout, be_specific=be_specific)
    if not is_integer(gameid):
        gameid, gamename = yield from get_app(gameid, timeout=timeout)
    else:
        gamename = "???"
    _check_key_set()
    if username is not None and gameid is not None:
        with aiohttp.ClientSession() as session:
            with aiohttp.Timeout(timeout):
                resp = yield from session.get("http://api.steampowered.com/ISteamUserStats/GetPlayerAchievements/v0001/?appid=" + gameid + "&key=" + STEAM_KEY + "&steamid=" + username)
                data = yield from resp.json()
                if "playerstats" in data and "achievements" in data["playerstats"]:
                    return UserAchievements(gameid, gamename, data["playerstats"]["achievements"])
isitdown.py 文件源码 项目:beatrice-witchcogs 作者: PookaMustard 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def isitdown(self, url):
        """Checks if a website is down or up."""
        if url == "":
            await self.bot.say("You haven't entered a website to check.")
            return
        if "http://" not in url or "https://" not in url:
            url = "http://" + url
        try:
            with aiohttp.Timeout(15):
                await self.bot.say("Testing " + url + "…")
                try:
                    response = await aiohttp.get(url, headers = { 'user_agent': headers })
                    if response.status == 200:
                        await self.bot.say(url + " is up and running.")
                    else:
                        await self.bot.say(url + " is down.")
                except:
                    await self.bot.say(url + " is down.")
        except asyncio.TimeoutError:
            await self.bot.say(url + " is down.")
proxy_client.py 文件源码 项目:PRCDNS 作者: lbp0200 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def fetch(session, url, proxy=None):
        with aiohttp.Timeout(10):
            # http://127.0.0.1:8123
            async with session.get(url, proxy=proxy) as response:
                return await response.text()
sorter.py 文件源码 项目:fingerprint-securedrop 作者: freedomofpress 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def fetch(self, url):
        """Load a webpage and read return the body as plaintext."""
        self.logger.info("{url}: loading...".format(**locals()))
        try:
            with aiohttp.Timeout(self.page_load_timeout, loop=self.loop):
                async with self.session.get(url,
                                            allow_redirects=True,
                                            headers=self.headers) as resp:

                    if resp.status != 200:
                        self.logger.warning("{url} was not reachable. HTTP "
                                            "error code {resp.status} was "
                                            "returned".format(**locals()))
                        raise SorterResponseCodeError

                    self.logger.info("{url}: loaded "
                                     "successfully.".format(**locals()))
                    return await resp.text()
        except asyncio.TimeoutError:
            self.logger.warning("{url}: timed out after "
                                "{self.page_load_timeout}.".format(**locals()))
            raise SorterTimeoutError
        except (aiosocks.errors.SocksError,
                aiohttp.errors.ServerDisconnectedError,
                aiohttp.errors.ClientResponseError) as exc:
            self.logger.warning("{url} was not reachable: "
                                "{exc}".format(**locals()))
            raise SorterConnectionError
        except aiohttp.errors.ClientOSError as exception_msg:
            if "SSL" in exception_msg:
                self.logger.warning("{url}: certificate error (probably due to "
                                    "use of a self-signed "
                                    "cert.".format(**locals()))
                raise SorterCertError
            else:
                raise
        except (ssl.CertificateError, aiohttp.errors.ClientOSError):
            self.logger.warning("{url}: certificate error (probably due to "
                                "use of a self-signed "
                                "cert.".format(**locals()))
            raise SorterCertError
client.py 文件源码 项目:telegram-uz-bot 作者: vit- 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def call(self, endpoint, method='POST', raw=False, *args, **kwargs):
        if 'headers' not in kwargs:
            kwargs['headers'] = await self.get_headers()

        uri = self.uri(endpoint)
        logger.debug('Fetching: %s', uri)
        logger.debug('Headers: %s', kwargs['headers'])
        logger.debug('Cookies: %s', self.session.cookies)

        with aiohttp.Timeout(self.request_timeout):
            async with self.session.request(
                    method, uri, *args, **kwargs) as response:
                body = await response.read()
                if not response.status == 200:
                    try:
                        json = await response.json()
                    except Exception:  # TODO: narrow exception
                        json = None
                    ex = BadRequest if response.status == 400 else HTTPError
                    raise ex(response.status, body, kwargs.get('data'), json)
                if raw:
                    return body
                json = await response.json()
                if json.get('error'):
                    raise ResponseError(response.status, body, kwargs.get('data'), json)
                return json
pokemon.py 文件源码 项目:WeenieBot 作者: Beefywhale 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def fetch(session, url):
    with aiohttp.Timeout(10, loop=session.loop):
        async with session.get(url) as response:
            tmp = await response.text()
            return (tmp, response.status)
stream.py 文件源码 项目:peony-twitter 作者: odrling 项目源码 文件源码 阅读 42 收藏 0 点赞 0 评论 0
def connect(self):
        """
            Create the connection

        Returns
        -------
        self

        Raises
        ------
        exception.PeonyException
            On a response status in 4xx that are not status 420 or 429
            Also on statuses in 1xx or 3xx since this should not be the status
            received here
        """
        with aiohttp.Timeout(self.timeout):
            self.response = await self._connect()

        if self.response.status in range(200, 300):
            self._error_timeout = 0
            self.state = NORMAL
        elif self.response.status == 500:
            self.state = DISCONNECTION
        elif self.response.status in range(501, 600):
            self.state = RECONNECTION
        elif self.response.status in (420, 429):
            self.state = ENHANCE_YOUR_CALM
        else:
            logger.debug("raising error during stream connection")
            raise await exceptions.throw(self.response,
                                         loads=self.client._loads,
                                         url=self.kwargs['url'])

        logger.debug("stream state: %d" % self.state)


问题


面经


文章

微信
公众号

扫码关注公众号