python类utils()的实例源码

bodegaREQ.py 文件源码 项目:OpenCouture-Dev 作者: 9-9-0 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def loadCartAndCheckout(self):
        #Import Cookies
        driver = webdriver.Chrome(executable_path="./chromedriver")
        driver.delete_all_cookies()
        driver.get(self.URL_cart)

        cookies = requests.utils.dict_from_cookiejar(self.user_session.cookies)

        for cookie in cookies.items():
            cookie_dict = {'name': '',
                           'value': '',
                           'path': '/'}
            cookie_dict['name'] = cookie[0]
            cookie_dict['value'] = cookie[1]
            driver.add_cookie(cookie_dict)

        driver.get(self.URL_cart)
        #time.sleep(5)
        #driver.quit()
saml_token_provider.py 文件源码 项目:Office365-REST-Python-Client 作者: vgrem 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def acquire_authentication_cookie(self, options):
        """Retrieve SPO auth cookie"""
        logger = self.logger(self.acquire_authentication_cookie.__name__)

        url = options['endpoint']

        session = requests.session()
        logger.debug_secrets("session: %s\nsession.post(%s, data=%s)", session, url, self.token)
        session.post(url, data=self.token, headers={'Content-Type': 'application/x-www-form-urlencoded'})
        logger.debug_secrets("session.cookies: %s", session.cookies)
        cookies = requests.utils.dict_from_cookiejar(session.cookies)
        logger.debug_secrets("cookies: %s", cookies)
        if 'FedAuth' in cookies and 'rtFa' in cookies:
            self.FedAuth = cookies['FedAuth']
            self.rtFa = cookies['rtFa']
            return True
        self.error = "An error occurred while retrieving auth cookies"
        logger.error(self.error)
        return False
instaloader.py 文件源码 项目:instaloader 作者: Thammus 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def load_session_from_file(self, username: str, filename: Optional[str] = None) -> None:
        """Internally stores :class:`requests.Session` object loaded from file.

        If filename is None, the file with the default session path is loaded.

        :raises FileNotFoundError: If the file does not exist.
        """
        if filename is None:
            filename = get_default_session_filename(username)
        with open(filename, 'rb') as sessionfile:
            session = requests.Session()
            session.cookies = requests.utils.cookiejar_from_dict(pickle.load(sessionfile))
            session.headers.update(self._default_http_header())
            session.headers.update({'X-CSRFToken': session.cookies.get_dict()['csrftoken']})
            self._log("Loaded session from %s." % filename)
            self.session = session
            self.username = username
auth.py 文件源码 项目:zhihu-api 作者: lzjun567 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def authenticated(func):
    def wrapper(self, *args, **kwargs):
        success = False
        # ??????cookie??, ???cookie????
        if 'z_c0' in requests.utils.dict_from_cookiejar(self.cookies):
            from ..url import URL
            r = self._execute(method="get", url=URL.profile(user_slug="zhijun-liu"))
            success = r.ok
        while not success:
            account = input("???Email??????:")
            password = input("?????:")
            obj = Account()
            data = obj.login(account, password)
            if data.get("r") == 0:
                success = True
                self.cookies = obj.cookies
            else:
                print(data.get("msg"))
        else:
            return func(self, *args, **kwargs)

    return wrapper
py_bing_search.py 文件源码 项目:Genum 作者: la0rg 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def _search(self, limit, format):
        '''
        Returns a list of result objects, with the url for the next page bing search url.
        '''
        url = self.QUERY_URL.format(requests.utils.quote("'{}'".format(self.query)), min(50, limit),
                                    self.current_offset, format)
        r = requests.get(url, auth=("", self.api_key))
        try:
            json_results = r.json()
        except ValueError as vE:
            if not self.safe:
                raise PyBingWebException("Request returned with code %s, error msg: %s" % (r.status_code, r.text))
            else:
                print ("[ERROR] Request returned with code %s, error msg: %s. \nContinuing in 5 seconds." % (
                    r.status_code, r.text))
                time.sleep(5)
        packaged_results = [WebResult(single_result_json) for single_result_json in json_results['d']['results']]
        self.current_offset += min(50, limit, len(packaged_results))
        return packaged_results
py_bing_search.py 文件源码 项目:Genum 作者: la0rg 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def _search(self, limit, format):
        '''
        Returns a list of result objects, with the url for the next page bing search url.
        '''
        url = self.QUERY_URL.format(requests.utils.quote("'{}'".format(self.query)), min(50, limit),
                                    self.current_offset, format)
        r = requests.get(url, auth=("", self.api_key))
        try:
            json_results = r.json()
        except ValueError as vE:
            if not self.safe:
                raise PyBingVideoException("Request returned with code %s, error msg: %s" % (r.status_code, r.text))
            else:
                print ("[ERROR] Request returned with code %s, error msg: %s. \nContinuing in 5 seconds." % (
                    r.status_code, r.text))
                time.sleep(5)
        packaged_results = [VideoResult(single_result_json) for single_result_json in json_results['d']['results']]
        self.current_offset += min(50, limit, len(packaged_results))
        return packaged_results
py_bing_search.py 文件源码 项目:Genum 作者: la0rg 项目源码 文件源码 阅读 36 收藏 0 点赞 0 评论 0
def _search(self, limit, format):
        '''
        Returns a list of result objects, with the url for the next page bing search url.
        '''
        url = self.QUERY_URL.format(requests.utils.quote("'{}'".format(self.query)), min(50, limit),
                                    self.current_offset, format)
        r = requests.get(url, auth=("", self.api_key))
        try:
            json_results = r.json()
        except ValueError as vE:
            if not self.safe:
                raise PyBingNewsException("Request returned with code %s, error msg: %s" % (r.status_code, r.text))
            else:
                print ("[ERROR] Request returned with code %s, error msg: %s. \nContinuing in 5 seconds." % (
                    r.status_code, r.text))
                time.sleep(5)
        packaged_results = [NewsResult(single_result_json) for single_result_json in json_results['d']['results']]
        self.current_offset += min(50, limit, len(packaged_results))
        return packaged_results
sync.py 文件源码 项目:boartty 作者: openstack 项目源码 文件源码 阅读 36 收藏 0 点赞 0 评论 0
def __init__(self, app, disable_background_sync):
        self.user_agent = 'Boartty/%s %s' % (boartty.version.version_info.release_string(),
                                            requests.utils.default_user_agent())
        self.version = (0, 0, 0)
        self.offline = False
        self.app = app
        self.log = logging.getLogger('boartty.sync')
        self.queue = MultiQueue([HIGH_PRIORITY, NORMAL_PRIORITY, LOW_PRIORITY])
        self.result_queue = queue.Queue()
        self.session = requests.Session()
        self.token = 'Bearer %s' % (self.app.config.token)
        self.submitTask(GetVersionTask(HIGH_PRIORITY))
        self.submitTask(SyncOwnUserTask(HIGH_PRIORITY))
        if not disable_background_sync:
            self.submitTask(UpdateStoriesTask(HIGH_PRIORITY))
            self.submitTask(SyncProjectListTask(HIGH_PRIORITY))
            self.submitTask(SyncUserListTask(HIGH_PRIORITY))
            self.submitTask(SyncProjectSubscriptionsTask(NORMAL_PRIORITY))
            self.submitTask(SyncSubscribedProjectsTask(NORMAL_PRIORITY))
            self.submitTask(SyncBoardsTask(NORMAL_PRIORITY))
            self.submitTask(SyncWorklistsTask(NORMAL_PRIORITY))
            #self.submitTask(SyncSubscribedProjectBranchesTask(LOW_PRIORITY))
            #self.submitTask(SyncOutdatedChangesTask(LOW_PRIORITY))
            #self.submitTask(PruneDatabaseTask(self.app.config.expire_age, LOW_PRIORITY))
            self.periodic_thread = threading.Thread(target=self.periodicSync)
            self.periodic_thread.daemon = True
            self.periodic_thread.start()
instaloader.py 文件源码 项目:instaloader 作者: Thammus 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def copy_session(session: requests.Session) -> requests.Session:
    """Duplicates a requests.Session."""
    new = requests.Session()
    new.cookies = \
        requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
    new.headers = session.headers.copy()
    return new
instaloader.py 文件源码 项目:instaloader 作者: Thammus 项目源码 文件源码 阅读 38 收藏 0 点赞 0 评论 0
def save_session_to_file(self, filename: Optional[str] = None) -> None:
        """Saves internally stored :class:`requests.Session` object."""
        if filename is None:
            filename = get_default_session_filename(self.username)
        dirname = os.path.dirname(filename)
        if dirname != '' and not os.path.exists(dirname):
            os.makedirs(dirname)
            os.chmod(dirname, 0o700)
        with open(filename, 'wb') as sessionfile:
            os.chmod(filename, 0o600)
            pickle.dump(requests.utils.dict_from_cookiejar(self.session.cookies), sessionfile)
            self._log("Saved session to %s." % filename)
browser.py 文件源码 项目:bjguahao 作者: iBreaker 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def load_cookies(self, path):
        with open(path, 'rb') as f:
            self.session.cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
browser.py 文件源码 项目:bjguahao 作者: iBreaker 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def save_cookies(self, path):
        with open(path, 'wb') as f:
            cookies_dic = requests.utils.dict_from_cookiejar(self.session.cookies)
            pickle.dump(cookies_dic, f)
__main__.py 文件源码 项目:simphony-remote 作者: simphony 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def login(ctx, url, username, password):
    """Performs login on the remote server at the specified URL."""
    login_url = urljoin(url, "/hub/login")

    payload = {"username": username, "password": password}

    # Unfortunately, jupyterhub handles the afterlogin with an immediate
    # redirection, meaning that we have to check for a 302 and prevent
    # redirection in order to capture the cookies.
    try:
        response = requests.post(login_url, payload, verify=False,
                                 allow_redirects=False)
    except Exception as e:
        print("Could not perform request. {}".format(e), file=sys.stderr)
        sys.exit(1)

    if response.status_code == 302:
        cookies_dict = requests.utils.dict_from_cookiejar(response.cookies)
        cred = Credentials(url, username, cookies_dict)
        cred.write(ctx.obj.credentials_file)
    else:
        print("Failed to perform login. Server replied with error: {}".format(
            response.status_code), file=sys.stderr)
        sys.exit(1)

# -------------------------------------------------------------------------
work.py 文件源码 项目:biji 作者: jianmoumou 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def test4():
    from requests.utils import get_netrc_auth
    url = "http://www.126.com"
    print get_netrc_auth(url)
py_bing_search.py 文件源码 项目:Genum 作者: la0rg 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def _search(self, limit, format):
        '''
        Returns a list of result objects, with the url for the next page bing search url.

        Image filters:
        Array of strings that filter the response the API sends based on size, aspect, color, style, face or
        any combination thereof. Valid values are: Size:Small, Size:Medium, Size:Large, Size:Width:[Width],
        Size:Height:[Height], Aspect:Square, Aspect:Wide, Aspect:Tall, Color:Color, Color:Monochrome, Style:Photo,
        Style:Graphics, Face:Face, Face:Portrait, Face:Other.
        '''
        url = self.QUERY_URL.format(requests.utils.quote("'{}'".format(self.query)), min(50, limit),
                                    self.current_offset, format,
                                    requests.utils.quote("'{}'".format(self.image_filters)))
        r = requests.get(url, auth=("", self.api_key))
        try:
            json_results = r.json()
        except ValueError as vE:
            if not self.safe:
                raise PyBingImageException("Request returned with code %s, error msg: %s" % (r.status_code, r.text))
            else:
                print ("[ERROR] Request returned with code %s, error msg: %s. \nContinuing in 5 seconds." % (
                    r.status_code, r.text))
                time.sleep(5)
        packaged_results = [ImageResult(single_result_json) for single_result_json in json_results['d']['results']]
        self.current_offset += min(50, limit, len(packaged_results))
        return packaged_results
api.py 文件源码 项目:PY-Login 作者: PY-Trade 项目源码 文件源码 阅读 38 收藏 0 点赞 0 评论 0
def load_cookies(self, path):
        with open(path, 'rb') as f:
            self.session.cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
api.py 文件源码 项目:PY-Login 作者: PY-Trade 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def save_cookies(self, path):
        with open(path, 'wb') as f:
            cookies_dic = requests.utils.dict_from_cookiejar(self.session.cookies)
            pickle.dump(cookies_dic, f)

    # ??????????
api.py 文件源码 项目:PY-Login 作者: PY-Trade 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def load_cookies(self, path):
        with open(path, 'rb') as f:
            self.session.cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
api.py 文件源码 项目:PY-Login 作者: PY-Trade 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def save_cookies(self, path):
        with open(path, 'wb') as f:
            cookies_dic = requests.utils.dict_from_cookiejar(self.session.cookies)
            pickle.dump(cookies_dic, f)

    #??????
api.py 文件源码 项目:PY-Login 作者: PY-Trade 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def load_cookies(self, path):
        with open(path, 'rb') as f:
            self.session.cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
api.py 文件源码 项目:PY-Login 作者: PY-Trade 项目源码 文件源码 阅读 35 收藏 0 点赞 0 评论 0
def load_cookies(self, path):
        with open(path, 'rb') as f:
            self.session.cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
api.py 文件源码 项目:PY-Login 作者: PY-Trade 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def save_cookies(self, path):
        with open(path, 'wb') as f:
            cookies_dic = requests.utils.dict_from_cookiejar(self.session.cookies)
            pickle.dump(cookies_dic, f)

    # ??????????
leecher.py 文件源码 项目:Iron 作者: Alendi42 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def login():

    #cf = open('.cookie','r')
    if os.path.exists(cookie_file_name):

        cf = open(cookie_file_name,'r')
        cookies = json.load(cf)
        s.cookies.update(cookies)

        logging.info("Load cookies from cookie file: " + str(cookies))

        r = s.get(website+"/user/login",headers = headers)
        print("Old cookies:" + str(r.headers))
    else:
        user = config.get('user','id')
        password = config.get('user','password')
        logging.info("Login as " + user)

        url = website + '/User/Login/ajaxLogin'
        payload = 'account=%s&password=%s&from=loginpage&remember=0&url_back='%(user, password)
        r = s.post(url, headers=headers, data=payload)

        cookies = requests.utils.dict_from_cookiejar(r.cookies)
        logging.info("Login cookie " + str(cookies))
        print("New Cookies:" + str(cookies))

        with open(cookie_file_name,'w') as cf:
            json.dump(cookies, cf)
test_requests.py 文件源码 项目:Codeforces-Sublime-Plugin 作者: karunk 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def test_html_charset(self):
        """HTML5 meta charset attribute"""
        content = '<meta charset="UTF-8">'
        encodings = requests.utils.get_encodings_from_content(content)
        assert len(encodings) == 1
        assert encodings[0] == 'UTF-8'
test_requests.py 文件源码 项目:Codeforces-Sublime-Plugin 作者: karunk 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def test_html4_pragma(self):
        """HTML4 pragma directive"""
        content = '<meta http-equiv="Content-type" content="text/html;charset=UTF-8">'
        encodings = requests.utils.get_encodings_from_content(content)
        assert len(encodings) == 1
        assert encodings[0] == 'UTF-8'
test_requests.py 文件源码 项目:Codeforces-Sublime-Plugin 作者: karunk 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def test_xhtml_pragma(self):
        """XHTML 1.x served with text/html MIME type"""
        content = '<meta http-equiv="Content-type" content="text/html;charset=UTF-8" />'
        encodings = requests.utils.get_encodings_from_content(content)
        assert len(encodings) == 1
        assert encodings[0] == 'UTF-8'
test_requests.py 文件源码 项目:Codeforces-Sublime-Plugin 作者: karunk 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def test_xml(self):
        """XHTML 1.x served as XML"""
        content = '<?xml version="1.0" encoding="UTF-8"?>'
        encodings = requests.utils.get_encodings_from_content(content)
        assert len(encodings) == 1
        assert encodings[0] == 'UTF-8'
test_requests.py 文件源码 项目:Codeforces-Sublime-Plugin 作者: karunk 项目源码 文件源码 阅读 35 收藏 0 点赞 0 评论 0
def test_precedence(self):
        content = '''
        <?xml version="1.0" encoding="XML"?>
        <meta charset="HTML5">
        <meta http-equiv="Content-type" content="text/html;charset=HTML4" />
        '''.strip()
        encodings = requests.utils.get_encodings_from_content(content)
        assert encodings == ['HTML5', 'HTML4', 'XML']
test_requests.py 文件源码 项目:Codeforces-Sublime-Plugin 作者: karunk 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def test_super_len_correctly_calculates_len_of_partially_read_file(self):
        """Ensure that we handle partially consumed file like objects."""
        from requests.utils import super_len
        s = StringIO.StringIO()
        s.write('foobarbogus')
        assert super_len(s) == 0
test_requests.py 文件源码 项目:Codeforces-Sublime-Plugin 作者: karunk 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def test_get_environ_proxies_ip_ranges(self):
        """Ensures that IP addresses are correctly matches with ranges
        in no_proxy variable."""
        from requests.utils import get_environ_proxies
        os.environ['no_proxy'] = "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1"
        assert get_environ_proxies('http://192.168.0.1:5000/') == {}
        assert get_environ_proxies('http://192.168.0.1/') == {}
        assert get_environ_proxies('http://172.16.1.1/') == {}
        assert get_environ_proxies('http://172.16.1.1:5000/') == {}
        assert get_environ_proxies('http://192.168.1.1:5000/') != {}
        assert get_environ_proxies('http://192.168.1.1/') != {}


问题


面经


文章

微信
公众号

扫码关注公众号