python类LWPCookieJar()的实例源码

google.py 文件源码 项目:darkc0de-old-stuff 作者: tuwid 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def __init__(self, proxyHandler):
        self.__googleCookie = None
        self.__matches = []
        self.__cj = cookielib.LWPCookieJar()
        self.opener = urllib2.build_opener(proxyHandler, urllib2.HTTPCookieProcessor(self.__cj))
        self.opener.addheaders = conf.httpHeaders
login.py 文件源码 项目:weibo 作者: windskyer 项目源码 文件源码 阅读 43 收藏 0 点赞 0 评论 0
def login(self, username, pwd, cookie_file):
        """"
            Login with use name, password and cookies.
            (1) If cookie file exists then try to load cookies;
            (2) If no cookies found then do login
        """
        # If cookie file exists then try to load cookies
        if os.path.exists(cookie_file):
            try:
                cookie_jar = cookielib.LWPCookieJar(cookie_file)
                cookie_jar.load(ignore_discard=True, ignore_expires=True)
                loaded = 1
            except cookielib.LoadError:
                loaded = 0
                LOG.info('Loading cookies error')

            # install loaded cookies for urllib2
            if loaded:
                cookie_support = urllib2.HTTPCookieProcessor(cookie_jar)
                opener = urllib2.build_opener(cookie_support,
                                              urllib2.HTTPHandler)
                urllib2.install_opener(opener)
                LOG.info('Loading cookies success')
                return 1
            else:
                return self.do_login(username, pwd, cookie_file)

        else:  # If no cookies found
            return self.do_login(username, pwd, cookie_file)
login.py 文件源码 项目:weibo 作者: windskyer 项目源码 文件源码 阅读 55 收藏 0 点赞 0 评论 0
def save_cookie(self, text, cookie_file=CONF.cookie_file):
        cookie_jar2 = cookielib.LWPCookieJar()
        cookie_support2 = urllib2.HTTPCookieProcessor(cookie_jar2)
        opener2 = urllib2.build_opener(cookie_support2, urllib2.HTTPHandler)
        urllib2.install_opener(opener2)
        if six.PY3:
            text = text.decode('gbk')
        p = re.compile('location\.replace\(\'(.*?)\'\)')
        # ???httpfox??????????????
        # location.replace('http://weibo.com ?????????
        # ?????????????# ????login_url?? ??????re?????
        # p = re.compile('location\.replace\(\B'(.*?)'\B\)')
        # ??? ??????? re?????\'???????
        try:
            # Search login redirection URL
            login_url = p.search(text).group(1)
            data = urllib2.urlopen(login_url).read()
            # Verify login feedback, check whether result is TRUE
            patt_feedback = 'feedBackUrlCallBack\((.*)\)'
            p = re.compile(patt_feedback, re.MULTILINE)

            feedback = p.search(data).group(1)
            feedback_json = json.loads(feedback)
            if feedback_json['result']:
                cookie_jar2.save(cookie_file,
                                 ignore_discard=True,
                                 ignore_expires=True)
                return 1
            else:
                return 0
        except:
            return 0
Lweibo.py 文件源码 项目:weibo 作者: windskyer 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def login(self, username, pwd, cookie_file):
        """"
            Login with use name, password and cookies.
            (1) If cookie file exists then try to load cookies;
            (2) If no cookies found then do login
        """
        #If cookie file exists then try to load cookies
        if os.path.exists(cookie_file):
            try:
                cookie_jar = cookielib.LWPCookieJar(cookie_file)
                cookie_jar.load(ignore_discard=True, ignore_expires=True)
                loaded = 1
            except cookielib.LoadError:
                loaded = 0
                print 'Loading cookies error'

            #install loaded cookies for urllib2
            if loaded:
                cookie_support = urllib2.HTTPCookieProcessor(cookie_jar)
                opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
                urllib2.install_opener(opener)
                print 'Loading cookies success'
                return 1
            else:
                return self.do_login(username, pwd, cookie_file)

        else:  #If no cookies found
            return self.do_login(username, pwd, cookie_file)
Lweibo.py 文件源码 项目:weibo 作者: windskyer 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def login(self, username, pwd, cookie_file):
        """"
            Login with use name, password and cookies.
            (1) If cookie file exists then try to load cookies;
            (2) If no cookies found then do login
        """
        # If cookie file exists then try to load cookies
        if os.path.exists(cookie_file):
            try:
                cookie_jar = cookielib.LWPCookieJar(cookie_file)
                cookie_jar.load(ignore_discard=True, ignore_expires=True)
                loaded = 1
            except cookielib.LoadError:
                loaded = 0
                print('Loading cookies error')

            #install loaded cookies for urllib2
            if loaded:
                cookie_support = urllib2.HTTPCookieProcessor(cookie_jar)
                opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
                urllib2.install_opener(opener)
                print('Loading cookies success')
                return 1
            else:
                return self.do_login(username, pwd, cookie_file)

        else:  #If no cookies found
            return self.do_login(username, pwd, cookie_file)
rest_Utils.py 文件源码 项目:warriorframework 作者: warriorframework 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def resolve_value_of_cookies(element):
    """ This function evaluates user input for cookies. If a file path is given,
     then a cookiejar object is created and the contents of the file are loaded
     into the object. This object is then returned.

    Else, a dictionary would be created out of the string given
    input = "foo=foo1; bar=bar1; ; =foobar; barfoo="
    return value = {'foo': 'foo1', 'bar': 'bar1'}

    If the dictionary is empty at the end of the function, None is retuened.
    """
    if element is not None and element is not False and element != "":
        abs_path = file_Utils.getAbsPath(element, sys.path[0])
        if os.path.exists(abs_path):
            element = cookielib.LWPCookieJar(element)
            try:
                element.load()
            except cookielib.LoadError:
                pNote("Cookies could not be loaded from {}.".format(element),
                      "error")
                element = None
            except Exception as e:
                pNote("An Error Occurred: {0}".format(e), "error")
        else:
            element = convert_string_to_dict(element)
    else:
        element = None

    return element
weibologin.py 文件源码 项目:SinaMicroblog_Creeper-Spider_VerificationCode 作者: somethingx64 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def EnableCookie(self, enableProxy):
        #"Enable cookie & proxy (if needed)."        
        cookiejar = cookielib.LWPCookieJar()#construct cookie
        cookie_support = urllib2.HTTPCookieProcessor(cookiejar)

        if enableProxy:
            proxy_support = urllib2.ProxyHandler({'http':'http://xxxxx.pac'})#use proxy
            opener = urllib2.build_opener(proxy_support, cookie_support, urllib2.HTTPHandler)
            print ("Proxy enabled")
        else:
            opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
        urllib2.install_opener(opener)#construct cookie's opener
api.py 文件源码 项目:nem-downloader 作者: nyanim 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def __init__(self):
        self.header = {
            'Accept': '*/*',
            'Accept-Encoding': 'gzip,deflate,sdch',
            'Accept-Language': 'zh-CN,zh;q=0.8,gl;q=0.6,zh-TW;q=0.4',
            'Connection': 'keep-alive',
            'Content-Type': 'application/x-www-form-urlencoded',
            'Host': 'music.163.com',
            'Referer': 'http://music.163.com/search/',
            'User-Agent':
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36'  # NOQA
        }
        self.cookies = {'appver': '1.5.2'}
        self.playlist_class_dict = {}
        self.session = requests.Session()
        self.storage = Storage()
        self.session.cookies = LWPCookieJar(self.storage.cookie_path)
        try:
            self.session.cookies.load()
            self.file = file(self.storage.cookie_path, 'r')
            cookie = self.file.read()
            self.file.close()
            pattern = re.compile(r'\d{4}-\d{2}-\d{2}')
            str = pattern.findall(cookie)
            if str:
                if str[0] < time.strftime('%Y-%m-%d',
                                          time.localtime(time.time())):
                    self.storage.database['user'] = {
                        'username': '',
                        'password': '',
                        'user_id': '',
                        'nickname': '',
                    }
                    self.storage.save()
                    os.remove(self.storage.cookie_path)
        except IOError as e:
            log.error(e)
            self.session.cookies.save()
unifi_client.py 文件源码 项目:django-unifi-portal 作者: bsab 项目源码 文件源码 阅读 16 收藏 0 点赞 0 评论 0
def __init__(self):
        """ Create a UnifiClient object. """

        self.version = settings.UNIFI_VERSION
        self.site_id = settings.UNIFI_SITE_ID
        self.__unifiUser = settings.UNIFI_USER
        self.__unifiPass = settings.UNIFI_PASSWORD
        self.__unifiServer = settings.UNIFI_SERVER
        self.__unifiPort = settings.UNIFI_PORT

        self.__cookie_file = "/tmp/unifi_cookie"

        # Use a Session object to handle cookies.
        self.__session = requests.Session()
        cj = cookielib.LWPCookieJar(self.__cookie_file)

        # Load existing cookies (file might not yet exist)
        try:
            cj.load()
        except:
            pass
        self.__session.cookies = cj

        # Use an SSLAdapter to work around SSL handshake issues.
        self.__session.mount(self._get_resource_url(), SSLAdapter(ssl.PROTOCOL_SSLv23))

        pass
Opener.py 文件源码 项目:DistributeCrawler 作者: SmallHedgehog 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def __init__(self, cookie_filename=None, timeout=None, **kwargs):
        self.cj = cookielib.LWPCookieJar()
        if cookie_filename is not None:
            self.cj.load(cookie_filename)
        self.cookie_processor = urllib2.HTTPCookieProcessor(self.cj)
        self.__build_opener()
        urllib2.install_opener(self.opener)

        if timeout is None:
            # self._default_timeout = socket._GLOBAL_DEFAULT_TIMEOUT
            # Set default timeout
            self._default_timeout = 5
        else:
            self._default_timeout = timeout
Opener.py 文件源码 项目:DistributeCrawler 作者: SmallHedgehog 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def __init__(self, cookie_filename=None, user_agent=None, timeout=None, **kwargs):
        try:
            import mechanize
        except ImportError:
            raise DependencyNotInstalledError('mechanize')

        if user_agent is None:
            user_agent = 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)'

        self.browser = mechanize.Browser()
        self.cj = cookielib.LWPCookieJar()
        if cookie_filename is not None:
            self.cj.load(cookie_filename)
        self.browser.set_cookiejar(self.cj)
        self.browser.set_handle_equiv(True)
        self.browser.set_handle_gzip(True)
        self.browser.set_handle_redirect(True)
        self.browser.set_handle_referer(True)
        self.browser.set_handle_robots(False)
        self.browser.addheaders = [
            ('User-agnet', user_agent)
        ]

        if timeout is None:
            # self._default_timout = mechanize._sockettimeout._GLOBAL_DEFAULT_TIMEOUT
            self._default_timout = 5
        else:
            self._default_timout = timeout
accountlib.py 文件源码 项目:weibo_scrawler_app 作者: coolspiderghy 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def init(self, proxy=None):
        cj = cookielib.LWPCookieJar()
        cookie_support = urllib2.HTTPCookieProcessor(cj)
        if proxy:
            proxy_support = urllib2.ProxyHandler({'http': proxy})
            opener = urllib2.build_opener(proxy_support, cookie_support, urllib2.HTTPHandler)
        else:
            opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
        urllib2.install_opener(opener)
    #print 'seton'
downloader.py 文件源码 项目:weibo_scrawler_app 作者: coolspiderghy 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def use_proxy(self, proxy):
        """
        ????????,??proxy?????????????:http://XX.XX.XX.XX:XXXX
        """
        cj = cookielib.LWPCookieJar()
        cookie_support = urllib2.HTTPCookieProcessor(cj)
        if proxy:
            proxy_support = urllib2.ProxyHandler({'http': proxy})
            opener = urllib2.build_opener(proxy_support, cookie_support, urllib2.HTTPHandler)
        else:
            opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
        urllib2.install_opener(opener)
getobj.py 文件源码 项目:spider 作者: shancang 项目源码 文件源码 阅读 15 收藏 0 点赞 0 评论 0
def __init__(self,url):
        cookie_jar = cookielib.LWPCookieJar()
        cookie = urllib2.HTTPCookieProcessor(cookie_jar)
        self.opener = urllib2.build_opener(cookie)
        user_agent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.84 Safari/537.36"
        self.url=url
        self.send_headers={'User-Agent':user_agent}
test_cookielib.py 文件源码 项目:oil 作者: oilshell 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def test_lwp_valueless_cookie(self):
        # cookies with no value should be saved and loaded consistently
        from cookielib import LWPCookieJar
        filename = test_support.TESTFN
        c = LWPCookieJar()
        interact_netscape(c, "http://www.acme.com/", 'boo')
        self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None)
        try:
            c.save(filename, ignore_discard=True)
            c = LWPCookieJar()
            c.load(filename, ignore_discard=True)
        finally:
            try: os.unlink(filename)
            except OSError: pass
        self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None)
test_cookielib.py 文件源码 项目:python2-tracer 作者: extremecoders-re 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def test_lwp_valueless_cookie(self):
        # cookies with no value should be saved and loaded consistently
        from cookielib import LWPCookieJar
        filename = test_support.TESTFN
        c = LWPCookieJar()
        interact_netscape(c, "http://www.acme.com/", 'boo')
        self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None)
        try:
            c.save(filename, ignore_discard=True)
            c = LWPCookieJar()
            c.load(filename, ignore_discard=True)
        finally:
            try: os.unlink(filename)
            except OSError: pass
        self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None)
main.py 文件源码 项目:seu-jwc-catcher 作者: LeonidasCl 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def get_verifycode():
    global cookie
    cookie = cookielib.LWPCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie), urllib2.HTTPHandler)
    urllib2.install_opener(opener)
    img = urllib2.urlopen('http://xk.urp.seu.edu.cn/jw_css/getCheckCode', timeout=8)
    f = open('verifycode.jpg', 'wb')
    f.write(img.read())
    f.close()
    return 0
UrllibCrawler.py 文件源码 项目:Crawler 作者: xinhaojing 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def __init__(self, headers = {},debug = True, p = ''):
        #timeout 
        self.timeout = 10
        #cookie handler
        self.cookie_processor = urllib2.HTTPCookieProcessor(cookielib.LWPCookieJar())

        #debug handler
        self.debug = debug
        if self.debug:
            self.httpHandler = urllib2.HTTPHandler(debuglevel=1)
            self.httpsHandler = urllib2.HTTPSHandler(debuglevel=1)
        else:
            self.httpHandler = urllib2.HTTPHandler(debuglevel=0)
            self.httpsHandler = urllib2.HTTPSHandler(debuglevel=0)

        #proxy handler (http)
        if p != '' and p != 'None' and p != None and p != 'NULL':
            self.proxy_handler = urllib2.ProxyHandler({'http': p})
        else:
            self.proxy_handler = urllib2.ProxyHandler({})

        #opener
        self.opener = urllib2.build_opener( self.cookie_processor,self.proxy_handler, self.httpHandler, self.httpsHandler)
        self.opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36'),]

        #header
        for key in headers.keys():
            cur=self._replace(key)
            if cur!=-1:
                self.opener.addheaders.pop(cur)
            self.opener.addheaders += [(key, headers[key]), ]
common.py 文件源码 项目:script.quasar.t411-rik91 作者: rik91 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def __init__(self):
        import cookielib

        self._cookies = None
        self.cookies = cookielib.LWPCookieJar()
        self.content = None
        self.status = None
crawler.py 文件源码 项目:crawler 作者: dragonflylxp 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def install_cookiehandler():
    url = 'http://www.ticaihui.com/'
    c = cookielib.LWPCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(c))
    urllib2.install_opener(opener)
    req = urllib2.Request(url)
    resp = urllib2.urlopen(req)

#???????


问题


面经


文章

微信
公众号

扫码关注公众号