python类Error()的实例源码

pipelines.py 文件源码 项目:sbdspider 作者: onecer 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def insert_user(self,uid,name,pic):
        try:
            userid=0
            bSginal=self.curosr.execute("SELECT * FROM yzy_users WHERE uid='%s'"%(uid))
            if bSginal==1:
                results=self.curosr.fetchone()
                userid=results[0]
            else:
                sql = """INSERT INTO yzy_users(uid,uname,avatar)
                                    VALUES('%s','%s','%s')"""%(uid,name,pic)
                vsql = sql.encode('utf8')
                if self.curosr.execute(vsql)==1:
                    userid=self.curosr.lastrowid

        except MySQLdb.Error,e:
            print "Error:%d:%s" % (e.args[0], e.args[1])

        return userid
recipe-578774.py 文件源码 项目:code 作者: ActiveState 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def populate_store(self, store):

        try:
            connection = None
            connection = MySQLdb.connect('localhost', 'annon', 'pass')
            cursor = connection.cursor()
            cursor.execute("Select * From `INFORMATION_SCHEMA`.`SCHEMATA`")
            rows = cursor.fetchall()

            for row in rows:
                store.append([row[0], row[1], row[2], row[3]])

        except MySQLdb.Error, e:
            store.append([str(e.args[0]), e.args[1], '', ''])

        finally:
            if connection != None:
                connection.close()
md5db11.py 文件源码 项目:darkc0de-old-stuff 作者: tuwid 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def insertwl():
    counter = 0
    try:
        words = open(wordlist, "r")
    except(IOError):
        print "Error: check", wordlist
        sys.exit(1)
    dupes = 0
    print "Inserting Wordlist, Skipping Dupes....may take ages"
    print "\nStart :", timer()
    for word in words.read().split('\n'):
        hash = md5.new(word).hexdigest()
        counter = counter+1
        try:
            csr = dbconnect()
            csr.execute("INSERT INTO "+dbname+".data (plain, md5)VALUES ('"+str(word)+"', '"+str(hash)+"');")
        except MySQLdb.Error, e:
            dupes = dupes+1 
    print "\nDupes :", dupes
    print "\nDone  :", timer()
linkpostioninserter.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def insert_pagelength(self, source_article_id, screen_positions_1920_1080, zip_file_path):

               data={}
               data['source_article_id'] = source_article_id
               if screen_positions_1920_1080 is not None:
                   data['page_length_1920_1080'] = screen_positions_1920_1080
               else:
                   data['page_length_1920_1080'] = None
               #print data
               sql = "INSERT INTO page_length (id, page_length_1920_1080) VALUES" \
                      "(%(source_article_id)s, %(page_length_1920_1080)s);"
               try:
                   self.cursor.execute(sql, data)
               except (MySQLdb.Error, MySQLdb.Warning), e:
                   print ('FAIL: Data caused warning or error "%s" for source_article_id: "%s"', data,  source_article_id)
                   print 'FAIL: EXCEPTION:', e
                   print zip_file_path
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def resolve_redirect(self, name):
        """resolves a redirect and returns the real article name

           @param name the name of the redirect

           @return the real name of the article or None if it cannot be resolved
        """
        try:
            self._cursor.execute('SELECT target_article_name FROM redirects WHERE source_article_name=%s;', (name,))
            row = self._cursor.fetchone()
            if row != None:
                return row[0]
        except MySQLdb.Error, e:
            logging.error('error resolving redirect for name "%s": %s (%d)'
                          % (name.encode('ascii', 'ignore'), e.args[1], e.args[0]))
        return None
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def retrieve_all_articles(self):
        """retrieves all articles. useful for crawling or making media wiki api requests
        @return a list of dictionaries holding the following keys:
           'id': the id of the retrieved article
           'rev_id': the revision id  of the retrieved article
           'title': the title of the retrieved article
                """
        articles = []
        try:
            #self._cursor.execute('SELECT * FROM articles WHERE RAND()<=0.0006 limit 1000;')
            #self._cursor.execute('SELECT * FROM articles limit 1000;')
            self._cursor.execute('SELECT * FROM articles;')
            result = self._cursor.fetchall()
            for row in result:
                article = {}
                article['id'] = row[0]
                article['rev_id'] = row[1]
                article['title'] = row[2]
                articles.append(article)
        except MySQLdb.Error, e:
            logging.error('error retrieving 1000 random articles  %s (%d)' % (e.args[1], e.args[0]))
        return articles
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def retrieve_all_unique_links(self):
            """retrieves all links. These are the network edges
            @return a list of dictionaries holding the following keys:
               'from': the source article id
               'to': the target article id
                    """
            links = []
            try:
                self._cursor.execute('SELECT * FROM unique_links;')
                result = self._cursor.fetchall()
                for row in result:
                    link = {}
                    link['from'] = row[0]
                    link['to'] = row[1]
                    links.append(link)
            except MySQLdb.Error, e:
                logging.error('error retrieving unique links %s (%d)' % (e.args[1], e.args[0]))
            return links
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def retrieve_all_transitions(self):
                """retrieves all transitions from the wikipeida clickstream_derived that are an internal links. These are the network edges
                @return a list of dictionaries holding the following keys:
                   'from': the source article id
                   'to': the target article id
                        """
                links = []
                try:
                    self._cursor.execute('SELECT * FROM clickstream_derived WHERE link_type_derived LIKE %s AND NOT link_type_derived=%s;', ("internal%", "internal-nonexistent",))
                    result = self._cursor.fetchall()
                    for row in result:
                        link = {}
                        link['from'] = row[0]
                        link['to'] = row[1]
                        links.append(link)
                except MySQLdb.Error, e:
                    logging.error('error retrieving unique links %s (%d)' % (e.args[1], e.args[0]))
                return links
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def retrieve_all_internal_transitions(self):
        """retrieves all internal links transitions from the wikipeida clickstream_derived that are an internal links. These are the network edges
        @return a list of dictionaries holding the following keys:
           'from': the source article id
           'to': the target article id
                """
        links = []
        try:
            self._cursor.execute('SELECT * FROM clickstream_derived WHERE link_type_derived=%s;', ("internal-link",))
            result = self._cursor.fetchall()
            for row in result:
                link = {}
                link['from'] = row[0]
                link['to'] = row[1]
                links.append(link)
        except MySQLdb.Error, e:
            logging.error('error retrieving unique links %s (%d)' % (e.args[1], e.args[0]))
        return links
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def retrieve_all_internal_transitions_counts(self):
        """retrieves all internal links transitions from the wikipeida clickstream_derived that are an internal links. These are the network edges
        @return a list of dictionaries holding the following keys:
           'from': the source article id
           'to': the target article id
                """
        links = []
        try:
            self._cursor.execute('SELECT * FROM clickstream_derived WHERE link_type_derived=%s;', ("internal-link",))
            result = self._cursor.fetchall()
            for row in result:
                link = {}
                link['from'] = row[0]
                link['to'] = row[1]
                link['counts']=row[2]
                links.append(link)
        except MySQLdb.Error, e:
            logging.error('error retrieving unique links %s (%d)' % (e.args[1], e.args[0]))
        return links
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def retrieve_all_links_coords(self):
            """retrieves all xy coord for all links in wikipeida.
            @return a list of coords holding the following keys:
               'source_article_id': the wikipedia article id
               'x': x position on screen
               'y': y position on screen
                    """
            coords = []
            try:
                self._cursor.execute('SELECT source_article_id, target_x_coord_1920_1080, target_y_coord_1920_1080 FROM links where target_x_coord_1920_1080 is not Null and target_y_coord_1920_1080 is not Null and target_x_coord_1920_1080!=0 and target_y_coord_1920_1080!=0 and source_article_id!=target_article_id;')
                result = self._cursor.fetchall()
                for row in result:
                    link = {}
                    link['source_article_id']= row[0]
                    link['x'] = row[1]
                    link['y'] = row[2]
                    coords.append(link)
            except MySQLdb.Error, e:
                logging.error('error retrieving xy coord for all links links %s (%d)' % (e.args[1], e.args[0]))
            return coords
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def retrieve_all_links_coords_clicks(self):
        """retrieves all xy coord for all links in wikipeida.
        @return a list of coords holding the following keys:
           'source_article_id': the wikipedia article id
           'x': x position on screen
           'y': y position on screen
                """
        coords = []
        try:
            self._cursor.execute('select l.source_article_id, l.target_article_id, l.target_x_coord_1920_1080, l.target_y_coord_1920_1080, c.counts, p.page_length_1920_1080 from links l, clickstream_derived c, page_length p where l.source_article_id=c.prev_id and l.target_article_id=c.curr_id and c.link_type_derived like %s and l.source_article_id = p.id and l.target_x_coord_1920_1080 is not Null and l.target_y_coord_1920_1080 is not Null  and l.target_x_coord_1920_1080!=0 and l.target_y_coord_1920_1080!=0  and l.source_article_id!=l.target_article_id;', ("internal%",))
            result = self._cursor.fetchall()
            for row in result:
                link = {}
                link['key']= row[0], row[1]
                link['x'] = row[2]
                link['y'] = row[3]
                link['counts'] = row[4]
                link['page_length'] = row[5]
                coords.append(link)
        except MySQLdb.Error, e:
            logging.error('error retrieving xy coord for all links links %s (%d)' % (e.args[1], e.args[0]))
        return coords
mysqlworkview.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def retrieve_all_links_multpile_occ(self):
        """retrieves all xy coord for all links in wikipeida.
        @return a list of coords holding the following keys:
           'source_article_id': the wikipedia article id
           'x': x position on screen
           'y': y position on screen
                """
        coords = []
        try:
            self._cursor.execute('SELECT source_article_id, target_article_id, target_x_coord_1920_1080, target_y_coord_1920_1080 FROM links where target_x_coord_1920_1080 is not Null and target_y_coord_1920_1080 is not Null and target_x_coord_1920_1080!=0 and target_y_coord_1920_1080!=0 and source_article_id!=target_article_id;')
            result = self._cursor.fetchall()
            for row in result:
                link = {}
                link['key']= row[0], row[1]
                link['x'] = row[2]
                link['y'] = row[3]
                coords.append(link)
        except MySQLdb.Error, e:
            logging.error('error retrieving xy coord for all links links %s (%d)' % (e.args[1], e.args[0]))
        return coords
click_distributions.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def pickle_category_counts_distribution():
    results =  {}
    db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
    db_worker_view = db.get_work_view()
    cursor = db_worker_view._cursor
    for category in ['lead', 'infobox', 'body', 'left-body', 'navbox']:
        try:
            cursor.execute('select counts from link_features where counts is not null and visual_region=%s;', (category,))
            result = cursor.fetchall()
            results[category] = result
        except MySQLdb.Error, e:
            print e

    try:
        cursor.execute('select counts from clickstream_derived_internal_links;')
        result = cursor.fetchall()
        results['counts'] = result
    except MySQLdb.Error, e:
        print e

    write_pickle(HOME+'output/category_counts_distribution.obj', results)
click_distributions.py 文件源码 项目:wikilinks 作者: trovdimi 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def pickle_aggregated_counts_distribution():

    db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
    db_worker_view = db.get_work_view()
    cursor = db_worker_view._cursor
    results = {}
    try:
        cursor.execute('select sum(counts) from clickstream_derived_internal_links group by prev_id;')
        result = cursor.fetchall()
        results['source_article']=result
    except MySQLdb.Error, e:
        print e

    try:
        cursor.execute('select sum(counts) from clickstream_derived_internal_links group by curr_id;')
        result = cursor.fetchall()
        results['target_article']=result
    except MySQLdb.Error, e:
        print e

    write_pickle(HOME+'output/aggregated_counts_distribution.obj', results)
select.py 文件源码 项目:stackstorm-mysql 作者: StackStorm-Exchange 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def select(self, query, data, key):
        q = query
        if data:
            values = self._list_to_string(data)
            q = q.format(values)

        c = self.db.cursor()
        try:
            c.execute(q)
            output = self._format_results(c)
            if key is not None:
                client = Client(base_url='http://localhost')
                client.keys.update(KeyValuePair(name=key, value=str(output)))
                return key
            else:
                return output
        except MySQLdb.Error, e:  # pylint: disable=no-member
            raise Exception(e)
router.py 文件源码 项目:dcmha 作者: wwwbjqcom 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def CheckConn(self,port):
        retry_num = 0
        while True:
            try:
                local_conn = MySQLdb.connect(host='127.0.0.1', user=mysql_user, passwd=mysql_password, port=int(port), db='',charset="utf8")
                local_conn.cursor()
                local_conn.close()
                state = True
                break
            except MySQLdb.Error,e:
                logging.error(e)
                state = None
            retry_num += 1
            time.sleep(1)
            if retry_num >= 3:
                break
        return  state
dbHandle.py 文件源码 项目:dcmha 作者: wwwbjqcom 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def ChangeMaster(self,host,port):
        '''????'''
        repluser,replpassword,ssl_ca,ssl_cert,ssl_key = GetConf().GetReplAcount()
        try:
            sql = 'reset slave all;'
            print self.host
            try:
                self.mysql_cur.execute(sql)
            except:
                self.mysql_cur.execute('stop slave')
                self.mysql_cur.execute(sql)
            change_sql = 'change master to master_host="%s",master_port=%s,master_user="%s",master_password="%s",master_auto_position=1 for channel "default"' % (host,int(port),repluser,replpassword)
            self.mysql_cur.execute(change_sql)
            return True
        except MySQLdb.Warning,e:
            start_sql = 'start slave'
            self.mysql_cur.execute(start_sql)
            self.mysql_cur.execute('set global read_only=1;')
            logging.warning('Change master to %s   state : Warning' % host)
            logging.warning(traceback.format_exc())
            return True
        except MySQLdb.Error,e:
            logging.error('Change master to %s   state : Error' % host)
            logging.error(traceback.format_exc())
            return False
db_opreation.py 文件源码 项目:com_top_whois_service 作者: h-j-13 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def db_connect(self):
        """?????"""
        if self.db_lock.acquire():
            try:
                self.conn = MySQLdb.Connection(
                    host=self.host,
                    port=self.port,
                    user=self.user,
                    passwd=self.passwd,
                    charset=self.charset,
                    use_unicode=False)
            except MySQLdb.Error, e:
                log_db.error('connect error:' + str(e))
            self.cursor = self.conn.cursor()
            if not self.cursor:
                raise (NameError, "Connect failure")
            log_db.warning("???????")
            self.db_lock.release()
db_opreation.py 文件源码 项目:com_top_whois_service 作者: h-j-13 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def execute_sql_value(self, sql, value):
        """
        ??sql??
        :param sql:sql??
        :param value: ???
        """
        try:
            self.cursor.execute(sql,value)
        except MySQLdb.Error, e:
            if e.args[0] == 2013 or e.args[0] == 2006:  # ??????????
                self.db_connect()
                log_db.error('??,???????')
                self.cursor.execute(sql)
            else:
                log_db.error('execute_no_return error:' + str(e))
                log_db.error('SQL : ' + sql)
db_opreation.py 文件源码 项目:com_top_whois_service 作者: h-j-13 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def execute_no_return(self, sql):
        """
        ??SQL??,???????
        :param sql: SQL??
        """
        log_db.info('??:' + str(sql[:127]))
        if self.db_lock.acquire():
            try:
                self.cursor.execute(sql)
            except MySQLdb.Error, e:
                if e.args[0] == 2013 or e.args[0] == 2006:  # ??????????
                    self.db_lock.release()
                    self.db_connect()
                    log_db.error('??,???????')
                    self.cursor.execute(sql)
                    self.db_lock.acquire()
                else:
                    log_db.error('execute_no_return error:' + str(e))
                    log_db.error('SQL : ' + sql)
                    self.db_lock.release()
                    return 'execute fail'
            self.db_lock.release()
        return 'execute success'
db_opreation.py 文件源码 项目:com_top_whois_service 作者: h-j-13 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def execute_Iterator(self, sql, pretchNum=1000):
        """
        ??SQL??(??????)
        :param sql: SQL??
        :param pretchNum: ??????
        :return: ???
        """
        log_db.info('??:' + sql)
        Iterator_count = 0
        result = None
        result_list = []
        try:
                Resultnum = self.cursor.execute(sql)
                for i in range(Resultnum):
                    result = self.cursor.fetchone()
                    result_list.append(result)
                    Iterator_count += 1
                    if Iterator_count == pretchNum:
                        yield result_list
                        result_list = []
                        Iterator_count = 0
                yield result_list  # ????????
        except MySQLdb.Error, e:
                log_db.error('execute_Iterator error:' + str(e))
                log_db.error('SQL : ' + sql)
update_search_jianshu.py 文件源码 项目:jianshu-api 作者: strugglingyouth 项目源码 文件源码 阅读 68 收藏 0 点赞 0 评论 0
def insert_data(self, table, my_dict):
        try:
            cols = ','.join(my_dict.keys())
            values = '","'.join(my_dict.values())
            values = '"' + values + '"'
            try:
              #  print "table:%s,cols:%s,values:%s." %(table, cols, values)
                sql = "insert into %s (%s) values(%s)" % (table, cols, values)
              #  print "sql:",sql
                result = self.cur.execute(sql)
                self.db.commit()
                if result:
                    return 1
                else:
                    return 0
            except MySQLdb.Error as e:
                self.db.rollback()
                if "key 'PRIMARY'" in e.args[1]:
                    print Fore.RED + self.get_current_time(), "???????????"
                else:
                    print Fore.RED + self.get_current_time(), "????????? %d: %s" % (e.args[0], e.args[1])
        except MySQLdb.Error as e:
            print Fore.RED + self.get_current_time(), "????????%d: %s" % (e.args[0], e.args[1])
mysql.py 文件源码 项目:lquant 作者: squall1988 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def insert_feature_data(self, database_name, data, id):
        if isinstance(data, np.ndarray):
            pass
        if isinstance(data, pd.DataFrame):
            data = data.values
        if data.shape[0] == 0:
            return None

        sql_line = '''insert into %s values('{id}', '{date}', {data} )''' % (database_name, )
        for d in data:
            try:
                data_str = ','.join([str(x) for x in d[1:]])
                line = sql_line.format(id=id, date=d[0], data=data_str)
                self.cur.execute(line)
                # self.cur.close()
                # self.cur = self.db.cursor()
            except MySQLdb.Error as e:
                print 'fuck'
        self.db.commit()
warden_server.py 文件源码 项目:STaaS 作者: CESNET 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def __exit__(self, exc_type, exc_val, exc_tb):
        """ Context manager protocol. If db exception is fired and
            self.retry_attempt is not zero, it is only logged and
            does not propagate, otherwise it propagates up. Also
            open transaction is rolled back.
            In case of no exception, transaction gets commited.
        """
        if not exc_type:
            self.con.commit()
            self.retry_attempt = 0
        else:
            try:
                if self.con:
                    self.con.rollback()
            except my.Error:
                pass
            try:
                self.close()
            except my.Error:
                pass
            if self.retry_attempt:
                self.log.info("Database error (%d attempts left): %s %s" % (self.retry_attempt, exc_type.__name__, exc_val))
                return True
seriesdbutils.py 文件源码 项目:imdb_pyscraper 作者: avraampiperidis 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def insert_season(globalmovieid,imdb,seasonlink,season):
    db = getCursor()
    cur = db.cursor()
    sql = "insert into Season(movieid,season,link) values(%s,%s,%s)"
    try:
        cur.execute(sql,[globalmovieid,season,seasonlink])
        db.commit()
    except MySQLdb.Error ,e:
        print e
    page = requests.get(seasonlink, headers=headers);
    tree = html.fromstring(page.content);
    tree.make_links_absolute(seasonlink)

    images = tree.xpath('//div[@class="list detail eplist"]//div[@class="image"]//img/@src')
    titles = tree.xpath('//div[@class="list detail eplist"]//div[@class="info"]//strong/a/@title')
    plot = tree.xpath('//div[@class="list detail eplist"]//div[@class="info"]//div[@class="item_description"]/text()')

    print len(images),len(titles),len(plot)
    for i in xrange(len(titles)):
        insert_episode(globalmovieid,imdb,season,titles[i].strip(),images[i].strip(),plot[i].strip(),i+1)
seriesdbutils.py 文件源码 项目:imdb_pyscraper 作者: avraampiperidis 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def insert_series_into_movie(imdb,globalmovieid,title,genre,content_rating,ratings,rating_value,plot,poster):

    db = getCursor()
    cur = db.cursor()
    if not rating_value:
        rating_value = 5
    if not content_rating:
        content_rating = "R"
    title = remove_all_special_chars(title)
    plot = remove_all_special_chars(plot)
    sql = "insert into Movie(movieid,imdbid,title,plot,altplot,genre,ratings,ratingvalue,contentrating,poster) " \
                  "values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s') " % \
                  (globalmovieid,imdb,title.strip(),plot.strip()," ",genre,int(ratings),float(rating_value),content_rating,poster)
    try:
        cur.execute(sql)
        db.commit()
    except MySQLdb.Error, e:
        db.rollback()
        print e
    db.close()
DomainVerify.py 文件源码 项目:Malicious_Domain_Whois 作者: h-j-13 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def execute(self, sql):
        result = None
        if self.db_lock.acquire():
            try:
                self.cursor.execute(sql)
                result = self.cursor.fetchall()
            except MySQLdb.Error, e:
                if e.args[0] == 2013 or e.args[0] == 2006: # ??????????
                    self.db_lock.release()
                    self.get_connect()
                    print str(datetime.datetime.now()).split(".")[0], '???????'
                    result = self.execute(sql) # ????
                    self.db_lock.acquire()
                else:
                    print str(datetime.datetime.now()).split(".")[0], "ERROR %d: %s" % (e.args[0], e.args[1])
            self.db_lock.release()

        return result if result else None

    #??SQL?? ???????????????
Time_test.py 文件源码 项目:Malicious_Domain_Whois 作者: h-j-13 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def execute(self, sql):
        result = None
        if self.db_lock.acquire():
            try:
                self.cursor.execute(sql)
                result = self.cursor.fetchall()
            except MySQLdb.Error, e:
                if e.args[0] == 2013 or e.args[0] == 2006: # ??????????
                    self.db_lock.release()
                    self.get_connect()
                    print str(datetime.datetime.now()).split(".")[0], '???????'
                    result = self.execute(sql) # ????
                    self.db_lock.acquire()
                else:
                    print str(datetime.datetime.now()).split(".")[0], "ERROR %d: %s" % (e.args[0], e.args[1])
            self.db_lock.release()

        return result if result else None
db_manage.py 文件源码 项目:Malicious_Domain_Whois 作者: h-j-13 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def __init__(self, dbconfig=DBCONFIG):
        """????????????????MySQL??"""
        try:
            self._conn = MySQLdb.connect(host=dbconfig['host'],
                                         port=dbconfig['port'], 
                                         user=dbconfig['user'],
                                         passwd=dbconfig['passwd'],
                                         db=dbconfig['db'],
                                         charset=dbconfig['charset'])
        except MySQLdb.Error, e:
            self.error_code = e.args[0]
            error_msg = 'MySQL error! ', e.args[0], e.args[1]
            print error_msg

            # ?????????????????????
            if self._timecount < self._TIMEOUT:
                interval = 5
                self._timecount += interval
                time.sleep(interval)
                return self.__init__(dbconfig)
            else:
                raise Exception(error_msg)

        self._cur = self._conn.cursor()
        self._instance = MySQLdb


问题


面经


文章

微信
公众号

扫码关注公众号