def get_all_achievements(event=None, show_disabled=False):
"""
Gets all of the achievements in the database.
Args:
event: Optional parameter to restrict which achievements are returned
show_disabled: Boolean indicating whether or not to show disabled achievements.
Returns:
List of achievements from the database
"""
db = api.common.get_conn()
match = {}
if event is not None:
match.update({'event': event})
if not show_disabled:
match.update({'disabled': False})
return list(db.achievements.find(match, {"_id":0}).sort('score', pymongo.ASCENDING))
python类ASCENDING的实例源码
def get_all_problems(category=None, show_disabled=False):
"""
Gets all of the problems in the database.
Args:
category: Optional parameter to restrict which problems are returned
show_disabled: Boolean indicating whether or not to show disabled problems.
Returns:
List of problems from the database
"""
db = api.common.get_conn()
match = {}
if category is not None:
match.update({'category': category})
if not show_disabled:
match.update({'disabled': False})
return list(db.problems.find(match, {"_id":0}).sort('score', pymongo.ASCENDING))
def get_all_achievements(event=None, show_disabled=False):
"""
Gets all of the achievements in the database.
Args:
event: Optional parameter to restrict which achievements are returned
show_disabled: Boolean indicating whether or not to show disabled achievements.
Returns:
List of achievements from the database
"""
db = api.common.get_conn()
match = {}
if event is not None:
match.update({'event': event})
if not show_disabled:
match.update({'disabled': False})
return list(db.achievements.find(match, {"_id":0}).sort('score', pymongo.ASCENDING))
def get_all_problems(category=None, show_disabled=False):
"""
Gets all of the problems in the database.
Args:
category: Optional parameter to restrict which problems are returned
show_disabled: Boolean indicating whether or not to show disabled problems.
Returns:
List of problems from the database
"""
db = api.common.get_conn()
match = {}
if category is not None:
match.update({'category': category})
if not show_disabled:
match.update({'disabled': False})
return list(db.problems.find(match, {"_id":0}).sort('score', pymongo.ASCENDING))
def update_ttl(ttl, ttl_index_name, index_field, coll):
"""Update or create time_to_live indexes.
:param ttl: time to live in seconds.
:param ttl_index_name: name of the index we want to update or create.
:param index_field: field with the index that we need to update.
:param coll: collection which indexes need to be updated.
"""
indexes = coll.index_information()
if ttl <= 0:
if ttl_index_name in indexes:
coll.drop_index(ttl_index_name)
return
if ttl_index_name in indexes:
return coll.database.command(
'collMod', coll.name,
index={'keyPattern': {index_field: pymongo.ASCENDING},
'expireAfterSeconds': ttl})
coll.create_index([(index_field, pymongo.ASCENDING)],
expireAfterSeconds=ttl,
name=ttl_index_name)
def get_all_achievements(event=None, show_disabled=False):
"""
Gets all of the achievements in the database.
Args:
event: Optional parameter to restrict which achievements are returned
show_disabled: Boolean indicating whether or not to show disabled achievements.
Returns:
List of achievements from the database
"""
db = api.common.get_conn()
match = {}
if event is not None:
match.update({'event': event})
if not show_disabled:
match.update({'disabled': False})
return list(db.achievements.find(match, {"_id":0}).sort('score', pymongo.ASCENDING))
def get_all_problems(category=None, show_disabled=False):
"""
Gets all of the problems in the database.
Args:
category: Optional parameter to restrict which problems are returned
show_disabled: Boolean indicating whether or not to show disabled problems.
Returns:
List of problems from the database
"""
db = api.common.get_conn()
match = {}
if category is not None:
match.update({'category': category})
if not show_disabled:
match.update({'disabled': False})
return list(db.problems.find(match, {"_id":0}).sort('score', pymongo.ASCENDING))
def get_transactions(self,from_date=None,to_date=None,only_new=True):
"""
Retrieve transactions for producing text file
"""
query = {}
if only_new:
query['plaid2text.pulled_to_file'] = False
if from_date and to_date and (from_date < to_date):
query['date'] = {'$gte':from_date,'$lte':to_date}
elif from_date and not to_date:
query['date'] = {'$gte':from_date}
elif not from_date and to_date:
query['date'] = {'$lte':to_date}
transactions = self.account.find(query).sort('date',ASCENDING)
return transactions
def get_risk_free_rate(self, start_date, end_date):
mongo_dates = self._yield_curve['dates'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING)
_dates = np.array([np.uint32(d['date']) for d in mongo_dates])
tenor = risk_free_helper.get_tenor_for(start_date, end_date)
tenor = tenor[-1] + tenor[:-1]
mongo_data = self._yield_curve[tenor].find({}, {"_id":0})
_table = np.array([d['data'] for d in mongo_data])
d = start_date.year * 10000 + start_date.month * 100 + start_date.day
pos = _dates.searchsorted(d)
if pos > 0 and (pos == len(_dates) or _dates[pos] != d):
pos -= 1
while pos >= 0 and np.isnan(_table[pos]):
# data is missing ...
pos -= 1
return _table[pos]
def available_data_range(self, frequency):
"""
??????????????
:param str frequency: ?????`1d` ?????, `1m` ??????
:return: (earliest, latest)
"""
if frequency == '1d':
mongo_data = self._day_bars[self.INSTRUMENT_TYPE_MAP['INDX']]['000001.XSHG'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING)
mongo_data = list(mongo_data)
s, e = np.uint32(mongo_data[0]['date']), np.uint32(mongo_data[-1]['date'])
return convert_int_to_date(s).date(), convert_int_to_date(e).date()
if frequency == '1m':
raise NotImplementedError
def get_symbol(self):
# ????????
df = fc.get_stock_basics_data()
for row in range(0, df.shape[0]):
item = {
'code' : str(df.index[row]),
'name' : str(df.iat[row, 0]),
'industry' : str(df.iat[row, 1]),
'area' : str(df.iat[row, 2]),
'timeToMarket' : str(df.iat[row, 14])
}
try:
self.Symbol_Db['equity'].insert(item)
except:
pass
self.Symbol_Db['equity'].ensure_index([('code', pymongo.ASCENDING)])
def available_data_range(self, frequency):
"""
??????????????
:param str frequency: ?????`1d` ?????, `1m` ??????
:return: (earliest, latest)
"""
if frequency == '1d':
mongo_data = self._day_bars[self.INSTRUMENT_TYPE_MAP['INDX']]['000001.XSHG'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING)
mongo_data = list(mongo_data)
s, e = np.uint32(mongo_data[0]['date']), np.uint32(mongo_data[-1]['date'])
return convert_int_to_date(s).date(), convert_int_to_date(e).date()
if frequency == '1m':
raise NotImplementedError
def get_all_problems_for_admin(**options):
"""Returns all problems.
Args:
**options: Options passed to query.
Returns:
A list of problem dictionaries.
"""
cursor = _db.problems.find(
{},
sort=[('_id', pymongo.ASCENDING)],
**options)
problems = list(cursor)
enhance_problems_for_admin(problems)
return problems
def extract(self): # TODO : Should be an exporter plugin
graph = {
'meta': {}, # self.__meta,
'properties': {} # self.__properties
}
graph['nodes'] = list()
for v in self.__vertices.find().sort('id', pymongo.ASCENDING):
v.pop("_id") # Remove MongoDB document ID
graph['nodes'].append(v)
graph['edges'] = list()
for e in self.__edges.find().sort("src", pymongo.ASCENDING):
e.pop("_id") # Remove MongoDB document ID
graph['edges'].append(e)
graph['tokens'] = list();
for t in self.__tokens.find().sort('id', pymongo.ASCENDING):
t.pop("_id") # Remove MongoDB document ID
t['id'] = str(t['id'])
t['ts'] = time.mktime(t['ts'].timetuple())
graph['tokens'].append(t)
return graph
def open_spider(self, spider):
logging.warning('??spider')
try:
self.client = pymongo.MongoClient(self.mongo_uri)
self.db = self.client[self.mongo_db]
except ValueError:
logging.error('???????')
# ?????????????????
if self.mongo_col not in self.db.collection_names():
self.db[self.mongo_col].create_index(
[('created_at', pymongo.DESCENDING)])
self.db[self.mongo_col].create_index(
[('admin', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('price', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('mblogid', pymongo.ASCENDING)], unique=True)
else:
# ????????????????????
recent_row = list(self.db[self.mongo_col].find({'title': {'$eq': None}}, projection=['created_at'],
limit=1, sort=[('created_at', pymongo.DESCENDING)]))
if recent_row:
self.recent = recent_row[0]['created_at'] # ????
logging.warning("???????????%s" % (
self.recent + datetime.timedelta(hours=8)).__str__())
def open_spider(self, spider):
logging.warning('??spider')
try:
self.client = pymongo.MongoClient(self.mongo_uri)
self.db = self.client[self.mongo_db]
except ValueError:
logging.error('???????')
# ?????????????????
if self.mongo_col not in self.db.collection_names():
self.db[self.mongo_col].create_index(
[('created_at', pymongo.DESCENDING)])
self.db[self.mongo_col].create_index(
[('admin', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('price', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('mblogid', pymongo.ASCENDING)], unique=True)
else:
# ????????????????????
recent_row = list(self.db[self.mongo_col].find({'title': {'$ne': None}}, projection=['created_at'],
limit=1, sort=[('created_at', pymongo.DESCENDING)]))
if recent_row:
self.recent = recent_row[0]['created_at'] # ????
logging.warning("???????????%s" % (
self.recent + datetime.timedelta(hours=8)).__str__())
def open_spider(self, spider):
logging.warning('??spider')
try:
self.client = pymongo.MongoClient(self.mongo_uri)
self.db = self.client[self.mongo_db]
except ValueError:
logging.error('???????')
# ?????????????????
if self.mongo_col not in self.db.collection_names():
self.db[self.mongo_col].create_index(
[('created_at', pymongo.DESCENDING)])
self.db[self.mongo_col].create_index(
[('admin', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('price', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('mblogid', pymongo.ASCENDING)], unique=True)
else:
# ??????????????????
recent_row = list(self.db[self.mongo_col].find(projection=['created_at', '_id'],
limit=1, sort=[('created_at', pymongo.DESCENDING)]))
self.recent = recent_row[0]['created_at'] # ????
logging.warning("???????????%s"%(self.recent+datetime.timedelta(hours=8)).__str__())
def get_comment_list(share_id, skip=0, limit=None):
cursor = ShareCommentDocument.find({
'share': DBRef(
ShareDocument.meta['collection'],
ObjectId(share_id)
)
}).sort([('comment_time', pymongo.ASCENDING)]).skip(skip)
if limit is not None:
cursor = cursor.limit(limit)
comment_list = yield ShareCommentDocument.to_list(cursor)
for i, comment in enumerate(comment_list):
comment['floor'] = skip + 1 + i
comment['author'] = yield UserDocument.translate_dbref(
comment['author']
)
if 'replyeder' in comment:
comment['replyeder'] = yield UserDocument.translate_dbref(
comment['replyeder']
)
raise gen.Return(comment_list)
def get_comment_list(topic_id, skip=0, limit=None):
cursor = TopicCommentDocument.find({
'topic': DBRef(TopicDocument.meta['collection'], ObjectId(topic_id))
}).sort([('comment_time', pymongo.ASCENDING)]).skip(skip)
if limit is not None:
cursor = cursor.limit(limit)
comment_list = yield TopicCommentDocument.to_list(cursor)
for i, comment in enumerate(comment_list):
comment['floor'] = skip + 1 + i
comment['author'] = yield UserDocument.translate_dbref(
comment['author']
)
if 'replyeder' in comment:
comment['replyeder'] = yield UserDocument.translate_dbref(
comment['replyeder']
)
raise gen.Return(comment_list)
def select(self, count=None, conditions=None):
if count:
count = int(count)
else:
count = 0
if conditions:
conditions = dict(conditions)
conditions_name = ['types', 'protocol']
for condition_name in conditions_name:
value = conditions.get(condition_name, None)
if value:
conditions[condition_name] = int(value)
else:
conditions = {}
items = self.proxys.find(conditions, limit=count).sort(
[("speed", pymongo.ASCENDING), ("score", pymongo.DESCENDING)])
results = []
for item in items:
result = (item['ip'], item['port'], item['score'])
results.append(result)
return results
def list(self, request):
await require(request, Permissions.view)
possible_fields = [k.name for k in self._schema.keys]
q = validate_query(request.GET, possible_fields)
paging = calc_pagination(q, self._primary_key)
filters = q.get('_filters')
query = {}
if filters:
query = create_filter(filters, self._schema)
sort_direction = ASCENDING if paging.sort_dir == ASC else DESCENDING
cursor = (self._collection.find(query)
.skip(paging.offset)
.limit(paging.limit)
.sort(paging.sort_field, sort_direction))
entities = await cursor.to_list(paging.limit)
count = await self._collection.find(query).count()
headers = {'X-Total-Count': str(count)}
return json_response(entities, headers=headers)
def getJudgementDetail():
jd_collection = db.JudgmentDoc_isExactlySame
query = jd_collection.find({},{'_id':0,'Id':1}).sort("Id",pymongo.ASCENDING)
idList = list(query)
length = len(idList)
collection = db.JudgementDetail
query = collection.find({},{'_id':0,'Id':1}).sort("_id",pymongo.DESCENDING).limit(3)
record_id = list(query)
print(record_id)
ii = idList.index(record_id[0])
for i in range(ii+1,ii+30000):
print("%d/%d\t%s"%(i,length,idList[i]['Id']))
ret = JudgementDetail(idList[i]['Id'])
if ret in err_code:
print('err_code: %s'%ret)
break
return ret
def getJudgementDetail():
jd_collection = db.JudgmentDoc_isExactlySame
query = jd_collection.find({},{'_id':0,'Id':1}).sort("Id",pymongo.ASCENDING)
idList = list(query)
length = len(idList)
collection = db.JudgementDetail
query = collection.find({},{'_id':0,'Id':1}).sort("_id",pymongo.DESCENDING).limit(10)
record_id = list(query)
ii = 0
for id in idList: #[:10]:
ii = ii + 1
print("%d/%d\t%s"%(ii,length,id['Id']))
ret = JudgementDetail(id['Id'])
if ret in ['101','102','103','104','105','107','108','109','110','199']:
break
def ensure_index(cls):
super().ensure_index()
if not cls.COLLECTION_NAME:
return
collection = cls.collection()
collection.create_index(
[
("is_latest", pymongo.DESCENDING)
],
name="index_latest",
partialFilterExpression={"is_latest": True}
)
collection.create_index(
[
("model_id", pymongo.ASCENDING),
("version", pymongo.ASCENDING)
],
name="index_unique_version",
unique=True
)
def verify_batch(start = 0, limitN = 7000, verbose = False):
from lmfdb import getDBconnection
import pymongo
C = getDBconnection()
i = 0
bound = 0;
label = None
for curve in C.genus2_curves.curves.find().sort([("cond", pymongo.ASCENDING), ("label", pymongo.ASCENDING)]).limit(limitN).skip(start):
label = curve['label']
Lhash = curve['Lhash'];
q, rendo, reuler = verify_curve_lmfdb(label, Lhash)
if not q:
print "FAILED at label = %s" % label
if verbose:
print label, q
i+=1
if int(100.0*i/limitN) >= bound:
print "%s%%\t %s / 66158\t at label = %s" %(int(100.0*i/limitN), start + i, label)
bound+=1
print "Done from %s to %s / 66158\t at label = %s" %(start + 1, start + i, label)
def update_setting(self, setting):
try:
self.setting_list.remove(setting)
except ValueError:
pass
while len(self.setting_list) == 0:
result = self.collection.find().sort('speed', pymongo.ASCENDING)
for one in result:
setting = {
# 'proxy': one,
'cookies': "".join(random.sample(string.ascii_letters + string.digits, 11)),
'agent': random.choice(AGENTS_ALL)
}
# self.collection.remove(one)
self.setting_list.append(setting)
if len(self.setting_list) == 0:
log.info('update setting failed,sleep....')
time.sleep(self.db_cycle_time)
else:
log.info('update setting succeed,get new setting {}.'.format(len(self.setting_list)))
def update_setting(self, setting):
try:
self.setting_list.remove(setting)
except ValueError:
pass
while len(self.setting_list) == 0:
result = self.collection.find().sort('speed', pymongo.ASCENDING)
for one in result:
setting = {
# 'proxy': one,
'cookies': "".join(random.sample(string.ascii_letters + string.digits, 11)),
'agent': random.choice(AGENTS_ALL)
}
self.setting_list.append(setting)
if len(self.setting_list) == 0:
log.info('update setting failed,sleep....')
time.sleep(self.db_cycle_time)
else:
log.info('update setting succeed,get new setting {}.'.format(len(self.setting_list)))
def get_activities(self, count=10, conversation_id=None, simple=False):
last_id = self._get_last_id()
if count == -1:
first_id = 0
else:
first_id = last_id - count
if first_id < 0:
first_id = 0
if count == -1:
count_index = 0
else:
count_index = -count
if conversation_id is None:
#list = self._simplify_list(list(self.conversation_collection.find({'_id': {'$gt': first_id, '$lte': last_id}}).sort("_id", ASCENDING)), simple)
return self._simplify_list(list(self.conversation_collection.find().sort("_id", ASCENDING)), simple)[count_index:]
else:
return self._simplify_list(list(self.conversation_collection.find({'conversation_id': conversation_id}).sort("_id", ASCENDING)), simple)[count_index:]
def getOneWeibo():
reason=['????','??','????????','????','??????','????','??????','????','????','?????']
db=get_db()
cl=db['retweet']
rst=cl.find({'retweet':0}).sort("mid", pymongo.ASCENDING)
for rt in rst:
cl.update({'_id':rt['_id']},{'$set':{'retweet':1}})
print('update',rt['mid'],' retweet=1')
if len(rt['url'])>4:
print(rt['mid'],' too many users to follow,find another weibo')
continue
rt['reason']=reason[random.randint(0,len(reason)-1)]
if rt['friend']>0:
cl=db['follow']
rst=cl.find({'follow':1})
rint=random.randint(0,rst.count()-rt['friend'])
for i in range(rint,rint+rt['friend']):
rt['reason']=rt['reason']+'@'+rst[i]['nick']+' '
return rt
return False
def _get_jobs(self, conditions):
jobs = []
failed_job_ids = []
for document in self.collection.find(conditions, ['_id', 'job_state'],
sort=[('next_run_time', ASCENDING)]):
try:
jobs.append(self._reconstitute_job(document['job_state']))
except:
self._logger.exception('Unable to restore job "%s" -- removing it',
document['_id'])
failed_job_ids.append(document['_id'])
# Remove all the jobs we failed to restore
if failed_job_ids:
self.collection.remove({'_id': {'$in': failed_job_ids}})
return jobs