def index(self):
"""Get the list of objects.
.. :quickref: File; Get the list of objects
Response is paginated and will only contain 25 results. The most recent
objects appear first.
:query page: page number.
:type page: int
:>json list files: list of files (see :http:get:`/files/(id)` for details on the format of a file).
"""
page = int(request.args.get('page', 1))
files = current_user.files.find().sort('_id', DESCENDING).limit(PER_PAGE).skip((page - 1) * PER_PAGE)
pagination = Pagination(page=page, per_page=PER_PAGE, total=files.count(), css_framework='bootstrap3')
files = {'files': clean_files(list(files))}
return render(files, 'files/index.html', ctx={'data': files, 'pagination': pagination})
python类DESCENDING的实例源码
def get_last_oplog_timestamp(conn,db_name):
start_time = time.time()
oplog=conn.local.oplog.rs;
if not db_name:
curr = oplog.find().sort(
'$natural', pymongo.DESCENDING
).limit(1)
else:
#{'ns': {'$in': oplog_ns_set}}
reg="^"+db_name+"\."
curr = oplog.find(
{'ns': re.compile(reg)}
).sort('$natural', pymongo.DESCENDING).limit(1)
if curr.count(with_limit_and_skip=True) == 0:
return None
print_cost_time("get_last_oplog_timestamp ", start_time)
return curr[0]['ts']
def get_mongos(self, force=False):
if not force and self.mongos_db:
return self.mongos_db
elif self.db.is_mongos():
return self.db
else:
db = self.connection['config']
for doc in db.mongos.find().sort('ping', DESCENDING):
try:
mongos_uri = MongoUri(doc['_id'])
logging.debug("Found cluster mongos: %s" % mongos_uri)
self.mongos_db = DB(mongos_uri, self.config, False, 'nearest')
logging.info("Connected to cluster mongos: %s" % mongos_uri)
return self.mongos_db
except DBConnectionError:
logging.debug("Failed to connect to mongos: %s, trying next available mongos" % mongos_uri)
raise OperationError('Could not connect to any mongos!')
def get_range_daily_data(self, code, start=None, end=None):
print u'????%s????' %code
# date open high close low volume amount
df = fc.get_stock_daily_data(code, start, end)
if df is None:
return
#df.to_csv(str(code)+ "daily.csv")
for row in range(0, df.shape[0]) :
# daily data
dailybar = {
'date' : str(df.index[row].date()),
'open' : str(df.iat[row, 0]),
'high' : str(df.iat[row, 1]),
'low' : str(df.iat[row, 3]),
'close' : str(df.iat[row, 2]),
'volume' : str(df.iat[row, 4]),
'amount' : str(df.iat[row, 5])
}
try:
self.Daily_Db[code].insert(dailybar)
except:
pass
self.Daily_Db[code].ensure_index([('date', pymongo.DESCENDING)])
def getTaginfo(tag):
'''
???????????????????
'''
if tag:
taginfo = db['kindRecord']
result = taginfo.find({'tag': tag}, {'_id': 0, 'date': 0}).sort(
'date', pymongo.DESCENDING).limit(1)
taganchor = db['Roominfo']
anchors = taganchor.find(
{'tag': tag}, {'_id': 0, 'img': 0, 'date': 0, 'tag': 0})
if anchors.count() != 0:
anchorsinfo = [{'anchor': item['anchor'], 'audience':item['audience'], 'roomid':item[
'roomid'], 'roomtitle':item['roomtitle']} for item in anchors]
if result.count() != 0:
return result[0], anchorsinfo
else:
return None
else:
return None
def findOne(db, resultantClass, **query):
''' (pymongo.database.Database, MongoORM) -> MongoORM
Creates a MongoORM directly from the Mongo database in db with
query arguments in the resultantClass.
'''
objectData = db[resultantClass.collection].find_one(
query,
# in case of tiebreakers, get the newest one
sort=[('_id', DESCENDING)]
)
if objectData:
return resultantClass(
db,
resultantClass.collection,
**objectData
)
# non-existant objectId
raise KeyError('No such document in %s' % (
str(db[resultantClass.collection])
))
def enhance_problems_for_admin(problems):
for problem in problems:
query = {
'problem_id': problem['_id']
}
snapshot = _db.problem_ranking_snapshots.find_one(
query,
sort=[('snapshot_time', pymongo.DESCENDING)])
if snapshot is None:
problem["solution_count"] = 0
problem["perfect_solution_count"] = 0
else:
problem["solution_count"] = len(snapshot["ranking"])
perfect_solution_count = 0
for solution in snapshot["ranking"]:
if solution["resemblance_int"] == 1000000:
perfect_solution_count += 1
problem["perfect_solution_count"] = perfect_solution_count
def open_spider(self, spider):
logging.warning('??spider')
try:
self.client = pymongo.MongoClient(self.mongo_uri)
self.db = self.client[self.mongo_db]
except ValueError:
logging.error('???????')
# ?????????????????
if self.mongo_col not in self.db.collection_names():
self.db[self.mongo_col].create_index(
[('created_at', pymongo.DESCENDING)])
self.db[self.mongo_col].create_index(
[('admin', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('price', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('mblogid', pymongo.ASCENDING)], unique=True)
else:
# ????????????????????
recent_row = list(self.db[self.mongo_col].find({'title': {'$eq': None}}, projection=['created_at'],
limit=1, sort=[('created_at', pymongo.DESCENDING)]))
if recent_row:
self.recent = recent_row[0]['created_at'] # ????
logging.warning("???????????%s" % (
self.recent + datetime.timedelta(hours=8)).__str__())
def open_spider(self, spider):
logging.warning('??spider')
try:
self.client = pymongo.MongoClient(self.mongo_uri)
self.db = self.client[self.mongo_db]
except ValueError:
logging.error('???????')
# ?????????????????
if self.mongo_col not in self.db.collection_names():
self.db[self.mongo_col].create_index(
[('created_at', pymongo.DESCENDING)])
self.db[self.mongo_col].create_index(
[('admin', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('price', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('mblogid', pymongo.ASCENDING)], unique=True)
else:
# ????????????????????
recent_row = list(self.db[self.mongo_col].find({'title': {'$ne': None}}, projection=['created_at'],
limit=1, sort=[('created_at', pymongo.DESCENDING)]))
if recent_row:
self.recent = recent_row[0]['created_at'] # ????
logging.warning("???????????%s" % (
self.recent + datetime.timedelta(hours=8)).__str__())
def open_spider(self, spider):
logging.warning('??spider')
try:
self.client = pymongo.MongoClient(self.mongo_uri)
self.db = self.client[self.mongo_db]
except ValueError:
logging.error('???????')
# ?????????????????
if self.mongo_col not in self.db.collection_names():
self.db[self.mongo_col].create_index(
[('created_at', pymongo.DESCENDING)])
self.db[self.mongo_col].create_index(
[('admin', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('price', pymongo.ASCENDING)], sparse=True)
self.db[self.mongo_col].create_index(
[('mblogid', pymongo.ASCENDING)], unique=True)
else:
# ??????????????????
recent_row = list(self.db[self.mongo_col].find(projection=['created_at', '_id'],
limit=1, sort=[('created_at', pymongo.DESCENDING)]))
self.recent = recent_row[0]['created_at'] # ????
logging.warning("???????????%s"%(self.recent+datetime.timedelta(hours=8)).__str__())
def get_chat_message_list(user_id, skip=0, limit=None):
'''????????????'''
user_dbref = DBRef(UserDocument.meta['collection'], ObjectId(user_id))
query = {
'$or': [{'sender': user_dbref}, {'recipient': user_dbref}]
}
cursor = ChatMessageDocument.find(query).sort(
[('send_time', pymongo.DESCENDING)]
).skip(skip)
if limit is not None:
cursor = cursor.limit(limit)
chat_message_list = yield ChatMessageDocument.to_list(cursor)
chat_message_list = yield ChatMessageDocument.translate_dbref_in_document_list(
chat_message_list)
raise gen.Return(chat_message_list)
def get_history_messages(id_a, id_b, since):
'''???????????'''
limit = setting.history_messages_number_per_time
user_a = DBRef(UserDocument.meta['collection'], ObjectId(id_a))
user_b = DBRef(UserDocument.meta['collection'], ObjectId(id_b))
cursor = ChatMessageDocument.find({
'between': user_a, 'between': user_b, 'send_time': {'$lt': since}
})
cursor = ChatMessageDocument.find(
{'$or': [{'between': [user_a, user_b]},
{'between': [user_b, user_a]}], 'send_time': {'$lt': since}}
).sort([('send_time', pymongo.DESCENDING)]).limit(limit)
result = yield ChatMessageDocument.to_list(cursor)
raise gen.Return(result[::-1])
def get_like_list(share_id, skip=0, limit=None):
cursor = ShareLikeDocument.find({
'share': DBRef(
ShareDocument.meta['collection'],
ObjectId(share_id)
)
}).sort([('like_time', pymongo.DESCENDING)]).skip(skip)
if limit is not None:
cursor = cursor.limit(limit)
like_list = yield ShareLikeDocument.to_list(cursor)
for like in like_list:
like['liker'] = yield UserDocument.translate_dbref(like['liker'])
raise gen.Return(like_list)
def get_topic_list_by_someone(author_id, skip=0, limit=None):
'''???????'''
cursor = TopicDocument.find({
'author': DBRef(
UserDocument.meta['collection'], ObjectId(author_id)
)
}).sort([('publish_time', pymongo.DESCENDING)]).skip(skip)
if limit is not None:
cursor = cursor.limit(limit)
topic_list = yield TopicDocument.to_list(cursor)
for topic in topic_list:
topic['author'] = yield UserDocument.translate_dbref(
topic['author']
)
topic['last_comment'] = yield TopicCommentDocument.get_last_comment(
topic['_id']
)
for i, node in enumerate(topic['nodes']):
topic['nodes'][i] = yield NodeDocument.translate_dbref(node)
raise gen.Return(topic_list)
def get_friend_list(user_id, skip=0, limit=None):
'''???????????
:Parameters:
- `user_id`: ????
'''
owner = DBRef(UserDocument.meta['collection'], ObjectId(user_id))
cursor = FriendDocument.find({'owner': owner}).sort(
[('be_time', pymongo.DESCENDING)]
).skip(skip)
if limit is not None:
cursor = cursor.limit(limit)
friends = yield FriendDocument.to_list(cursor)
friend_list = yield FriendDocument._gen_friend_list(
friends, "friend"
)
raise gen.Return(friend_list)
def get_member(league_id, skip=0, limit=9):
'''?????????'''
cursor = LeagueMemberDocument.find({
'league': DBRef(
UserDocument.meta['collection'],
ObjectId(league_id)
)
}).sort([('time', pymongo.DESCENDING)]).skip(skip).limit(limit)
member_list = yield LeagueMemberDocument.to_list(cursor)
for member in member_list:
member['member'] = yield LeagueMemberDocument.translate_dbref(
member['member']
)
raise gen.Return(member_list)
def select(self, count=None, conditions=None):
if count:
count = int(count)
else:
count = 0
if conditions:
conditions = dict(conditions)
conditions_name = ['types', 'protocol']
for condition_name in conditions_name:
value = conditions.get(condition_name, None)
if value:
conditions[condition_name] = int(value)
else:
conditions = {}
items = self.proxys.find(conditions, limit=count).sort(
[("speed", pymongo.ASCENDING), ("score", pymongo.DESCENDING)])
results = []
for item in items:
result = (item['ip'], item['port'], item['score'])
results.append(result)
return results
def setup(self):
"""Setting up MongoDB collections, if they not exist."""
try:
db = await self.db
collections = await db.collection_names()
created = False
if self.table_name not in collections:
# create table
logger.info("Creating MongoDB collection [{}]".format(self.table_name))
await db.create_collection(self.table_name)
await db[self.table_name].create_index([("target_id", DESCENDING), ("post_id", DESCENDING)])
created = True
# create control collection if not already created.
if self.control_table_name not in collections:
# create table
logger.info("Creating MongoDB control data collection [{}]".format(self.control_table_name))
await db.create_collection(self.control_table_name)
created = True
return created
except Exception as exc:
logger.error("[DB] Error when setting up MongoDB collections: {}".format(exc))
return False
def fetch_existing_token_of_user(self, client_id, grant_type, user_id):
data = self.collection.find_one({"client_id": client_id,
"grant_type": grant_type,
"user_id": user_id},
sort=[("expires_at",
pymongo.DESCENDING)])
if data is None:
raise AccessTokenNotFound
return AccessToken(client_id=data.get("client_id"),
grant_type=data.get("grant_type"),
token=data.get("token"),
data=data.get("data"),
expires_at=data.get("expires_at"),
refresh_token=data.get("refresh_token"),
refresh_expires_at=data.get("refresh_expires_at"),
scopes=data.get("scopes"),
user_id=data.get("user_id"))
def getJudgementDetail():
jd_collection = db.JudgmentDoc_isExactlySame
query = jd_collection.find({},{'_id':0,'Id':1}).sort("Id",pymongo.ASCENDING)
idList = list(query)
length = len(idList)
collection = db.JudgementDetail
query = collection.find({},{'_id':0,'Id':1}).sort("_id",pymongo.DESCENDING).limit(3)
record_id = list(query)
print(record_id)
ii = idList.index(record_id[0])
for i in range(ii+1,ii+30000):
print("%d/%d\t%s"%(i,length,idList[i]['Id']))
ret = JudgementDetail(idList[i]['Id'])
if ret in err_code:
print('err_code: %s'%ret)
break
return ret
def getJudgementDetail():
jd_collection = db.JudgmentDoc_isExactlySame
query = jd_collection.find({},{'_id':0,'Id':1}).sort("Id",pymongo.ASCENDING)
idList = list(query)
length = len(idList)
collection = db.JudgementDetail
query = collection.find({},{'_id':0,'Id':1}).sort("_id",pymongo.DESCENDING).limit(10)
record_id = list(query)
ii = 0
for id in idList: #[:10]:
ii = ii + 1
print("%d/%d\t%s"%(ii,length,id['Id']))
ret = JudgementDetail(id['Id'])
if ret in ['101','102','103','104','105','107','108','109','110','199']:
break
def ensure_index(cls):
super().ensure_index()
if not cls.COLLECTION_NAME:
return
collection = cls.collection()
collection.create_index(
[
("is_latest", pymongo.DESCENDING)
],
name="index_latest",
partialFilterExpression={"is_latest": True}
)
collection.create_index(
[
("model_id", pymongo.ASCENDING),
("version", pymongo.ASCENDING)
],
name="index_unique_version",
unique=True
)
def get_history(asset_id, document_limit=10):
try:
cursor = app.config['DB_COLL_HISTORY'].find(
{'asset_id': asset_id}
).sort(
"timestamp", pymongo.DESCENDING
).limit(int(document_limit))
except pymongo.errors.AutoReconnect as e:
logger.error(e.message)
except Exception as e:
logger.error(e.message)
history = []
for document in cursor:
doc = utilities.to_isoformat_datetime(document)
history.append(doc)
return dumps(history)
def setWeiboMongo(rst):
db=get_db()
cl=db['retweet']
try:
maxMid=cl.find().sort("mid", pymongo.DESCENDING)[0]['mid']
except:
maxMid='0'
coll=[]
for c in rst:
dt={}
cm=re.findall('comment_txt.*?/p',c)[0]
dt['mid']=returnMid(c)
if dt['mid']<=maxMid:
break
dt['url']=returnUrl(c)
dt['friend']=returnFri(cm)
dt['retweet']=0
coll.append(dt)
if coll:
cid=cl.insert(coll)
print('insert',len(coll),' retweet document!')
return coll
def surialert(request,task_id):
report = results_db.analysis.find_one({"info.id": int(task_id)},{"suricata.alerts": 1},sort=[("_id", pymongo.DESCENDING)])
if not report:
return render(request, "error.html",
{"error": "The specified analysis does not exist"})
suricata = report["suricata"]
if settings.MOLOCH_ENABLED:
if settings.MOLOCH_BASE[-1] != "/":
settings.MOLOCH_BASE = settings.MOLOCH_BASE + "/"
suricata = gen_moloch_from_suri_alerts(suricata)
return render(request, "analysis/surialert.html",
{"analysis": report,
"config": enabledconf})
def surihttp(request,task_id):
report = results_db.analysis.find_one({"info.id": int(task_id)},{"suricata.http": 1},sort=[("_id", pymongo.DESCENDING)])
if not report:
return render(request, "error.html",
{"error": "The specified analysis does not exist"})
suricata = report["suricata"]
if settings.MOLOCH_ENABLED:
if settings.MOLOCH_BASE[-1] != "/":
settings.MOLOCH_BASE = settings.MOLOCH_BASE + "/"
suricata = gen_moloch_from_suri_http(suricata)
return render(request, "analysis/surihttp.html",
{"analysis": report,
"config": enabledconf})
def suritls(request,task_id):
report = results_db.analysis.find_one({"info.id": int(task_id)},{"suricata.tls": 1},sort=[("_id", pymongo.DESCENDING)])
if not report:
return render(request, "error.html",
{"error": "The specified analysis does not exist"})
suricata = report["suricata"]
if settings.MOLOCH_ENABLED:
if settings.MOLOCH_BASE[-1] != "/":
settings.MOLOCH_BASE = settings.MOLOCH_BASE + "/"
suricata = gen_moloch_from_suri_tls(suricata)
return render(request, "analysis/suritls.html",
{"analysis": report,
"config": enabledconf})
def surifiles(request,task_id):
report = results_db.analysis.find_one({"info.id": int(task_id)},{"info.id": 1,"suricata.files": 1},sort=[("_id", pymongo.DESCENDING)])
if not report:
return render(request, "error.html",
{"error": "The specified analysis does not exist"})
suricata = report["suricata"]
if settings.MOLOCH_ENABLED:
if settings.MOLOCH_BASE[-1] != "/":
settings.MOLOCH_BASE = settings.MOLOCH_BASE + "/"
suricata = gen_moloch_from_suri_file_info(suricata)
return render(request, "analysis/surifiles.html",
{"analysis": report,
"config": enabledconf})
def topcookies(cmd, message, args):
all_cookies = cmd.db[cmd.db.db_cfg.database].Cookies.find({}).sort('Cookies', pymongo.DESCENDING).limit(20)
cookie_count = cmd.db[cmd.db.db_cfg.database].Cookies.aggregate(
[{'$group': {
'_id': 'cookie_counter_cursor',
'cookie_count': {'$sum': '$Cookies'}
}}]
)
cookie_count = list(cookie_count)
cookie_count = cookie_count[0]['cookie_count']
cookie_list = []
for cookie_file in all_cookies:
user = discord.utils.find(lambda x: x.id == cookie_file['UserID'], cmd.bot.get_all_members())
if user:
unam = user.name
else:
unam = '{Unknown}'
cookie_list.append([unam, cookie_file['Cookies']])
cookie_table = boop(cookie_list, ['User', 'Cookies'])
top_text = f'A total of {cookie_count} cookies have been given.'
response = discord.Embed(color=0xd99e82)
response.add_field(name='Cookie Count', value=top_text, inline=False)
response.add_field(name='Cookie Leaderboard', value=f'```bat\n{cookie_table}\n```', inline=False)
await message.channel.send(embed=response)
def initialize_indexes(database):
"""Ensure the necessary indexes exist."""
submissions = database['submissions']
comments = database['comments']
index_id = pymongo.IndexModel('reddit_id')
index_created = pymongo.IndexModel([('created', pymongo.DESCENDING)])
index_text_title_and_body = pymongo.IndexModel([('title', pymongo.TEXT),
('body', pymongo.TEXT)])
index_text_body = pymongo.IndexModel([('body', pymongo.TEXT)])
submissions.create_indexes([index_id,
index_created,
index_text_title_and_body])
comments.create_indexes([index_id,
index_created,
index_text_body])