def _get_jobs(self, conditions):
jobs = []
failed_job_ids = []
for document in self.collection.find(conditions, ['_id', 'job_state'],
sort=[('next_run_time', ASCENDING)]):
try:
jobs.append(self._reconstitute_job(document['job_state']))
except:
self._logger.exception('Unable to restore job "%s" -- removing it',
document['_id'])
failed_job_ids.append(document['_id'])
# Remove all the jobs we failed to restore
if failed_job_ids:
self.collection.remove({'_id': {'$in': failed_job_ids}})
return jobs
python类ASCENDING的实例源码
def _create_unique_index(self):
"""Create an index based on raw data reference fields.
Creates a compound index on the fields that contain the location
of the raw data from which a document was derived. This prevents
duplicate documents from being saved.
"""
data_key = _DISTILLERY_SETTINGS['RAW_DATA_KEY']
fields = [
'BACKEND_KEY',
'WAREHOUSE_KEY',
'COLLECTION_KEY',
'DOC_ID_KEY'
]
keys = ['%s.%s' % (data_key, _DISTILLERY_SETTINGS[field])
for field in fields]
formatted_keys = [(key, pymongo.ASCENDING) for key in keys]
return self._collection.create_index(formatted_keys, unique=True,
sparse=True)
def rebuild_people_indexes():
indexes = []
# indexes.append(IndexModel('pid', name='_pid'))
indexes.append(IndexModel('PersonNameLastName', name= '_LastName'))
indexes.append(IndexModel('PersonNameFirstName', name= '_FirstName'))
indexes.append(IndexModel('BirthPlace.Place', name= '_BirthPlace'))
indexes.append(IndexModel('relatives.pid', name= '_RelativesPid'))
# indexes.append(IndexModel('BirthDate', name= '_BirthDate'))
indexes.append(IndexModel([('BirthDate.Year', ASCENDING),
('BirthDate.Month', ASCENDING),
('BirthDate.Day', ASCENDING)],
name="_BirthDate"))
mc[write_table].create_indexes(indexes)
def download_arch_security():
db = get_db()
collection = db.arch_security_updates
collection.create_index([('package', ASCENDING),
('announced_at', ASCENDING)], unique=True)
for package, dt, source in rss_feed():
try:
collection.insert_one({'package': package,
'announced_at': dt,
'source': source})
except DuplicateKeyError:
return
else:
log.info('Identified Arch security update for {}, '
'announced at {}', package, dt)
yield (package, dt)
def init_db(self, sut_fuzzer_pairs):
"""
Creates an 'fuzzinator_issues' collection with appropriate indexes (if
not existing already), and initializes a 'fuzzinator_stats' collection
for (sut, fuzzer) pairs (with 0 exec and crash counts if not existing
already).
"""
db = self._db
issues = db.fuzzinator_issues
issues.create_index([('sut', ASCENDING), ('id', ASCENDING)])
stats = db.fuzzinator_stats
for sut, fuzzer in sut_fuzzer_pairs:
if stats.find({'sut': sut, 'fuzzer': fuzzer}).count() == 0:
stats.insert_one({'sut': sut, 'fuzzer': fuzzer, 'exec': 0, 'crashes': 0})
def _get_jobs(self, conditions):
jobs = []
failed_job_ids = []
for document in self.collection.find(conditions, ['_id', 'job_state'],
sort=[('next_run_time', ASCENDING)]):
try:
jobs.append(self._reconstitute_job(document['job_state']))
except:
self._logger.exception('Unable to restore job "%s" -- removing it',
document['_id'])
failed_job_ids.append(document['_id'])
# Remove all the jobs we failed to restore
if failed_job_ids:
self.collection.remove({'_id': {'$in': failed_job_ids}})
return jobs
def get_api_exceptions(result_limit=50, sort_direction=pymongo.DESCENDING):
"""
Retrieve api exceptions.
Args:
result_limit: the maximum number of exceptions to return.
sort_direction: pymongo.ASCENDING or pymongo.DESCENDING
"""
db = api.common.get_conn()
results = db.exceptions.find({"visible": True}).sort([("time", sort_direction)]).limit(result_limit)
return list(results)
def _index_list(key_or_list, direction=None):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
if isinstance(key_or_list, string_type):
return [(key_or_list, ASCENDING)]
elif not isinstance(key_or_list, (list, tuple)):
raise TypeError("if no direction is specified, "
"key_or_list must be an instance of list")
return key_or_list
def QA_SU_save_stock_day(client=QA_Setting.client):
stock_list = QA_fetch_get_stock_time_to_market()
coll_stock_day = client.quantaxis.stock_day
coll_stock_day.create_index(
[("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)])
err = []
def __saving_work(code, coll_stock_day):
try:
QA_util_log_info(
'##JOB01 Now Saving STOCK_DAY==== %s' % (str(code)))
ref = coll_stock_day.find({'code': str(code)[0:6]})
end_date = str(now_time())[0:10]
if ref.count() > 0:
# ????????????????????? ???????? ???????????
start_date = ref[ref.count() - 1]['date']
else:
start_date = '1990-01-01'
QA_util_log_info(' UPDATE_STOCK_DAY \n Trying updating %s from %s to %s' %
(code, start_date, end_date))
if start_date != end_date:
coll_stock_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(str(code), start_date, end_date, '00')[1::]))
except:
err.append(str(code))
for item in range(len(stock_list)):
QA_util_log_info('The %s of Total %s' %
(item, len(stock_list)))
QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(
float(item / len(stock_list) * 100))[0:4] + '%')
__saving_work(stock_list.index[item], coll_stock_day)
if len(err) < 1:
QA_util_log_info('SUCCESS')
else:
QA_util_log_info('ERROR CODE \n ')
QA_util_log_info(err)
def QA_SU_save_stock_xdxr(client=QA_Setting.client):
client.quantaxis.drop_collection('stock_xdxr')
stock_list = QA_fetch_get_stock_time_to_market()
coll = client.quantaxis.stock_xdxr
coll.create_index([('code', pymongo.ASCENDING),
('date', pymongo.ASCENDING)])
err = []
def __saving_work(code, coll):
QA_util_log_info('##JOB02 Now Saving XDXR INFO ==== %s' % (str(code)))
try:
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_xdxr(str(code))))
except:
err.append(str(code))
for i_ in range(len(stock_list)):
#__saving_work('000001')
QA_util_log_info('The %s of Total %s' % (i_, len(stock_list)))
QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(
float(i_ / len(stock_list) * 100))[0:4] + '%')
__saving_work(stock_list.index[i_], coll)
if len(err) < 1:
QA_util_log_info('SUCCESS')
else:
QA_util_log_info('ERROR CODE \n ')
QA_util_log_info(err)
def QA_SU_save_index_day(client=QA_Setting.client):
__index_list = QA_fetch_get_stock_list('index')
coll = client.quantaxis.index_day
coll.create_index([('code', pymongo.ASCENDING),
('date_stamp', pymongo.ASCENDING)])
err = []
def __saving_work(code, coll):
try:
ref_ = coll.find({'code': str(code)[0:6]})
end_time = str(now_time())[0:10]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['date']
else:
start_time = '1990-01-01'
QA_util_log_info('##JOB04 Now Saving INDEX_DAY==== \n Trying updating %s from %s to %s' %
(code, start_time, end_time))
if start_time != end_time:
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(str(code), start_time, end_time)[1::]))
except:
err.append(str(code))
for i_ in range(len(__index_list)):
#__saving_work('000001')
QA_util_log_info('The %s of Total %s' % (i_, len(__index_list)))
QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(
float(i_ / len(__index_list) * 100))[0:4] + '%')
__saving_work(__index_list.index[i_][0], coll)
if len(err) < 1:
QA_util_log_info('SUCCESS')
else:
QA_util_log_info('ERROR CODE \n ')
QA_util_log_info(err)
def _index_list(key_or_list, direction=None):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
if isinstance(key_or_list, basestring):
return [(key_or_list, pymongo.ASCENDING)]
elif not isinstance(key_or_list, (list, tuple)):
raise TypeError("if no direction is specified, "
"key_or_list must be an instance of list")
return key_or_list
def _ensure_index(self):
if not object.__getattribute__(self, "_ensured_index"):
self._coll.chunks.ensure_index(
[("files_id", ASCENDING), ("n", ASCENDING)],
unique=True)
object.__setattr__(self, "_ensured_index", True)
def _index_list(key_or_list, direction=None):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
if isinstance(key_or_list, basestring):
return [(key_or_list, pymongo.ASCENDING)]
elif not isinstance(key_or_list, (list, tuple)):
raise TypeError("if no direction is specified, "
"key_or_list must be an instance of list")
return key_or_list
def _ensure_index(self):
if not object.__getattribute__(self, "_ensured_index"):
self._coll.chunks.ensure_index(
[("files_id", ASCENDING), ("n", ASCENDING)],
unique=True)
object.__setattr__(self, "_ensured_index", True)
def _index_list(key_or_list, direction=None):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
if isinstance(key_or_list, basestring):
return [(key_or_list, pymongo.ASCENDING)]
elif not isinstance(key_or_list, (list, tuple)):
raise TypeError("if no direction is specified, "
"key_or_list must be an instance of list")
return key_or_list
def _index_list(key_or_list, direction=None):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
if isinstance(key_or_list, basestring):
return [(key_or_list, pymongo.ASCENDING)]
elif not isinstance(key_or_list, (list, tuple)):
raise TypeError("if no direction is specified, "
"key_or_list must be an instance of list")
return key_or_list
def get_api_exceptions(result_limit=50, sort_direction=pymongo.DESCENDING):
"""
Retrieve api exceptions.
Args:
result_limit: the maximum number of exceptions to return.
sort_direction: pymongo.ASCENDING or pymongo.DESCENDING
"""
db = api.common.get_conn()
results = db.exceptions.find({"visible": True}).sort([("time", sort_direction)]).limit(result_limit)
return list(results)
def __init__(self, rmpids, interval):
"""
Constructor for RateMyProfessors to set the RMP schools to request and the interval
:param rmpids: **list** List of rmp ids to scrape for
:param interval: **int** Seconds to wait in between scraping
:return:
"""
threading.Thread.__init__(self)
# Pass in a list that contains the the ids to fetch
self.ids = rmpids
# The amount of seconds to wait before scraping RMP again
self.interval = interval
# Establish db connection
self.db = pymongo.MongoClient().ScheduleStorm
log.info("Ensuring MongoDB indexes exist")
self.db.RateMyProfessors.create_index(
[("school", pymongo.ASCENDING)]
)
self.db.RateMyProfessors.create_index(
[("id", pymongo.ASCENDING),
("school", pymongo.ASCENDING)],
unique=True
)
def __init__(self, settings):
super().__init__(settings)
self.db = pymongo.MongoClient().ScheduleStorm
self.db.UAlbertaProfessor.create_index([("uid", pymongo.ASCENDING)], unique=True)