def api_people(entity=None):
"""
api route for people
:param entity: is the id of a person or None
:return: json with results or 404
"""
try:
person_id = request.args.get('id')
if entity is None and person_id is None:
return get_all_from_category(Person)
else:
if entity is not None:
data = db.session.query(Person).filter_by(
person_id=entity).one()
else:
data = db.session.query(Person).filter_by(
person_id=person_id).one()
return json.dumps(data.dictionary())
except SQLAlchemyError:
print("Get people failed")
abort(404)
python类SQLAlchemyError()的实例源码
def api_companies(entity=None):
"""
api route for companies
:param entity: is the id of a company or None
:return: json with results or 404
"""
try:
company_id = request.args.get('id')
if entity is None and company_id is None:
return get_all_from_category(Company)
else:
if entity is not None:
data = db.session.query(Company).filter_by(
company_id=entity).one()
else:
data = db.session.query(Company).filter_by(
company_id=company_id).one()
return json.dumps(data.dictionary())
except SQLAlchemyError:
print("Get companies failed")
abort(404)
def api_financialorgs(entity=None):
"""
api route for financial org
:param entity: is the id of a financial org or None
:return: json with results or 404
"""
try:
finorg_id = request.args.get('id')
if entity is None and finorg_id is None:
return get_all_from_category(FinancialOrg)
else:
if entity is not None:
data = db.session.query(FinancialOrg).filter_by(
financial_org_id=entity).one()
else:
data = db.session.query(FinancialOrg).filter_by(
financial_org_id=finorg_id).one()
return json.dumps(data.dictionary())
except SQLAlchemyError:
print("Get financial orgs failed")
abort(404)
def api_cities(entity=None):
"""
api route for cities
:param entity: is the id of a city or None
:return: json with results or 404
"""
try:
city_id = request.args.get('id')
if entity is None and city_id is None:
return get_all_from_category(City)
else:
if entity is not None:
data = db.session.query(City).filter_by(city_id=entity).one()
else:
data = db.session.query(City).filter_by(city_id=city_id).one()
return json.dumps(data.dictionary())
except SQLAlchemyError:
print("Get cities failed")
abort(404)
def get_analysis_count(self, ecosystem, package, version):
"""Get count of previously scheduled analysis for given EPV triplet.
:param ecosystem: str, Ecosystem name
:param package: str, Package name
:param version: str, Package version
:return: analysis count
"""
if ecosystem == 'maven':
package = MavenCoordinates.normalize_str(package)
try:
count = PostgresBase.session.query(Analysis).\
join(Version).join(Package).join(Ecosystem).\
filter(Ecosystem.name == ecosystem).\
filter(Package.name == package).\
filter(Version.identifier == version).\
count()
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
return count
def get_finished_task_names(analysis_id):
"""Get name of tasks that finished in Analysis.
:param analysis_id: analysis id for which task names should retrieved
:return: a list of task names
"""
try:
task_names = PostgresBase.session.query(WorkerResult.worker).\
join(Analysis).\
filter(Analysis.id == analysis_id).\
filter(WorkerResult.error.is_(False)).\
all()
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
return list(chain(*task_names))
def get_analysed_versions(ecosystem, package):
"""Return all already analysed versions for the given package.
:param ecosystem: str, Ecosystem name
:param package: str, Package name
return: a list of package version identifiers of already analysed versions
"""
try:
return chain(*PostgresBase.session.query(Version.identifier).
join(Analysis).join(Package).join(Ecosystem).
filter(Ecosystem.name == ecosystem).
filter(Package.name == package).
filter(Analysis.finished_at.isnot(None)).
distinct().all())
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
package_postgres.py 文件源码
项目:fabric8-analytics-worker
作者: fabric8-analytics
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def get_analysis_by_id(self, analysis_id):
"""Get result of previously scheduled analysis
:param analysis_id: str, ID of analysis
:return: analysis result
"""
try:
return PostgresBase.session.query(PackageAnalysis).\
filter(PackageAnalysis.id == analysis_id).\
one()
except (NoResultFound, MultipleResultsFound):
raise
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
package_postgres.py 文件源码
项目:fabric8-analytics-worker
作者: fabric8-analytics
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def get_analysis_count(self, ecosystem, package):
"""Get count of previously scheduled analyses for given ecosystem-package.
:param ecosystem: str, Ecosystem name
:param package: str, Package name
:return: analysis count
"""
if ecosystem == 'maven':
package = MavenCoordinates.normalize_str(package)
try:
count = PostgresBase.session.query(PackageAnalysis).\
join(Package).join(Ecosystem).\
filter(Ecosystem.name == ecosystem).\
filter(Package.name == package).\
count()
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
return count
package_postgres.py 文件源码
项目:fabric8-analytics-worker
作者: fabric8-analytics
项目源码
文件源码
阅读 23
收藏 0
点赞 0
评论 0
def get_finished_task_names(analysis_id):
"""Get name of tasks that finished in Analysis.
:param analysis_id: analysis id for which task names should retrieved
:return: a list of task names
"""
try:
task_names = PostgresBase.session.query(PackageWorkerResult.worker).\
join(PackageAnalysis).\
filter(PackageAnalysis.id == analysis_id).\
filter(PackageWorkerResult.error.is_(False)).\
all()
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
return list(chain(*task_names))
result_collector.py 文件源码
项目:fabric8-analytics-worker
作者: fabric8-analytics
项目源码
文件源码
阅读 17
收藏 0
点赞 0
评论 0
def do_run(self, arguments, s3, postgres, results):
for worker_result in results.raw_analyses:
# We don't want to store tasks that do book-keeping for Selinon's
# Dispatcher (starting uppercase)
if worker_result.worker[0].isupper():
continue
if not postgres.is_real_task_result(worker_result.task_result):
# Do not overwrite results stored on S3 with references to
# their version - this can occur on selective task runs.
continue
version_id = s3.store_task_result(arguments, worker_result.worker,
worker_result.task_result)
# Substitute task's result with version that we got on S3
worker_result.task_result = {'version_id': version_id}
try:
postgres.session.commit()
except SQLAlchemyError:
postgres.session.rollback()
raise
s3.store_base_file_record(arguments, results.to_dict())
manifest_keeper.py 文件源码
项目:fabric8-analytics-worker
作者: fabric8-analytics
项目源码
文件源码
阅读 18
收藏 0
点赞 0
评论 0
def execute(self, arguments):
self._strict_assert(arguments.get('external_request_id'))
postgres = StoragePool.get_connected_storage('BayesianPostgres')
try:
results = postgres.session.query(StackAnalysisRequest)\
.filter(StackAnalysisRequest.id == arguments.get('external_request_id'))\
.first()
except SQLAlchemyError:
postgres.session.rollback()
raise
manifests = []
if results is not None:
row = results.to_dict()
request_json = row.get("requestJson", {})
manifests = request_json.get('manifest', [])
return {'manifest': manifests}
def run(self, arguments):
self._strict_assert(arguments.get('document_id'))
try:
record = self.storage.session.query(Analysis).\
filter(Analysis.id == arguments['document_id']).one()
record.finished_at = json_serial(datetime.datetime.utcnow())
record.release = '{}:{}:{}'.format(arguments.get('ecosystem'),
arguments.get('name'),
arguments.get('version'))
self.storage.session.commit()
except SQLAlchemyError:
self.storage.session.rollback()
raise
# Commented out for now since we want to sync to S3
# if self.task_name.endswith('Error'):
# raise RuntimeError("Flow %s failed" % self.flow_name)
def get_package_dependents_count(ecosystem_backend, package, db_session=None):
"""Get number of GitHub projects dependent on the `package`.
:param ecosystem_backend: str, Ecosystem backend from `f8a_worker.enums.EcosystemBackend`
:param package: str, Package name
:param db_session: obj, Database session to use for querying
:return: number of dependent projects, or -1 if the information is not available
"""
if not db_session:
storage = StoragePool.get_connected_storage("BayesianPostgres")
db_session = storage.session
try:
count = db_session.query(PackageGHUsage.count).filter(PackageGHUsage.name == package) \
.filter(PackageGHUsage.ecosystem_backend == ecosystem_backend) \
.order_by(desc(PackageGHUsage.timestamp)) \
.first()
except SQLAlchemyError:
db_session.rollback()
raise
if count:
return count[0]
return -1
def get_latest_analysis(ecosystem, package, version, db_session=None):
"""Get latest analysis for the given EPV."""
if not db_session:
storage = StoragePool.get_connected_storage("BayesianPostgres")
db_session = storage.session
try:
return db_session.query(Analysis).\
filter(Ecosystem.name == ecosystem).\
filter(Package.name == package).\
filter(Version.identifier == version).\
order_by(Analysis.started_at.desc()).\
first()
except SQLAlchemyError:
db_session.rollback()
raise
def _edit_config(self, form_data):
if form_data['cid'] is not u'':
c = Config.query.get(int(form_data['cid']))
c.name = str(form_data['name'])
else: #adding new config
c = Config(name = form_data['name'])
c.plugin = Plugin.query.filter(Plugin.name == self.name).first()
c.role = "Runtime"
c.args = dict()
c.args['src'] = form_data['src']
c.args['dst'] = form_data['dst']
c.args['excl'] = form_data['excl']
c.args['opts'] = form_data['opts']
#commit it.
try:
if c.id is None:
db.session.add(c)
db.session.commit()
except SQLAlchemyError as e:
self.logger.error(e.message)
return False
return True
# Method takes form data and changes config objects arg property with a mkrt field.
def _make_realtime(self, form_data):
self.logger.debug(form_data)
id = int(form_data['conf_id'])
c = Config.query.get(id)
if c is None:
return False;
new_args = copy.deepcopy(c.args)
if form_data.get('enabled', False):
new_args['mkrt'] = True
self.add_watch(new_args)
else:
new_args['mkrt'] = False
self.remove_watch(new_args)
c.args = new_args
try:
db.session.commit()
except SQLAlchemyError as e:
self.logger.error(e.message)
return False
return True
def delete_tag(self, tag_name, sha256):
session = self.Session()
try:
# First remove the tag from the sample
malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first()
tag = session.query(Tag).filter(Tag.tag==tag_name).first()
try:
malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first()
malware_entry.tag.remove(tag)
session.commit()
except:
print_error("Tag {0} does not exist for this sample".format(tag_name))
# If tag has no entries drop it
count = len(self.find('tag', tag_name))
if count == 0:
session.delete(tag)
session.commit()
print_warning("Tag {0} has no additional entries dropping from Database".format(tag_name))
except SQLAlchemyError as e:
print_error("Unable to delete tag: {0}".format(e))
session.rollback()
finally:
session.close()
def rename(self, id, name):
session = self.Session()
if not name:
return False
try:
malware = session.query(Malware).get(id)
if not malware:
print_error("The opened file doesn't appear to be in the database, have you stored it yet?")
return False
malware.name = name
session.commit()
except SQLAlchemyError as e:
print_error("Unable to rename file: {}".format(e))
session.rollback()
return False
finally:
session.close()
return True
def delete_file(self, id):
session = self.Session()
try:
malware = session.query(Malware).get(id)
if not malware:
print_error("The opened file doesn't appear to be in the database, have you stored it yet?")
return False
session.delete(malware)
session.commit()
except SQLAlchemyError as e:
print_error("Unable to delete file: {0}".format(e))
session.rollback()
return False
finally:
session.close()
return True