def get_metrics(self, unit_type, sub_type):
"""
Handler to get supported metrics for a unit_type
:param unit_type:
:param sub_type
:return:
"""
log.info("Request received to get supported metrics for unit_type %s and subtype %s" %(unit_type, sub_type) )
conn = sql_helper.engine.connect()
sql_query = query_list.GET_METRICS
params = sub_type
final_query = sql_query % '"'+params+'"'
try:
#result = conn.execute(sql_query, params)
result = conn.execute(final_query)
except SQLAlchemyError as ex:
return "Unable to get the metrics list because of database\
exception"
log.info("Request to get supported metrics for %s successfully\
processed" % sub_type)
return json.dumps(dict(result.fetchall()))
python类SQLAlchemyError()的实例源码
def consumer(self):
self.logger.info(f"{self.name}: consumer coroutine started!")
while True:
source, events, last_id = await self.queue.get()
self.logger.info(f"{self.name}: got group of events from queue")
try:
with self.sql.transaction() as trans:
if events is not None:
await self.write(trans, source, events)
await self.update(trans, source, last_id)
except SQLAlchemyError:
self.logger.error(f"{self.name}: error during event write", exc_info=1)
self.queue.task_done()
def get_pdns_record(self, dn=None, ip=None,
start='2016-01-01', end=datetime.date.today()):
session = self.Session()
try:
if dn:
result = session.query(Pdns_records).filter(
Pdns_records.query.like('%{}%'.format(dn)),
Pdns_records.first_seen >= start,
Pdns_records.last_seen <= '{} 23:59:59'.format(end))
elif ip:
result = session.query(Pdns_records).filter(
Pdns_records.answer.like('%P{%'.format(ip)),
Pdns_records.first_seen >= start,
Pdns_records.last_seen <= '{} 23:59:59'.format(end))
return result
except SQLAlchemyError as e:
logging.info((str(e)))
finally:
session.close()
def gen_commit_deco(DBSession):
def wrap(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
register_db_commit = getattr(db_ctx, 'register_db_commit', False)
if not register_db_commit:
db_ctx.register_db_commit = True
result = func(*args, **kwargs)
if not register_db_commit:
try:
DBSession().flush()
DBSession().commit()
except SQLAlchemyError:
DBSession().rollback()
raise
finally:
DBSession().close()
delattr(db_ctx, 'register_db_commit')
return result
return wrapper
return wrap
def __init__(self, db_url):
"""
Initialize the Peekaboo database handler.
:param db_url: An RFC 1738 URL that points to the database.
"""
self.__engine = create_engine(db_url)
self.__db_con = None
session_factory = sessionmaker(bind=self.__engine)
self.__Session = scoped_session(session_factory)
self.__lock = threading.RLock()
try:
self.__db_con = self.__engine.connect()
except SQLAlchemyError as e:
raise PeekabooDatabaseError(
'Unable to connect to the database: %s' % e
)
if not self.__db_con.dialect.has_table(self.__engine, '_meta'):
self._init_db()
logger.debug('Database schema created.')
else:
self.clear_in_progress()
def clear_in_progress(self):
""" Remove all samples with the result 'inProgress'. """
session = self.__Session()
in_progress = PeekabooDatabase.__get(
session,
AnalysisResult,
name='inProgress'
)
in_progress_samples = session.query(SampleInfo).filter_by(
result=in_progress
).all()
for in_progress_sample in in_progress_samples:
session.query(AnalysisJournal).filter_by(
sample=in_progress_sample
).delete()
try:
session.commit()
logger.debug('Cleared the database from "inProgress" entries.')
except SQLAlchemyError as e:
session.rollback()
raise PeekabooDatabaseError(
'Unable to clear the database from "inProgress" entries: %s' % e
)
finally:
session.close()
def delete_search(search_id):
if current_user.is_anonymous:
return json_failed("You need to be logged in to delete a search.")
search = SavedSearch.query.filter_by(id=search_id).first()
if (not search) or (search.owner != current_user):
return json_failed('Invalid search id. Either the given search id does'
' not exist, or it does not belong to you.')
try:
db.session.delete(search)
db.session.commit()
return jsonify({
'status': 'success',
'search_id': search.id,
'search_terms': search.search_terms
})
except exc.SQLAlchemyError:
# TODO log this
return json_failed('Something went wrong while deleting your search '
'from our database.')
def delete_email_address():
if current_user.is_anonymous:
flash('You need to be logged in to do that')
return redirect(url_for('index'))
try:
current_user.email = None
db.session.commit()
except exc.SQLAlchemyError:
# TODO log this
flash('Something went wrong while deleting your email from our database.')
db.session.rollback()
oauth = OAuthSignIn.get_provider('facebook')
# Strip out the 'facebook$' at the start of the id
user_id = re.findall('\d+', current_user.social_id)[0]
permission_revoked = oauth.revoke_email_permission(user_id)
if not permission_revoked:
flash('There was a problem giving up the permission to access your email address. '
'It may be re-added to your account here the next time you sign in. '
'To permanently remove it, please use your privacy settings in Facebook.')
return redirect(url_for('index'))
def import_channel_data(self):
unflushed_rows = 0
try:
for model in self.content_models:
mapping = self.schema_mapping.get(model, {})
row_mapper = self.generate_row_mapper(mapping.get('per_row'))
table_mapper = self.generate_table_mapper(mapping.get('per_table'))
logging.info('Importing {model} data'.format(model=model.__name__))
unflushed_rows = self.table_import(model, row_mapper, table_mapper, unflushed_rows)
self.destination.session.commit()
except SQLAlchemyError as e:
# Rollback the transaction if any error occurs during the transaction
self.destination.session.rollback()
# Reraise the exception to prevent other errors occuring due to the non-completion
raise e
def setup(self):
self.default_TTL = CONF.ProjectManager.default_TTL
self.default_share = CONF.ProjectManager.default_share
db_connection = CONF.ProjectManager.db_connection
pool_size = CONF.ProjectManager.db_pool_size
pool_recycle = CONF.ProjectManager.db_pool_recycle
max_overflow = CONF.ProjectManager.db_max_overflow
try:
self.db_engine = create_engine(db_connection,
pool_size=pool_size,
pool_recycle=pool_recycle,
max_overflow=max_overflow)
except SQLAlchemyError as ex:
LOG.error(ex)
raise ex
self.configured = False
self.keystone_manager = self.getManager("KeystoneManager")
self.createTable()
def _removeProject(self, project, force=False):
if not force:
if project.getId() not in self.projects.keys():
raise SynergyError("project %s not found!" % project.getId())
self.projects.pop(project.getId())
connection = self.db_engine.connect()
trans = connection.begin()
try:
QUERY = "delete from project where id=%s"
connection.execute(QUERY, [project.getId()])
trans.commit()
except SQLAlchemyError as ex:
trans.rollback()
raise SynergyError(ex.message)
finally:
connection.close()
LOG.info("removed project %r" % project.getName())
self.notify(event_type="PROJECT_REMOVED", project=project)
def getUsage(self, prj_id):
result = 0
connection = self.db_engine.connect()
try:
QUERY = "select count(*) from `%s` " % self.getName()
QUERY += "where prj_id=%s"
qresult = connection.execute(QUERY, [prj_id])
row = qresult.fetchone()
result = row[0]
except SQLAlchemyError as ex:
raise SynergyError(ex.message)
finally:
connection.close()
return result
def _createTable(self):
if not self.db_engine:
return
TABLE = """CREATE TABLE IF NOT EXISTS `%s` (`id` BIGINT NOT NULL \
AUTO_INCREMENT PRIMARY KEY, `priority` INT DEFAULT 0, user_id CHAR(40) \
NOT NULL, prj_id CHAR(40) NOT NULL, `retry_count` INT DEFAULT 0, \
`creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, `last_update` \
TIMESTAMP NULL, `data` TEXT NOT NULL) ENGINE=InnoDB""" % self.getName()
connection = self.db_engine.connect()
try:
connection.execute(TABLE)
except SQLAlchemyError as ex:
raise SynergyError(ex.message)
except Exception as ex:
raise SynergyError(ex.message)
finally:
connection.close()
def _getItemDataDB(self, item):
if not item or not self.db_engine:
return
data = None
connection = self.db_engine.connect()
try:
QUERY = "select data from `%s`" % self.getName()
QUERY += " where id=%s"
result = connection.execute(QUERY, [item.getId()])
row = result.fetchone()
data = json.loads(row[0])
except SQLAlchemyError as ex:
raise SynergyError(ex.message)
finally:
connection.close()
item.setData(data)
return data
def confirm_registration(username):
user = User.query.filter_by(username=username).first()
secret_code = request.json.get('registrationCode')
if user is None:
abort(400)
if int(secret_code) == int(user.registration_code):
try:
user.registration_confirmed = True
db.session.add(user)
db.session.commit()
except SQLAlchemyError as ex:
app.logger.info(
'Could not commit user to database: {0}'.format(ex)
)
abort(500)
return jsonify(user.serialize)
else:
app.logger.info(
'User {0}: Secret code did not match.'.format(user.username)
)
abort(400)
def get_latest_analysis_by_hash(algorithm, artifact_hash, projection=None):
"""Note: has to be called inside flask request context."""
if algorithm not in ['sha1', 'sha256', 'md5']:
return None
contains_dict = {'details': [{"artifact": True, algorithm: artifact_hash}]}
try:
return rdb.session.query(Analysis).\
join(WorkerResult).\
filter(WorkerResult.worker == 'digests').\
filter(WorkerResult.task_result.contains(contains_dict)).\
order_by(Analysis.started_at.desc()).\
first()
except SQLAlchemyError:
rdb.session.rollback()
raise
def retrieve_worker_results(rdb, external_request_id):
start = datetime.datetime.now()
try:
query = rdb.session.query(WorkerResult) \
.filter(WorkerResult.external_request_id == external_request_id)
results = query.all()
except (NoResultFound, MultipleResultsFound):
return None
except SQLAlchemyError:
rdb.session.rollback()
raise
elapsed_seconds = (datetime.datetime.now() - start).total_seconds()
msg = "It took {t} seconds to retrieve " \
"all worker results for {r}.".format(t=elapsed_seconds, r=external_request_id)
current_app.logger.debug(msg)
return results
def retrieve_worker_result(rdb, external_request_id, worker):
start = datetime.datetime.now()
try:
query = rdb.session.query(WorkerResult) \
.filter(WorkerResult.external_request_id == external_request_id,
WorkerResult.worker == worker)
result = query.one()
except (NoResultFound, MultipleResultsFound):
return None
except SQLAlchemyError:
rdb.session.rollback()
raise
result_dict = result.to_dict()
elapsed_seconds = (datetime.datetime.now() - start).total_seconds()
msg = "It took {t} seconds to retrieve {w} " \
"worker results for {r}.".format(t=elapsed_seconds, w=worker, r=external_request_id)
current_app.logger.debug(msg)
return result_dict
def spike_init():
logging.info("Initializing Spike")
timestamp = int(time())
app = create_app(__get_config_file())
db.init_app(app)
with app.app_context():
db.create_all()
for r in rulesets_seeds:
logging.info("Adding ruleset: %s", r)
rmks = "Ruleset for %s / auto-created %s" % (r, strftime("%F - %H:%M", localtime(time())))
db.session.add(NaxsiRuleSets(r, rmks, timestamp))
for w in whitelists_seeds:
logging.info("Adding whitelistset: %s", w)
rmks = "Ruleset for %s / auto-created %s" % (w, strftime("%F - %H:%M", localtime(time())))
db.session.add(NaxsiWhitelistSets(w, rmks, timestamp))
try:
db.session.commit()
except SQLAlchemyError:
logging.error('It seems that the database was already initialized. Did you meant to run `%s run` instead?',
sys.argv[0])
logging.info('Spike initialization completed')
def display_edit(algorithm_id):
algorithm = Algorithm.query.filter_by(id=algorithm_id).first()
form = CreateAlgorithmForm(request.form)
if request.method == 'POST' and form.validate():
name = form.name.data
code = form.code.data
current_app.logger.info('Edit a algorithm %s.', (name))
algorithm.name = name
algorithm.code = code
try:
db.session.commit()
flash('Algorithm successfully edited.')
return redirect(url_for('setting.display_index'))
except exc.SQLAlchemyError as e:
flash('Algorithm was not edited.')
current_app.logger.error(e)
form.name.data = algorithm.name
form.code.data = algorithm.code
return render_template("setting_edit_algorithm.html", form=form)
def display_new_algorithm():
form = CreateAlgorithmForm(request.form)
if request.method == 'POST' and form.validate():
name = form.name.data
code = form.code.data
current_app.logger.info('Adding a new algorithm %s.', (name))
algorithm = Algorithm(name, current_user.id, code=code)
try:
db.session.add(algorithm)
db.session.commit()
flash('Algorithm successfully created.')
return redirect(url_for('setting.display_index'))
except exc.SQLAlchemyError as e:
flash('Algorithm was not created.')
current_app.logger.error(e)
return render_template('setting_new_algorithm.html', form=form)
def fetch(self, lock=True, machine=""):
"""Fetches a task waiting to be processed and locks it for running.
@return: None or task
"""
session = self.Session()
row = None
try:
if machine != "":
row = session.query(Task).filter_by(status=TASK_PENDING).filter_by(machine=machine).order_by("priority desc, added_on").first()
else:
row = session.query(Task).filter_by(status=TASK_PENDING).order_by("priority desc, added_on").first()
if not row:
return None
if lock:
self.set_status(task_id=row.id, status=TASK_RUNNING)
session.refresh(row)
return row
except SQLAlchemyError as e:
log.debug("Database error fetching task: {0}".format(e))
session.rollback()
finally:
session.close()
def guest_start(self, task_id, name, label, manager):
"""Logs guest start.
@param task_id: task identifier
@param name: vm name
@param label: vm label
@param manager: vm manager
@return: guest row id
"""
session = self.Session()
guest = Guest(name, label, manager)
try:
session.query(Task).get(task_id).guest = guest
session.commit()
session.refresh(guest)
return guest.id
except SQLAlchemyError as e:
log.debug("Database error logging guest start: {0}".format(e))
session.rollback()
return None
finally:
session.close()
def guest_stop(self, guest_id):
"""Logs guest stop.
@param guest_id: guest log entry id
"""
session = self.Session()
try:
session.query(Guest).get(guest_id).shutdown_on = datetime.now()
session.commit()
except SQLAlchemyError as e:
log.debug("Database error logging guest stop: {0}".format(e))
session.rollback()
except TypeError:
log.warning("Data inconsistency in guests table detected, it might be a crash leftover. Continue")
session.rollback()
finally:
session.close()
def get_file_types(self):
"""Get sample filetypes
@return: A list of all available file types
"""
session = self.Session()
try:
unfiltered = session.query(Sample.file_type).group_by(Sample.file_type)
res = []
for asample in unfiltered.all():
res.append(asample[0])
res.sort()
except SQLAlchemyError as e:
log.debug("Database error getting file_types: {0}".format(e))
return 0
finally:
session.close()
return res
def view_task(self, task_id, details=False):
"""Retrieve information on a task.
@param task_id: ID of the task to query.
@return: details on the task.
"""
session = self.Session()
try:
if details:
task = session.query(Task).options(joinedload("guest"), joinedload("errors"), joinedload("tags")).get(task_id)
else:
task = session.query(Task).get(task_id)
except SQLAlchemyError as e:
log.debug("Database error viewing task: {0}".format(e))
return None
else:
if task:
session.expunge(task)
return task
finally:
session.close()
def delete_task(self, task_id):
"""Delete information on a task.
@param task_id: ID of the task to query.
@return: operation status.
"""
session = self.Session()
try:
task = session.query(Task).get(task_id)
session.delete(task)
session.commit()
except SQLAlchemyError as e:
log.debug("Database error deleting task: {0}".format(e))
session.rollback()
return False
finally:
session.close()
return True
def view_sample(self, sample_id):
"""Retrieve information on a sample given a sample id.
@param sample_id: ID of the sample to query.
@return: details on the sample used in sample: sample_id.
"""
session = self.Session()
try:
sample = session.query(Sample).get(sample_id)
except AttributeError:
return None
except SQLAlchemyError as e:
log.debug("Database error viewing task: {0}".format(e))
return None
else:
if sample:
session.expunge(sample)
finally:
session.close()
return sample
def find_sample(self, md5=None, sha1=None, sha256=None):
"""Search samples by MD5, SHA1, or SHA256.
@param md5: md5 string
@param sha1: sha1 string
@param sha256: sha256 string
@return: matches list
"""
session = self.Session()
try:
if md5:
sample = session.query(Sample).filter_by(md5=md5).first()
elif sha1:
sample = session.query(Sample).filter_by(sha1=sha1).first()
elif sha256:
sample = session.query(Sample).filter_by(sha256=sha256).first()
except SQLAlchemyError as e:
log.debug("Database error searching sample: {0}".format(e))
return None
else:
if sample:
session.expunge(sample)
finally:
session.close()
return sample
def prune_target(engine, target_name, target_type):
if target_type == 'user':
metrics.incr('users_purged')
else:
metrics.incr('others_purged')
try:
engine.execute('''DELETE FROM `target` WHERE `name` = %s AND `type_id` = (SELECT `id` FROM `target_type` WHERE `name` = %s)''', (target_name, target_type))
logger.info('Deleted inactive target %s', target_name)
# The user has messages or some other user data which should be preserved.
# Just mark as inactive.
except IntegrityError:
logger.info('Marking target %s inactive', target_name)
engine.execute('''UPDATE `target` SET `active` = FALSE WHERE `name` = %s AND `type_id` = (SELECT `id` FROM `target_type` WHERE `name` = %s)''', (target_name, target_type))
except SQLAlchemyError as e:
logger.error('Deleting target %s failed: %s', target_name, e)
metrics.incr('sql_errors')