def edit_view(self, request):
client = self.client
data = request.POST or None
client.connection._p_activate()
client_form = Client.Form(data, initial=client.__dict__)
del client_form.fields['hostname']
connection_form = RshClientConnection.Form(data, initial=client.connection.__dict__)
if data and client_form.is_valid() and connection_form.is_valid():
client._update(client_form.cleaned_data)
client.connection._update(connection_form.cleaned_data)
transaction.get().note('Edited client %s' % client.hostname)
transaction.commit()
return self.redirect_to()
return self.render(request, 'core/client/edit.html', {
'client': client,
'client_form': client_form,
'connection_form': connection_form,
})
python类commit()的实例源码
def prune_archives(self, archives, repository):
"""
Prune list of two tuples (delete, archive), all of which must be in the same `Repository`.
Return `Statistics`.
"""
# TODO Maybe commit some stuff here after a while, because this can seriously take some time.
stats = Statistics()
with open_repository(repository) as borg_repository:
manifest, key = Manifest.load(borg_repository)
with Cache(borg_repository, key, manifest, lock_wait=1) as cache:
for delete, archive in archives:
assert archive.repository == repository
if delete:
log.info('Deleting archive %s [%s]', archive.name, archive.id)
archive.delete(manifest, stats, cache)
else:
log.info('Skipping archive %s [%s]', archive.name, archive.id)
manifest.write()
borg_repository.commit()
cache.commit()
transaction.commit()
log.error(stats.summary.format(label='Deleted data:', stats=stats))
return stats
def _add_completed_archive(self):
log.debug('Saving archive metadata to database')
archive = BorgArchive(self.repository, self._repository_key, self._manifest, self.job.archive_name, cache=self._cache)
stats = archive.calc_stats(self._cache)
duration = archive.ts_end - archive.ts
ao = Archive(
id=archive.fpr,
repository=self.job.repository,
name=archive.name,
client=self.job.client,
job=self.job,
nfiles=stats.nfiles,
original_size=stats.osize,
compressed_size=stats.csize,
deduplicated_size=stats.usize,
duration=duration,
timestamp=archive.ts,
timestamp_end=archive.ts_end,
)
self.job.archive = ao
transaction.get().note('Added completed archive %s for job %s' % (ao.id, self.job.id))
transaction.commit()
log.debug('Saved archive metadata')
def setup_schema(command, conf, vars):
"""Place any commands to setup pyjobsweb here"""
# Load the models
# <websetup.websetup.schema.before.model.import>
from pyjobsweb import model
# <websetup.websetup.schema.after.model.import>
# <websetup.websetup.schema.before.metadata.create_all>
print("Creating tables")
model.metadata.create_all(bind=config['tg.app_globals'].sa_engine)
# <websetup.websetup.schema.after.metadata.create_all>
transaction.commit()
print('Initializing Migrations')
import alembic.config
alembic_cfg = alembic.config.Config()
alembic_cfg.set_main_option("script_location", "migration")
alembic_cfg.set_main_option("sqlalchemy.url", config['sqlalchemy.url'])
import alembic.command
alembic.command.stamp(alembic_cfg, "head")
def process_oils(session_class):
session = session_class()
record_ids = [r.adios_oil_id for r in session.query(ImportedRecord)]
session.close()
logger.info('Adding Oil objects...')
for record_id in record_ids:
# Note: committing our transaction for every record slows the
# import job significantly. But this is necessary if we
# want the option of rejecting oil records.
session = session_class()
transaction.begin()
rec = (session.query(ImportedRecord)
.filter(ImportedRecord.adios_oil_id == record_id)
.one())
try:
add_oil(rec)
transaction.commit()
except OilRejected as e:
logger.warning(repr(e))
transaction.abort()
def add_oil_object(session, file_columns, row_data):
file_columns = [slugify_filename(c).lower()
for c in file_columns]
row_dict = dict(zip(file_columns, row_data))
fix_name(row_dict)
fix_pour_point(row_dict)
fix_flash_point(row_dict)
fix_preferred_oils(row_dict)
oil = ImportedRecord(**row_dict)
add_synonyms(session, oil, row_dict)
add_densities(oil, row_dict)
add_kinematic_viscosities(oil, row_dict)
add_dynamic_viscosities(oil, row_dict)
add_distillation_cuts(oil, row_dict)
add_toxicity_effective_concentrations(oil, row_dict)
add_toxicity_lethal_concentrations(oil, row_dict)
session.add(oil)
transaction.commit()
def link_crude_medium_oils(session):
# our category
top, categories = get_categories_by_names(session, 'Crude',
('Medium',))
oils = get_oils_by_api(session, 'Crude',
api_min=22.3, api_max=31.1)
count = 0
for o in oils:
o.categories.extend(categories)
count += 1
logger.info('{0} oils added to {1} -> {2}.'
.format(count, top.name, [n.name for n in categories]))
transaction.commit()
def link_all_other_oils(session):
'''
Category Name:
- Other
Sample Oils:
- Catalytic Cracked Slurry Oil
- Fluid Catalytic Cracker Medium Cycle Oil
Criteria:
- Any oils that fell outside all the other Category Criteria
'''
_top, categories = get_categories_by_names(session, 'Other',
('Other',))
oils = (session.query(Oil)
.filter(Oil.categories == None)
.all())
count = 0
for o in oils:
o.categories.extend(categories)
count += 1
logger.info('{0} oils added to {1}.'
.format(count, [n.name for n in categories]))
transaction.commit()
def __disable_textversions(statement_uid, author_uid):
"""
Disables the textversions of the given statement
:param statement_uid: Statement.uid
:param author_uid: User.uid
:return: None
"""
db_textversion = DBDiscussionSession.query(TextVersion).filter(and_(TextVersion.statement_uid == statement_uid,
TextVersion.author_uid == author_uid)).all() # TODO #432
for textversion in db_textversion:
logger('QueryHelper', '__disable_textversions', str(textversion.uid))
textversion.set_disable(True)
DBDiscussionSession.add(textversion)
DBDiscussionSession.flush()
transaction.commit()
def __transfer_textversion_to_new_author(statement_uid, old_author_uid, new_author_uid):
"""
Sets a new author for the given textversion and creates a row in RevokedContentHistory
:param statement_uid: Statement.uid
:param old_author_uid: User.uid
:param new_author_uid: User.uid
:return: Boolean
"""
logger('QueryHelper', '__revoke_statement', 'Textversion of {} will change author from {} to {}'.format(statement_uid, old_author_uid, new_author_uid))
db_textversion = DBDiscussionSession.query(TextVersion).filter(and_(TextVersion.statement_uid == statement_uid,
TextVersion.author_uid == old_author_uid)).all() # TODO #432
if not db_textversion:
return False
for textversion in db_textversion:
textversion.author_uid = new_author_uid
DBDiscussionSession.add(textversion)
DBDiscussionSession.add(RevokedContentHistory(old_author_uid, new_author_uid, textversion_uid=textversion.uid))
DBDiscussionSession.flush()
transaction.commit()
return True
def set_reference(reference, url, nickname, statement_uid, issue_uid):
"""
Creates a new reference
:param reference: Text of the reference
:param nickname: nickname of the user
:param statement_uid: statement uid of the linked statement
:param issue_uid: current issue uid
:return: Boolean
"""
db_user = DBDiscussionSession.query(User).filter_by(nickname=str(nickname)).first()
if not db_user:
return False
parsed_url = urlparse(url)
host = parsed_url.scheme + '://' + parsed_url.netloc
path = parsed_url.path
author_uid = db_user.uid
DBDiscussionSession.add(StatementReferences(reference, host, path, author_uid, statement_uid, issue_uid))
DBDiscussionSession.flush()
transaction.commit()
return True
def save_issue_uid(issue_uid, nickname):
"""
Saves the Issue.uid for an user
:param issue_uid: Issue.uid
:param nickname: User.nickname
:return: Boolean
"""
db_user = DBDiscussionSession.query(User).filter_by(nickname=nickname).first()
if not db_user:
return False
db_settings = DBDiscussionSession.query(Settings).get(db_user.uid)
if not db_settings:
return False
db_settings.set_last_topic_uid(issue_uid)
transaction.commit()
return True
def add_click_for_statement(statement_uid, nickname, supportive):
"""
Adds a clicks for the given statements
:param statement_uid: Statement.uid
:param nickname: User.nickname
:param supportive: boolean
:return: Boolean
"""
logger('VotingHelper', 'add_click_for_statement', 'increasing {} vote for statement {}'.format('up' if supportive else 'down', str(statement_uid)))
if not is_integer(statement_uid):
return False
db_statement = DBDiscussionSession.query(Statement).get(statement_uid)
db_user = DBDiscussionSession.query(User).filter_by(nickname=str(nickname)).first()
if not db_user or not db_statement:
return False
__click_statement(db_statement, db_user, supportive)
__statement_seen_by_user(db_user, statement_uid)
transaction.commit()
return True
def add_seen_statement(statement_uid, db_user):
"""
Adds the uid of the statement into the seen_by list, mapped with the given user uid
:param db_user:current user
:param statement_uid: uid of the statement
:return: undefined
"""
if not is_integer(statement_uid) or not isinstance(db_user, User):
return False
logger('VotingHelper', 'add_seen_statement', 'statement ' + str(statement_uid) + ', for user ' + str(db_user.uid))
val = __statement_seen_by_user(db_user, statement_uid)
if val:
transaction.commit()
return val
def test_update_projects(self):
"""testing update_projects() is working properly
"""
# create a new Project (with Python)
from stalker import db, Project
new_project = Project(
name='New Project',
code='NP',
repositories=[self.test_repo]
)
db.DBSession.add(new_project)
db.DBSession.commit()
from stalker_pyramid.testing import DummyRequest, DummyMultiDict
request = DummyRequest()
request.matchdict['id'] = self.test_user1.id
# patch get_logged_in_user
self.patch_logged_in_user(request)
# and assign it to the new user (with RESTFull API)
request.method = 'POST'
request.params = DummyMultiDict()
request.params['project_id[]'] = [self.test_project1.id,
new_project.id]
request.POST = request.params
user_view = user.UserViews(request)
response = user_view.update_projects()
# check the user projects
from stalker import User
test_user1_db = User.query.get(self.test_user1.id)
self.assertEqual(
sorted(test_user1_db.projects),
sorted([self.test_project1, new_project])
)
def test_get_vacations_view_is_working_properly(self):
"""testing if GET: /api/users/{id}/vacations view is working properly
"""
from stalker import db, Vacation
import datetime
vac1 = Vacation(
user=self.test_user1,
start=datetime.datetime(2016, 4, 24, 0, 0),
end=datetime.datetime(2016, 4, 28, 0, 0)
)
vac2 = Vacation(
user=self.test_user1,
start=datetime.datetime(2016, 7, 1, 0, 0),
end=datetime.datetime(2016, 7, 8, 0, 0)
)
db.DBSession.add_all([vac1, vac2])
db.DBSession.flush()
import transaction
transaction.commit()
from stalker import User
user1 = User.query.filter(User.login == self.test_user1.login).first()
response = self.test_app.get(
'/api/users/%s/vacations' % self.test_user1.id
)
self.assertEqual(
sorted(response.json_body),
sorted([
{
'id': v.id,
'$ref': '/api/vacations/%s' % v.id,
'name': v.name,
'entity_type': v.entity_type
} for v in [user1.vacations[0], user1.vacations[1]]
])
)
# TASKS