def remote_create(self, command_line):
try:
self.callx('create', command_line)
except CalledProcessError as cpe:
if cpe.returncode == 1:
log.debug('remote create finished (warning)')
with transaction.manager as txn:
self.job.borg_warning = True
txn.note('Set borg warning flag on job %s' % self.job.id)
else:
raise
else:
log.debug('remote create finished (success)')
finally:
transaction.begin()
self.job.update_state(BackupJob.State.client_in_progress, BackupJob.State.client_done)
python类begin()的实例源码
def process_oils(session_class):
session = session_class()
record_ids = [r.adios_oil_id for r in session.query(ImportedRecord)]
session.close()
logger.info('Adding Oil objects...')
for record_id in record_ids:
# Note: committing our transaction for every record slows the
# import job significantly. But this is necessary if we
# want the option of rejecting oil records.
session = session_class()
transaction.begin()
rec = (session.query(ImportedRecord)
.filter(ImportedRecord.adios_oil_id == record_id)
.one())
try:
add_oil(rec)
transaction.commit()
except OilRejected as e:
logger.warning(repr(e))
transaction.abort()
def transaction(self, principal_id):
if principal_id:
transaction.begin()
login_principal(get_principal(principal_id))
try:
yield
except:
transaction.abort()
raise
else:
try:
transaction.commit()
except ZODB.POSException.ConflictError:
log.warning('Conflict while publishing', exc_info=True)
transaction.abort()
raise
finally:
zope.security.management.endInteraction()
def test_transaction_record_rollback(session):
import transaction
import uuid
from snovault.storage import Resource
rid = uuid.uuid4()
resource = Resource('test_item', {'': {}}, rid=rid)
session.add(resource)
transaction.commit()
transaction.begin()
sp = session.begin_nested()
resource = Resource('test_item', {'': {}}, rid=rid)
session.add(resource)
with pytest.raises(Exception):
sp.commit()
sp.rollback()
resource = Resource('test_item', {'': {}})
session.add(resource)
transaction.commit()
def conn(engine_url):
from snovault.app import configure_engine
from snovault.storage import Base
engine_settings = {
'sqlalchemy.url': engine_url,
}
engine = configure_engine(engine_settings)
conn = engine.connect()
tx = conn.begin()
try:
Base.metadata.create_all(bind=conn)
yield conn
finally:
tx.rollback()
conn.close()
engine.dispose()
def idle(self):
transaction.begin()
hook.borgcubed_idle(apiserver=self)
self.check_children()
self.queue_new_jobs()
self.check_queue()
def set_geolocation(cls, company_id, lat, lon):
transaction.begin()
DBSession.query(cls) \
.filter(cls.id == company_id) \
.update({'latitude': lat,
'longitude': lon,
'geolocation_is_valid': True})
transaction.commit()
def set_geolocation_is_valid(cls, company_id, is_valid):
transaction.begin()
DBSession.query(cls) \
.filter(cls.id == company_id) \
.update({'geolocation_is_valid': is_valid})
transaction.commit()
def set_address_is_valid(cls, company_id, is_valid):
transaction.begin()
DBSession.query(cls) \
.filter(cls.id == company_id) \
.update({'address_is_valid': is_valid})
transaction.commit()
def update_last_sync(cls, job_id, timestamp):
transaction.begin()
DBSession.query(cls) \
.filter(cls.id == job_id) \
.update({'last_sync': timestamp,
'last_modified': cls.last_modified})
DBSession.query(cls) \
.filter(cls.id == job_id) \
.filter(cls.last_modified < timestamp) \
.update({'last_modified': timestamp})
transaction.commit()
def reset_last_sync(cls):
transaction.begin()
DBSession.query(cls) \
.filter(cls.validated) \
.update({'last_sync': base_time()})
transaction.commit()
def set_geolocation(cls, offer_id, lat, lon):
transaction.begin()
DBSession.query(cls) \
.filter(cls.id == offer_id) \
.update({'latitude': lat,
'longitude': lon,
'geolocation_is_valid': True})
transaction.commit()
def set_geolocation_is_valid(cls, offer_id, is_valid):
transaction.begin()
DBSession.query(cls) \
.filter(cls.id == offer_id) \
.update({'geolocation_is_valid': is_valid})
transaction.commit()
def set_pushed_on_twitter(cls, offer_id, pushed_on_twitter):
transaction.begin()
DBSession.query(cls) \
.filter(cls.id == offer_id) \
.update({'pushed_on_twitter': pushed_on_twitter})
transaction.commit()
def update_last_sync(cls, job_id, timestamp):
transaction.begin()
DBSession.query(cls) \
.filter(cls.id == job_id) \
.update({'last_sync': timestamp,
'last_modified': cls.last_modified})
DBSession.query(cls) \
.filter(cls.id == job_id) \
.filter(cls.last_modified < timestamp) \
.update({'last_modified': timestamp})
transaction.commit()
def reset_last_sync(cls):
transaction.begin()
DBSession.query(cls) \
.update({'last_sync': base_time()})
transaction.commit()
def automatic_transaction_begin():
"""Starts a new transaction for every test.
We want to start with an empty celery_session for each test.
"""
transaction.begin()
zope.security.management.endInteraction()
def begin(self):
transaction.begin()
def transaction(request, external_tx, zsa_savepoints, check_constraints):
import transaction
transaction.begin()
request.addfinalizer(transaction.abort)
return transaction
def set_snapshot(xmin, snapshot_id):
global current_xmin_snapshot_id
if current_xmin_snapshot_id == (xmin, snapshot_id):
return
clear_snapshot()
current_xmin_snapshot_id = (xmin, snapshot_id)
while True:
txn = transaction.begin()
txn.doom()
if snapshot_id is not None:
txn.setExtendedInfo('snapshot_id', snapshot_id)
session = app.registry[DBSESSION]()
connection = session.connection()
db_xmin = connection.execute(
"SELECT txid_snapshot_xmin(txid_current_snapshot());").scalar()
if db_xmin >= xmin:
break
transaction.abort()
log.info('Waiting for xmin %r to reach %r', db_xmin, xmin)
time.sleep(0.1)
registry = app.registry
request = app.request_factory.blank('/_indexing_pool')
request.registry = registry
request.datastore = 'database'
apply_request_extensions(request)
request.invoke_subrequest = app.invoke_subrequest
request.root = app.root_factory(request)
request._stats = {}
manager.push({'request': request, 'registry': registry})
def zsa_savepoints(conn):
""" Place a savepoint at the start of the zope transaction
This means failed requests rollback to the db state when they began rather
than that at the start of the test.
"""
from transaction.interfaces import ISynchronizer
from zope.interface import implementer
@implementer(ISynchronizer)
class Savepoints(object):
def __init__(self, conn):
self.conn = conn
self.sp = None
self.state = None
def beforeCompletion(self, transaction):
pass
def afterCompletion(self, transaction):
# txn be aborted a second time in manager.begin()
if self.sp is None:
return
if self.state == 'commit':
self.state = 'completion'
self.sp.commit()
else:
self.state = 'abort'
self.sp.rollback()
self.sp = None
self.state = 'done'
def newTransaction(self, transaction):
self.state = 'new'
self.sp = self.conn.begin_nested()
self.state = 'begun'
transaction.addBeforeCommitHook(self._registerCommit)
def _registerCommit(self):
self.state = 'commit'
zsa_savepoints = Savepoints(conn)
import transaction
transaction.manager.registerSynch(zsa_savepoints)
yield zsa_savepoints
transaction.manager.unregisterSynch(zsa_savepoints)