def create_tenant(name, slug, extra_data, domains=[], user=None):
from shared_schema_tenants.models import Tenant
with transaction.atomic():
tenant = Tenant.objects.create(
name=name, slug=slug, extra_data=extra_data)
if len(domains) > 0:
for domain in domains:
site = Site.objects.create(name=name, domain=domain)
tenant.tenant_sites.create(site=site)
if user:
rel = tenant.relationships.create(user=user)
rel.groups.add(create_default_tenant_groups()[0])
return tenant
python类atomic()的实例源码
def update_request_states_for_window_expiration():
'''Update the state of all requests and user_requests to WINDOW_EXPIRED if their last window has passed'''
now = timezone.now()
states_changed = False
for user_request in UserRequest.objects.exclude(state__in=TERMINAL_STATES):
request_states_changed = False
for request in user_request.requests.filter(state='PENDING').prefetch_related('windows'):
if request.max_window_time < now:
logger.info('Expiring request %s', request.id, extra={'tags': {'request_num': request.id}})
with transaction.atomic():
req = Request.objects.select_for_update().get(pk=request.id)
if req.state == 'PENDING':
req.state = 'WINDOW_EXPIRED'
states_changed = True
request_states_changed = True
req.save()
if request_states_changed:
update_user_request_state(user_request)
return states_changed
def _rename(self, apps, schema_editor, old_model, new_model):
ContentType = apps.get_model('contenttypes', 'ContentType')
db = schema_editor.connection.alias
if not router.allow_migrate_model(db, ContentType):
return
try:
content_type = ContentType.objects.db_manager(db).get_by_natural_key(self.app_label, old_model)
except ContentType.DoesNotExist:
pass
else:
content_type.model = new_model
try:
with transaction.atomic(using=db):
content_type.save(update_fields={'model'})
except IntegrityError:
# Gracefully fallback if a stale content type causes a
# conflict as remove_stale_contenttypes will take care of
# asking the user what should be done next.
content_type.model = old_model
else:
# Clear the cache as the `get_by_natual_key()` call will cache
# the renamed ContentType instance by its old model name.
ContentType.objects.clear_cache()
def update_heartbeat(self, hostname, heartbeat, update_freq):
with transaction.atomic():
# check if there was an update in the last n seconds?
interval = Now() - timedelta(seconds=update_freq)
recent_worker_updates = self.filter(
hostname=hostname,
last_update__gte=interval,
)
if recent_worker_updates.exists():
# if yes, get the latest update and move on
obj = recent_worker_updates.get()
else:
# if no, update the worker state and move on
obj, _ = self.select_for_update_or_create(
hostname=hostname,
defaults={'last_heartbeat': heartbeat},
)
return obj
def update_state(self, state, task_id, defaults):
with transaction.atomic():
obj, created = self.select_for_update_or_create(
task_id=task_id,
defaults=defaults,
)
if created:
return obj
if states.state(state) < states.state(obj.state):
keep = Task.merge_rules[states.RECEIVED]
else:
keep = {}
for key, value in defaults.items():
if key not in keep:
setattr(obj, key, value)
obj.save(update_fields=tuple(defaults.keys()))
return obj
def handle(self, host, user, password, no_progress, *args, **options):
self.s = requests.Session()
self.s.auth = (user, password)
self.host = host
self.updated, self.inserted, self.errored, self.skipped = 0, 0, 0, 0
count = self.get('institutions/?tags=7')['count']
with transaction.atomic() and tqdm(total=count) as t:
page_num = int(ceil(count / self.PER_PAGE))
for page in range(1, page_num + 1):
t.set_description("Page {} of {}".format(page, page_num))
result = self.get('institutions/?tags=7&page={}'.format(page))
for row in result['results']:
self.update_row(row)
t.update(1)
self.stdout.write(
"Processed {} city halls, which {} updated, {} skipped and {} inserted. but {} errored.".
format(self.updated + self.inserted,
self.updated,
self.skipped,
self.inserted,
self.errored))
total_count = CityHall.objects.count()
self.stdout.write("There is {} city halls in total".format(total_count))
def handle(self, no_progress, update, *args, **options):
standard, try_extended, extended = 0, 0, 0
with transaction.atomic():
for cityhall in self.get_iter(self.get_queryset(update), no_progress):
guest_list = REGON.objects.filter(regonjst__jst=cityhall.original_terc).exclude(data=None).order_by('name').all()
self.stdout.write(cityhall.original_name)
for regon in guest_list:
regon_no = self.get_regon(regon.data)
self.stdout.write("** {} - {}".format(normalize(regon.name), regon_no))
if guest_list:
standard += 1
if not guest_list:
jst_list = JednostkaAdministracyjna.objects.area(cityhall.original_terc.parent).all()
subregon_list = REGON.objects.filter(regonjst__jst__in=jst_list).exclude(data=None).order_by('name').all()
try_extended += 1
if len(subregon_list) < 20:
extended += 1
for regon in subregon_list:
regon_no = self.get_regon(regon.data)
self.stdout.write("**** {} - {}".format(normalize(regon.name), regon_no))
self.stdout.write("\n")
print("Standard found {} time, extended {} times, no found {} times".format(standard, try_extended, extended))
def handle(self, comment, no_progress, dry_run, institutions_id, *args, **options):
self.updated, self.skipped, self.errored = 0, 0, 0
with transaction.atomic():
for institution in self.get_iter(self.get_queryset(institutions_id), no_progress):
name_resp = normalize(institution.resp.data.get('name'))
name_regon = normalize(institution.regon_data.data.get('nazwa'))
best_name = name_resp if len(name_resp) >= len(name_regon) else name_regon
if institution.name != best_name:
if dry_run:
pprint({'id': institution.id,
'best_name': best_name,
'current_x': institution.name,
'resp_name': institution.resp.data.get('name'),
'regn_name': institution.regon_data.data.get('nazwa')
})
institution.name = best_name
if not dry_run:
institution.save(update_fields=['name'])
self.updated += 1
else:
self.skipped += 1
self.stdout.write(("There is {} institutions changed, which "
"{} updated and "
"{} skipped.").format(self.updated + self.skipped, self.updated, self.skipped))
def handle(self, comment, no_progress, infile, *args, **options):
self.updated, self.inserted, self.skipped, self.deactivated = 0, 0, 0, 0
self.cached_courts = {}
with transaction.atomic() and reversion.create_revision():
reversion.set_comment(comment)
for item in self.get_iter(self.generate_data(infile), no_progress):
self.process_item(item)
for obj in Court.objects.exclude(pk__in=self.cached_courts.values()).all():
obj.active = False
obj.save()
self.stdout.write("There is {} courts, which {} skipped, {} updated, {} inserted and {} deactivated.".format(
self.updated + self.inserted + self.skipped,
self.skipped,
self.updated,
self.inserted,
self.deactivated))
def handle(self, comment, no_progress, verbose, update, *args, **options):
self.updated, self.inserted, self.skipped, self.deleted, self.missing = 0, 0, 0, 0, 0
print(update)
with transaction.atomic() and reversion.create_revision():
reversion.set_comment(comment)
for court in self.get_iter(self.get_queryset(update), no_progress):
self.process_item(court, verbose)
self.stdout.write(
"There is {} connection, which {} skipped, {} updated, {} deleted, {} inserted, {} missing.".format(
self.updated + self.inserted + self.skipped,
self.skipped,
self.updated,
self.deleted,
self.inserted,
self.missing))
def assign(self, request, pk):
try:
task = Task.objects.get(pk=pk, assignee=None)
except Task.DoesNotExist:
return Response(json.dumps({"message": "Already taken"}), status=status.HTTP_400_BAD_REQUEST)
expense, created = TaskExpense.objects.get_or_create(
task=task,
executor_id=request.user.pk,
money=task.money)
if created:
with transaction.atomic():
request.user.update_balance(u"???? ??????", task.money, task=task)
Task.objects.filter(pk=pk, assignee=None).update(assignee=request.user)
return Response(json.dumps({'message': "Taken"}), status=status.HTTP_200_OK)
def update(self, instance, validated_data):
with transaction.atomic():
for attr, value in validated_data.items():
if attr == 'work_history' or attr == 'education':
continue
else:
setattr(instance, attr, value)
update_image = 'image' in validated_data
instance.save(update_image=update_image)
if 'work_history' in self.initial_data:
update_work_history(validated_data['work_history'], instance.id)
if 'education' in self.initial_data:
update_education(validated_data['education'], instance.id)
return instance
def process_request(self, request, data):
form = ResultURLForm(self.conf, data)
if form.is_valid():
with transaction.atomic():
inv_id, out_sum = form.cleaned_data['InvId'], form.cleaned_data['OutSum']
self.invoice = Invoice.objects.get(id=inv_id)
# ???????? ??????
robokassa_result_received.send(
sender=self.invoice.content_object.__class__,
invoice=self.invoice,
inv_id=inv_id,
out_sum=out_sum,
extra=form.extra_params()
)
# ????????? ??????
self.invoice.payment_date = now()
self.invoice.status_changed(options.STATUS_SUCCESS, '?????? ?????? ?? ResultURL')
return HttpResponse('OK{}'.format(inv_id)) # ????? ??? ?????????, ??? ??? ????
return HttpResponse('Error: bad signature')
def process_request(self, request, data):
form = SuccessRedirectForm(self.conf, data)
if form.is_valid():
with transaction.atomic():
inv_id, out_sum = form.cleaned_data['InvId'], form.cleaned_data['OutSum']
self.invoice = Invoice.objects.get(id=inv_id)
# ???????? ??????
robokassa_success_page_visited.send(
sender=self.invoice.content_object.__class__,
invoice=self.invoice,
inv_id=inv_id,
out_sum=out_sum,
extra=form.extra_params()
)
# ????????? ??????
self.invoice.status_changed(options.STATUS_SUCCESS, '????????? ?? success url')
else:
raise ValidationError('Robokassa data not valid')
return HttpResponse('??????? ?? ?????? :)')
def process_request(self, request, data):
form = FailRedirectForm(self.conf, data)
if form.is_valid():
with transaction.atomic():
inv_id, out_sum = form.cleaned_data['InvId'], form.cleaned_data['OutSum']
self.invoice = Invoice.objects.get(id=inv_id)
# ???????? ??????
robokassa_fail_page_visited.send(
sender=self.invoice.content_object.__class__,
invoice=self.invoice,
inv_id=inv_id,
out_sum=out_sum,
extra=form.extra_params()
)
# ????????? ??????
self.invoice.status_changed(options.STATUS_FAIL, '????????? ?? fail url')
else:
raise ValidationError('Robokassa data not valid')
return HttpResponse('?????? ????????, ????? ????????????? :(')
def _rename(self, apps, schema_editor, old_model, new_model):
ContentType = apps.get_model('contenttypes', 'ContentType')
db = schema_editor.connection.alias
if not router.allow_migrate_model(db, ContentType):
return
try:
content_type = ContentType.objects.db_manager(db).get_by_natural_key(self.app_label, old_model)
except ContentType.DoesNotExist:
pass
else:
content_type.model = new_model
try:
with transaction.atomic(using=db):
content_type.save(update_fields={'model'})
except IntegrityError:
# Gracefully fallback if a stale content type causes a
# conflict as remove_stale_contenttypes will take care of
# asking the user what should be done next.
content_type.model = old_model
else:
# Clear the cache as the `get_by_natual_key()` call will cache
# the renamed ContentType instance by its old model name.
ContentType.objects.clear_cache()
def messages(request, room_id):
if request.method == 'POST':
try:
room = ChatRoom.objects.get(eid=room_id)
except ChatRoom.DoesNotExist:
try:
room = ChatRoom(eid=room_id)
room.save()
except IntegrityError:
# someone else made the room. no problem
room = ChatRoom.objects.get(eid=room_id)
mfrom = request.POST['from']
text = request.POST['text']
with transaction.atomic():
msg = ChatMessage(room=room, user=mfrom, text=text)
msg.save()
send_event('room-%s' % room_id, 'message', msg.to_data())
body = json.dumps(msg.to_data(), cls=DjangoJSONEncoder)
return HttpResponse(body, content_type='application/json')
else:
return HttpResponseNotAllowed(['POST'])
def save(self, *args, **kwargs):
if not self.eid:
counter = EventCounter.get_or_create(self.channel)
with transaction.atomic():
counter = EventCounter.objects.select_for_update(
).get(id=counter.id)
self.eid = counter.value + 1
try:
super(Event, self).save(*args, **kwargs)
except Exception:
self.eid = 0
raise
counter.value = self.eid
counter.save()
else:
super(Event, self).save(*args, **kwargs)
def process_photograph_event(applicant, calendar_service, redis):
"""Processes the potential photograph event.
It tries to submit the event to designated google calendar.
And it commits to the database to prevent duplication event in the calendar.
The function is atomic.
"""
# Uses the best effort redis redlock assumed the network is quite ok for such task.
with redis.lock("{}{}".format(_PHOTOGRAPH_EVENT_LOCK_KEY_PREFIX, applicant.id)):
applicant.refresh_from_db()
if applicant.google_calendar_event_created_at:
return
with transaction.atomic():
event = populate_event_for_submitting(applicant)
result = submit_photograph_event_to_calendar(calendar_service, event)
commit_created_at_timestamp_in_db(applicant, result)
re_encrypt_transcript_credentials.py 文件源码
项目:edx-video-pipeline
作者: edx
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def handle(self, *args, **options):
"""
handle method for command class.
"""
LOGGER.info('[Transcript credentials re-encryption] Process started.')
# Invalidate cached properties so that we get the latest keys
invalidate_fernet_cached_properties(TranscriptCredentials, ['api_key', 'api_secret'])
try:
with transaction.atomic():
# Call save on each credentials record so that re-encryption can be be performed on fernet fields.
for transcript_credential in TranscriptCredentials.objects.all():
transcript_credential.save()
LOGGER.info('[Transcript credentials re-encryption] Process completed.')
except InvalidToken:
LOGGER.exception(
'[Transcript credentials re-encryption] No valid fernet key present to decrypt. Process halted.'
)