def process_export():
bucket = int(flask.request.values['bucket'])
filename = '/ilps-search-log.appspot.com/search_log.%d.gz' % bucket
with gcs.open(filename, 'w' , 'text/plain', {'content-encoding': 'gzip'}) as f:
bucket_size = int(flask.request.values['bucket_size'])
offset = bucket * bucket_size
with gzip.GzipFile('', fileobj=f, mode='wb') as gz:
ndb.get_context().clear_cache()
for s in Session.query(Session.shared == True).iter(batch_size=10,
offset=offset, limit=bucket_size):
ndb.get_context().clear_cache()
gc.collect()
s.user_id = ''
print >>gz, json.dumps(s.to_dict(), default=util.default,
ensure_ascii=False).encode('utf-8')
response = 'Written: %s' % str(blobstore.create_gs_key('/gs' + filename))
app.logger.info(response)
return response, 200
python类get_context()的实例源码
def test_delete_db_ndb_mixed(self):
# Start empty
storage_ndb = appengine.StorageByKeyName(
appengine.CredentialsNDBModel, 'foo', 'credentials')
storage = appengine.StorageByKeyName(
appengine.CredentialsModel, 'foo', 'credentials')
# First DB, then NDB
self.assertEqual(None, storage.get())
storage.put(self.credentials)
self.assertNotEqual(None, storage.get())
storage_ndb.delete()
self.assertEqual(None, storage.get())
# First NDB, then DB
self.assertEqual(None, storage_ndb.get())
storage_ndb.put(self.credentials)
storage.delete()
self.assertNotEqual(None, storage_ndb.get())
# NDB uses memcache and an instance cache (Context)
ndb.get_context().clear_cache()
memcache.flush_all()
self.assertEqual(None, storage_ndb.get())
def test_delete_db_ndb_mixed(self):
# Start empty
storage_ndb = appengine.StorageByKeyName(
appengine.CredentialsNDBModel, 'foo', 'credentials')
storage = appengine.StorageByKeyName(
appengine.CredentialsModel, 'foo', 'credentials')
# First DB, then NDB
self.assertEqual(None, storage.get())
storage.put(self.credentials)
self.assertNotEqual(None, storage.get())
storage_ndb.delete()
self.assertEqual(None, storage.get())
# First NDB, then DB
self.assertEqual(None, storage_ndb.get())
storage_ndb.put(self.credentials)
storage.delete()
self.assertNotEqual(None, storage_ndb.get())
# NDB uses memcache and an instance cache (Context)
ndb.get_context().clear_cache()
memcache.flush_all()
self.assertEqual(None, storage_ndb.get())
def setUp(self):
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
# Then activate the testbed, which prepares the service stubs for use.
self.testbed.activate()
# Next, declare which service stubs you want to use.
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
# Clear ndb's in-context cache between tests.
# This prevents data from leaking between tests.
# Alternatively, you could disable caching by
# using ndb.get_context().set_cache_policy(False)
ndb.get_context().clear_cache()
# [END datastore_example_test]
# [START datastore_example_teardown]
def setUp(self):
super(BaseTest, self).setUp()
root_path = '.'
application_id = 'graphene-gae-test'
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.setup_env(app_id=application_id, overwrite=True)
policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=self.datastore_probability)
self.testbed.init_datastore_v3_stub(root_path=root_path, consistency_policy=policy, require_indexes=True)
self.testbed.init_app_identity_stub()
self.testbed.init_blobstore_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=root_path)
self.testbed.init_urlfetch_stub()
self.storage = cloudstorage_stub.CloudStorageStub(self.testbed.get_stub('blobstore').storage)
self.testbed.init_mail_stub()
self.testbed.init_user_stub()
self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
ndb.get_context().clear_cache()
ndb.get_context().set_cache_policy(lambda x: True)
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache()
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache()
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache()
def minimal_database():
my_testbed = testbed.Testbed()
my_testbed.activate()
my_testbed.init_datastore_v3_stub()
my_testbed.init_memcache_stub()
# Clear ndb's in-context cache between tests.
ndb.get_context().clear_cache()
def define_get_google():
@ndb.tasklet
def get_google():
context = ndb.get_context()
result = yield context.urlfetch("http://www.google.com/")
if result.status_code == 200:
raise ndb.Return(result.content)
# else return None
return get_google
def get_first_ready():
urls = ["http://www.google.com/", "http://www.blogspot.com/"]
context = ndb.get_context()
futures = [context.urlfetch(url) for url in urls]
first_future = ndb.Future.wait_any(futures)
return first_future.get_result().content
def test_set_in_process_cache_policy(testbed):
def policy(key):
return 1 == 1
snippets.set_in_process_cache_policy(policy)
assert policy == ndb.get_context().get_cache_policy()
def test_set_memcache_policy(testbed):
def policy(key):
return 1 == 2
snippets.set_memcache_policy(policy)
assert policy == ndb.get_context().get_memcache_policy()
def test_bypass_in_process_cache_for_account_entities(testbed):
context = ndb.get_context()
assert context.get_cache_policy() == context.default_cache_policy
snippets.bypass_in_process_cache_for_account_entities()
assert context.get_cache_policy() != context.default_cache_policy
def test_set_memcache_timeout_policy(testbed):
def policy(key):
return 1
snippets.set_memcache_timeout_policy(policy)
assert ndb.get_context().get_memcache_timeout_policy() == policy
def set_in_process_cache_policy(func):
context = ndb.get_context()
context.set_cache_policy(func)
def set_memcache_policy(func):
context = ndb.get_context()
context.set_memcache_policy(func)
def bypass_in_process_cache_for_account_entities():
context = ndb.get_context()
context.set_cache_policy(lambda key: key.kind() != 'Account')
def set_datastore_policy(func):
context = ndb.get_context()
context.set_datastore_policy(func)
def clear_cache():
context = ndb.get_context()
context.clear_cache()
def setUp(self):
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
# Then activate the testbed, which prepares the service stubs for use.
self.testbed.activate()
# Create a consistency policy that will simulate the High Replication
# consistency model.
self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(
probability=0)
# Initialize the datastore stub with this policy.
self.testbed.init_datastore_v3_stub(consistency_policy=self.policy)
# Initialize memcache stub too, since ndb also uses memcache
self.testbed.init_memcache_stub()
# Clear in-context cache before each test.
ndb.get_context().clear_cache()
def _set_ndb_cache_policy():
"""Tell NDB to never cache anything in memcache or in-process.
This ensures that entities fetched from Datastore input_readers via NDB
will not bloat up the request memory size and Datastore Puts will avoid
doing calls to memcache. Without this you get soft memory limit exits,
which hurts overall throughput.
"""
ndb_ctx = ndb.get_context()
ndb_ctx.set_cache_policy(lambda key: False)
ndb_ctx.set_memcache_policy(lambda key: False)
def setUp(self):
"""Set up the test bed and activate it"""
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
self.testbed.setup_env(current_version_id='testbed.version')
# Then activate the testbed, which prepares the service stubs for use.
self.testbed.activate()
# Next, declare which service stubs you want to use.
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
# Clear ndb's in-context cache between tests.
ndb.get_context().clear_cache()
def DeleteAllData():
entries = []
entries.extend(Person.query().fetch(keys_only=True))
entries.extend(Troop.query().fetch(keys_only=True))
entries.extend(ScoutGroup.query().fetch(keys_only=True))
entries.extend(Meeting.query().fetch(keys_only=True))
entries.extend(TroopPerson.query().fetch(keys_only=True))
entries.extend(Semester.query().fetch(keys_only=True))
entries.extend(TaskProgress.query().fetch(keys_only=True))
entries.extend(UserPrefs.query().fetch(keys_only=True))
ndb.delete_multi(entries)
ndb.get_context().clear_cache() # clear memcache
def removeNDBCache(self, key):
"""Helper method to remove key from context cache"""
# key.delete(use_datastore=False)
ndb.get_context()._clear_memcache((key,)).get_result()
def removeNDBCache(self, key):
"""Helper method to remove key from context cache"""
# key.delete(use_datastore=False)
ndb.get_context()._clear_memcache((key,)).get_result()
def clearNDBCache(self):
ndb.get_context().clear_cache()
def setUp(self, use_mysql=False):
super(NdbTestBase, self).setUp(use_mysql=use_mysql)
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache()
self.doSetUp()
def urlfetch_async(self, url, method='GET', headers=None,
payload=None, deadline=None, callback=None,
follow_redirects=False):
"""Make an async urlfetch() call.
This is an async wrapper around urlfetch(). It adds an authentication
header.
Args:
url: the url to fetch.
method: the method in which to fetch.
headers: the http headers.
payload: the data to submit in the fetch.
deadline: the deadline in which to make the call.
callback: the call to make once completed.
follow_redirects: whether or not to follow redirects.
Yields:
This returns a Future despite not being decorated with @ndb.tasklet!
"""
headers = {} if headers is None else dict(headers)
headers.update(self.user_agent)
try:
self.token = yield self.get_token_async()
except app_identity.InternalError, e:
if os.environ.get('DATACENTER', '').endswith('sandman'):
self.token = None
logging.warning('Could not fetch an authentication token in sandman '
'based Appengine devel setup; proceeding without one.')
else:
raise e
if self.token:
headers['authorization'] = 'OAuth ' + self.token
deadline = deadline or self.retry_params.urlfetch_timeout
ctx = ndb.get_context()
resp = yield ctx.urlfetch(
url, payload=payload, method=method,
headers=headers, follow_redirects=follow_redirects,
deadline=deadline, callback=callback)
raise ndb.Return(resp)
def gaebed(gae, gae_sdk_path, gae_probability, mocker):
""" Prepare GAE testbed environment if necessary """
if gae:
if gae_sdk_path not in sys.path:
sys.path.insert(0, gae_sdk_path)
try:
import appengine_config
except ImportError:
pass
mocker.patch('google.appengine.api.mail.send_mail', autospec=True)
APP_ID = 'lostre-test-1637'
os.environ['APPLICATION_ID'] = APP_ID
from google.appengine.api import apiproxy_stub_map, datastore_file_stub
from google.appengine.datastore import datastore_stub_util
from google.appengine.api.memcache import memcache_stub
from google.appengine.api import urlfetch_stub
policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(
probability=gae_probability)
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
stub = datastore_file_stub.DatastoreFileStub(
APP_ID, datastore_file=None, consistency_policy=policy)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub)
apiproxy_stub_map.apiproxy.RegisterStub(
'memcache', memcache_stub.MemcacheServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'urlfetch', urlfetch_stub.URLFetchServiceStub())
import google.appengine.tools.os_compat
from google.appengine.ext import testbed
from google.appengine.api import memcache
from google.appengine.ext import ndb
bed = testbed.Testbed()
bed.activate()
bed.init_datastore_v3_stub()
bed.init_taskqueue_stub(True)
bed.init_memcache_stub()
bed.init_user_stub()
bed.init_urlfetch_stub()
bed.init_app_identity_stub(enable=True)
ndb.get_context().clear_cache()
from google.appengine.tools.devappserver2 import application_configuration
# get the app id out of your app.yaml and stuff
configuration = application_configuration.ApplicationConfiguration(
['.'])
return bed
else:
return None