def run_on_appengine(gdata_service, store_tokens=True,
single_user_mode=False, deadline=None):
"""Modifies a GDataService object to allow it to run on App Engine.
Args:
gdata_service: An instance of AtomService, GDataService, or any
of their subclasses which has an http_client member and a
token_store member.
store_tokens: Boolean, defaults to True. If True, the gdata_service
will attempt to add each token to it's token_store when
SetClientLoginToken or SetAuthSubToken is called. If False
the tokens will not automatically be added to the
token_store.
single_user_mode: Boolean, defaults to False. If True, the current_token
member of gdata_service will be set when
SetClientLoginToken or SetAuthTubToken is called. If set
to True, the current_token is set in the gdata_service
and anyone who accesses the object will use the same
token.
Note: If store_tokens is set to False and
single_user_mode is set to False, all tokens will be
ignored, since the library assumes: the tokens should not
be stored in the datastore and they should not be stored
in the gdata_service object. This will make it
impossible to make requests which require authorization.
deadline: int (optional) The number of seconds to wait for a response
before timing out on the HTTP request. If no deadline is
specified, the deafault deadline for HTTP requests from App
Engine is used. The maximum is currently 10 (for 10 seconds).
The default deadline for App Engine is 5 seconds.
"""
gdata_service.http_client = AppEngineHttpClient(deadline=deadline)
gdata_service.token_store = AppEngineTokenStore()
gdata_service.auto_store_tokens = store_tokens
gdata_service.auto_set_current_token = single_user_mode
return gdata_service
python类set()的实例源码
def set_token(unique_key, token_str):
"""Saves the serialized auth token in the datastore.
The token is also stored in memcache to speed up retrieval on a cache hit.
Args:
unique_key: The unique name for this token as a string. It is up to your
code to ensure that this token value is unique in your application.
Previous values will be silently overwitten.
token_str: A serialized auth token as a string. I expect that this string
will be generated by gdata.gauth.token_to_blob.
Returns:
True if the token was stored sucessfully, False if the token could not be
safely cached (if an old value could not be cleared). If the token was
set in memcache, but not in the datastore, this function will return None.
However, in that situation an exception will likely be raised.
Raises:
Datastore exceptions may be raised from the App Engine SDK in the event of
failure.
"""
# First try to save in memcache.
result = memcache.set(unique_key, token_str)
# If memcache fails to save the value, clear the cached value.
if not result:
result = memcache.delete(unique_key)
# If we could not clear the cached value for this token, refuse to save.
if result == 0:
return False
# Save to the datastore.
if Token(key_name=unique_key, t=token_str).put():
return True
return None
def set(self, url, content):
try:
memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
except Exception as e:
LOGGER.warning(e, exc_info=True)
def set(key=None, value=None, time=TIMEOUT):
parts = _split_value(json.dumps(value))
memcache.set(key=key + '_parts', value=len(parts), time=time)
for i, part in enumerate(parts):
logging.debug("Setting %s%d" % (key, i))
memcache.set(key='%s%d' % (key, i), value=part, time=time)
def GetEntityViaMemcache(entity_key):
"""Get entity from memcache if available, from datastore if not."""
entity = memcache.get(entity_key)
if entity is not None:
return entity
key = ndb.Key(urlsafe=entity_key)
entity = key.get()
if entity is not None:
memcache.set(entity_key, entity)
return entity
def _put(self):
"""Saves the session and updates the memcache entry."""
memcache.set(self._key.id(), self.data)
super(Session, self).put()
def save_session(self, response):
if self.session is None or not self.session.modified:
return
memcache.set(self.sid, dict(self.session))
self.session_store.save_secure_cookie(
response, self.name, {'_sid': self.sid}, **self.session_args)
def set(self, url, content):
try:
memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
except Exception as e:
LOGGER.warning(e, exc_info=True)
def index():
"""Lists the coffeez"""
coffees = memcache.get(ALL_COFFEES_KEY)
if not coffees:
coffees = Coffee.query(Coffee.active == True).fetch()
# cannot store all images into memcached due to size limits
for coffee in coffees:
coffee.image = None
memcache.set(ALL_COFFEES_KEY, coffees)
roaster_query = memcache.get(ALL_ROASTERS_KEY)
if not roaster_query:
roaster_query = Coffee.query(projection=["roaster"], distinct=True).fetch()
memcache.set(ALL_ROASTERS_KEY, roaster_query)
roasters = [data.roaster for data in roaster_query]
return render_template('index.html', coffees=coffees, roasters=roasters)
def get_coffee_image(coffee_id):
"""Gets the image attached to the coffee"""
coffee_int_id = int(coffee_id)
coffee = memcache.get("coffee_image_{}".format(coffee_int_id))
if not coffee:
coffee = Coffee.get_by_id(coffee_int_id)
memcache.set("coffee_image_{}".format(coffee_int_id), coffee)
if coffee:
if coffee.image:
return send_file(io.BytesIO(coffee.image))
return app.send_static_file('coffee.png')
def save(self, sync_only=False):
# todo: implement sync only
self._record = PicklableSession(
self._expires,
self._last_accessed,
self._data
)
memcache.set(self._sid, pickle.dumps(self._record), namespace=NAMESPACE)
def get_sharded_config(cls, name):
cache_key = ShardedCounterConfig.cache_key(name)
config = memcache.get(cache_key)
if not config:
''' Try fetching from datastore '''
config = ShardedCounterConfig.get_or_insert(name, name=name, shards=20)
memcache.set(cache_key, config, time=86400)
return config
def get_count(cls, name):
cache_key = CrashReport.count_cache_key(name)
total = memcache.get(cache_key)
if total is None:
total = 0
q = CrashReport.all()
q.filter('name = ', name)
for entity in q.run():
total += entity.count
memcache.set(cache_key, str(total))
return int(total)
def jinja2_environment(cls):
# set the template search path to pages
if not RRequest.environment:
RRequest.environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'pages')))
# add readable date filter to make it available for templates
RRequest.environment.filters['readable_date'] = readable_date
RRequest.environment.filters['crash_uri'] = crash_uri
RRequest.environment.filters['snippetize'] = snippetize
RRequest.environment.filters['issue_url'] = issue_url
return RRequest.environment
def getUser_MakerSecret(id):
secret = memcache.get(key="MakerSecret-" + id)
if secret == None:
try:
key = Key.from_path('UserData', id)
entity = datastore.Get(key)
secret = entity['maker_secret']
memcache.set(key="MakerSecret-" + id, value=secret)
except:
logging.debug(id + u"?IFTTT Maker Secret???????????")
return secret
def setUser_MakerSecret(id, maker_secret):
key = Key.from_path('UserData', id)
entity = datastore.Get(key)
entity.update({
'maker_secret': maker_secret
})
datastore.Put(entity)
memcache.set(key="MakerSecret-" + id, value=maker_secret)
def setUserByDashIdWithCurrentUser(dashId):
# if not exists, assign the dashId to lineId
result = False
currentUser = _getCurrentUser()
if currentUser != None:
# memcache.set(key = "Dash-user-"+dashid,value=currentUser, time=86400)
# memcache.set(key = "User-dash-"+currentUser,value=dashid, time=86400 )
logging.debug(dashId + u"?DashButton???????????")
try:
lineId = currentUser
key = db.Key.from_path('UserData', lineId)
userData = db.get(key)
userData.dashId = dashId
userData.put()
found_lineId = lineId
_clearCurrentUser()
send2Line.sendText(currentUser, "DashButton????????")
result = True
except:
logging.warning(u"currentUser???????" + lineId + u"???????????")
else:
logging.warning(u"currentUser??????????????????????")
return result
def __init(self):
# date object storing date for
self.date = None
self.heading = None
self.verse_reference = None
self.bible_in_a_year = None
self.verse_concise = None
self.verse_full = None
self.post = None
# write get and set
# link to get full verse
self.link_to_full_verse_bgw = None
self.link_to_full_verse_yv = None
def save(cls, media):
recent_owner_ids = (memcache.get(cls.KEY_PRE_OWNER_IDS) or '').split()
recent_codes = cls._get_recent_codes()
codes_set = set(recent_codes)
owner_ids_set = set(recent_owner_ids)
new_media = []
for item in media:
if not TagValidator.is_valid_tags(item['tags']):
continue
if item['code'] in codes_set:
continue
if item['owner']['id'] in owner_ids_set:
continue
new_media.append(item)
owner_ids_set.add(item['owner']['id'])
codes_set.add(item['code'])
if not new_media:
return 0
tag_text = cls()
tag_text.text = '\n'.join(map(cls._to_line, new_media))
tag_text.put()
recent_codes.extend([x['code'] for x in media])
recent_owner_ids.extend([x['owner']['id'] for x in new_media])
cls._set_cache(recent_codes, cls.MAX_CODES_COUNT, cls.KEY_RECENT_CODES)
cls._set_cache(recent_owner_ids, cls.MAX_OWNER_COUNT, cls.KEY_PRE_OWNER_IDS)
tags = reduce(lambda y,x: y+x['tags'], new_media, [])
tags = set(tags)
cls._set_last_tags(tags)
return len(new_media)
def _set_last_tags(cls, tags):
memcache.set('tag_text_last_tags', ' '.join(tags))