def save_auth_tokens(token_dict, user=None):
"""Associates the tokens with the current user and writes to the datastore.
If there us no current user, the tokens are not written and this function
returns None.
Returns:
The key of the datastore entity containing the user's tokens, or None if
there was no current user.
"""
if user is None:
user = users.get_current_user()
if user is None:
return None
memcache.set('gdata_pickled_tokens:%s' % user, pickle.dumps(token_dict))
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
user_tokens.pickled_tokens = pickle.dumps(token_dict)
return user_tokens.put()
else:
user_tokens = TokenCollection(
user=user,
pickled_tokens=pickle.dumps(token_dict))
return user_tokens.put()
python类set()的实例源码
def load_auth_tokens(user=None):
"""Reads a dictionary of the current user's tokens from the datastore.
If there is no current user (a user is not signed in to the app) or the user
does not have any tokens, an empty dictionary is returned.
"""
if user is None:
user = users.get_current_user()
if user is None:
return {}
pickled_tokens = memcache.get('gdata_pickled_tokens:%s' % user)
if pickled_tokens:
return pickle.loads(pickled_tokens)
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
memcache.set('gdata_pickled_tokens:%s' % user, user_tokens.pickled_tokens)
return pickle.loads(user_tokens.pickled_tokens)
return {}
def _update_memcached(cls, domain, time=3600 * 24, records=None):
"""
Updates memcached with the latest data from the datastore
and returns that data. By default stores entries to expire after
24 hours.
"""
namespace = "{}|".format(domain)
if not records:
records = cls._get_from_datastore(domain,
cls._memcache_date_offset)
memcache.set(
key=cls._memcache_key,
namespace=namespace,
value=json.dumps(records),
time=time)
return records
def update(cls, domain, time=0):
"""
Updates the memcached stats for a given domain
This is used when a report is updated so that memcached
has the current stats.
Args:
domain - str - The domain to use for the namespace
time - int - The timeout for stored keys (default: 5 seconds)
"""
namespace = "{}|{}".format('stats', domain)
for status in VALID_STATUSES:
count = EmailReport.query(EmailReport.reported_domain == domain,
EmailReport.status == status).count()
memcache.set(
key=status, namespace=namespace, value=count, time=time)
def update(cls, domain, time=0):
"""
Updates the memcached stats for a given domain
This is used when a report is updated so that memcached
has the current stats.
Args:
domain - str - The domain to use for the namespace
time - int - The timeout for stored keys (default: 5 seconds)
"""
namespace = "{}|".format(domain)
records = cls._get_from_datastore(domain, cls._memcache_result_count)
memcache.set(
key=cls._memcache_key,
namespace=namespace,
value=json.dumps(records),
time=time)
def get_by_sid(cls, sid):
"""Returns a ``Session`` instance by session id.
:param sid:
A session id.
:returns:
An existing ``Session`` entity.
"""
data = memcache.get(sid)
if not data:
session = model.Key(cls, sid).get()
if session:
data = session.data
memcache.set(sid, data)
return data
def new_crash_with_backoff(cls, crash_report):
"""
there is a chance that we get a new crash before an issue was submitted before.
"""
backoff_cache_key = cls.backoff_crash_key_new_crash(crash_report)
backoff_value = memcache.get(backoff_cache_key)
if not backoff_value:
# A task does not exist. Queue a job.
memcache.set(backoff_cache_key, "in_progress")
deferred.defer(
GithubOrchestrator.create_issue_job,
crash_report.fingerprint, _queue=GithubOrchestrator.__QUEUE__)
logging.info(
'Enqueued job for new issue on GitHub for fingerprint {0}'.format(crash_report.fingerprint))
else:
# task already in progress, backoff
logging.info(
'A GitHub task is already in progress. Waiting to the dust to settle for fingerprint {0}'
.format(crash_report.fingerprint)
)
def new_comment_with_backoff(cls, crash_report):
"""
there is a chance that this is a hot issue, and that there are too many crashes coming in.
try and use backoff, when you are posting a new comment.
"""
backoff_cache_key = cls.backoff_crash_key_new_comment(crash_report)
backoff_value = memcache.get(backoff_cache_key)
if not backoff_value:
# A task does not exist. Queue a job.
memcache.set(backoff_cache_key, "in_progress")
deferred.defer(
GithubOrchestrator.add_comment_job,
crash_report.fingerprint, _queue=GithubOrchestrator.__QUEUE__)
logging.info(
'Enqueued job for new comment on GitHub for fingerprint {0}'.format(crash_report.fingerprint))
else:
# task already in progress, backoff
logging.info(
'A GitHub task is already in progress. Waiting to the dust to settle for fingerprint {0}'
.format(crash_report.fingerprint)
)
def _most_recent_property(
cls, name, property_name, default_value=None, serialize=lambda x: x, deserialize=lambda x: x, ttl=120):
cache_key = CrashReport.recent_crash_property_key(name, property_name)
most_recent_value = memcache.get(cache_key)
if most_recent_value is None:
most_recent = 0
most_recent_value = default_value
q = CrashReport.all()
q.filter('name = ', name)
for entity in q.run():
in_millis = to_milliseconds(entity.date_time)
if most_recent <= in_millis:
most_recent = in_millis
most_recent_value = serialize(entity.__getattribute__(property_name))
memcache.set(cache_key, most_recent_value, ttl)
to_return = deserialize(most_recent_value)
return to_return
def getAddress(longitude, latitude):
gsp_key = "gps-" + str(longitude) + "," + str(latitude)
resultData = memcache.get(key=gsp_key)
if resultData == None:
url = "https://maps.googleapis.com/maps/api/geocode/json?language=ja&sensor=false&key=" + const.GOOGLE_API_KEY + "&latlng=" + str(
longitude) + "," + str(latitude)
logging.debug(url)
result = urlfetch.fetch(
url=url,
method=urlfetch.GET,
headers={
}
)
if result.status_code == 200:
logging.debug(result.content)
else:
logging.debug(result.content)
jsonstr = result.content
jsonobj = json.loads(jsonstr)
if len(jsonobj["results"]) > 0:
memcache.set(key=gsp_key, value=jsonobj, time=3600)
resultData = jsonobj;
else:
logging.debug(resultData)
return resultData["results"]
def createUserData(lineId):
id = lineId
# entity = Entity('UserData', name=id)
# entity.update({
# 'registrationTime': datetime.datetime.now().strftime("%Y/%m/%d %H:%M:%S"),
# 'lineId': 'dummy-line-id',
# 'dashId': id,
# })
# datastore.Put(entity)
userData = UserData(
key_name=id,
lineId=id,
dashId='not_registered_yet',
registrationTime=datetime.datetime.now().strftime("%Y/%m/%d %H:%M:%S"),
message=u'??????????????',
)
userData.put()
_setCurrentUser(id)
# memcache.set(key = "USER-"+id,value="DUMMY")
def getSkel( self ):
"""
Returns a matching :class:`server.db.skeleton.Skeleton` instance for the
current query.
Its only possible to use this function if this query has been created using
:func:`server.skeleton.Skeleton.all`.
:returns: The Skeleton or None if the result-set is empty.
:rtype: :class:`server.skeleton.Skeleton`
"""
if self.srcSkel is None:
raise NotImplementedError("This query has not been created using skel.all()")
res = self.get()
if res is None:
return( None )
#s = self.srcSkel.clone()
self.srcSkel.setValues( res, key=res.key() )
return self.srcSkel
def __setitem__(self, name, value):
"""
Implements the [] operator. Used to set property value(s).
:param name: Name of the property to set.
:type name: str
:param value: Any value to set tot the property.
:raises: :exc:`BadPropertyError` if the property name is the \
empty string or not a string.
:raises: :exc:`BadValueError` if the value is not a supported type.
"""
if isinstance(value,list) or isinstance(value,tuple):
# We cant store an empty list, so we catch any attempts
# and store None. As "does not exists" queries aren't
# possible anyway, this makes no difference
if len( value ) == 0:
value = None
super( Entity, self ).__setitem__( name, value )
def set( self, key, value, indexed=True ):
"""
Sets a property.
:param key: key of the property to set.
:type key: str
:param value: Any value to set tot the property.
:param indexed: Defines if the value is indexed.
:type indexed: bool
:raises: :exc:`BadPropertyError` if the property name is the \
empty string or not a string.
:raises: :exc:`BadValueError` if the value is not a supported type.
"""
if not indexed:
unindexed = list( self.getUnindexedProperties() )
if not key in unindexed:
self.setUnindexedProperties( unindexed+[key] )
self[ key ] = value
def __getitem__(self, key):
currTime = datetime.now()
if currTime>self.ctime+self.updateInterval:
data = memcache.get( self.keyName )
if data: #Loaded successfully from Memcache
self.data.update( data )
self.ctime = currTime
else:
data = SharedConf.SharedConfData.get_by_key_name( self.keyName )
if data:
for k in data.dynamic_properties():
self.data[ k ] = getattr( data, k )
else: #There isnt any config in the db nor the memcache
data = SharedConf.SharedConfData( key_name=self.keyName )
for k,v in self.data.items(): #Initialize the DB-Config
setattr( data, k, v )
data.put()
memcache.set( self.keyName, self.data, 60*60*24 )
return( self.data[ key ] )
def save_auth_tokens(token_dict, user=None):
"""Associates the tokens with the current user and writes to the datastore.
If there us no current user, the tokens are not written and this function
returns None.
Returns:
The key of the datastore entity containing the user's tokens, or None if
there was no current user.
"""
if user is None:
user = users.get_current_user()
if user is None:
return None
memcache.set('gdata_pickled_tokens:%s' % user, pickle.dumps(token_dict))
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
user_tokens.pickled_tokens = pickle.dumps(token_dict)
return user_tokens.put()
else:
user_tokens = TokenCollection(
user=user,
pickled_tokens=pickle.dumps(token_dict))
return user_tokens.put()
def load_auth_tokens(user=None):
"""Reads a dictionary of the current user's tokens from the datastore.
If there is no current user (a user is not signed in to the app) or the user
does not have any tokens, an empty dictionary is returned.
"""
if user is None:
user = users.get_current_user()
if user is None:
return {}
pickled_tokens = memcache.get('gdata_pickled_tokens:%s' % user)
if pickled_tokens:
return pickle.loads(pickled_tokens)
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
memcache.set('gdata_pickled_tokens:%s' % user, user_tokens.pickled_tokens)
return pickle.loads(user_tokens.pickled_tokens)
return {}
def add_values():
# [START add_values]
# Add a value if it doesn't exist in the cache
# with a cache expiration of 1 hour.
memcache.add(key="weather_USA_98105", value="raining", time=3600)
# Set several values, overwriting any existing values for these keys.
memcache.set_multi(
{"USA_98115": "cloudy", "USA_94105": "foggy", "USA_94043": "sunny"},
key_prefix="weather_",
time=3600
)
# Atomically increment an integer value.
memcache.set(key="counter", value=0)
memcache.incr("counter")
memcache.incr("counter")
memcache.incr("counter")
# [END add_values]
def CreateXsrfToken(action):
"""Generate a token to be passed with a form for XSRF protection.
Args:
action: action to restrict token to
Returns:
suitably random token which is only valid for ten minutes and, if the user
is authenticated, is only valid for the user that generated it.
"""
user_str = _MakeUserStr()
token = base64.b64encode(
''.join(chr(int(random.random()*255)) for _ in range(0, 64)))
memcache.set(token,
(user_str, action),
time=XSRF_VALIDITY_TIME,
namespace=MEMCACHE_NAMESPACE)
return token
def _SetValue(self, key, type_, value):
"""Convert a string value and store the result in memcache.
Args:
key: String
type_: String, describing what type the value should have in the cache.
value: String, will be converted according to type_.
Returns:
Result of memcache.set(key, converted_value). True if value was set.
Raises:
ValueError: Value can't be converted according to type_.
"""
for _, converter, typestr in self.TYPES:
if typestr == type_:
value = converter(value)
break
else:
raise ValueError('Type %s not supported.' % type_)
return memcache.set(key, value)
def get_by_sid(cls, sid):
"""Returns a ``Session`` instance by session id.
:param sid:
A session id.
:returns:
An existing ``Session`` entity.
"""
data = memcache.get(sid)
if not data:
session = model.Key(cls, sid).get()
if session:
data = session.data
memcache.set(sid, data)
return data
def save_auth_tokens(token_dict, user=None):
"""Associates the tokens with the current user and writes to the datastore.
If there us no current user, the tokens are not written and this function
returns None.
Returns:
The key of the datastore entity containing the user's tokens, or None if
there was no current user.
"""
if user is None:
user = users.get_current_user()
if user is None:
return None
memcache.set('gdata_pickled_tokens:%s' % user, pickle.dumps(token_dict))
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
user_tokens.pickled_tokens = pickle.dumps(token_dict)
return user_tokens.put()
else:
user_tokens = TokenCollection(
user=user,
pickled_tokens=pickle.dumps(token_dict))
return user_tokens.put()
def load_auth_tokens(user=None):
"""Reads a dictionary of the current user's tokens from the datastore.
If there is no current user (a user is not signed in to the app) or the user
does not have any tokens, an empty dictionary is returned.
"""
if user is None:
user = users.get_current_user()
if user is None:
return {}
pickled_tokens = memcache.get('gdata_pickled_tokens:%s' % user)
if pickled_tokens:
return pickle.loads(pickled_tokens)
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
memcache.set('gdata_pickled_tokens:%s' % user, user_tokens.pickled_tokens)
return pickle.loads(user_tokens.pickled_tokens)
return {}
def get_access_token(force=False):
"""Tries to obtain access token from memcache and, if it fails,
obtains a new set and stores in memcache.
See https://dev.twitter.com/oauth/application-only.
Deleting the memcache key `access_token` will trigger a token refresh.
"""
token = memcache.get('access_token')
if force or token is None:
logging.warning('Needed to fetch access_token')
encoded_key = urllib.quote_plus(CUSTOMER_KEY)
encoded_secret = urllib.quote_plus(CUSTOMER_SECRET)
encoded_credentials = base64.b64encode(
"{}:{}".format(encoded_key, encoded_secret))
response = urlfetch.fetch(
'https://api.twitter.com/oauth2/token',
payload='grant_type=client_credentials',
method=urlfetch.POST,
headers={'Authorization': 'Basic ' + encoded_credentials})
if response.status_code == urlfetch.httplib.OK:
response_data = json.loads(response.content)
token = response_data['access_token']
memcache.set('access_token', token, 2592000) # 30 days
return token
def add_user_message(self, kind, msg, detail='', time=15 * 60):
"""
Add a message to the current user to memcache.
"""
if self.facebook.uid:
key = 'messages:%s' % self.facebook.uid
self._messages = memcache.get(key)
message = {
'kind': kind,
'message': msg,
'detail': detail,
}
if self._messages is not None:
self._messages.append(message)
else:
self._messages = [message]
memcache.set(key, self._messages, time=time)
def get_request_token(user, callback):
'''
Get request token
'''
from settings import secrets
client = EvernoteClient(
consumer_key=secrets.EVERNOTE_CONSUMER_KEY,
consumer_secret=secrets.EVERNOTE_CONSUMER_SECRET,
sandbox=SANDBOX
)
request_token = client.get_request_token(callback)
logging.debug(request_token)
# Save secret
memcache.set(SECRET_MCK % user.key.id(), request_token['oauth_token_secret'])
authorize_url = client.get_authorize_url(request_token)
return authorize_url
def _goals_request(self):
[annual, monthly, longterm] = Goal.Current(self.user)
speech = None
g = None
if monthly:
g = monthly
speech = "Goals for %s. " % datetime.strftime(g.date, "%B %Y")
elif annual:
g = annual
speech = "Goals for %s. " % g.date.year
if g:
if g.text:
for i, text in enumerate(g.text):
speech += "%d: %s. " % (i+1, text)
else:
speech = "No goals yet"
else:
speech = "You haven't set up any goals yet. " + GOAL.SET_INFO
return speech
def set(self, url, content):
try:
memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
except Exception as e:
logging.warning(e, exc_info=True)
def post(self):
if pubsub_utils.SUBSCRIPTION_UNIQUE_TOKEN != self.request.get('token'):
self.response.status = 404
return
# Store the message in the datastore.
message = json.loads(urllib.unquote(self.request.body).rstrip('='))
message_body = base64.b64decode(str(message['message']['data']))
message = message_body.split(',')
d = datetime.strptime(message[0][:-5],'%Y-%m-%dT%H:%M:%S')
timestamp = time.mktime(d.timetuple())
message = message[1:]
entities = zip(message[::2],map(int,message[1::2]))
data_raw = memcache.get(MC_OSCARS_TOP10)
if data_raw:
data = json.loads(memcache.get(MC_OSCARS_TOP10))
else:
data = None
if data is None or data['timestamp'] < timestamp:
memcache.set(MC_OSCARS_TOP10,json.dumps({
'timestamp': timestamp,
'entities': entities
}))
def query_or_cache(entity, start, stop, force = False):
if force:
entities = TwitterEntityFreq.query(
TwitterEntityFreq.entity == entity,
TwitterEntityFreq.timestamp > start,
TwitterEntityFreq.timestamp <= stop
).order(TwitterEntityFreq.timestamp)
data = [MockTWE(i) for i in entities]
memcache.set(MC_GUARD%entity,1,30)
memcache.set(MC_KEY%entity, data)
return [i for i in data]
nq = memcache.get(MC_GUARD%entity)
data = memcache.get(MC_KEY%entity)
td = timedelta(seconds = 30)
if data:
last = data[-1].timestamp
if stop - last > td and not nq:
entities = TwitterEntityFreq.query(
TwitterEntityFreq.entity == entity,
TwitterEntityFreq.timestamp > last,
TwitterEntityFreq.timestamp <= stop
).order(TwitterEntityFreq.timestamp)
data += [MockTWE(i) for i in entities]
memcache.set(MC_GUARD%entity,1,30)
memcache.set(MC_KEY%entity, data)
elif not nq:
entities = TwitterEntityFreq.query(
TwitterEntityFreq.entity == entity,
TwitterEntityFreq.timestamp > start,
TwitterEntityFreq.timestamp <= stop
).order(TwitterEntityFreq.timestamp)
data = [MockTWE(i) for i in entities]
memcache.set(MC_GUARD%entity,1,30)
memcache.set(MC_KEY%entity, data)
return [i for i in data if i.timestamp > start and i.timestamp <= stop]