def save_func(self, obj):
try:
self.save_global(obj)
return
except pickle.PicklingError:
pass
assert type(obj) is types.FunctionType
self.save(types.FunctionType)
self.save((
obj.func_code,
obj.func_globals,
obj.func_name,
obj.func_defaults,
obj.func_closure,
))
self.write(pickle.REDUCE)
if id(obj) not in self.memo: # Could be if we recursively landed here. See also pickle.save_tuple().
self.memoize(obj)
python类REDUCE的实例源码
def save_function_tuple(self, func):
""" Pickles an actual func object.
A func comprises: code, globals, defaults, closure, and dict. We
extract and save these, injecting reducing functions at certain points
to recreate the func object. Keep in mind that some of these pieces
can contain a ref to the func itself. Thus, a naive save on these
pieces could trigger an infinite loop of save's. To get around that,
we first create a skeleton func object using just the code (this is
safe, since this won't contain a ref to the func), and memoize it as
soon as it's created. The other stuff can then be filled in later.
"""
save = self.save
write = self.write
code, f_globals, defaults, closure, dct, base_globals = self.extract_func_data(func)
save(_fill_function) # skeleton function updater
write(pickle.MARK) # beginning of tuple that _fill_function expects
# create a skeleton function object and memoize it
save(_make_skel_func)
save((code, closure, base_globals))
write(pickle.REDUCE)
self.memoize(func)
# save the rest of the func data needed by _fill_function
save(f_globals)
save(defaults)
save(dct)
save(func.__module__)
write(pickle.TUPLE)
write(pickle.REDUCE) # applies _fill_function on the tuple
def base64unpickle(value, unsafe=False):
"""
Decodes value from Base64 to plain format and deserializes (with pickle) its content
>>> base64unpickle('gAJVBmZvb2JhcnEBLg==')
'foobar'
"""
retVal = None
def _(self):
if len(self.stack) > 1:
func = self.stack[-2]
if func not in PICKLE_REDUCE_WHITELIST:
raise Exception, "abusing reduce() is bad, Mkay!"
self.load_reduce()
def loads(str):
f = StringIO.StringIO(str)
if unsafe:
unpickler = picklePy.Unpickler(f)
unpickler.dispatch[picklePy.REDUCE] = _
else:
unpickler = pickle.Unpickler(f)
return unpickler.load()
try:
retVal = loads(base64decode(value))
except TypeError:
retVal = loads(base64decode(bytes(value)))
return retVal
def save_function_tuple(self, func):
""" Pickles an actual func object.
A func comprises: code, globals, defaults, closure, and dict. We
extract and save these, injecting reducing functions at certain points
to recreate the func object. Keep in mind that some of these pieces
can contain a ref to the func itself. Thus, a naive save on these
pieces could trigger an infinite loop of save's. To get around that,
we first create a skeleton func object using just the code (this is
safe, since this won't contain a ref to the func), and memoize it as
soon as it's created. The other stuff can then be filled in later.
"""
save = self.save
write = self.write
code, f_globals, defaults, closure, dct, base_globals = self.extract_func_data(func)
save(_fill_function) # skeleton function updater
write(pickle.MARK) # beginning of tuple that _fill_function expects
# create a skeleton function object and memoize it
save(_make_skel_func)
save((code, closure, base_globals))
write(pickle.REDUCE)
self.memoize(func)
# save the rest of the func data needed by _fill_function
save(f_globals)
save(defaults)
save(dct)
write(pickle.TUPLE)
write(pickle.REDUCE) # applies _fill_function on the tuple
def base64unpickle(value, unsafe=False):
"""
Decodes value from Base64 to plain format and deserializes (with pickle) its content
>>> base64unpickle('gAJVBmZvb2JhcnEBLg==')
'foobar'
"""
retVal = None
def _(self):
if len(self.stack) > 1:
func = self.stack[-2]
if func not in PICKLE_REDUCE_WHITELIST:
raise Exception, "abusing reduce() is bad, Mkay!"
self.load_reduce()
def loads(str):
f = StringIO.StringIO(str)
if unsafe:
unpickler = picklePy.Unpickler(f)
unpickler.dispatch[picklePy.REDUCE] = _
else:
unpickler = pickle.Unpickler(f)
return unpickler.load()
try:
retVal = loads(base64decode(value))
except TypeError:
retVal = loads(base64decode(bytes(value)))
return retVal
def save_method(self, obj):
try:
self.save_global(obj)
return
except pickle.PicklingError:
pass
assert type(obj) is types.MethodType
self.save(types.MethodType)
self.save((obj.im_func, obj.im_self, obj.im_class))
self.write(pickle.REDUCE)
self.memoize(obj)
def save_code(self, obj):
assert type(obj) is types.CodeType
self.save(marshal.loads)
self.save((marshal.dumps(obj),))
self.write(pickle.REDUCE)
self.memoize(obj)
def save_cell(self, obj):
assert type(obj) is CellType
self.save(makeFuncCell)
self.save((obj.cell_contents,))
self.write(pickle.REDUCE)
self.memoize(obj)
def intellisave_dict(self, obj):
modname = getModNameForModDict(obj)
if modname:
self.save(getModuleDict)
self.save((modname,))
self.write(pickle.REDUCE)
self.memoize(obj)
return
self.save_dict(obj)
def save_buffer(self, obj):
self.save(buffer)
self.save((str(obj),))
self.write(pickle.REDUCE)
def save_function(self, obj, name=None):
""" Registered with the dispatch to handle all function types.
Determines what kind of function obj is (e.g. lambda, defined at
interactive prompt, etc) and handles the pickling appropriately.
"""
write = self.write
if name is None:
name = obj.__name__
try:
# whichmodule() could fail, see
# https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
modname = pickle.whichmodule(obj, name)
except Exception:
modname = None
# print('which gives %s %s %s' % (modname, obj, name))
try:
themodule = sys.modules[modname]
except KeyError:
# eval'd items such as namedtuple give invalid items for their function __module__
modname = '__main__'
if modname == '__main__':
themodule = None
if themodule:
self.modules.add(themodule)
if getattr(themodule, name, None) is obj:
return self.save_global(obj, name)
# if func is lambda, def'ed at prompt, is in main, or is nested, then
# we'll pickle the actual function object rather than simply saving a
# reference (as is done in default pickler), via save_function_tuple.
if islambda(obj) or obj.__code__.co_filename == '<stdin>' or themodule is None:
#print("save global", islambda(obj), obj.__code__.co_filename, modname, themodule)
self.save_function_tuple(obj)
return
else:
# func is nested
klass = getattr(themodule, name, None)
if klass is None or klass is not obj:
self.save_function_tuple(obj)
return
if obj.__dict__:
# essentially save_reduce, but workaround needed to avoid recursion
self.save(_restore_attr)
write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
self.save(obj.__dict__)
write(pickle.TUPLE + pickle.REDUCE)
else:
write(pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
def save_reduce(self, func, args, state=None,
listitems=None, dictitems=None, obj=None):
"""Modified to support __transient__ on new objects
Change only affects protocol level 2 (which is always used by PiCloud"""
# Assert that args is a tuple or None
if not isinstance(args, tuple):
raise pickle.PicklingError("args from reduce() should be a tuple")
# Assert that func is callable
if not hasattr(func, '__call__'):
raise pickle.PicklingError("func from reduce should be callable")
save = self.save
write = self.write
# Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
#Added fix to allow transient
cls = args[0]
if not hasattr(cls, "__new__"):
raise pickle.PicklingError(
"args[0] from __newobj__ args has no __new__")
if obj is not None and cls is not obj.__class__:
raise pickle.PicklingError(
"args[0] from __newobj__ args has the wrong class")
args = args[1:]
save(cls)
#Don't pickle transient entries
if hasattr(obj, '__transient__'):
transient = obj.__transient__
state = state.copy()
for k in list(state.keys()):
if k in transient:
del state[k]
save(args)
write(pickle.NEWOBJ)
else:
save(func)
save(args)
write(pickle.REDUCE)
if obj is not None:
self.memoize(obj)
# More new special cases (that work with older protocols as
# well): when __reduce__ returns a tuple with 4 or 5 items,
# the 4th and 5th item should be iterators that provide list
# items and dict items (as (key, value) tuples), or None.
if listitems is not None:
self._batch_appends(listitems)
if dictitems is not None:
self._batch_setitems(dictitems)
if state is not None:
save(state)
write(pickle.BUILD)
def save_function(self, obj, name=None):
""" Registered with the dispatch to handle all function types.
Determines what kind of function obj is (e.g. lambda, defined at
interactive prompt, etc) and handles the pickling appropriately.
"""
write = self.write
if name is None:
name = obj.__name__
modname = pickle.whichmodule(obj, name)
# print('which gives %s %s %s' % (modname, obj, name))
try:
themodule = sys.modules[modname]
except KeyError:
# eval'd items such as namedtuple give invalid items for their function __module__
modname = '__main__'
if modname == '__main__':
themodule = None
if themodule:
self.modules.add(themodule)
if getattr(themodule, name, None) is obj:
return self.save_global(obj, name)
# if func is lambda, def'ed at prompt, is in main, or is nested, then
# we'll pickle the actual function object rather than simply saving a
# reference (as is done in default pickler), via save_function_tuple.
if islambda(obj) or obj.__code__.co_filename == '<stdin>' or themodule is None:
#print("save global", islambda(obj), obj.__code__.co_filename, modname, themodule)
self.save_function_tuple(obj)
return
else:
# func is nested
klass = getattr(themodule, name, None)
if klass is None or klass is not obj:
self.save_function_tuple(obj)
return
if obj.__dict__:
# essentially save_reduce, but workaround needed to avoid recursion
self.save(_restore_attr)
write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
self.save(obj.__dict__)
write(pickle.TUPLE + pickle.REDUCE)
else:
write(pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
def save_reduce(self, func, args, state=None,
listitems=None, dictitems=None, obj=None):
"""Modified to support __transient__ on new objects
Change only affects protocol level 2 (which is always used by PiCloud"""
# Assert that args is a tuple or None
if not isinstance(args, tuple):
raise pickle.PicklingError("args from reduce() should be a tuple")
# Assert that func is callable
if not hasattr(func, '__call__'):
raise pickle.PicklingError("func from reduce should be callable")
save = self.save
write = self.write
# Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
#Added fix to allow transient
cls = args[0]
if not hasattr(cls, "__new__"):
raise pickle.PicklingError(
"args[0] from __newobj__ args has no __new__")
if obj is not None and cls is not obj.__class__:
raise pickle.PicklingError(
"args[0] from __newobj__ args has the wrong class")
args = args[1:]
save(cls)
#Don't pickle transient entries
if hasattr(obj, '__transient__'):
transient = obj.__transient__
state = state.copy()
for k in list(state.keys()):
if k in transient:
del state[k]
save(args)
write(pickle.NEWOBJ)
else:
save(func)
save(args)
write(pickle.REDUCE)
if obj is not None:
self.memoize(obj)
# More new special cases (that work with older protocols as
# well): when __reduce__ returns a tuple with 4 or 5 items,
# the 4th and 5th item should be iterators that provide list
# items and dict items (as (key, value) tuples), or None.
if listitems is not None:
self._batch_appends(listitems)
if dictitems is not None:
self._batch_setitems(dictitems)
if state is not None:
save(state)
write(pickle.BUILD)
def save_function_tuple(self, func):
""" Pickles an actual func object.
A func comprises: code, globals, defaults, closure, and dict. We
extract and save these, injecting reducing functions at certain points
to recreate the func object. Keep in mind that some of these pieces
can contain a ref to the func itself. Thus, a naive save on these
pieces could trigger an infinite loop of save's. To get around that,
we first create a skeleton func object using just the code (this is
safe, since this won't contain a ref to the func), and memoize it as
soon as it's created. The other stuff can then be filled in later.
"""
if is_tornado_coroutine(func):
self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,),
obj=func)
return
save = self.save
write = self.write
code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func)
save(_fill_function) # skeleton function updater
write(pickle.MARK) # beginning of tuple that _fill_function expects
self._save_subimports(
code,
itertools.chain(f_globals.values(), closure_values or ()),
)
# create a skeleton function object and memoize it
save(_make_skel_func)
save((
code,
len(closure_values) if closure_values is not None else -1,
base_globals,
))
write(pickle.REDUCE)
self.memoize(func)
# save the rest of the func data needed by _fill_function
save(f_globals)
save(defaults)
save(dct)
save(func.__module__)
save(closure_values)
write(pickle.TUPLE)
write(pickle.REDUCE) # applies _fill_function on the tuple
def save_reduce(self, func, args, state=None,
listitems=None, dictitems=None, obj=None):
# Assert that args is a tuple or None
if not isinstance(args, tuple):
raise pickle.PicklingError("args from reduce() should be a tuple")
# Assert that func is callable
if not hasattr(func, '__call__'):
raise pickle.PicklingError("func from reduce should be callable")
save = self.save
write = self.write
# Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
cls = args[0]
if not hasattr(cls, "__new__"):
raise pickle.PicklingError(
"args[0] from __newobj__ args has no __new__")
if obj is not None and cls is not obj.__class__:
raise pickle.PicklingError(
"args[0] from __newobj__ args has the wrong class")
args = args[1:]
save(cls)
save(args)
write(pickle.NEWOBJ)
else:
save(func)
save(args)
write(pickle.REDUCE)
if obj is not None:
self.memoize(obj)
# More new special cases (that work with older protocols as
# well): when __reduce__ returns a tuple with 4 or 5 items,
# the 4th and 5th item should be iterators that provide list
# items and dict items (as (key, value) tuples), or None.
if listitems is not None:
self._batch_appends(listitems)
if dictitems is not None:
self._batch_setitems(dictitems)
if state is not None:
save(state)
write(pickle.BUILD)