def pickle_loads(s):
if six.PY3:
return pickle.loads(s, encoding='iso-8859-1')
else:
return pickle.loads(s)
python类loads()的实例源码
test_pickle_unpickle_theano_fn.py 文件源码
项目:Theano-Deep-learning
作者: GeekLiB
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def test_pickle_unpickle_with_reoptimization():
mode = theano.config.mode
if mode in ["DEBUG_MODE", "DebugMode"]:
mode = "FAST_RUN"
x1 = T.fmatrix('x1')
x2 = T.fmatrix('x2')
x3 = theano.shared(numpy.ones((10, 10), dtype=floatX))
x4 = theano.shared(numpy.ones((10, 10), dtype=floatX))
y = T.sum(T.sum(T.sum(x1 ** 2 + x2) + x3) + x4)
updates = OrderedDict()
updates[x3] = x3 + 1
updates[x4] = x4 + 1
f = theano.function([x1, x2], y, updates=updates, mode=mode)
# now pickle the compiled theano fn
string_pkl = pickle.dumps(f, -1)
in1 = numpy.ones((10, 10), dtype=floatX)
in2 = numpy.ones((10, 10), dtype=floatX)
# test unpickle with optimization
default = theano.config.reoptimize_unpickled_function
try:
# the default is True
theano.config.reoptimize_unpickled_function = True
f_ = pickle.loads(string_pkl)
assert f(in1, in2) == f_(in1, in2)
finally:
theano.config.reoptimize_unpickled_function = default
test_pickle_unpickle_theano_fn.py 文件源码
项目:Theano-Deep-learning
作者: GeekLiB
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def test_pickle_unpickle_without_reoptimization():
mode = theano.config.mode
if mode in ["DEBUG_MODE", "DebugMode"]:
mode = "FAST_RUN"
x1 = T.fmatrix('x1')
x2 = T.fmatrix('x2')
x3 = theano.shared(numpy.ones((10, 10), dtype=floatX))
x4 = theano.shared(numpy.ones((10, 10), dtype=floatX))
y = T.sum(T.sum(T.sum(x1**2 + x2) + x3) + x4)
updates = OrderedDict()
updates[x3] = x3 + 1
updates[x4] = x4 + 1
f = theano.function([x1, x2], y, updates=updates, mode=mode)
# now pickle the compiled theano fn
string_pkl = pickle.dumps(f, -1)
# compute f value
in1 = numpy.ones((10, 10), dtype=floatX)
in2 = numpy.ones((10, 10), dtype=floatX)
# test unpickle without optimization
default = theano.config.reoptimize_unpickled_function
try:
# the default is True
theano.config.reoptimize_unpickled_function = False
f_ = pickle.loads(string_pkl)
assert f(in1, in2) == f_(in1, in2)
finally:
theano.config.reoptimize_unpickled_function = default
def test_pickle_bug(self):
# Regression test for bug fixed in 24d4fd291054.
o = Prod()
s = pickle.dumps(o, protocol=-1)
o = pickle.loads(s)
pickle.dumps(o)
def test_none_Constant():
""" Tests equals
We had an error in the past with unpickling
"""
o1 = Constant(NoneTypeT(), None, name='NoneConst')
o2 = Constant(NoneTypeT(), None, name='NoneConst')
assert o1.equals(o2)
assert NoneConst.equals(o1)
assert o1.equals(NoneConst)
assert NoneConst.equals(o2)
assert o2.equals(NoneConst)
# This trigger equals that returned the wrong answer in the past.
import six.moves.cPickle as pickle
import theano
from theano import tensor
x = tensor.vector('x')
y = tensor.argmax(x)
kwargs = {}
# We can't pickle DebugMode
if theano.config.mode in ["DebugMode", "DEBUG_MODE"]:
kwargs = {'mode': 'FAST_RUN'}
f = theano.function([x], [y], **kwargs)
pickle.loads(pickle.dumps(f))
def test_pickle(self):
a = T.scalar() # the a is for 'anonymous' (un-named).
x, s = T.scalars('xs')
f = function([x, In(a, value=1.0, name='a'),
In(s, value=0.0, update=s + a * x, mutable=True)], s + a * x)
try:
# Note that here we also test protocol 0 on purpose, since it
# should work (even though one should not use it).
g = pickle.loads(pickle.dumps(f, protocol=0))
g = pickle.loads(pickle.dumps(f, protocol=-1))
except NotImplementedError as e:
if e[0].startswith('DebugMode is not picklable'):
return
else:
raise
# if they both return, assume that they return equivalent things.
# print [(k,id(k)) for k in f.finder.keys()]
# print [(k,id(k)) for k in g.finder.keys()]
self.assertFalse(g.container[0].storage is f.container[0].storage)
self.assertFalse(g.container[1].storage is f.container[1].storage)
self.assertFalse(g.container[2].storage is f.container[2].storage)
self.assertFalse(x in g.container)
self.assertFalse(x in g.value)
self.assertFalse(g.value[1] is f.value[1]) # should not have been copied
self.assertFalse(g.value[2] is f.value[2]) # should have been copied because it is mutable.
self.assertFalse((g.value[2] != f.value[2]).any()) # its contents should be identical
self.assertTrue(f(2, 1) == g(2)) # they should be in sync, default value should be copied.
self.assertTrue(f(2, 1) == g(2)) # they should be in sync, default value should be copied.
f(1, 2) # put them out of sync
self.assertFalse(f(1, 2) == g(1, 2)) # they should not be equal anymore.
def test_consistent_inner_fct(self):
# Test that scan does not falsely detect inconsistencies in a valid
# inner graph
rs = theano.sandbox.rng_mrg.MRG_RandomStreams(use_cuda=True)
output, _ = theano.scan(lambda : rs.uniform((3,), dtype="float32"),
n_steps=3)
pickle.loads(pickle.dumps(output))
# Also ensure that, after compilation, the Scan has been moved
# on the gpu
fct = theano.function([], output, mode=self.mode_with_gpu)
scan_nodes = scan_nodes_from_fct(fct)
assert len(scan_nodes) == 1
assert self.is_scan_on_gpu(scan_nodes[0])
def test_pickle(self):
self.test_file_name_property()
name = "file"
file1 = os.path.join(self.tmp_dir, name)
wrap = FileWrapper(file1)
pickled_data = pickle.dumps(wrap)
wrap2 = pickle.loads(pickled_data)
print(wrap2.file_path)
def test_pickle(self):
rpm_version = [int(v) for v in getattr(rpm, '__version__', '0.0').split('.')]
if rpm_version[0:2] < [4, 10]:
warnings.warn('RPM header pickling unsupported in rpm %s' % rpm_version)
return
wrap = RpmWrapper(self.file_path)
pickled_data = pickle.dumps(wrap)
wrap2 = pickle.loads(pickled_data)
self.assertEqual(wrap.name, wrap2.name)
def test_pickle(self):
wrap = SimpleRpmWrapper(self.file_path)
pickled_data = pickle.dumps(wrap)
wrap2 = pickle.loads(pickled_data)
self.assertEqual(wrap.name, wrap2.name)
def _deserialize_function_sandbox(sandbox):
'''
environment : dictionary
create by `serialize_sandbox`
'''
import marshal
import importlib
environment = {}
defined_function = []
main_func = None
# first pass we deserialize all type except function type
for name, (typ, val) in sandbox.items():
if isinstance(typ, string_types):
if typ == 'None':
val = None
elif typ == 'edward_distribution':
try:
import edward
val = getattr(edward.models, val)
except ImportError:
raise ImportError("Cannot import 'edward' library to deserialize "
"the function.")
# exec("from edward.models import %s as %s" % (val, name))
elif typ == 'function_type':
val = types.FunctionType
elif typ == 'Mapping':
val = cPickle.loads(val)
elif typ == 'ndarray':
val = np.fromstring(val[0], dtype=val[1])
elif typ == 'module':
val = importlib.import_module(val)
elif 'imported_function' == typ:
val = getattr(importlib.import_module(val[1]), val[0])
if '_main' in typ: main_func = val
elif 'defined_function' in typ:
val = str_to_func(val, globals())
if '_main' in typ: main_func = val
defined_function.append(name)
elif builtins.any(isinstance(typ, i) for i in _primitives):
pass
else:
raise ValueError('Unsupport deserializing type: {}, '
'value: {}'.format(typ, val))
environment[name] = val
# ====== create all defined function ====== #
# second pass, function all funciton and set it globales to new environment
for name in defined_function:
func = environment[name]
func.__globals__.update(environment)
return main_func, environment
def inference(label_fn, bboxes=False, flags=None):
"""
Used to infer against a nsr bbox model or a nsr model.
Args:
label_fn: Accept parameters (label, metadata_bbox), and return the real label
bboxes: Use metadata bbox if False. Do not crop image if None. Will crop image if not False and None.
flags:
Returns:
"""
flags = flags or _FLAGS
# Build the inference graph.
g = tf.Graph()
with g.as_default(), tf.device('/cpu:0'):
model = create_model(flags, 'inference')
model.build()
saver = tf.train.Saver()
g.finalize()
model_path = tf.train.latest_checkpoint(flags.checkpoint_dir)
if not model_path:
tf.logging.info("Skipping inference. No checkpoint found in: %s",
flags.checkpoint_dir)
return
with tf.Session(graph=g) as sess:
# Load the model from checkpoint.
tf.logging.info("Loading model from checkpoint: %s", flags.checkpoint_dir)
saver.restore(sess, model_path)
files = [s.strip() for s in flags.input_files.split(',')]
metadata = pickle.loads(open(flags.metadata_file_path, 'rb').read())
real_labels = []
sep_bboxes = []
file_paths = [os.path.join(flags.data_dir_path, f) for f in files]
data = []
for i, f in enumerate(files):
metadata_idx = metadata['filenames'].index(f)
label, metadata_bbox = metadata['labels'][metadata_idx], metadata['bboxes'][metadata_idx]
sep_bboxes.append(metadata['sep_bboxes'][metadata_idx])
real_labels.append(label_fn(label, metadata_bbox))
bbox = (bboxes[i] if bboxes is not None else None) if bboxes is not False else metadata_bbox
data.append(inputs.read_img(file_paths[i], bbox, flags.bbox_expand))
labels = model.infer(sess, data)
for i in range(len(files)):
tf.logging.info('inferred image %s(%s, %s): %s', files[i], real_labels[i], sep_bboxes[i], labels[i])
correct_inferences = filter(lambda i: real_labels[i] == labels[i][0], range(len(files)))
correct_count = len(list(correct_inferences))
tf.logging.info('correct count: %s, rate: %.4f', correct_count, correct_count / len(files))
return labels