def promise_executor_job(job, promise):
"""
Toil job that runs a promise with an executor. Executes the executor and
rejects/resolves the promise.
Returns the promise's success result and error, as a pair.
"""
executor = dill.loads(promise.executor_dill)
# Run the executor, and handle resolution/rejection, possibly scheduling
# child jobs
executor(lambda result: promise.handle_resolve(job, result),
lambda err: promise.handle_reject(job, err))
# Grab the cached result and return it
return (promise.result, promise.err)
python类loads()的实例源码
def _handle_loopback(self, message):
"""
Handle incoming messages in the loopback socket.
"""
header, data = dill.loads(message)
if header == 'EXECUTE_METHOD':
method, args, kwargs = data
try:
response = getattr(self, method)(*args, **kwargs)
except Exception as error:
yield format_method_exception(error, method, args, kwargs)
raise
yield response or True
else:
error = 'Unrecognized loopback message: {} {}'.format(header, data)
self.log_error(error)
yield error
def handler(reader, writer):
queries = await reader.read(-1)
try:
queries = dill.loads(queries)
shelf = queries.pop(0)
result = QueryHandler(db, shelf, queries).run()
result = dill.dumps(result)
except:
print("Unexpected error:", sys.exc_info()[1])
result = dill.dumps(sys.exc_info()[1])
writer.write(result)
await writer.drain()
writer.close()
raise
writer.write(result)
await writer.drain()
writer.close()
def deepcopy(self):
""" Return a deep copy of the batch.
Constructs a new ``Batch`` instance and then recursively copies all
the objects found in the original batch, except the ``pipeline``,
which remains unchanged.
Returns
-------
Batch
"""
pipeline = self.pipeline
self.pipeline = None
dump_batch = dill.dumps(self)
self.pipeline = pipeline
restored_batch = dill.loads(dump_batch)
restored_batch.pipeline = pipeline
return restored_batch
def test_pickle(self):
import sys
if sys.version_info < (3, 4):
import dill as pickle
else:
import pickle
states = ['A', 'B', 'C', 'D']
# Define with list of dictionaries
transitions = [
{'trigger': 'walk', 'source': 'A', 'dest': 'B'},
{'trigger': 'run', 'source': 'B', 'dest': 'C'},
{'trigger': 'sprint', 'source': 'C', 'dest': 'D'}
]
m = Machine(states=states, transitions=transitions, initial='A')
m.walk()
dump = pickle.dumps(m)
self.assertIsNotNone(dump)
m2 = pickle.loads(dump)
self.assertEqual(m.state, m2.state)
m2.run()
def p_b_ot():
params = json.loads(cherrypy.request.headers['params'])
body = cherrypy.request.body.read()
event_stream = False
try:
body = json.loads(body)
if 'js_func' in body and body['js_func']:
js_func = True
else:
js_func = False
if 'event_stream' in body and body['event_stream']:
event_stream = True
except json.JSONDecodeError:
body = pickle.loads(body)
js_func = False
if 'type' in params:
obj_type = params['type']
else:
obj_type = None
return params, body, obj_type, js_func, event_stream
def restore_state(self, path):
"""Returns loaded state"""
try:
with open(path, 'rb') as f:
if self.encrypt:
state = pickle.loads(self.decrypt_data(pickle.load(f)))
else:
state = pickle.load(f)
LOG.debug("Restoring state successs")
except Exception as e:
LOG.debug("Restoring state from %s failed with %s" % (
path, e))
state = StateMachine(self.bot, state_path=path)
LOG.debug("Successfully inicialized new state.")
return state
def restore_state(self, path):
"""Returns loaded state"""
tmp_file = io.BytesIO()
self.bucket.download_fileobj(path, tmp_file)
if self.encrypt:
data = self.encrypt_data(tmp_file.getvalue())
else:
data = tmp_file.getvalue()
state = pickle.loads(str(data, encoding="utf-8"))
state.bot = self.bot
return state
def deserialize(name_d, func_code_d, args_d, clos_d, type_obj):
"""A function to deserialize an object serialized with the serialize
function.
Args:
name_d(unicode): the dumped name of the object
func_code_d(unicode): the dumped byte code of the function
args_d(unicode): the dumped information about the arguments
clos_d(unicode): the dumped information about the function closure
Returns:
a deserialized object"""
if type_obj == 'func':
name = pickle.loads(name_d.encode('raw_unicode_escape'))
code = dill.loads(func_code_d.encode('raw_unicode_escape'))
args = pickle.loads(args_d.encode('raw_unicode_escape'))
clos = dill.loads(clos_d.encode('raw_unicode_escape'))
loaded_obj = types.FunctionType(code, globals(), name, args, clos)
else: # pragma: no cover
loaded_obj = dill.loads(func_code_d.encode('raw_unicode_escape'))
return loaded_obj
# Serialization utilities
def run_dill_encoded(what):
fun, args = dill.loads(what)
return fun(*args)
def download_var_cloud(data_name):
if not isinstance(data_name, str):
print("data_name must be a string")
return
user_hash = settings.API_KEY
url = 'http://%s/api/save/getDownloadUrl' % settings.CATALEARN_URL
r = requests.post(url, data={
'type': 'variable',
'user_hash': user_hash,
'file_name': data_name
})
if r.status_code != 200:
raise RuntimeError(r.text)
presigned_url = r.content
# Now send the post request to the catalearn server
res = requests.get(presigned_url, stream=True)
raw = io.BytesIO()
download_progress(res, raw, data_name)
result = dill.loads(raw.getvalue())
return result
def promise_then_job(job, promise, prev_promise_returned):
"""
Toil job that runs a promise created with a then handler instead of an
executor.
Takes the promise and the (resolve value, reject value) pair from the
previous promise.
Returns the promise's success result and error, as a pair.
"""
then_handler = dill.loads(promise.then_dill)
resolved, rejected = prev_promise_returned
if rejected is None:
# Actually run this child promise
try:
# Get the result from the then handler and resolve with it
result = then_handler(resolved)
promise.handle_resolve(job, result)
except Exception as e:
# Reject with an error if there is one
Logger.error("".join(traceback.format_exception(*sys.exc_info())))
promise.handle_reject(job, e)
else:
# Parent promise rejected so we should not run
# Bubble up the error
promise.handle_reject(job, rejected)
return (promise.result, promise.err)
def _handle_loopback_safe(self, data):
"""
Handle incoming messages in the _loopback_safe socket.
"""
method, args, kwargs = dill.loads(data)
try:
response = getattr(self, method)(*args, **kwargs)
except Exception as error:
yield format_method_exception(error, method, args, kwargs)
raise
yield response
def _execute_tasks(self):
self._poll()
while not self._queue.empty():
task = dill.loads(self._queue.get())
_try_callback(self, task)
def import_object(obj):
import dill as pickle
import base64
# if obj is None:
# obj = sys.stdin.read().strip().encode('utf-8')
if obj is str:
obj = obj.strip().encode('utf-8')
return pickle.loads(gzip.zlib.decompress(base64.b64decode(obj)))
def load(self):
"""a private method that loads an estimator object from the filesystem"""
if self.is_file_persisted:
self.object_file.open()
temp = dill.loads(self.object_file.read())
self.set_object(temp)
self.object_file.close()
def deserialize(data):
return dill.loads(data)
def run_dill_encoded(what):
fun, args = dill.loads(what)
return fun(*args)
def run_dill_encoded(what):
fun, args = dill.loads(what)
return fun(*args)
mlengine_prediction_summary.py 文件源码
项目:incubator-airflow-old
作者: apache
项目源码
文件源码
阅读 15
收藏 0
点赞 0
评论 0
def decode(self, x):
return json.loads(x)
def _load_blosc(self, ix, src=None, components=None):
""" Load data from a blosc packed file """
file_name = self._get_file_name(ix, src, 'blosc')
with open(file_name, 'rb') as f:
data = dill.loads(blosc.decompress(f.read()))
if self.components is None:
components = (data.keys()[0],)
else:
components = tuple(components or self.components)
item = tuple(data[i] for i in components)
return item
def import_object(obj):
import dill as pickle
import base64
# if obj is None:
# obj = sys.stdin.read().strip().encode('utf-8')
if obj is str:
obj = obj.strip().encode('utf-8')
return pickle.loads(gzip.zlib.decompress(base64.b64decode(obj)))
def decompress(compressed_workload_state):
return pickle.loads(zlib.decompress(compressed_workload_state))
test_threading.py 文件源码
项目:sudkamp-langs-machines-python
作者: thundergolfer
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def test_pickle(self):
import sys
if sys.version_info < (3, 4):
import dill as pickle
else:
import pickle
# go to non initial state B
self.stuff.to_B()
# pickle Stuff model
dump = pickle.dumps(self.stuff)
self.assertIsNotNone(dump)
stuff2 = pickle.loads(dump)
self.assertTrue(stuff2.machine.is_state("B"))
# check if machines of stuff and stuff2 are truly separated
stuff2.to_A()
self.stuff.to_C()
self.assertTrue(stuff2.machine.is_state("A"))
thread = Thread(target=stuff2.forward)
thread.start()
# give thread some time to start
time.sleep(0.01)
# both objects should be in different states
# and also not share locks
begin = time.time()
# stuff should not be locked and execute fast
self.assertTrue(self.stuff.machine.is_state("C"))
fast = time.time()
# stuff2 should be locked and take about 1 second
# to be executed
self.assertTrue(stuff2.machine.is_state("B"))
blocked = time.time()
self.assertAlmostEqual(fast-begin, 0, delta=0.1)
self.assertAlmostEqual(blocked-begin, 1, delta=0.1)
# Same as TestLockedTransition but with LockedHierarchicalMachine
test_nesting.py 文件源码
项目:sudkamp-langs-machines-python
作者: thundergolfer
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def test_pickle(self):
import sys
if sys.version_info < (3, 4):
import dill as pickle
else:
import pickle
states = ['A', 'B', {'name': 'C', 'children': ['1', '2', {'name': '3', 'children': ['a', 'b', 'c']}]},
'D', 'E', 'F']
transitions = [
{'trigger': 'walk', 'source': 'A', 'dest': 'B'},
{'trigger': 'run', 'source': 'B', 'dest': 'C'},
{'trigger': 'sprint', 'source': 'C', 'dest': 'D'}
]
m = self.stuff.machine_cls(states=states, transitions=transitions, initial='A')
m.walk()
dump = pickle.dumps(m)
self.assertIsNotNone(dump)
m2 = pickle.loads(dump)
self.assertEqual(m.state, m2.state)
m2.run()
if State.separator in '_':
m2.to_C_3_a()
m2.to_C_3_b()
else:
m2.to_C.s3.a()
m2.to_C.s3.b()
def MDLSTM_train(params):
func,args= dill.loads(params)
X_arr,model,dict_conv_param,grd_truth_seq,reg= args[0]
return func(X_arr,model,dict_conv_param,grd_truth_seq,reg)
def MDLSTM_val(params):
func,args= dill.loads(params)
X_arr,model,dict_conv_param,rand_no,grd_truth_seq,reg= args[0]
return func(X_arr,model,dict_conv_param,grd_truth_seq,reg),rand_no
def load(cls, filepath):
tmpdir = tempfile.mkdtemp()
error = None
logger.debug('extracting archive to: {}'.format(tmpdir))
try:
with tarfile.open(filepath, 'r:*') as ar:
ar.extractall(tmpdir)
payload = open(os.path.join(tmpdir, 'metadata'), 'rb').read()
print(payload)
meta = dill.loads(payload)
instance = cls()
for attr_name, attr_val in meta.items():
setattr(instance, attr_name, attr_val)
if os.path.exists(os.path.join(tmpdir, 'vectors')):
logger.debug('loading word vectors')
import h5py
with h5py.File(os.path.join(tmpdir, 'vectors'), 'r') as h5:
setattr(instance, '_W', h5['vectors'][:])
else:
logger.debug('no word vectors found in archive')
except Exception as e:
logger.error('encountered error: {}'.format(e))
error = e
finally:
logger.debug('cleaning up {}'.format(tmpdir))
shutil.rmtree(tmpdir)
if error is not None:
raise error
return instance
def _get_function_and_execute(f_dill, *args):
f = dill.loads(f_dill)
return f(*args)
def update(self, g_id, dbid, head, conn):
params, body, obj_type, js_func, event_stream = p_b_ot()
if obj_type + 's' in acceptable_types:
obj_id, uid, id_quote = id_or_uid(obj_type, params['obj_id'])
update = body['update']
msg = errors['Nonexistence'][obj_type](g_id, obj_id)
qu = r.db(dbid).table(obj_type + 's')
if not uid:
try:
dd = auto_reql(qu.get(obj_id).update(update), conn)
except r.ReqlNonExistenceError:
return json.dumps({'error': msg})
else:
dd = auto_reql(qu.get_all(obj_id, index='uid').update(update), conn)
return json.dumps(dd)
elif obj_type in acceptable_types:
update = body['update']
def literalize(d):
for k, v in d.iteritems():
if isinstance(v, dict):
literalize(v)
else:
m = literal_check.search(v)
if m:
d[k] = r.literal(json.loads(m.group('json_doc')))
literalize(update)
qu = r.db(dbid).table(obj_type)
if 'get_all' in body:
if 'index' in body:
qu = qu.get_all(*body['get_all'], index=body['index'])
else:
qu = qu.get_all(*body['get_all'])
if 'filter' in body:
filt_func = body['filter']
if js_func:
filt_func = r.js(filt_func)
qu = qu.filter(filt_func)
d = auto_reql(qu.update(update), conn)
return json.dumps(d)