python类dumps()的实例源码

agent.py 文件源码 项目:osbrain 作者: opensistemas-hub 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def safe_call(self, method, *args, **kwargs):
        """
        A safe call to a method.

        A safe call is simply sent to be executed by the main thread.

        Parameters
        ----------
        method : str
            Method name to be executed by the main thread.
        *args : arguments
            Method arguments.
        *kwargs : keyword arguments
            Method keyword arguments.
        """
        if not self.running:
            raise RuntimeError(
                'Agent must be running to safely execute methods!')
        data = dill.dumps((method, args, kwargs))
        return self._loopback_reqrep('inproc://_loopback_safe', data)
server.py 文件源码 项目:shelfdb 作者: nitipit 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def handler(reader, writer):
    queries = await reader.read(-1)
    try:
        queries = dill.loads(queries)
        shelf = queries.pop(0)
        result = QueryHandler(db, shelf, queries).run()
        result = dill.dumps(result)
    except:
        print("Unexpected error:", sys.exc_info()[1])
        result = dill.dumps(sys.exc_info()[1])
        writer.write(result)
        await writer.drain()
        writer.close()
        raise
    writer.write(result)
    await writer.drain()
    writer.close()
batch.py 文件源码 项目:dataset 作者: analysiscenter 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def deepcopy(self):
        """ Return a deep copy of the batch.

        Constructs a new ``Batch`` instance and then recursively copies all
        the objects found in the original batch, except the ``pipeline``,
        which remains unchanged.

        Returns
        -------
        Batch
        """
        pipeline = self.pipeline
        self.pipeline = None
        dump_batch = dill.dumps(self)
        self.pipeline = pipeline

        restored_batch = dill.loads(dump_batch)
        restored_batch.pipeline = pipeline
        return restored_batch
test_core.py 文件源码 项目:sudkamp-langs-machines-python 作者: thundergolfer 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def test_pickle(self):
        import sys
        if sys.version_info < (3, 4):
            import dill as pickle
        else:
            import pickle

        states = ['A', 'B', 'C', 'D']
        # Define with list of dictionaries
        transitions = [
            {'trigger': 'walk', 'source': 'A', 'dest': 'B'},
            {'trigger': 'run', 'source': 'B', 'dest': 'C'},
            {'trigger': 'sprint', 'source': 'C', 'dest': 'D'}
        ]
        m = Machine(states=states, transitions=transitions, initial='A')
        m.walk()
        dump = pickle.dumps(m)
        self.assertIsNotNone(dump)
        m2 = pickle.loads(dump)
        self.assertEqual(m.state, m2.state)
        m2.run()
__init__.py 文件源码 项目:covertutils 作者: operatorequals 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def __form_stage_from_function( init, work ) :
    ret = {}
    dict_ = {'init' : init, 'work' : work}
    try:                    # Python 3
        code = {'init' : init.__code__, 'work' : work.__code__}
    except AttributeError:  # Python 2
        code = {'init' : init.func_code, 'work' : work.func_code}
    ret['object'] = dict_
    ret['python'] = code
    try :
        marshaled = marshal.dumps(code)
    except ValueError:
        marshaled = None

    try :
        import dill
        dilled = dill.dumps(code)
    except ImportError:
        dilled = None
    ret['dill'] = dilled
    ret['marshal'] = marshaled

    return ret
test_queue.py 文件源码 项目:kq 作者: joowani 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def test_enqueue_call(producer, logger):
    producer_cls, producer_inst = producer

    queue = Queue(hosts='host:7000', topic='foo', timeout=300)
    job = queue.enqueue(success_func, 1, 2, c=[3, 4, 5])

    assert isinstance(job, Job)
    assert isinstance(job.id, str)
    assert isinstance(job.timestamp, int)
    assert job.topic == 'foo'
    assert job.func == success_func
    assert job.args == (1, 2)
    assert job.kwargs == {'c': [3, 4, 5]}
    assert job.timeout == 300

    producer_inst.send.assert_called_with('foo', dill.dumps(job), key=None)
    logger.info.assert_called_once_with('Enqueued: {}'.format(job))
test_queue.py 文件源码 项目:kq 作者: joowani 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def test_enqueue_call_with_key(producer, logger):
    producer_cls, producer_inst = producer

    queue = Queue(hosts='host:7000', topic='foo', timeout=300)
    job = queue.enqueue_with_key('bar', success_func, 1, 2, c=[3, 4, 5])

    assert isinstance(job, Job)
    assert isinstance(job.id, str)
    assert isinstance(job.timestamp, int)
    assert job.topic == 'foo'
    assert job.func == success_func
    assert job.args == (1, 2)
    assert job.kwargs == {'c': [3, 4, 5]}
    assert job.timeout == 300
    assert job.key == 'bar'

    producer_inst.send.assert_called_with('foo', dill.dumps(job), key='bar')
    logger.info.assert_called_once_with('Enqueued: {}'.format(job))
decorators.py 文件源码 项目:lambdify 作者: ZhukovAlexander 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def invoke(self, event, context, inv_type=None, log_type='None', version=None):
        """Invoke the lambda function This is basically a low-level lambda interface.
        In most cases, you won't need to use this by yourself.

        :param event: lambda input
        :param context: lambda execution client context
        :param inv_type: invocation type
        :param log_type: log type
        :param version: version
        """
        if not self._was_updated and self.create_options & UPDATE_LAZY == UPDATE_LAZY:
            self._create_or_update()

        params = dict(
                FunctionName=self.name,
                InvocationType=inv_type or self._inv_type,
                LogType=log_type,
                ClientContext=json.dumps(context),
                Payload=json.dumps(event),
        )
        if version:
            params['Qualifier'] = version

        return self.client.invoke(**params)
child_processes.py 文件源码 项目:artemis 作者: QUVA-Lab 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def pickle_dumps_without_main_refs(obj):
    """
    Yeah this is horrible, but it allows you to pickle an object in the main module so that it can be reloaded in another
    module.
    :param obj:
    :return:
    """
    currently_run_file = sys.argv[0]
    module_path = file_path_to_absolute_module(currently_run_file)
    try:
        pickle_str = pickle.dumps(obj, protocol=0)
    except:
        print("Using Dill")
        # TODO: @petered There is something very fishy going on here that I don't understand.
        import dill
        pickle_str = dill.dumps(obj, protocol=0)

    pickle_str = pickle_str.replace('__main__', module_path)  # Hack!
    return pickle_str
DataIO.py 文件源码 项目:kaggle 作者: RankingAI 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def SaveToPklFile(Data,OutputDir):

        df_train,df_test = Data

        if(os.path.exists(OutputDir) == False):
            os.makedirs(OutputDir)

        with open('%s/train.pkl' % OutputDir, 'wb') as o_file:
            pickle.dump(df_train, o_file, -1)
        o_file.close()

        max_bytes = 2 ** 31 - 1
        bytes_out = pickle.dumps(df_test)
        n_bytes = len(bytes_out)
        with open('%s/test.pkl' % OutputDir, 'wb') as o_file:
            for idx in range(0, n_bytes, max_bytes):
                o_file.write(bytes_out[idx:idx + max_bytes])
                # too big for pickle
                #pickle.dump(df_test, o_file, -1)
        o_file.close()

        # with open('%s/test.csv' % OutputDir, 'w') as o_file:
        #     o_file.write('%s\n' % (','.join(list(df_test.columns))))
        #     for idx in df_test.index:
        #         rec = [str(v) for v in df_test.ix[idx].values]
        #         o_file.write('%s\n' % (','.join(rec)))
        # o_file.close()
__init__.py 文件源码 项目:onefl-deduper 作者: ufbmi 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def apply_async(pool, fun, args, run_dill_encoded=run_dill_encoded):
    return pool.apply_async(run_dill_encoded, (dill.dumps((fun, args)),))
saver.py 文件源码 项目:catalearn 作者: Catalearn 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def save_var_cloud(data_var, data_name):
    if not isinstance(data_name, str):
        print("data_name must be a string")
        return

    user_hash = settings.API_KEY
    data_buffer = io.BytesIO(dill.dumps(data_var))
    print('Uploading %s...' % data_name)

    url = 'http://%s/api/save/getUploadUrl' % settings.CATALEARN_URL
    r = requests.post(url, data={
        'type': 'variable',
        'user_hash': user_hash,
        'file_name': data_name
    })
    if r.status_code != 200:
        raise RuntimeError(r.text)

    presigned_url = r.content

    r = requests.put(presigned_url, data=data_buffer)

    if (r.status_code != 200):
        print("Error saving %s\: %s" % (data_name, r.content))
    else:
        print("Successfully uploaded %s" % data_name)
    return
toilpromise.py 文件源码 项目:hgvm-builder 作者: BD2KGenomics 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def set_executor(self, executor):
        """
        Set the given function to run in the promise. It will call its first
        argument with its result, or its second argument with an error.
        """

        # Pickle the function and save it
        self.executor_dill = dill.dumps(executor)
toilpromise.py 文件源码 项目:hgvm-builder 作者: BD2KGenomics 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def set_then_handler(self, then_handler):
        """
        Set the then handler for this promise. When the prev promise resolves,
        the then handler will be called with the result.

        """

        # Pickle the function and save it
        self.then_dill = dill.dumps(then_handler)
agent.py 文件源码 项目:osbrain 作者: opensistemas-hub 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def _loopback(self, header, data=None):
        """
        Send a message to the loopback socket.
        """
        if not self.running:
            raise NotImplementedError()
        data = dill.dumps((header, data))
        return self._loopback_reqrep('inproc://loopback', data)
__init__.py 文件源码 项目:gax-python 作者: googleapis 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def add_done_callback(self, fn):  # pylint: disable=invalid-name
        """Enters a polling loop on OperationsClient.get_operation, and once the
        operation is done or cancelled, calls the function with this
        _OperationFuture. Added callables are called in the order that they were
        added.
        """
        if self._operation.done:
            _try_callback(self, fn)
        else:
            self._queue.put(dill.dumps(fn))
            if self._process is None:
                self._process = mp.Process(target=self._execute_tasks)
                self._process.start()
utils.py 文件源码 项目:hakkuframework 作者: 4shadoww 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def export_object(obj):
    import dill as pickle
    import base64
    return base64.b64encode(gzip.zlib.compress(pickle.dumps(obj,4),9)).decode('utf-8')
base.py 文件源码 项目:django-estimators 作者: fridiculous 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def persist(self):
        """a private method that persists an estimator object to the filesystem"""
        if self.object_hash:
            data = dill.dumps(self.object_property)
            f = ContentFile(data)
            self.object_file.save(self.object_hash, f, save=False)
            f.close()
            self._persisted = True
        return self._persisted
_memoization.py 文件源码 项目:pydecor 作者: mplanchard 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def hashable(item):
    """Get return a hashable version of an item

    If the item is natively hashable, return the item itself. If
    it is not, return it dumped to a pickle string.
    """
    try:
        hash(item)
    except TypeError:
        item = pickle.dumps(item)
    return item
serializer.py 文件源码 项目:Pandas-Farm 作者: medo 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def serialize(obj):
    return dill.dumps(obj)
chx_compress_debug.py 文件源码 项目:chxanalys 作者: yugangzhang 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def apply_async(pool, fun, args, callback=None):    
    return pool.apply_async( run_dill_encoded, (dill.dumps((fun, args)),), callback= callback)
chx_compress_debug.py 文件源码 项目:chxanalys 作者: yugangzhang 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def map_async(pool, fun, args ):    
    return pool.map_async(run_dill_encoded,  (dill.dumps((fun, args)),))
chx_compress.py 文件源码 项目:chxanalys 作者: yugangzhang 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def apply_async(pool, fun, args, callback=None):    
    return pool.apply_async( run_dill_encoded, (dill.dumps((fun, args)),), callback= callback)
chx_compress.py 文件源码 项目:chxanalys 作者: yugangzhang 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def map_async(pool, fun, args ):    
    return pool.map_async(run_dill_encoded,  (dill.dumps((fun, args)),))
mlengine_prediction_summary.py 文件源码 项目:incubator-airflow-old 作者: apache 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def encode(self, x):
        return json.dumps(x)
comp_simple.py 文件源码 项目:dataset 作者: analysiscenter 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def mpc_some(item):
    print("some:",)
    dill.dumps(item)
    #print(type(item), item.images.ndim)



# Example of custom Batch class which defines some actions
comp_simple.py 文件源码 项目:dataset 作者: analysiscenter 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def some(self, item=None):
        print("some:", type(item))
        print(item)
        print("len", len(dill.dumps(item.as_tuple())))
        return mpc_some
batch.py 文件源码 项目:dataset 作者: analysiscenter 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def _dump_blosc(self, ix, dst, components=None):
        """ Save blosc packed data to file """
        file_name = self._get_file_name(ix, dst, 'blosc')
        with open(file_name, 'w+b') as f:
            if self.components is None:
                components = (None,)
                item = (self[ix],)
            else:
                components = tuple(components or self.components)
                item = self[ix].as_tuple(components)
            data = dict(zip(components, item))
            f.write(blosc.compress(dill.dumps(data)))
utils.py 文件源码 项目:trex-http-proxy 作者: alwye 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def export_object(obj):
    import dill as pickle
    import base64
    return base64.b64encode(gzip.zlib.compress(pickle.dumps(obj,4),9)).decode('utf-8')
serialize_mixin.py 文件源码 项目:elm 作者: ContinuumIO 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def dumps(self, protocol=None, byref=None, fmode=None, recurse=None):
        '''pickle (dill) an object to a string
        '''
        getattr(self, '_close', lambda: [])()
        return dill.dumps(self, protocol=protocol,
                          byref=byref, fmode=fmode, recurse=recurse)


问题


面经


文章

微信
公众号

扫码关注公众号