python类load()的实例源码

flora.py 文件源码 项目:flora 作者: Lamden 项目源码 文件源码 阅读 38 收藏 0 点赞 0 评论 0
def generate(location):
    # cli wizard for creating a new contract from a template
    if directory_has_smart_contract(location):
        example_payload = json.load(open(glob.glob(os.path.join(location, '*.json'))[0]))
        print(example_payload)
        for k, v in example_payload.items():
            value = input(k + ':')
            if value != '':
                example_payload[k] = value
        print(example_payload)

        code_path = glob.glob(os.path.join(location, '*.tsol'))
        tsol.compile(open(code_path[0]), example_payload)
        print('Code compiles with new payload.')
        selection = ''
        while True:
            selection = input('(G)enerate Solidity contract or (E)xport implementation:')
            if selection.lower() == 'g':
                output_name = input('Name your contract file without an extension:')
                code = tsol.generate_code(open(code_path[0]).read(), example_payload)
                open(os.path.join(location, '{}.sol'.format(output_name)), 'w').write(code)
                break

            if selection.lower() == 'e':
                output_name = input('Name your implementation file without an extension:')
                json.dump(example_payload, open(os.path.join(location, '{}.json'.format(output_name)), 'w'))
                break
    else:
        print('Provided directory does not contain a *.tsol and *.json or does not compile.')
workflow.py 文件源码 项目:alfred-mpd 作者: deanishe 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer
workflow.py 文件源码 项目:alfred-mpd 作者: deanishe 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def load(cls, file_obj):
        """Load serialized object from open JSON file.

        .. versionadded:: 1.8

        :param file_obj: file handle
        :type file_obj: ``file`` object
        :returns: object loaded from JSON file
        :rtype: object

        """
        return json.load(file_obj)
workflow.py 文件源码 项目:alfred-mpd 作者: deanishe 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def load(cls, file_obj):
        """Load serialized object from open pickle file.

        .. versionadded:: 1.8

        :param file_obj: file handle
        :type file_obj: ``file`` object
        :returns: object loaded from pickle file
        :rtype: object

        """
        return cPickle.load(file_obj)
workflow.py 文件源码 项目:alfred-mpd 作者: deanishe 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def _load(self):
        """Load cached settings from JSON file `self._filepath`."""
        self._nosave = True
        d = {}
        with open(self._filepath, 'rb') as file_obj:
            for key, value in json.load(file_obj, encoding='utf-8').items():
                d[key] = value
        self.update(d)
        self._original = deepcopy(d)
        self._nosave = False
workflow.py 文件源码 项目:alfred-mpd 作者: deanishe 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def cached_data(self, name, data_func=None, max_age=60):
        """Return cached data if younger than ``max_age`` seconds.

        Retrieve data from cache or re-generate and re-cache data if
        stale/non-existant. If ``max_age`` is 0, return cached data no
        matter how old.

        :param name: name of datastore
        :param data_func: function to (re-)generate data.
        :type data_func: ``callable``
        :param max_age: maximum age of cached data in seconds
        :type max_age: ``int``
        :returns: cached data, return value of ``data_func`` or ``None``
            if ``data_func`` is not set

        """
        serializer = manager.serializer(self.cache_serializer)

        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
        age = self.cached_data_age(name)

        if (age < max_age or max_age == 0) and os.path.exists(cache_path):

            with open(cache_path, 'rb') as file_obj:
                self.logger.debug('Loading cached data from : %s',
                                  cache_path)
                return serializer.load(file_obj)

        if not data_func:
            return None

        data = data_func()
        self.cache_data(name, data)

        return data
utils.py 文件源码 项目:DeepAnomaly 作者: adiyoss 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def read_data(path):
    with open(path, 'rb') as f:
        data = pickle.load(f)
    return data
all_convnet.py 文件源码 项目:GELUs 作者: hendrycks 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def unpickle(file):
    fo = open(file, 'rb')
    d = pickle.load(fo, encoding='latin1')
    fo.close()
    return {'x': np.cast[th.config.floatX]((-127.5 + d['data'].reshape((10000,3,32,32)))/128.), 'y': np.array(d['labels']).astype(np.uint8)}
SGDR_WRNs_gelu.py 文件源码 项目:GELUs 作者: hendrycks 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def unpickle(file):
    import pickle
    fo = open(file, 'rb')
    dict = pickle.load(fo, encoding='latin1')
    fo.close()
    return dict
load_cifar10.py 文件源码 项目:GELUs 作者: hendrycks 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def to_categorical(y, nb_classes):
    y = np.asarray(y, dtype='int32')
    if not nb_classes:
        nb_classes = np.max(y)+1
    Y = np.zeros((len(y), nb_classes))
    for i in range(len(y)):
        Y[i, y[i]] = 1.
    return Y

# load training and testing data
load_cifar10.py 文件源码 项目:GELUs 作者: hendrycks 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def load_batch(fpath):
    with open(fpath, 'rb') as f:
        d = pickle.load(f, encoding='latin1')
    data = d["data"]
    labels = d["labels"]
    return data, labels
preposition_model.py 文件源码 项目:onto-lstm 作者: pdasigi 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def load_model(self, epoch=None):
        '''
        Loads a saved model. If epoch id is provided, will load the corresponding model. Or else,
        will load the best model.
        '''
        if not epoch:
            self.model = load_model("%s.model" % self.model_name_prefix,
                                    custom_objects=self.custom_objects)
        else:
            self.model = load_model("%s_%d.model" % (self.model_name_prefix, epoch),
                                    custom_objects=self.custom_objects)
        self.model.summary()
        self.data_processor = pickle.load(open("%s.dataproc" % self.model_name_prefix, "rb"))
model_pp_relation.py 文件源码 项目:onto-lstm 作者: pdasigi 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def load_model(self, epoch=None):
        self.label_map = pickle.load(open("%s.label_map" % self.model_name_prefix, "rb"))
        super(PPRelationModel, self).load_model(epoch)
model_entailment.py 文件源码 项目:onto-lstm 作者: pdasigi 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def load_model(self, epoch=None):
        '''
        Loads a saved model. If epoch id is provided, will load the corresponding model. Or else,
        will load the best model.
        '''
        if not epoch:
            self.model = load_model("%s.model" % self.model_name_prefix,
                                    custom_objects=self.custom_objects)
        else:
            self.model = load_model("%s_%d.model" % (self.model_name_prefix, epoch),
                                    custom_objects=self.custom_objects)
        self.data_processor = pickle.load(open("%s.dataproc" % self.model_name_prefix, "rb"))
        self.label_map = pickle.load(open("%s.labelmap" % self.model_name_prefix, "rb"))
data.py 文件源码 项目:DREAM 作者: LaceyChen17 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def get_users_orders(self, prior_or_train):
        '''
            get users' prior detailed orders
        '''
        if os.path.exists(self.cache_dir + 'users_orders.pkl'):
            with open(self.cache_dir + 'users_orders.pkl', 'rb') as f:
                users_orders = pickle.load(f)
        else:
            orders = self.get_orders()
            order_products_prior = self.get_orders_items(prior_or_train)
            users_orders = pd.merge(order_products_prior, orders[['user_id', 'order_id', 'order_number', 'days_up_to_last']], 
                        on = ['order_id'], how = 'left')
            with open(self.cache_dir + 'users_orders.pkl', 'wb') as f:
                pickle.dump(users_orders, f, pickle.HIGHEST_PROTOCOL)
        return users_orders
data.py 文件源码 项目:DREAM 作者: LaceyChen17 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def get_users_products(self, prior_or_train):
        '''
            get users' all purchased products
        '''
        if os.path.exists(self.cache_dir + 'users_products.pkl'):
            with open(self.cache_dir + 'users_products.pkl', 'rb') as f:
                users_products = pickle.load(f)
        else:
            users_products = self.get_users_orders(prior_or_train)[['user_id', 'product_id']].drop_duplicates()
            users_products['product_id'] = users_products.product_id.astype(int)
            users_products['user_id'] = users_products.user_id.astype(int)
            users_products = users_products.groupby(['user_id'])['product_id'].apply(list).reset_index()
            with open(self.cache_dir + 'users_products.pkl', 'wb') as f:
                pickle.dump(users_products, f, pickle.HIGHEST_PROTOCOL)
        return users_products
data.py 文件源码 项目:DREAM 作者: LaceyChen17 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def get_baskets(self, prior_or_train, reconstruct = False, reordered = False, none_idx = 49689):
        '''
            get users' baskets
        '''
        if reordered:
            filepath = self.cache_dir + './reorder_basket_' + prior_or_train + '.pkl'
        else:
            filepath = self.cache_dir + './basket_' + prior_or_train + '.pkl'

        if (not reconstruct) and os.path.exists(filepath):
            with open(filepath, 'rb') as f:
                up_basket = pickle.load(f)
        else:          
            up = self.get_users_orders(prior_or_train).sort_values(['user_id', 'order_number', 'product_id'], ascending = True)
            uid_oid = up[['user_id', 'order_number']].drop_duplicates()
            up = up[up.reordered == 1][['user_id', 'order_number', 'product_id']] if reordered else up[['user_id', 'order_number', 'product_id']]
            up_basket = up.groupby(['user_id', 'order_number'])['product_id'].apply(list).reset_index()
            up_basket = pd.merge(uid_oid, up_basket, on = ['user_id', 'order_number'], how = 'left')
            for row in up_basket.loc[up_basket.product_id.isnull(), 'product_id'].index:
                up_basket.at[row, 'product_id'] = [none_idx]
            up_basket = up_basket.sort_values(['user_id', 'order_number'], ascending = True).groupby(['user_id'])['product_id'].apply(list).reset_index()
            up_basket.columns = ['user_id', 'reorder_basket'] if reordered else ['user_id', 'basket']
            #pdb.set_trace()
            with open(filepath, 'wb') as f:
                pickle.dump(up_basket, f, pickle.HIGHEST_PROTOCOL)
        return up_basket
parse_indepexpends.py 文件源码 项目:SuperPACs 作者: SpencerNorris 项目源码 文件源码 阅读 52 收藏 0 点赞 0 评论 0
def donations(filename='donationdata.pickle'):

    try:
        print("donation data pickled already. Grabbing data from donationdata.picke")
        with open(filename, 'rb') as handle:
            donations = pickle.load(handle)
        return donations
    except EOFError:
        print("donation data not pickled, grabbing directly from FEC and ProPublica APIs")
        donations = donations_helper()

        with open(filename, 'wb') as handle:
            pickle.dump(donations, handle, protocol=pickle.HIGHEST_PROTOCOL)

        return donations
config_map.py 文件源码 项目:xr-telemetry-m2m-web 作者: cisco 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def __init__(self, sh_run):
        self.sh_run = sh_run.split('\n')
        self.push_back = None
        self.index = 0
        self.cache_misses = []
        self.saved_cli_lines = []
        try:
            self.cache = pickle.load(open(CACHE_PATH, 'rb'))
            print('LOADED {} items from the MAP CACHE'.format(len(self.cache)))
        except Exception:
            self.cache = {}
util.py 文件源码 项目:cognitive-system-postagger 作者: made2591 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def get_obj_from_file(file_name):
    """
    Questo metodo carica un oggetto dal file_name e lo restituisce.
    Se ci sono errori, ritorna -1
    :param file_name: file da cui caricare l'oggetto
    :return: oggetto caricato dal file
    """
    try:
        file = open(file_name,'rb')
        object_file = pickle.load(file)
        file.close()
        return object_file
    except Exception, e:
        print e
        return -1


问题


面经


文章

微信
公众号

扫码关注公众号