python类dump()的实例源码

hplc.py 文件源码 项目:astrobase 作者: waqasbhatti 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def lcdict_to_pickle(lcdict, outfile=None):
    '''This just writes the lcdict to a pickle.

    If outfile is None, then will try to get the name from the
    lcdict['objectid'] and write to <objectid>-hptxtlc.pkl. If that fails, will
    write to a file named hptxtlc.pkl'.

    '''

    if not outfile and lcdict['objectid']:
        outfile = '%s-hplc.pkl' % lcdict['objectid']
    elif not outfile and not lcdict['objectid']:
        outfile = 'hplc.pkl'

    with open(outfile,'wb') as outfd:
        pickle.dump(lcdict, outfd, protocol=pickle.HIGHEST_PROTOCOL)

    if os.path.exists(outfile):
        LOGINFO('lcdict for object: %s -> %s OK' % (lcdict['objectid'],
                                                    outfile))
        return outfile
    else:
        LOGERROR('could not make a pickle for this lcdict!')
        return None
checkpoint.py 文件源码 项目:deep-summarization 作者: harpribot 项目源码 文件源码 阅读 45 收藏 0 点赞 0 评论 0
def __init__(self, model_nm, cell_nm, attention_type):
        """

        :param model_nm:
        :param cell_nm:
        :param attention_type:
        """
        self.model_nm = model_nm
        self.cell_nm = cell_nm
        self.attention_type = attention_type
        self.last_ckpt = None
        self.last_id = 0
        self.step_save_location = 'steps.p'
        self.data_save_location = 'data'
        self.mapper_save_location = 'mapper.p'
        self.steps_per_ckpt = None
        self.num_steps_per_prediction = None
        self.present_checkpoints = None
        self.outfile = None
        # initialize the steps if not initialized
        if self.step_save_location not in os.listdir(self.get_checkpoint_location()):
            pickle.dump(0,open(self.get_step_file(), 'wb'))
workflow.py 文件源码 项目:alfred-mpd 作者: deanishe 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def save(self):
        """Save settings to JSON file specified in ``self._filepath``.

        If you're using this class via :attr:`Workflow.settings`, which
        you probably are, ``self._filepath`` will be ``settings.json``
        in your workflow's data directory (see :attr:`~Workflow.datadir`).
        """
        if self._nosave:
            return
        data = {}
        data.update(self)
        # for key, value in self.items():
        #     data[key] = value
        with LockFile(self._filepath):
            with atomic_writer(self._filepath, 'wb') as file_obj:
                json.dump(data, file_obj, sort_keys=True, indent=2,
                          encoding='utf-8')

    # dict methods
__init__.py 文件源码 项目:sampleRNN_ICLR2017 作者: soroushmehr 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def save_training_info(values, path):
    """
    Gets a set of values as dictionary and append them to a log file.
    stores in <path>/train_log.pkl
    """
    file_name = os.path.join(path, __train_log_file_name)
    try:
        with open(file_name, "rb") as f:
            log = pickle.load(f)
    except IOError:  # first time
        log = {}
        for k in values.keys():
            log[k] = []
    for k, v in values.items():
        log[k].append(v)
    with open(file_name, "wb") as f:
        pickle.dump(log, f)
workflow.py 文件源码 项目:Gank-Alfred-Workflow 作者: hujiaweibujidao 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """

        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer
workflow.py 文件源码 项目:Gank-Alfred-Workflow 作者: hujiaweibujidao 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def dump(cls, obj, file_obj):
        """Serialize object ``obj`` to open pickle file.

        .. versionadded:: 1.8

        :param obj: Python object to serialize
        :type obj: Python object
        :param file_obj: file handle
        :type file_obj: ``file`` object

        """

        return pickle.dump(obj, file_obj, protocol=-1)


# Set up default manager and register built-in serializers
workflow.py 文件源码 项目:Gank-Alfred-Workflow 作者: hujiaweibujidao 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def save(self):
        """Save settings to JSON file specified in ``self._filepath``

        If you're using this class via :attr:`Workflow.settings`, which
        you probably are, ``self._filepath`` will be ``settings.json``
        in your workflow's data directory (see :attr:`~Workflow.datadir`).
        """
        if self._nosave:
            return
        data = {}
        data.update(self)
        # for key, value in self.items():
        #     data[key] = value
        with LockFile(self._filepath):
            with atomic_writer(self._filepath, 'wb') as file_obj:
                json.dump(data, file_obj, sort_keys=True, indent=2,
                          encoding='utf-8')

    # dict methods
workflow.py 文件源码 项目:Gank-Alfred-Workflow 作者: hujiaweibujidao 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """

        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer
workflow.py 文件源码 项目:Gank-Alfred-Workflow 作者: hujiaweibujidao 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def dump(cls, obj, file_obj):
        """Serialize object ``obj`` to open pickle file.

        .. versionadded:: 1.8

        :param obj: Python object to serialize
        :type obj: Python object
        :param file_obj: file handle
        :type file_obj: ``file`` object

        """

        return pickle.dump(obj, file_obj, protocol=-1)


# Set up default manager and register built-in serializers
cache.py 文件源码 项目:Flask_Blog 作者: sugarguo 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def set(self, key, value, timeout=None):
        if timeout is None:
            timeout = int(time() + self.default_timeout)
        elif timeout != 0:
            timeout = int(time() + timeout)
        filename = self._get_filename(key)
        self._prune()
        try:
            fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
                                       dir=self._path)
            with os.fdopen(fd, 'wb') as f:
                pickle.dump(timeout, f, 1)
                pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
            rename(tmp, filename)
            os.chmod(filename, self._mode)
        except (IOError, OSError):
            return False
        else:
            return True
networks.py 文件源码 项目:comprehend 作者: Fenugreek 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def save_params(self, tofile):
        """
        Save params to disk, compatible with fromfile option of constructor.
        If argument is a string, a new file is with that name is written to.
        If argument is a file handle, data is written to that.
        """

        is_handle = type(tofile) == file
        save_file = tofile if is_handle else open(tofile, 'wb')

        for variable in self.params.values():
            cPickle.dump(variable.eval(), save_file, -1)

        for attr in type(self).attr_names:
            cPickle.dump(getattr(self, attr), save_file, -1)

        if not is_handle: save_file.close()
recipe-576380.py 文件源码 项目:code 作者: ActiveState 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def ith_prime(i):
    """
    gets prime(i)
    input: number
    return: number
    """
    global primes, interval
    while i >= len(primes):
        a = ((primes[-1] + 2) // 6) * 6 - 1
        b = a + interval
        c = a + 2
        d = b + 2
        try:
            primes.extend(filter(isprime, xrange(a, b, 6)))
            primes.extend(filter(isprime, xrange(c, d, 6)))
            primes = sorted(list(set(primes)))
            mpp = open(fn, 'w')
            cPickle.dump(primes, mpp, protocol = -1)
            mpp.close()
            print 'Prime[%s] = %s' % (fmt_n(len(primes)), fmt_n(primes[-1]))
        except ValueError:
            interval = interval // 2
    return primes[i]
recipe-576523.py 文件源码 项目:code 作者: ActiveState 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def make_gc_snapShot(filename, name):
        """Append the signatures to a file, giving them the given
        'name'. A signature is a pair object_id / type_name"""
    global first_time
    if first_time:
        gc.collect()
        first_time = False
    contents = []
    for o in gc.get_objects():
        try:
            tname = o.__class__.__name__
        except AttributeError:
            tname = str(type(o))
        contents.append((id(o), tname))
        del tname
    f = open(filename, 'a')
    pickle.dump((name, contents), f)
    f.close()
    del contents
    del f
pascal_voc.py 文件源码 项目:dpl 作者: ppengtang 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def gt_roidb(self):
        """
        Return the database of ground-truth regions of interest.

        This function loads/saves from/to a cache file to speed up future calls.
        """
        cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
        if os.path.exists(cache_file):
            with open(cache_file, 'rb') as fid:
                roidb = cPickle.load(fid)
            print '{} gt roidb loaded from {}'.format(self.name, cache_file)
            return roidb

        # gt_roidb = [self._load_pascal_annotation(index)
        gt_roidb = [self._load_pascal_labels(index)
                    for index in self.image_index]
        with open(cache_file, 'wb') as fid:
            cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
        print 'wrote gt roidb to {}'.format(cache_file)

        return gt_roidb
pascal_voc.py 文件源码 项目:dpl 作者: ppengtang 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def selective_search_IJCV_roidb(self):
        """
        Return the database of selective search regions of interest.
        Ground-truth ROIs are also included.

        This function loads/saves from/to a cache file to speed up future calls.
        """
        cache_file = os.path.join(self.cache_path,
                '{:s}_selective_search_IJCV_top_{:d}_roidb.pkl'.
                format(self.name, self.config['top_k']))

        if os.path.exists(cache_file):
            with open(cache_file, 'rb') as fid:
                roidb = cPickle.load(fid)
            print '{} ss roidb loaded from {}'.format(self.name, cache_file)
            return roidb

        gt_roidb = self.gt_roidb()
        ss_roidb = self._load_selective_search_IJCV_roidb(gt_roidb)
        roidb = datasets.imdb.merge_roidbs(gt_roidb, ss_roidb)
        with open(cache_file, 'wb') as fid:
            cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL)
        print 'wrote ss roidb to {}'.format(cache_file)

        return roidb
cache.py 文件源码 项目:swjtu-pyscraper 作者: Desgard 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def set(self, key, value, timeout=None):
        if timeout is None:
            timeout = int(time() + self.default_timeout)
        elif timeout != 0:
            timeout = int(time() + timeout)
        filename = self._get_filename(key)
        self._prune()
        try:
            fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
                                       dir=self._path)
            with os.fdopen(fd, 'wb') as f:
                pickle.dump(timeout, f, 1)
                pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
            rename(tmp, filename)
            os.chmod(filename, self._mode)
        except (IOError, OSError):
            return False
        else:
            return True
netbase.py 文件源码 项目:deep-prior 作者: moberweger 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def save(self, filename):
        """
        Save the state of this network to a pickle file on disk.
        :param filename: Save the parameters of this network to a pickle file at the named path. If this name ends in
               ".gz" then the output will automatically be gzipped; otherwise the output will be a "raw" pickle.
        :return: None
        """

        state = dict([('class', self.__class__.__name__), ('network', self.__str__())])
        for layer in self.layers:
            key = '{}-values'.format(layer.layerNum)
            state[key] = [p.get_value() for p in layer.params]
        opener = gzip.open if filename.lower().endswith('.gz') else open
        handle = opener(filename, 'wb')
        cPickle.dump(state, handle, -1)
        handle.close()
        print 'Saved model parameter to {}'.format(filename)
rotational_first_multipoint_backup.py 文件源码 项目:sail 作者: GemHunt 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def create_seed_and_test_random(factor, start_id):
    # Only use 1/factor of the crop images
    # for example there are 10000 crops and a factor of 100
    #then only 100 of them would be the random seed and test images.
    # A factor of 0 would be 100%
    # This should be changed to percent!
    crops = []
    image_ids = []
    for filename in glob.iglob(crop_dir + '*.png'):
        crops.append(filename)

    for filename in crops:
        renamed = filename.replace("_", "")
        image_id = int(renamed.replace('.png', '').replace('/home/pkrush/cents/', ''))
        if image_id < start_id:
            continue
        renamed = crop_dir + str(image_id) + '.png'
        os.rename(filename, renamed)
        rand_int = random.randint(0, factor)
        if rand_int == 0:
            image_ids.append(image_id)
    pickle.dump(image_ids, open(data_dir + 'seed_image_ids.pickle', "wb"))
    pickle.dump(image_ids, open(data_dir + 'test_image_ids.pickle', "wb"))
rotational_temp.py 文件源码 项目:sail 作者: GemHunt 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def create_seed_and_test_random(factor, start_id):
    # Only use 1/factor of the crop images
    # for example there are 10000 crops and a factor of 100
    #then only 100 of them would be the random seed and test images.
    # A factor of 0 would be 100%
    # This should be changed to percent!
    crops = []
    image_ids = []
    for filename in glob.iglob(crop_dir + '*.png'):
        crops.append(filename)

    for filename in crops:
        renamed = filename.replace("_", "")
        image_id = int(renamed.replace('.png', '').replace('/home/pkrush/cents/', ''))
        if image_id < start_id:
            continue
        renamed = crop_dir + str(image_id) + '.png'
        os.rename(filename, renamed)
        rand_int = random.randint(0, factor)
        if rand_int == 0:
            image_ids.append(image_id)
    pickle.dump(image_ids, open(data_dir + 'seed_image_ids.pickle', "wb"))
    pickle.dump(image_ids, open(data_dir + 'test_image_ids.pickle', "wb"))
rotational.py 文件源码 项目:sail 作者: GemHunt 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def create_new_indexes(total_new_seed_imgs, total_new_test_imgs):
    seeds = pickle.load(open(data_dir + 'seed_data.pickle', "rb"))
    seed_image_ids = []
    test_image_ids = []
    count = 0

    for seed_image_id, values in seeds.iteritems():
        values.sort(key=lambda x: x[0], reverse=False)
        # seed_image_ids.append(values[0:total_new_seed_imgs][2])
        # test_image_ids.append(values[total_new_seed_imgs:total_new_seed_imgs+total_new_test_imgs][2])

        for max_value, angle, image_id in values:
            count += 1
            if count < total_new_seed_imgs:
                seed_image_ids.append(image_id)
            else:
                if count < total_new_seed_imgs + total_new_test_imgs:
                    test_image_ids.append(image_id)
        count = 0
    pickle.dump(seed_image_ids, open(data_dir + 'seed_image_ids.pickle', "wb"))
    pickle.dump(test_image_ids, open(data_dir + 'test_image_ids.pickle', "wb"))
rotational.py 文件源码 项目:sail 作者: GemHunt 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def get_ground_truth_dates(total_coin_results):
    #ground_truth_dates = pickle.load(open(data_dir + 'get_ground_truth_dates.pickle', "rb"))
    ground_truth_date_dict = {}
    for seed_id, values in total_coin_results.iteritems():
       for coin_id, result in values.iteritems():
        if coin_id not in ground_truth_date_dict.iterkeys():
            ground_truth_date_dict[coin_id] = [seed_id, 0]
        if result > ground_truth_date_dict[coin_id][1]:
            ground_truth_date_dict[coin_id] = [seed_id, result]

    #it bugs me I am not using a more pythonic way here:
    ground_truth_date_array = []
    for coin_id, values in ground_truth_date_dict.iteritems():
        seed_id = values[0]
        result = values[1]
        ground_truth_date_array.append([seed_id,coin_id, result,0,False,False])

    ground_truth_date_array = sorted(ground_truth_date_array, key=lambda x: x[2],reverse = True)
    ground_truth_date_array = sorted(ground_truth_date_array, key=lambda x: x[0])

    pickle.dump(ground_truth_date_array, open(data_dir + 'ground_truth_dates.pickle', "wb"))
    return ground_truth_date_array
pascal_voc.py 文件源码 项目:mx-rfcn 作者: giorking 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def gt_roidb(self):
        """
        return ground truth image regions database
        :return: imdb[image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
        """
        cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
        if os.path.exists(cache_file):
            with open(cache_file, 'rb') as fid:
                roidb = cPickle.load(fid)
            print '{} gt roidb loaded from {}'.format(self.name, cache_file)
            return roidb

        gt_roidb = [self.load_pascal_annotation(index) for index in self.image_set_index]
        with open(cache_file, 'wb') as fid:
            cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
        print 'wrote gt roidb to {}'.format(cache_file)

        return gt_roidb
pascal_voc.py 文件源码 项目:mx-rfcn 作者: giorking 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def selective_search_roidb(self, gt_roidb):
        """
        get selective search roidb and ground truth roidb
        :param gt_roidb: ground truth roidb
        :return: roidb of selective search (ground truth included)
        """
        cache_file = os.path.join(self.cache_path, self.name + '_ss_roidb.pkl')
        if os.path.exists(cache_file):
            with open(cache_file, 'rb') as fid:
                roidb = cPickle.load(fid)
            print '{} ss roidb loaded from {}'.format(self.name, cache_file)
            return roidb

        if self.image_set != 'test':
            ss_roidb = self.load_selective_search_roidb(gt_roidb)
            roidb = IMDB.merge_roidbs(gt_roidb, ss_roidb)
        else:
            roidb = self.load_selective_search_roidb(None)
        with open(cache_file, 'wb') as fid:
            cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL)
        print 'wrote ss roidb to {}'.format(cache_file)

        return roidb
detection_list.py 文件源码 项目:mx-rfcn 作者: giorking 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def gt_roidb(self):
        """
        return ground truth image regions database
        :return: imdb[image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
        """
        cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
        if os.path.exists(cache_file):
            with open(cache_file, 'rb') as fid:
                roidb = cPickle.load(fid)
            print '{} gt roidb loaded from {}'.format(self.name, cache_file)
            return roidb

        gt_roidb = [self.load_annotation(index) for index in self.image_set_index]
        with open(cache_file, 'wb') as fid:
            cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
        print 'wrote gt roidb to {}'.format(cache_file)

        return gt_roidb
pascal_voc.py 文件源码 项目:adversarial-frcnn 作者: xiaolonw 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def gt_roidb(self):
        """
        Return the database of ground-truth regions of interest.

        This function loads/saves from/to a cache file to speed up future calls.
        """
        cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
        if os.path.exists(cache_file):
            with open(cache_file, 'rb') as fid:
                roidb = cPickle.load(fid)
            print '{} gt roidb loaded from {}'.format(self.name, cache_file)
            return roidb

        gt_roidb = [self._load_pascal_annotation(index)
                    for index in self.image_index]
        with open(cache_file, 'wb') as fid:
            cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
        print 'wrote gt roidb to {}'.format(cache_file)

        return gt_roidb
coco.py 文件源码 项目:adversarial-frcnn 作者: xiaolonw 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def _write_coco_results_file(self, all_boxes, res_file):
        # [{"image_id": 42,
        #   "category_id": 18,
        #   "bbox": [258.15,41.29,348.26,243.78],
        #   "score": 0.236}, ...]
        results = []
        for cls_ind, cls in enumerate(self.classes):
            if cls == '__background__':
                continue
            print 'Collecting {} results ({:d}/{:d})'.format(cls, cls_ind,
                                                          self.num_classes - 1)
            coco_cat_id = self._class_to_coco_cat_id[cls]
            results.extend(self._coco_results_one_category(all_boxes[cls_ind],
                                                           coco_cat_id))
        print 'Writing results json to {}'.format(res_file)
        with open(res_file, 'w') as fid:
            json.dump(results, fid)
workflow.py 文件源码 项目:workflows.kyoyue 作者: wizyoung 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer
workflow.py 文件源码 项目:workflows.kyoyue 作者: wizyoung 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def save(self):
        """Save settings to JSON file specified in ``self._filepath``.

        If you're using this class via :attr:`Workflow.settings`, which
        you probably are, ``self._filepath`` will be ``settings.json``
        in your workflow's data directory (see :attr:`~Workflow.datadir`).
        """
        if self._nosave:
            return
        data = {}
        data.update(self)
        # for key, value in self.items():
        #     data[key] = value
        with LockFile(self._filepath):
            with atomic_writer(self._filepath, 'wb') as file_obj:
                json.dump(data, file_obj, sort_keys=True, indent=2,
                          encoding='utf-8')

    # dict methods
services.py 文件源码 项目:abusehelper 作者: Exploit-install 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def run(self):
        state = self._get(None)
        self._put(None, None)

        try:
            state = yield self.errors | self.kill_sessions() | self.main(state)
        except Stop:
            state = None
        finally:
            self._put(None, state)

            if self.file is not None:
                self.file.seek(0)
                self.file.truncate(0)
                pickle.dump(self.state, self.file, pickle.HIGHEST_PROTOCOL)

                self.file.flush()
                unlock_file(self.file)
                self.file.close()
da.py 文件源码 项目:structured-output-ae 作者: sbelharbi 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def save_params(self, weights_file, catched=False):
        """Save the model's parameters."""
        f_dump = open(weights_file, "w")
        params_vls = []
        if catched:
            if self.catched_params != []:
                params_vls = self.catched_params
            else:
                raise ValueError(
                    "You asked to save catched params," +
                    "but you didn't catch any!!!!!!!")
        else:
            for param in self.params:
                params_vls.append(param.get_value())
        pkl.dump(params_vls, f_dump, protocol=pkl.HIGHEST_PROTOCOL)
        f_dump.close()


问题


面经


文章

微信
公众号

扫码关注公众号