python类HIGHEST_PROTOCOL的实例源码

data_serialization.py 文件源码 项目:logodetect 作者: munibasad 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def maybe_pickle(data_dirs, force=False):
    dataset_names = []
    for dir in data_dirs:
        set_filename = dir + '.pickle'
        dataset_names.append(set_filename)
        if os.path.exists(set_filename) and not force:
            # You may overwrite by setting force=True
            print('%s already present - Skipping pickling. ' % set_filename)
        else:
            print('Pickling %s.' % set_filename)
            dataset = load_logo(dir)
            try:
                with open(set_filename, 'wb') as f:
                    pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
            except Exception as e:
                print('Unable to save data to', set_filename, ':', e)
    return dataset_names
data_serialization.py 文件源码 项目:logodetect 作者: munibasad 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def save_pickle(train_dataset, train_labels, valid_dataset, valid_labels,
                test_dataset, test_labels):
    try:
        f = open(PICKLE_FILENAME, 'wb')
        save = {
            'train_dataset': train_dataset,
            'train_labels': train_labels,
            'valid_dataset': valid_dataset,
            'valid_labels': valid_labels,
            'test_dataset': test_dataset,
            'test_labels': test_labels,
        }
        pickle.dump(save, f, pickle.HIGHEST_PROTOCOL)
        f.close()
    except Exception as e:
        print('Unable to save data to', PICKLE_FILENAME, ':', e)
        raise
mypeda.py 文件源码 项目:mgtools 作者: miyagaw61 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def save_snapshot(self, filename=None):
        """
        Save a snapshot of current process to file
        Warning: this is not thread safe, do not use with multithread program

        Args:
            - filename: target file to save snapshot

        Returns:
            - Bool
        """
        if not filename:
            filename = self.get_config_filename("snapshot")

        snapshot = self.take_snapshot()
        if not snapshot:
            return False
        # dump to file
        fd = open(filename, "wb")
        pickle.dump(snapshot, fd, pickle.HIGHEST_PROTOCOL)
        fd.close()

        return True
image_processing.py 文件源码 项目:ML-Project 作者: Shiam-Chowdhury 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  folders_list = os.listdir(data_folders)
  for folder in folders_list:

    #print(os.path.join(data_folders, folder))
    curr_folder_path = os.path.join(data_folders, folder)
    if os.path.isdir(curr_folder_path):
        set_filename = curr_folder_path + '.pickle'
        dataset_names.append(set_filename)
        if os.path.exists(set_filename) and not force:
          # You may override by setting force=True.
          print('%s already present - Skipping pickling.' % set_filename)
        else:
          print('Pickling %s.' % set_filename)
          dataset = load_letter(curr_folder_path, min_num_images_per_class) # load and normalize the data
          try:
            with open(set_filename, 'wb') as f:
                pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
                f.close()
          except Exception as e:
            print('Unable to save data to', set_filename, ':', e)

  return dataset_names
read_PascalVocData.py 文件源码 项目:FCN-GoogLeNet 作者: DeepSegment 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def read_dataset(data_dir):
    pickle_filename = "PascalVoc.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        PascalVoc_folder = "VOCdevkit"
        result = create_image_lists(os.path.join(data_dir, PascalVoc_folder))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records
read_MITSceneParsingData.py 文件源码 项目:FCN-GoogLeNet 作者: DeepSegment 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def read_dataset(data_dir):
    pickle_filename = "MITSceneParsing.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        SceneParsing_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, SceneParsing_folder))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records
read_celebADataset.py 文件源码 项目:EBGAN.tensorflow 作者: shekkizh 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def read_dataset(data_dir):
    pickle_filename = "celebA.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        celebA_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, celebA_folder))
        print ("Training set: %d" % len(result['train']))
        print ("Test set: %d" % len(result['test']))
        print ("Validation set: %d" % len(result['validation']))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_images = result['train']
        testing_images = result['test']
        validation_images = result['validation']

        del result
    return training_images, testing_images, validation_images
data_extractor.py 文件源码 项目:tensorflow_image_tutorial 作者: ybenoit 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def maybe_pickle(self, data_folders, min_num_images_per_class, force=False):
        dataset_names = []
        for folder in data_folders:
            set_filename = folder + '.pickle'
            dataset_names.append(set_filename)
            if os.path.exists(set_filename) and not force:
                # You may override by setting force=True.
                print('%s already present - Skipping pickling.' % set_filename)
            else:
                print('Pickling %s.' % set_filename)
                dataset = self.load_letter(folder, min_num_images_per_class, self.image_size, self.pixel_depth)
                try:
                    with open(set_filename, 'wb') as f:
                        pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
                except Exception as e:
                    print('Unable to save data to', set_filename, ':', e)

        return dataset_names
eye_preprocess.py 文件源码 项目:drowsy_detection 作者: thandongtb 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def save_train_and_test_set(dataset, labels, ratio, pickle_file):
    split = int(len(dataset) * ratio)
    train_dataset = dataset[:split]
    train_labels = labels[:split]
    test_dataset = dataset[split:]
    test_labels = labels[split:]

    try:
        f = open(pickle_file, 'wb')
        save = {
            'train_dataset': train_dataset,
            'train_labels': train_labels,
            'test_dataset': test_dataset,
            'test_labels': test_labels,
        }
        pickle.dump(save, f, pickle.HIGHEST_PROTOCOL)
        f.close()
    except Exception as e:
        print('Unable to save data to', pickle_file, ':', e)
        raise

    statinfo = os.stat(pickle_file)
    print('Compressed pickle size:', statinfo.st_size)

# Main
1_notmnist.py 文件源码 项目:udacity-deep-learning 作者: hankcs 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
    dataset_names = []
    for folder in data_folders:
        set_filename = folder + '.pickle'
        dataset_names.append(set_filename)
        if os.path.exists(set_filename) and not force:
            # You may override by setting force=True.
            print('%s already present - Skipping pickling.' % set_filename)
        else:
            print('Pickling %s.' % set_filename)
            dataset = load_letter(folder, min_num_images_per_class)
            try:
                with open(set_filename, 'wb') as f:
                    pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
            except Exception as e:
                print('Unable to save data to', set_filename, ':', e)

    return dataset_names
peda.py 文件源码 项目:vuln 作者: mikaelkall 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def save_snapshot(self, filename=None):
        """
        Save a snapshot of current process to file
        Warning: this is not thread safe, do not use with multithread program

        Args:
            - filename: target file to save snapshot

        Returns:
            - Bool
        """
        if not filename:
            filename = self.get_config_filename("snapshot")

        snapshot = self.take_snapshot()
        if not snapshot:
            return False
        # dump to file
        fd = open(filename, "wb")
        pickle.dump(snapshot, fd, pickle.HIGHEST_PROTOCOL)
        fd.close()

        return True
1_notmnist.py 文件源码 项目:udacity-deep-learning 作者: runhani 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  for folder in data_folders:
    set_filename = folder + '.pickle'
    dataset_names.append(set_filename)
    if os.path.exists(set_filename) and not force:
      # You may override by setting force=True.
      print('%s already present - Skipping pickling.' % set_filename)
    else:
      print('Pickling %s.' % set_filename)
      dataset = load_letter(folder, min_num_images_per_class)
      try:
        with open(set_filename, 'wb') as f:
          pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
      except Exception as e:
        print('Unable to save data to', set_filename, ':', e)

  return dataset_names
read_MITSceneParsingData.py 文件源码 项目:streetview 作者: ydnaandy123 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def read_dataset(data_dir):
    pickle_filename = "MITSceneParsing.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        SceneParsing_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, SceneParsing_folder))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records
save_test.py 文件源码 项目:odin 作者: imito 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def test_load_save3(self):
        X = K.placeholder(shape=(None, 28, 28))
        ops = N.Sequence([
            N.Dimshuffle(pattern=(0, 1, 2, 'x')),
            N.Conv(8, (3, 3), strides=(1, 1), pad='same', activation=K.relu),
            K.pool2d,
            N.Flatten(outdim=2),
            N.Dense(64, activation=K.relu),
            N.Dense(10, activation=K.softmax)
        ])
        y = ops(X)
        f1 = K.function(X, y)

        ops_ = cPickle.loads(cPickle.dumps(ops, protocol=cPickle.HIGHEST_PROTOCOL))
        y_ = ops_(X)
        f2 = K.function(X, y_)

        x = np.random.rand(32, 28, 28)
        self.assertEqual(np.sum(f1(x) - f2(x)), 0.)
peda-arm.py 文件源码 项目:peda-arm 作者: alset0326 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def save_snapshot(self, filename=None):
        """
        Save a snapshot of current process to file
        Warning: this is not thread safe, do not use with multithread program

        Args:
            - filename: target file to save snapshot

        Returns:
            - Bool
        """
        if not filename:
            filename = self.get_config_filename("snapshot")

        snapshot = self.take_snapshot()
        if not snapshot:
            return False
        # dump to file
        fd = open(filename, "wb")
        pickle.dump(snapshot, fd, pickle.HIGHEST_PROTOCOL)
        fd.close()

        return True
read_LaMemDataset.py 文件源码 项目:Colorization.tensorflow 作者: shekkizh 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def read_dataset(data_dir):
    pickle_filename = "lamem.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        lamem_folder = (DATA_URL.split("/")[-1]).split(os.path.extsep)[0]
        result = {'images': create_image_lists(os.path.join(data_dir, lamem_folder))}
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['images']
        del result

    return training_records
cmodule.py 文件源码 项目:Theano-Deep-learning 作者: GeekLiB 项目源码 文件源码 阅读 69 收藏 0 点赞 0 评论 0
def save_pkl(self):
        """
        Dump this object into its `key_pkl` file.

        May raise a cPickle.PicklingError if such an exception is raised at
        pickle time (in which case a warning is also displayed).

        """
        # Note that writing in binary mode is important under Windows.
        try:
            with open(self.key_pkl, 'wb') as f:
                pickle.dump(self, f, protocol=pickle.HIGHEST_PROTOCOL)
        except pickle.PicklingError:
            _logger.warning("Cache leak due to unpickle-able key data %s",
                            self.keys)
            os.remove(self.key_pkl)
            raise
sqlite_recorder.py 文件源码 项目:OpenMDAO 作者: OpenMDAO 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def record_metadata_solver(self, recording_requester):
        """
        Record solver metadata.

        Parameters
        ----------
        recording_requester: <Solver>
            The Solver that would like to record its metadata.
        """
        path = recording_requester._system.pathname
        solver_class = type(recording_requester).__name__
        if not path:
            path = 'root'
        id = "{}.{}".format(path, solver_class)

        solver_options = pickle.dumps(recording_requester.options,
                                      pickle.HIGHEST_PROTOCOL)

        with self.con:
            self.con.execute(
                "INSERT INTO solver_metadata(id, solver_options, solver_class) "
                "VALUES(?,?,?)", (id, sqlite3.Binary(solver_options), solver_class))
Model.py 文件源码 项目:cxr_classification 作者: harishanand95 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def save(self, dataset_filename="CXR_png.pickle", overwrite=False):
        if self._dataset is None:
            print("Dataset is empty. Run load_images before saving.")
            return

        data = {"dataset": self._dataset,
                "labels": self._labels,
                "valid_images_count": self._valid_images_count,
                "width": self._image_width,
                "height": self._image_height,
                "convert_to_gray": self._convert_to_gray,
                "folder": self._folder,
                "test_dataset": self._test_dataset,
                "test_labels": self._test_labels,
                "test_data_size": self._test_data_size}

        if overwrite is True:
            if os.path.isfile(dataset_filename):
                os.remove(dataset_filename)
        try:
            with open(dataset_filename, 'wb') as f:
                pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
        except Exception as e:
            print('Unable to save data to', dataset_filename, ':', e)
processor.py 文件源码 项目:nuts-flow 作者: maet3608 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def __rrshift__(self, iterable):
        """
        Return elements in iterable.

        :param iterable iterable: Any iterable
        :return: Generator over same elements as input iterable.
        :rtype: Generator
        """
        if self.path or (self._cachepath and not self._clearcache):
            for e in self.__iter__():
                yield e
        else:
            self._create_cache()
            for i, e in enumerate(iterable):
                with open(self._fpath(i), 'wb') as f:
                    pickle.dump(e, f, pickle.HIGHEST_PROTOCOL)
                yield e
extract_params.py 文件源码 项目:deeplab_v1_tf1.0 作者: automan000 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def main():
    """Extract and save network skeleton with the corresponding weights.

    Raises:
      ImportError: PyCaffe module is not found."""
    args = get_arguments()
    sys.path.append(args.pycaffe_path)
    try:
        import caffe
    except ImportError:
        raise
    # Load net definition.
    net = caffe.Net('./util/deploy.prototxt', args.caffemodel, caffe.TEST)

    # Check the existence of output_dir.
    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # Net skeleton with parameters names and shapes.
    # In TF, the filter shape is as follows: [ks, ks, input_channels, output_channels],
    # while in Caffe it looks like this: [output_channels, input_channels, ks, ks].
    net_skeleton = list() 
    for name, item in net.params.iteritems():
        net_skeleton.append([name + '/w', item[0].data.shape[::-1]]) # See the explanataion on filter formats above.
        net_skeleton.append([name + '/b', item[1].data.shape])

    with open(os.path.join(args.output_dir, 'net_skeleton.ckpt'), 'wb') as f:
        cPickle.dump(net_skeleton, f, protocol=cPickle.HIGHEST_PROTOCOL)

    # Net weights. 
    net_weights = dict()
    for name, item in net.params.iteritems():
        net_weights[name + '/w'] = item[0].data.transpose(2, 3, 1, 0) # See the explanation on filter formats above.
        net_weights[name + '/b'] = item[1].data
    with open(os.path.join(args.output_dir,'net_weights.ckpt'), 'wb') as f:
        cPickle.dump(net_weights, f, protocol=cPickle.HIGHEST_PROTOCOL)
    del net, net_skeleton, net_weights
data_handlers.py 文件源码 项目:feagen 作者: ianlini 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def write_data(self, result_dict):
        for key, val in six.viewitems(result_dict):
            pickle_path = os.path.join(self.pickle_dir, key + ".pkl")
            with SimpleTimer("Writing generated data %s to pickle file" % key,
                             end_in_new_line=False), \
                    open(pickle_path, "wb") as fp:
                cPickle.dump(val, fp, protocol=cPickle.HIGHEST_PROTOCOL)
read_celebADataset.py 文件源码 项目:WassersteinGAN.tensorflow 作者: shekkizh 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def read_dataset(data_dir):
    pickle_filename = "celebA.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        # utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        celebA_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        dir_path = os.path.join(data_dir, celebA_folder)
        if not os.path.exists(dir_path):
            print ("CelebA dataset needs to be downloaded and unzipped manually")
            print ("Download from: %s" % DATA_URL)
            raise ValueError("Dataset not found")

        result = create_image_lists(dir_path)
        print ("Training set: %d" % len(result['train']))
        print ("Test set: %d" % len(result['test']))
        print ("Validation set: %d" % len(result['validation']))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        celebA = CelebA_Dataset(result)
        del result
    return celebA
utils.py 文件源码 项目:MIL.pytorch 作者: gujiuxiang 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def save_variables(pickle_file_name, var, info, overwrite=False):
    if os.path.exists(pickle_file_name) and overwrite == False:
        raise Exception('{:s} exists and over write is false.'.format(pickle_file_name))
    # Construct the dictionary
    assert (type(var) == list);
    assert (type(info) == list);
    d = {}
    for i in xrange(len(var)):
        d[info[i]] = var[i]
    with open(pickle_file_name, 'wb') as f:
        cPickle.dump(d, f, cPickle.HIGHEST_PROTOCOL)
gpickle.py 文件源码 项目:pybel 作者: pybel 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def to_bytes(graph, protocol=HIGHEST_PROTOCOL):
    """Converts a graph to bytes with pickle. Note that the pickle module has some incompatibilities between Python
    2 and 3. To export a universally importable pickle, choose 0, 1, or 2.

    :param BELGraph graph: A BEL network
    :param int protocol: Pickling protocol to use
    :return: Pickled bytes representing the graph
    :rtype: bytes

    .. seealso:: https://docs.python.org/3.6/library/pickle.html#data-stream-format
    """
    raise_for_not_bel(graph)
    return dumps(graph, protocol=protocol)
gpickle.py 文件源码 项目:pybel 作者: pybel 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def to_pickle(graph, file, protocol=HIGHEST_PROTOCOL):
    """Writes this graph to a pickle object with :func:`networkx.write_gpickle`.  Note that the pickle module has some
    incompatibilities between Python 2 and 3. To export a universally importable pickle, choose 0, 1, or 2.

    :param BELGraph graph: A BEL graph
    :param str or file: A file or filename to write to
    :param int protocol: Pickling protocol to use

    .. seealso:: https://docs.python.org/3.6/library/pickle.html#data-stream-format
    """
    raise_for_not_bel(graph)
    write_gpickle(graph, file, protocol=protocol)
extract_params.py 文件源码 项目:tensorflow-deeplab-lfov 作者: DrSleep 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def main():
    """Extract and save network skeleton with the corresponding weights.

    Raises:
      ImportError: PyCaffe module is not found."""
    args = get_arguments()
    sys.path.append(args.pycaffe_path)
    try:
        import caffe
    except ImportError:
        raise
    # Load net definition.
    net = caffe.Net('./util/deploy.prototxt', args.caffemodel, caffe.TEST)

    # Check the existence of output_dir.
    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # Net skeleton with parameters names and shapes.
    # In TF, the filter shape is as follows: [ks, ks, input_channels, output_channels],
    # while in Caffe it looks like this: [output_channels, input_channels, ks, ks].
    net_skeleton = list() 
    for name, item in net.params.iteritems():
        net_skeleton.append([name + '/w', item[0].data.shape[::-1]]) # See the explanataion on filter formats above.
        net_skeleton.append([name + '/b', item[1].data.shape])

    with open(os.path.join(args.output_dir, 'net_skeleton.ckpt'), 'wb') as f:
        cPickle.dump(net_skeleton, f, protocol=cPickle.HIGHEST_PROTOCOL)

    # Net weights. 
    net_weights = dict()
    for name, item in net.params.iteritems():
        net_weights[name + '/w'] = item[0].data.transpose(2, 3, 1, 0) # See the explanation on filter formats above.
        net_weights[name + '/b'] = item[1].data
    with open(os.path.join(args.output_dir,'net_weights.ckpt'), 'wb') as f:
        cPickle.dump(net_weights, f, protocol=cPickle.HIGHEST_PROTOCOL)
    del net, net_skeleton, net_weights
shortcuts.py 文件源码 项目:serialtime 作者: ianlini 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def save_pklgz(obj, path, log_description=None, logger=None,
               logging_level=logging.INFO, verbose_start=True,
               verbose_end=True, end_in_new_line=True, log_prefix="..."):
    if log_description is None:
        log_description = "Pickling to " + (path)
    with SimpleTimer(log_description, logger, logging_level, verbose_start,
                     verbose_end, end_in_new_line, log_prefix):
        pkl = cPickle.dumps(obj, protocol=cPickle.HIGHEST_PROTOCOL)
        with gzip.open(path, "wb") as fp:
            fp.write(pkl)
shortcuts.py 文件源码 项目:serialtime 作者: ianlini 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def save_pkl(obj, path, log_description=None, logger=None,
             logging_level=logging.INFO, verbose_start=True,
             verbose_end=True, end_in_new_line=True, log_prefix="..."):
    if log_description is None:
        log_description = "Pickling to " + (path)
    with open(path, "wb") as fp, \
            SimpleTimer(log_description, logger, logging_level, verbose_start,
                        verbose_end, end_in_new_line, log_prefix):
        cPickle.dump(obj, fp, protocol=cPickle.HIGHEST_PROTOCOL)
read_FlowersDataset.py 文件源码 项目:EBGAN.tensorflow 作者: shekkizh 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def read_dataset(data_dir):
    pickle_filename = "flowers_data.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        flower_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, flower_folder))
        print "Training set: %d" % len(result['train'])
        print "Test set: %d" % len(result['test'])
        print "Validation set: %d" % len(result['validation'])
        print "Pickling ..."
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print "Found pickle file!"

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_images = result['train']
        testing_images = result['test']
        validation_images = result['validation']

        del result

    print ("Training: %d, Validation: %d, Test: %d" % (
        len(training_images), len(validation_images), len(testing_images)))
    return training_images, testing_images, validation_images


问题


面经


文章

微信
公众号

扫码关注公众号