def writeToFile(self, filename, saveOpts=False):
'''
write the distortion coeffs to file
saveOpts --> Whether so save calibration options (and not just results)
'''
try:
if not filename.endswith('.%s' % self.ftype):
filename += '.%s' % self.ftype
s = {'coeffs': self.coeffs}
if saveOpts:
s['opts'] = self.opts
# else:
# s['opts':{}]
np.savez(filename, **s)
return filename
except AttributeError:
raise Exception(
'need to calibrate camera before calibration can be saved to file')
python类savez()的实例源码
def _save_model(self, bleu_score):
if self._is_valid_to_save(bleu_score):
model = ModelInfo(bleu_score, self.config['saveto'])
# Manage n-best model list first
if len(self.best_models) >= self.track_n_models:
old_model = self.best_models[0]
if old_model.path and os.path.isfile(old_model.path):
logger.info("Deleting old model %s" % old_model.path)
os.remove(old_model.path)
self.best_models.remove(old_model)
self.best_models.append(model)
self.best_models.sort(key=operator.attrgetter('bleu_score'))
# Save the model here
s = signal.signal(signal.SIGINT, signal.SIG_IGN)
logger.info("Saving new model {}".format(model.path))
numpy.savez(
model.path, **self.main_loop.model.get_parameter_dict())
numpy.savez(
os.path.join(self.config['saveto'], 'val_bleu_scores.npz'),
bleu_scores=self.val_bleu_curve)
signal.signal(signal.SIGINT, s)
def save_training(self, filename):
"""
Save traning set and labels of current model
:param filename: filename of new data.npz, it will be saved in 'train/'
"""
np.savez(os.path.join(TRAIN_DATA, filename),
train_set=self.train_set,
train_labels=self.train_labels)
def end(self, _session):
np.savez(self.params["file"], **self._beam_accum)
def end(self, _session):
scores_path = os.path.join(self.params["output_dir"],
"attention_scores.npz")
np.savez(scores_path, *self._attention_scores_accum)
tf.logging.info("Wrote %s", scores_path)
def save_path(self, path):
self._model.save(path)
self._feature_scaler.save_path(path.with_suffix('.feature_scaler'))
with open(path.with_suffix('.pessimism'), 'wb') as f:
np.savez(
f,
aim_pessimism_factor=self._aim_pessimism_factor,
accuracy_pessimism_factor=self._accuracy_pessimism_factor,
)
def save_path(self, path):
with open(path, 'wb') as f:
np.savez(
f,
ndim=len(self._axes) + 1,
mean=self.mean,
std=self.std,
)
def dataset_to_file(dataset, ngrams, filename='dataset'):
"""Save a dataset to a file.
Args:
dataset (:class:`np.ndarray`): the dataset to save (built with :func:`dataset_tools.build_dataset`)
ngrams (list of strings): the ngrams used to compute the features
filename (string): the filename without extension (will be .npz)
"""
num_samples, num_entries, num_features = dataset.shape
# We rehaspe the ndarray from 3D to 2D in order to write it into a text file
# Each line of the file will correspond to one cited paper
# Therefore, on each there will be the `num_entries` sets of features
dataset_sp = sparse.csr_matrix(dataset.reshape(num_samples*num_entries, num_features))
np.savez(filename, num_entries=np.array([num_entries]), data=dataset_sp.data, indices=dataset_sp.indices,
indptr=dataset_sp.indptr, shape=dataset_sp.shape, ngrams=ngrams)
def save_sparse_csr(filename,array):
np.savez(filename,data = array.data ,indices=array.indices,
indptr =array.indptr, shape=array.shape )
def secure_numpy_save(params_dict, path):
"""Try saving into a temporary file and then move."""
try:
dirname = os.path.dirname(path)
with tempfile.NamedTemporaryFile(delete=False, dir=dirname) as temp:
numpy.savez(temp, **params_dict)
shutil.move(temp.name, path)
except Exception as e:
# if "temp" in locals():
# os.remove(temp.name)
logger.error(" Error {0}".format(str(e)))
def _save_params(self, model, params):
# Rename accordingly for blocks compatibility
params_to_save = dict(
(k.replace('/', '-'), v) for k, v in params.items())
numpy.savez(model.path, **params_to_save)
def _save_bleu_scores(self):
numpy.savez(
os.path.join(
self.saveto,
'val_bleu_scores{}_{}.npz'.format(self.enc_id, self.dec_id)),
bleu_scores=self.val_bleu_curve)
def encode_npz(subvol):
"""
This file format is unrelated to np.savez
We are just saving as .npy and the compressing
using zlib.
The .npy format contains metadata indicating
shape and dtype, instead of np.tobytes which doesn't
contain any metadata.
"""
fileobj = io.BytesIO()
if len(subvol.shape) == 3:
subvol = np.expand_dims(subvol, 0)
np.save(fileobj, subvol)
cdz = zlib.compress(fileobj.getvalue())
return cdz
def save_np(reader, output_filename):
ts, data = reader.read_stack()
np.savez(output_filename, timestamps=ts, data=data)
def write_test_file(self, variable='v', check=False):
data, metadata = self.build_test_data(variable)
metadata_array = np.array(sorted(metadata.items()))
np.savez(self.test_file, data=data, metadata=metadata_array)
if check:
data1, metadata1 = read_test_file(self.test_file)
assert metadata == metadata1, "%s != %s" % (metadata, metadata1)
assert data.shape == data1.shape == (505, 2), \
"%s, %s, (505, 2)" % (data.shape, data1.shape)
assert (data == data1).all()
assert metadata["n"] == 505
def _write_file_contents(self, data, metadata):
# we explicitly set the dtype to ensure roundtrips preserve file contents exactly
max_metadata_length = max(chain([len(k) for k in metadata.keys()],
[len(str(v)) for v in metadata.values()]))
if PY2:
dtype = "S%d" % max_metadata_length
else:
dtype = "U%d" % max_metadata_length
metadata_array = numpy.array(sorted(metadata.items()), dtype)
numpy.savez(self.filename, data=data, metadata=metadata_array)
def write_test_file(self, variable='v', check=False):
data, metadata = self.build_test_data(variable)
metadata_array = np.array(sorted(metadata.items()))
np.savez(self.test_file, data=data, metadata=metadata_array)
if check:
data1, metadata1 = read_test_file(self.test_file)
assert metadata == metadata1, "%s != %s" % (metadata, metadata1)
assert data.shape == data1.shape == (505, 2), \
"%s, %s, (505, 2)" % (data.shape, data1.shape)
assert (data == data1).all()
assert metadata["n"] == 505
def _write_file_contents(self, data, metadata):
# we explicitly set the dtype to ensure roundtrips preserve file contents exactly
max_metadata_length = max(chain([len(k) for k in metadata.keys()],
[len(str(v)) for v in metadata.values()]))
if PY2:
dtype = "S%d" % max_metadata_length
else:
dtype = "U%d" % max_metadata_length
metadata_array = numpy.array(sorted(metadata.items()), dtype)
numpy.savez(self.filename, data=data, metadata=metadata_array)
tensorFlowNetwork.py 文件源码
项目:PersonalizedMultitaskLearning
作者: mitmedialab
项目源码
文件源码
阅读 29
收藏 0
点赞 0
评论 0
def save_model(self, file_name, directory):
"""Saves a checkpoint of the model and a .npz file with stored rewards.
Args:
file_name: String name to use for the checkpoint and rewards files.
Defaults to self.model_name if None is provided.
"""
if self.verbose: print "Saving model..."
save_dir = directory + file_name
os.mkdir(save_dir)
directory = save_dir + '/'
save_loc = os.path.join(directory, file_name + '.ckpt')
training_epochs = len(self.training_val_results) * self.accuracy_logged_every_n
self.saver.save(self.session, save_loc, global_step=training_epochs)
npz_name = os.path.join(directory, file_name + '-' + str(training_epochs))
if not self.print_per_task:
np.savez(npz_name,
training_val_results=self.training_val_results,
l2_beta=self.l2_beta,
dropout=self.dropout,
hidden_sizes_shared=self.hidden_sizes_shared,
hidden_size_task=self.hidden_size_task)
tensorFlowNetworkMultiTask.py 文件源码
项目:PersonalizedMultitaskLearning
作者: mitmedialab
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def save_model(self, file_name, directory):
"""Saves a checkpoint of the model and a .npz file with stored rewards.
Args:
file_name: String name to use for the checkpoint and rewards files.
Defaults to self.model_name if None is provided.
"""
if self.verbose: print "Saving model..."
save_dir = directory + file_name
os.mkdir(save_dir)
directory = save_dir + '/'
save_loc = os.path.join(directory, file_name + '.ckpt')
training_epochs = len(self.training_val_results) * self.accuracy_logged_every_n
self.saver.save(self.session, save_loc, global_step=training_epochs)
npz_name = os.path.join(directory, file_name + '-' + str(training_epochs))
if not self.print_per_task:
np.savez(npz_name,
training_val_results=self.training_val_results,
l2_beta=self.l2_beta,
dropout=self.dropout,
hidden_sizes_shared=self.hidden_sizes_shared,
hidden_size_task=self.hidden_size_task)
else:
np.savez(npz_name,
training_val_results=self.training_val_results,
training_val_results_per_task=self.training_val_results_per_task,
l2_beta=self.l2_beta,
dropout=self.dropout,
hidden_sizes_shared=self.hidden_sizes_shared,
hidden_size_task=self.hidden_size_task)