def testPathsWithParse(self):
base_dir = os.path.join(tf.test.get_temp_dir(), "paths_parse")
self.assertFalse(gfile.Exists(base_dir))
for p in xrange(3):
gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
# add a base_directory to ignore
gfile.MakeDirs(os.path.join(base_dir, "ignore"))
# create a simple parser that pulls the export_version from the directory.
def parser(path):
match = re.match("^" + base_dir + "/(\\d+)$", path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
self.assertEquals(
gc.get_paths(base_dir, parser=parser),
[gc.Path(os.path.join(base_dir, "0"), 0),
gc.Path(os.path.join(base_dir, "1"), 1),
gc.Path(os.path.join(base_dir, "2"), 2)])
python类MakeDirs()的实例源码
def testFinalOpsOnEvaluationLoop(self):
value_op, update_op = slim.metrics.streaming_accuracy(
self._predictions, self._labels)
init_op = tf.group(tf.initialize_all_variables(),
tf.initialize_local_variables())
# Create Checkpoint and log directories
chkpt_dir = os.path.join(self.get_temp_dir(), 'tmp_logs/')
gfile.MakeDirs(chkpt_dir)
logdir = os.path.join(self.get_temp_dir(), 'tmp_logs2/')
gfile.MakeDirs(logdir)
# Save initialized variables to checkpoint directory
saver = tf.train.Saver()
with self.test_session() as sess:
init_op.run()
saver.save(sess, os.path.join(chkpt_dir, 'chkpt'))
# Now, run the evaluation loop:
accuracy_value = slim.evaluation.evaluation_loop(
'', chkpt_dir, logdir, eval_op=update_op, final_op=value_op,
max_number_of_evaluations=1)
self.assertAlmostEqual(accuracy_value, self._expected_accuracy)
def maybe_download(filename, work_directory, source_url):
"""Download the data from source url, unless it's already here.
Args:
filename: string, name of the file in the directory.
work_directory: string, path to working directory.
source_url: url to download from if file doesn't exist.
Returns:
Path to resulting file.
"""
if not gfile.Exists(work_directory):
gfile.MakeDirs(work_directory)
filepath = os.path.join(work_directory, filename)
if not gfile.Exists(filepath):
with tempfile.NamedTemporaryFile() as tmpfile:
temp_file_name = tmpfile.name
urllib.request.urlretrieve(source_url, temp_file_name)
gfile.Copy(temp_file_name, filepath)
with gfile.GFile(filepath) as f:
size = f.size()
print('Successfully downloaded', filename, size, 'bytes.')
return filepath
def testPathsWithParse(self):
base_dir = os.path.join(tf.test.get_temp_dir(), "paths_parse")
self.assertFalse(gfile.Exists(base_dir))
for p in xrange(3):
gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
# add a base_directory to ignore
gfile.MakeDirs(os.path.join(base_dir, "ignore"))
# create a simple parser that pulls the export_version from the directory.
def parser(path):
match = re.match("^" + base_dir + "/(\\d+)$", path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
self.assertEquals(
gc.get_paths(base_dir, parser=parser),
[gc.Path(os.path.join(base_dir, "0"), 0),
gc.Path(os.path.join(base_dir, "1"), 1),
gc.Path(os.path.join(base_dir, "2"), 2)])
def testFinalOpsOnEvaluationLoop(self):
value_op, update_op = slim.metrics.streaming_accuracy(
self._predictions, self._labels)
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
# Create Checkpoint and log directories
chkpt_dir = os.path.join(self.get_temp_dir(), 'tmp_logs/')
gfile.MakeDirs(chkpt_dir)
logdir = os.path.join(self.get_temp_dir(), 'tmp_logs2/')
gfile.MakeDirs(logdir)
# Save initialized variables to checkpoint directory
saver = tf.train.Saver()
with self.test_session() as sess:
init_op.run()
saver.save(sess, os.path.join(chkpt_dir, 'chkpt'))
# Now, run the evaluation loop:
accuracy_value = slim.evaluation.evaluation_loop(
'', chkpt_dir, logdir, eval_op=update_op, final_op=value_op,
max_number_of_evaluations=1)
self.assertAlmostEqual(accuracy_value, self._expected_accuracy)
def maybe_download(filename, work_directory, source_url):
"""Download the data from source url, unless it's already here.
Args:
filename: string, name of the file in the directory.
work_directory: string, path to working directory.
source_url: url to download from if file doesn't exist.
Returns:
Path to resulting file.
"""
if not gfile.Exists(work_directory):
gfile.MakeDirs(work_directory)
filepath = os.path.join(work_directory, filename)
if not gfile.Exists(filepath):
temp_file_name, _ = urlretrieve_with_retry(source_url)
gfile.Copy(temp_file_name, filepath)
with gfile.GFile(filepath) as f:
size = f.size()
print('Successfully downloaded', filename, size, 'bytes.')
return filepath
def output_predict(depths, images, output_dir):
print("output predict into %s" % output_dir)
if not gfile.Exists(output_dir):
gfile.MakeDirs(output_dir)
for i, (image, depth) in enumerate(zip(images, depths)):
pilimg = Image.fromarray(np.uint8(image))
image_name = "%s/%05d_org.png" % (output_dir, i)
pilimg.save(image_name)
depth = depth.transpose(2, 0, 1)
if np.max(depth) != 0:
ra_depth = (depth/np.max(depth))*255.0
else:
ra_depth = depth*255.0
depth_pil = Image.fromarray(np.uint8(ra_depth[0]), mode="L")
depth_name = "%s/%05d_dep.png" % (output_dir, i)
depth_pil.save(depth_name)
def _create_tfrecord_dataset(tmpdir):
if not gfile.Exists(tmpdir):
gfile.MakeDirs(tmpdir)
data_sources = test_utils.create_tfrecord_files(tmpdir, num_files=1)
keys_to_features = {
'image/encoded': tf.FixedLenFeature(shape=(), dtype=dtypes.string, default_value=''),
'image/format': tf.FixedLenFeature(shape=(), dtype=dtypes.string, default_value='jpeg'),
'image/class/label': tf.FixedLenFeature(
shape=[1], dtype=dtypes.int64,
default_value=array_ops.zeros([1], dtype=dtypes.int64))
}
items_to_handlers = {
'image': tfslim.tfexample_decoder.Image(),
'label': tfslim.tfexample_decoder.Tensor('image/class/label'),
}
decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
return Dataset(
data_sources=data_sources, reader=tf.TFRecordReader, decoder=decoder, num_samples=100)
gc_test.py 文件源码
项目:DeepLearning_VirtualReality_BigData_Project
作者: rashmitripathi
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def testPathsWithParse(self):
base_dir = os.path.join(test.get_temp_dir(), "paths_parse")
self.assertFalse(gfile.Exists(base_dir))
for p in xrange(3):
gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
# add a base_directory to ignore
gfile.MakeDirs(os.path.join(base_dir, "ignore"))
# create a simple parser that pulls the export_version from the directory.
def parser(path):
match = re.match("^" + base_dir + "/(\\d+)$", path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
self.assertEquals(
gc.get_paths(
base_dir, parser=parser), [
gc.Path(os.path.join(base_dir, "0"), 0),
gc.Path(os.path.join(base_dir, "1"), 1),
gc.Path(os.path.join(base_dir, "2"), 2)
])
base.py 文件源码
项目:DeepLearning_VirtualReality_BigData_Project
作者: rashmitripathi
项目源码
文件源码
阅读 23
收藏 0
点赞 0
评论 0
def maybe_download(filename, work_directory, source_url):
"""Download the data from source url, unless it's already here.
Args:
filename: string, name of the file in the directory.
work_directory: string, path to working directory.
source_url: url to download from if file doesn't exist.
Returns:
Path to resulting file.
"""
if not gfile.Exists(work_directory):
gfile.MakeDirs(work_directory)
filepath = os.path.join(work_directory, filename)
if not gfile.Exists(filepath):
temp_file_name, _ = urlretrieve_with_retry(source_url)
gfile.Copy(temp_file_name, filepath)
with gfile.GFile(filepath) as f:
size = f.size()
print('Successfully downloaded', filename, size, 'bytes.')
return filepath
def _write_plugin_assets(self, graph):
plugin_assets = plugin_asset.get_all_plugin_assets(graph)
logdir = self.event_writer.get_logdir()
for asset_container in plugin_assets:
plugin_name = asset_container.plugin_name
plugin_dir = os.path.join(logdir, _PLUGINS_DIR, plugin_name)
gfile.MakeDirs(plugin_dir)
assets = asset_container.assets()
for (asset_name, content) in assets.items():
asset_path = os.path.join(plugin_dir, asset_name)
with gfile.Open(asset_path, "w") as f:
f.write(content)
def gfile_copy_callback(files_to_copy, export_dir_path):
"""Callback to copy files using `gfile.Copy` to an export directory.
This method is used as the default `assets_callback` in `Exporter.init` to
copy assets from the `assets_collection`. It can also be invoked directly to
copy additional supplementary files into the export directory (in which case
it is not a callback).
Args:
files_to_copy: A dictionary that maps original file paths to desired
basename in the export directory.
export_dir_path: Directory to copy the files to.
"""
logging.info("Write assest into: %s using gfile_copy.", export_dir_path)
gfile.MakeDirs(export_dir_path)
for source_filepath, basename in files_to_copy.items():
new_path = os.path.join(
compat.as_bytes(export_dir_path), compat.as_bytes(basename))
logging.info("Copying asset %s to path %s.", source_filepath, new_path)
if gfile.Exists(new_path):
# Guard against being restarted while copying assets, and the file
# existing and being in an unknown state.
# TODO(b/28676216): Do some file checks before deleting.
logging.info("Removing file %s.", new_path)
gfile.Remove(new_path)
gfile.Copy(source_filepath, new_path)
def _copy_dir(dir_in, dir_out):
gfile.MakeDirs(dir_out)
for name in gfile.ListDirectory(dir_in):
name_in = os.path.join(dir_in, name)
name_out = os.path.join(dir_out, name)
if gfile.IsDirectory(name_in):
gfile.MakeDirs(name_out)
_copy_dir(name_in, name_out)
else:
gfile.Copy(name_in, name_out, overwrite=True)
def testEvaluationLoopTimeout(self):
_, update_op = slim.metrics.streaming_accuracy(
self._predictions, self._labels)
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
# Create checkpoint and log directories.
chkpt_dir = os.path.join(self.get_temp_dir(), 'tmp_logs/')
gfile.MakeDirs(chkpt_dir)
logdir = os.path.join(self.get_temp_dir(), 'tmp_logs2/')
gfile.MakeDirs(logdir)
# Save initialized variables to checkpoint directory.
saver = tf.train.Saver()
with self.test_session() as sess:
init_op.run()
saver.save(sess, os.path.join(chkpt_dir, 'chkpt'))
# Run the evaluation loop with a timeout.
with self.test_session() as sess:
start = time.time()
slim.evaluation.evaluation_loop(
'', chkpt_dir, logdir, eval_op=update_op,
eval_interval_secs=2.0, timeout=6.0)
end = time.time()
# Check we've waited for the timeout.
self.assertGreater(end - start, 6.0)
# Then the timeout kicked in and stops the loop.
self.assertLess(end - start, 8.0)
def main(argv=None):
if not gfile.Exists(COARSE_DIR):
gfile.MakeDirs(COARSE_DIR)
if not gfile.Exists(REFINE_DIR):
gfile.MakeDirs(REFINE_DIR)
if(TEST):
test()
elif(TRAIN):
train()
def check_path_exist(self):
if not gfile.Exists(self.output_summary_path):
gfile.MakeDirs(self.output_summary_path)
if not gfile.Exists(self.output_check_point_path):
gfile.MakeDirs(self.output_check_point_path)
if not gfile.Exists(self.output_train_predict_depth_path):
gfile.MakeDirs(self.output_train_predict_depth_path)
if not gfile.Exists(self.output_eval_predict_depth_path):
gfile.MakeDirs(self.output_eval_predict_depth_path)
if not gfile.Exists(self.output_test_predict_depth_path):
gfile.MakeDirs(self.output_test_predict_depth_path)
def save(images, depths, predict_depths, global_step, target_path, batch_number=None, mode='train'):
output_dir = os.path.join(target_path, str(global_step))
if not gfile.Exists(output_dir):
gfile.MakeDirs(output_dir)
for i, (image, depth, predict_depth) in enumerate(zip(images, depths, predict_depths)):
if(batch_number == None):
image_name = "%s/%05d_rgb.png" % (output_dir, i)
depth_name = "%s/%05d_depth.png" % (output_dir, i)
predict_depth_name = "%s/%05d_predict.png" % (output_dir, i)
else:
image_name = "%s/%d_%05d_rgb.png" % (output_dir, batch_number, i)
depth_name = "%s/%d_%05d_depth.png" % (output_dir, batch_number, i)
predict_depth_name = "%s/%d_%05d_predict.png" % (output_dir, batch_number, i)
pilimg = Image.fromarray(np.uint8(image))
pilimg.save(image_name)
depth = depth.transpose(2, 0, 1)
if np.max(depth) != 0:
ra_depth = (depth/np.max(depth))*255.0
else:
ra_depth = depth*255.0
depth_pil = Image.fromarray(np.uint8(ra_depth[0]), mode="L")
depth_pil.save(depth_name)
predict_depth = predict_depth.transpose(2, 0, 1)
if np.max(predict_depth) != 0:
ra_depth = (predict_depth/np.max(predict_depth))*255.0
else:
ra_depth = predict_depth*255.0
depth_pil = Image.fromarray(np.uint8(ra_depth[0]), mode="L")
depth_pil.save(predict_depth_name)
def output_predict_test(true_depths, depths, images, filenames, depth_filenames, output_dir, current_test_number):
#print images.shape
print("output predict into %s" % output_dir)
if not gfile.Exists(output_dir):
gfile.MakeDirs(output_dir)
for i, (image, depth, true_depth, filename) in enumerate(zip(images, depths, true_depths, filenames)):
#print filenames
img_info = re.sub(r'/', '_', re.findall(r'data/[a-zA-Z0-9_]+/[a-zA-Z0-9_]+/[a-zA-Z0-9]+', filename)[0])[0]
pilimg = Image.fromarray(np.uint8(image))
image_name = "%s/%s_org.png" % (output_dir, img_info)
pilimg.save(image_name)
depth = depth.transpose(2, 0, 1)
if np.max(depth) != 0:
ra_depth = (depth/np.max(depth))*255.0
else:
ra_depth = depth*255.0
depth_pil = Image.fromarray(np.uint8(ra_depth[0]), mode="L")
depth_name = "%s/%s_dep.png" % (output_dir, img_info)
depth_pil.save(depth_name)
true_depth = true_depth.transpose(2, 0, 1)
if np.max(true_depth) != 0:
ra_true_depth = (true_depth/np.max(true_depth))*255.0
else:
ra_true_depth = true_depth*255.0
true_depth_pil = Image.fromarray(np.uint8(ra_true_depth[0]), mode="L")
true_depth_name = "%s/%s_ture.png" % (output_dir, img_info)
true_depth_pil.save(true_depth_name)
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if gfile.Exists(FLAGS.train_dir):
gfile.DeleteRecursively(FLAGS.train_dir)
gfile.MakeDirs(FLAGS.train_dir)
train()
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if gfile.Exists(FLAGS.train_dir):
gfile.DeleteRecursively(FLAGS.train_dir)
else:
gfile.MakeDirs(FLAGS.train_dir)
train()
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if gfile.Exists(FLAGS.eval_dir):
gfile.DeleteRecursively(FLAGS.eval_dir)
gfile.MakeDirs(FLAGS.eval_dir)
evaluate()
cifar10_multi_gpu_train_highway.py 文件源码
项目:Gating-types-for-Residual-Networks
作者: luong-vinh
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def main(argv=None): # pylint: disable=unused-argument
setConfig()
config = network_config.getConfig()
train_dir = config['train_dir']
cifar10.maybe_download_and_extract()
if gfile.Exists(train_dir):
gfile.DeleteRecursively(train_dir)
gfile.MakeDirs(train_dir)
train()
cifar10_train_highway.py 文件源码
项目:Gating-types-for-Residual-Networks
作者: luong-vinh
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def main(argv=None): # pylint: disable=unused-argument
# Have to set config first
# TODO: remove the need for this, will check how Python initialize a module
setConfig()
cifar10.maybe_download_and_extract()
config = network_config.getConfig()
train_dir = config['train_dir']
if gfile.Exists(train_dir):
gfile.DeleteRecursively(train_dir)
gfile.MakeDirs(train_dir)
train()
cifar10_multi_gpu_train.py 文件源码
项目:Gating-types-for-Residual-Networks
作者: luong-vinh
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def main(argv=None): # pylint: disable=unused-argument
setConfig()
config = network_config.getConfig()
train_dir = config['train_dir']
cifar10.maybe_download_and_extract()
if gfile.Exists(train_dir):
gfile.DeleteRecursively(train_dir)
gfile.MakeDirs(train_dir)
train()
cifar10_train.py 文件源码
项目:Gating-types-for-Residual-Networks
作者: luong-vinh
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def main(argv=None): # pylint: disable=unused-argument
# Have to set config first
# TODO: remove the need for this, will check how Python initialize a module
setConfig()
cifar10.maybe_download_and_extract()
config = network_config.getConfig()
train_dir = config['train_dir']
if gfile.Exists(train_dir):
gfile.DeleteRecursively(train_dir)
gfile.MakeDirs(train_dir)
train()
cifar10_train_myway.py 文件源码
项目:Gating-types-for-Residual-Networks
作者: luong-vinh
项目源码
文件源码
阅读 17
收藏 0
点赞 0
评论 0
def main(argv=None): # pylint: disable=unused-argument
# Have to set config first
# TODO: remove the need for this, will check how Python initialize a module
setConfig()
cifar10.maybe_download_and_extract()
config = network_config.getConfig()
train_dir = config['train_dir']
if gfile.Exists(train_dir):
gfile.DeleteRecursively(train_dir)
gfile.MakeDirs(train_dir)
train()
def main(argv=None): # pylint: disable=unused-argument
if not gfile.Exists(FLAGS.checkpoint_dir):
# gfile.DeleteRecursively(FLAGS.checkpoint_dir)
gfile.MakeDirs(FLAGS.checkpoint_dir)
model_file = os.path.join('models', FLAGS.model + '.py')
assert gfile.Exists(model_file), 'no model file named: ' + model_file
gfile.Copy(model_file, FLAGS.checkpoint_dir + '/model.py')
m = importlib.import_module('.' + FLAGS.model, 'models')
data = get_data_provider(FLAGS.dataset, training=True)
train(m.model, data,
batch_size=FLAGS.batch_size,
checkpoint_dir=FLAGS.checkpoint_dir,
log_dir=FLAGS.log_dir,
num_epochs=FLAGS.num_epochs)
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if gfile.Exists(FLAGS.train_dir):
gfile.DeleteRecursively(FLAGS.train_dir)
gfile.MakeDirs(FLAGS.train_dir)
train()
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if gfile.Exists(FLAGS.train_dir):
gfile.DeleteRecursively(FLAGS.train_dir)
gfile.MakeDirs(FLAGS.train_dir)
train()
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if gfile.Exists(FLAGS.eval_dir):
gfile.DeleteRecursively(FLAGS.eval_dir)
gfile.MakeDirs(FLAGS.eval_dir)
evaluate()