def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Session() as sess:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return sess.graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
python类GraphDef()的实例源码
def build_from_pb(self):
with tf.gfile.FastGFile(self.FLAGS.pbLoad, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(
graph_def,
name=""
)
with open(self.FLAGS.metaLoad, 'r') as fp:
self.meta = json.load(fp)
self.framework = create_framework(self.meta, self.FLAGS)
# Placeholders
self.inp = tf.get_default_graph().get_tensor_by_name('input:0')
self.feed = dict() # other placeholders
self.out = tf.get_default_graph().get_tensor_by_name('output:0')
self.setup_meta_ops()
def load_model(model):
# Check if the model is a model directory (containing a metagraph and a checkpoint file)
# or if it is a protobuf file with a frozen graph
model_exp = os.path.expanduser(model)
if (os.path.isfile(model_exp)):
print('Model filename: %s' % model_exp)
with gfile.FastGFile(model_exp,'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, name='')
else:
print('Model directory: %s' % model_exp)
meta_file, ckpt_file = get_model_filenames(model_exp)
print('Metagraph file: %s' % meta_file)
print('Checkpoint file: %s' % ckpt_file)
saver = tf.train.import_meta_graph(os.path.join(model_exp, meta_file))
saver.restore(tf.get_default_session(), os.path.join(model_exp, ckpt_file))
run_freeze_graph.py 文件源码
项目:convolutional-pose-machines-tensorflow
作者: timctho
项目源码
文件源码
阅读 27
收藏 0
点赞 0
评论 0
def load_graph(frozen_graph_filename):
# We load the protobuf file from the disk and parse it to retrieve the
# unserialized graph_def
with tf.gfile.GFile(frozen_graph_filename, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name="prefix",
op_dict=None,
producer_op_list=None
)
return graph
def __init__(self, config, graph, model_scope, model_dir, model_file):
self.config = config
frozen_model = os.path.join(model_dir, model_file)
with tf.gfile.GFile(frozen_model, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
# This model_scope adds a prefix to all the nodes in the graph
tf.import_graph_def(graph_def, input_map=None, return_elements=None,
name="{}/".format(model_scope))
# Uncomment the two lines below to look for the names of all the operations in the graph
# for op in graph.get_operations():
# print(op.name)
# Using the lines commented above to look for the tensor name of the input node
# Or you can figure it out in your original model, if you explicitly named it.
self.input_tensor = graph.get_tensor_by_name("{}/input_1:0".format(model_scope))
self.output_tensor = graph.get_tensor_by_name("{}/s1_output0:0".format(model_scope))
def load_frozen_graph(graph_dir, fix_nodes=True, entry=None, output=None):
with gfile.FastGFile(graph_dir, "rb") as file:
graph_def = tf.GraphDef()
graph_def.ParseFromString(file.read())
if fix_nodes:
for node in graph_def.node:
if node.op == 'RefSwitch':
node.op = 'Switch'
for index in range(len(node.input)):
if 'moving_' in node.input[index]:
node.input[index] = node.input[index] + '/read'
elif node.op == 'AssignSub':
node.op = 'Sub'
if 'use_locking' in node.attr:
del node.attr['use_locking']
tf.import_graph_def(graph_def, name="")
if entry is not None:
entry = tf.get_default_graph().get_tensor_by_name(entry)
if output is not None:
output = tf.get_default_graph().get_tensor_by_name(output)
return entry, output
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Session() as sess:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return sess.graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def import_protobuf(self, pb_file, verbose=False):
"""
Imports graph_def from protobuf file to ngraph.
Arguments:
pb_file: Protobuf file path.
verbose: Prints graph_def at each node if True.
"""
# read graph_def
graph_def = tf.GraphDef()
if mimetypes.guess_type(pb_file)[0] == 'text/plain':
with open(pb_file, 'r') as f:
text_format.Merge(f.read(), graph_def)
else:
with open(pb_file, 'rb') as f:
graph_def.ParseFromString(f.read())
self.import_graph_def(graph_def, verbose=verbose)
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Session() as sess:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return sess.graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def inference():
graph = tf.Graph()
with graph.as_default():
with tf.gfile.FastGFile(FLAGS.input, 'rb') as f:
image_data = f.read()
input_image = tf.image.decode_jpeg(image_data, channels=3)
input_image = tf.image.resize_images(input_image, size=(FLAGS.image_size, FLAGS.image_size))
input_image = utils.convert2float(input_image)
input_image.set_shape([FLAGS.image_size, FLAGS.image_size, 3])
with tf.gfile.FastGFile(FLAGS.model, 'rb') as model_file:
graph_def = tf.GraphDef()
graph_def.ParseFromString(model_file.read())
[output_image] = tf.import_graph_def(graph_def,
input_map={'input_image': input_image},
return_elements=['output_image:0'],
name='output')
with tf.Session(graph=graph) as sess:
generated = output_image.eval()
with open(FLAGS.output, 'wb') as f:
f.write(generated)
def gnr_optimize_graph(graph_path, optimized_graph_path):
"""
???
:param graph_path: ???
:param optimized_graph_path: ????
:return: NULL
"""
input_graph_def = tf.GraphDef() # ?????
with tf.gfile.Open(graph_path, "r") as f:
data = f.read()
input_graph_def.ParseFromString(data)
# ??????????????????1/4
output_graph_def = optimize_for_inference_lib.optimize_for_inference(
input_graph_def,
["I"], # an array of the input node(s)
["O"], # an array of output nodes
tf.float32.as_datatype_enum)
# ??????
f = tf.gfile.FastGFile(optimized_graph_path, "w")
f.write(output_graph_def.SerializeToString())
def load_model(model):
# Check if the model is a model directory (containing a metagraph and a checkpoint file)
# or if it is a protobuf file with a frozen graph
model_exp = os.path.expanduser(model)
if (os.path.isfile(model_exp)):
print('Model filename: %s' % model_exp)
with gfile.FastGFile(model_exp,'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, name='')
else:
print('Model directory: %s' % model_exp)
meta_file, ckpt_file = get_model_filenames(model_exp)
print('Metagraph file: %s' % meta_file)
print('Checkpoint file: %s' % ckpt_file)
saver = tf.train.import_meta_graph(os.path.join(model_exp, meta_file))
saver.restore(tf.get_default_session(), os.path.join(model_exp, ckpt_file))
def Get_Pre_Trained_Weights(input_vars,name):
with open("vgg16.tfmodel", mode='rb') as f:
fileContent = f.read()
graph_def = tf.GraphDef()
graph_def.ParseFromString(fileContent)
images = tf.placeholder(tf.float32,shape = (None, 64, 64, 3),name=name)
tf.import_graph_def(graph_def, input_map={ "images": images })
print "graph loaded from disk"
graph = tf.get_default_graph()
with tf.Session() as sess:
init = tf.initialize_all_variables()
sess.run(init)
#batch = np.reshape(input_vars,(-1, 224, 224, 3))
n_timewin = 7
convnets = []
for i in xrange(n_timewin):
feed_dict = { images:input_vars[:,i,:,:,:] }
pool_tensor = graph.get_tensor_by_name("import/pool5:0")
pool_tensor = sess.run(pool_tensor, feed_dict=feed_dict)
convnets.append(tf.contrib.layers.flatten(pool_tensor))
convpool = tf.pack(convnets, axis = 1)
return convpool
def fromGraphDef(cls, graph_def, feed_names, fetch_names):
"""
Construct a TFInputGraph from a tf.GraphDef object.
:param graph_def: :py:class:`tf.GraphDef`, a serializable object containing the topology and
computation units of the TensorFlow graph.
:param feed_names: list, names of the input tensors.
:param fetch_names: list, names of the output tensors.
"""
assert isinstance(graph_def, tf.GraphDef), \
('expect tf.GraphDef type but got', type(graph_def))
graph = tf.Graph()
with tf.Session(graph=graph) as sess:
tf.import_graph_def(graph_def, name='')
return _build_with_feeds_fetches(sess=sess, graph=graph, feed_names=feed_names,
fetch_names=fetch_names)
def load_model(sess, model_path):
if os.path.isfile(model_path):
# A protobuf file with a frozen graph
print('Model filename: %s' % model_path)
with gfile.FastGFile(model_path, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, name='')
else:
# A directory containing a metagraph file and a checkpoint file
print('Model directory: %s' % model_path)
meta_file, ckpt_file = get_model_filenames(model_path)
print('Metagraph file: %s' % meta_file)
print('Checkpoint file: %s' % ckpt_file)
saver = tf.train.import_meta_graph(os.path.join(model_path, meta_file), clear_devices=True)
saver.restore(sess, os.path.join(model_path, ckpt_file))
def create_inception_graph():
""""
Brief:
Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_filename = os.path.join(FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def load_model(model):
# Check if the model is a model directory (containing a metagraph and a checkpoint file)
# or if it is a protobuf file with a frozen graph
model_exp = os.path.expanduser(model)
if (os.path.isfile(model_exp)):
print('Model filename: %s' % model_exp)
with gfile.FastGFile(model_exp,'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, name='')
else:
print('Model directory: %s' % model_exp)
meta_file, ckpt_file = get_model_filenames(model_exp)
print('Metagraph file: %s' % meta_file)
print('Checkpoint file: %s' % ckpt_file)
saver = tf.train.import_meta_graph(os.path.join(model_exp, meta_file))
saver.restore(tf.get_default_session(), os.path.join(model_exp, ckpt_file))
def __init__(self, name, input, i, j, k):
"""
:param input: A 4D-tensor of shape [batchSize, 224, 224, 3]
[0:i, :, :, :] holds i style images,
[i:i+j, :, :, :] holds j content images,
[i+j:i+j+k, :, :, :] holds k synthesized images
"""
self.name = name
self.num_style = i
self.num_content = j
self.num_synthesized = k
with open("models/vgg16.tfmodel", mode='rb') as f:
file_content = f.read()
graph_def = tf.GraphDef()
graph_def.ParseFromString(file_content)
tf.import_graph_def(graph_def, input_map={"images": input}, name=self.name)
def load_graph(frozen_graph_filename):
""" Load graph/model to be used """
logging.info('Loading frozen model-graph: ' + frozen_graph_filename)
# We load the protobuf file from the disk and parse it to retrieve the
# unserialized graph_def
logging.debug('Reading model file')
with tf.gfile.GFile(frozen_graph_filename, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
# Then, we can use again a convenient built-in function to import a graph_def into the
# current default Graph
logging.debug('Importing graph')
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name="prefix",
op_dict=None,
producer_op_list=None
)
return graph
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Session() as sess:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return sess.graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Session() as sess:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return sess.graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def test_get_graph_def_from_url_tarball(self):
"""Test `get_graph_def_from_url_tarball`."""
# Write dummy binary GraphDef to tempfile.
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
tmp_file.write(_get_dummy_graphdef().SerializeToString())
relative_path = os.path.relpath(tmp_file.name)
# Create gzip tarball.
tar_dir = tempfile.mkdtemp()
tar_filename = os.path.join(tar_dir, 'tmp.tar.gz')
with tarfile.open(tar_filename, 'w:gz') as tar:
tar.add(relative_path)
with mock.patch.object(gan_metrics, 'urllib') as mock_urllib:
mock_urllib.request.urlretrieve.return_value = tar_filename, None
graph_def = gan_metrics.get_graph_def_from_url_tarball(
'unused_url', relative_path)
self.assertIsInstance(graph_def, tf.GraphDef)
self.assertEqual(_get_dummy_graphdef(), graph_def)
def load_frozen_graph(frozen_graph):
with tf.gfile.GFile(frozen_graph, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
try:
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name='model',
op_dict=None,
producer_op_list=None
)
return graph
except Exception as e:
print(e.message)
def create_model_graph(model_info):
""""Creates a graph from saved GraphDef file and returns a Graph object.
Args:
model_info: Dictionary containing information about the model architecture.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_path = os.path.join(FLAGS.model_dir, model_info['model_file_name'])
with gfile.FastGFile(model_path, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, resized_input_tensor = (tf.import_graph_def(
graph_def,
name='',
return_elements=[
model_info['bottleneck_tensor_name'],
model_info['resized_input_tensor_name'],
]))
return graph, bottleneck_tensor, resized_input_tensor
def load_graph(frozen_graph_filename):
# We parse the graph_def file
with tf.gfile.GFile(frozen_graph_filename, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
# We load the graph_def in the default graph
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name="prefix",
op_dict=None,
producer_op_list=None
)
return graph
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Session() as sess:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return sess.graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
def __init__(self, graph_file_path, initializer_node_name, input_node_name, output_node_name):
self.graph = tf.Graph()
self.session = tf.Session(graph=self.graph)
graph_def = tf.GraphDef()
graph_def.ParseFromString(open(graph_file_path, 'rb').read())
with self.graph.as_default():
tf.import_graph_def(graph_def)
if initializer_node_name:
self.initializer = self.graph.get_operation_by_name('import/' + initializer_node_name)
self.input = self.graph.get_tensor_by_name('import/%s:0' % input_node_name)
self.output = self.graph.get_tensor_by_name('import/%s:0' % output_node_name)
if initializer_node_name:
self.session.run(self.initializer)