def templatemethod(name_):
"""This decorator wraps a method with `tf.make_template`. For example,
@templatemethod
def my_method():
# Create variables
"""
def template_decorator(func):
"""Inner decorator function"""
def func_wrapper(*args, **kwargs):
"""Inner wrapper function"""
templated_func = tf.make_template(name_, func)
return templated_func(*args, **kwargs)
return func_wrapper
return template_decorator
python类make_template()的实例源码
def setUp(self):
super(TestVirtualAdversarialMethod, self).setUp()
import tensorflow as tf
import tensorflow.contrib.slim as slim
def dummy_model(x):
net = slim.fully_connected(x, 60)
return slim.fully_connected(net, 10, activation_fn=None)
self.sess = tf.Session()
self.sess.as_default()
self.model = tf.make_template('dummy_model', dummy_model)
self.attack = VirtualAdversarialMethod(self.model, sess=self.sess)
# initialize model
with tf.name_scope('dummy_model'):
self.model(tf.placeholder(tf.float32, shape=(None, 1000)))
self.sess.run(tf.global_variables_initializer())
def setUp(self):
super(TestSaliencyMapMethod, self).setUp()
import tensorflow as tf
import tensorflow.contrib.slim as slim
def dummy_model(x):
net = slim.fully_connected(x, 60)
return slim.fully_connected(net, 10, activation_fn=None)
self.sess = tf.Session()
self.sess.as_default()
self.model = tf.make_template('dummy_model', dummy_model)
self.attack = SaliencyMapMethod(self.model, sess=self.sess)
# initialize model
with tf.name_scope('dummy_model'):
self.model(tf.placeholder(tf.float32, shape=(None, 1000)))
self.sess.run(tf.global_variables_initializer())
self.attack = SaliencyMapMethod(self.model, sess=self.sess)
train_lstm_multivariate.py 文件源码
项目:TensorFlow-Time-Series-Examples
作者: hzy46
项目源码
文件源码
阅读 33
收藏 0
点赞 0
评论 0
def initialize_graph(self, input_statistics):
"""Save templates for components, which can then be used repeatedly.
This method is called every time a new graph is created. It's safe to start
adding ops to the current default graph here, but the graph should be
constructed from scratch.
Args:
input_statistics: A math_utils.InputStatistics object.
"""
super(_LSTMModel, self).initialize_graph(input_statistics=input_statistics)
self._lstm_cell = tf.nn.rnn_cell.LSTMCell(num_units=self._num_units)
# Create templates so we don't have to worry about variable reuse.
self._lstm_cell_run = tf.make_template(
name_="lstm_cell",
func_=self._lstm_cell,
create_scope_now_=True)
# Transforms LSTM output into mean predictions.
self._predict_from_lstm_output = tf.make_template(
name_="predict_from_lstm_output",
func_=lambda inputs: tf.layers.dense(inputs=inputs, units=self.num_features),
create_scope_now_=True)
def initialize_graph(self, input_statistics):
"""Save templates for components, which can then be used repeatedly.
This method is called every time a new graph is created. It's safe to start
adding ops to the current default graph here, but the graph should be
constructed from scratch.
Args:
input_statistics: A math_utils.InputStatistics object.
"""
super(_LSTMModel, self).initialize_graph(input_statistics=input_statistics)
self._lstm_cell = tf.nn.rnn_cell.LSTMCell(num_units=self._num_units)
# Create templates so we don't have to worry about variable reuse.
self._lstm_cell_run = tf.make_template(
name_="lstm_cell",
func_=self._lstm_cell,
create_scope_now_=True)
# Transforms LSTM output into mean predictions.
self._predict_from_lstm_output = tf.make_template(
name_="predict_from_lstm_output",
func_=lambda inputs: tf.layers.dense(inputs=inputs, units=self.num_features),
create_scope_now_=True)
def reuse(scope):
"""
A decorator for transparent reuse of tensorflow
`Variables <https://www.tensorflow.org/api_docs/python/tf/Variable>`_ in a
function. The decorated function will automatically create variables the
first time they are called and reuse them thereafter.
.. note::
This decorator is internally implemented by tensorflow's
:func:`make_template` function. See `its doc
<https://www.tensorflow.org/api_docs/python/tf/make_template>_`
for requirements on the target function.
:param scope: A string. The scope name passed to tensorflow
`variable_scope()
<https://www.tensorflow.org/api_docs/python/tf/variable_scope>`_.
"""
return lambda f: tf.make_template(scope, f)
def templatemethod(name_):
"""This decorator wraps a method with `tf.make_template`. For example,
@templatemethod
def my_method():
# Create variables
"""
def template_decorator(func):
"""Inner decorator function"""
def func_wrapper(*args, **kwargs):
"""Inner wrapper function"""
templated_func = tf.make_template(name_, func)
return templated_func(*args, **kwargs)
return func_wrapper
return template_decorator
def __init__(self, summaries=None, summary_labels=None):
"""
Creates a new optimizer instance.
"""
self.variables = dict()
self.summaries = summaries
if summary_labels is None:
self.summary_labels = dict()
else:
self.summary_labels = summary_labels
def custom_getter(getter, name, registered=False, **kwargs):
variable = getter(name=name, registered=True, **kwargs)
if not registered:
assert kwargs.get('trainable', False)
self.variables[name] = variable
return variable
# TensorFlow function
self.step = tf.make_template(
name_='step',
func_=self.tf_step,
custom_getter=custom_getter
)
def __init__(self, max_iterations, unroll_loop=False):
"""
Creates a new iterative solver instance.
Args:
max_iterations: Maximum number of iterations before termination.
unroll_loop: Unrolls the TensorFlow while loop if true.
"""
assert max_iterations >= 0
self.max_iterations = max_iterations
assert isinstance(unroll_loop, bool)
self.unroll_loop = unroll_loop
super(Iterative, self).__init__()
# TensorFlow functions
self.initialize = tf.make_template(name_='initialize', func_=self.tf_initialize)
self.step = tf.make_template(name_='step', func_=self.tf_step)
self.next_step = tf.make_template(name_='next-step', func_=self.tf_next_step)
def __init__(self, scope='exploration', summary_labels=None):
self.summary_labels = set(summary_labels or ())
self.variables = dict()
self.summaries = list()
def custom_getter(getter, name, registered=False, **kwargs):
variable = getter(name=name, registered=True, **kwargs)
if not registered:
self.variables[name] = variable
return variable
self.explore = tf.make_template(
name_=(scope + '/explore'),
func_=self.tf_explore,
custom_getter_=custom_getter
)
def __init__(self, scope='preprocessor', summary_labels=None):
self.summary_labels = set(summary_labels or ())
self.variables = dict()
self.summaries = list()
def custom_getter(getter, name, registered=False, **kwargs):
variable = getter(name=name, registered=True, **kwargs)
if not registered:
self.variables[name] = variable
return variable
self.process = tf.make_template(
name_=(scope + '/process'),
func_=self.tf_process,
custom_getter_=custom_getter
)
def initialize(self, custom_getter):
super(QDemoModel, self).initialize(custom_getter=custom_getter)
# Demonstration loss
self.fn_demo_loss = tf.make_template(
name_='demo-loss',
func_=self.tf_demo_loss,
custom_getter_=custom_getter
)
# Demonstration optimization
self.fn_demo_optimization = tf.make_template(
name_='demo-optimization',
func_=self.tf_demo_optimization,
custom_getter_=custom_getter
)
def initialize(self, custom_getter):
super(DistributionModel, self).initialize(custom_getter)
# Network
self.network = Network.from_spec(
spec=self.network_spec,
kwargs=dict(summary_labels=self.summary_labels)
)
# Distributions
self.distributions = self.create_distributions()
# Network internals
self.internals_input.extend(self.network.internals_input())
self.internals_init.extend(self.network.internals_init())
# KL divergence function
self.fn_kl_divergence = tf.make_template(
name_=(self.scope + '/kl-divergence'),
func_=self.tf_kl_divergence,
custom_getter_=custom_getter
)
def templatemethod(name_):
"""This decorator wraps a method with `tf.make_template`. For example,
@templatemethod
def my_method():
# Create variables
"""
def template_decorator(func):
"""Inner decorator function"""
def func_wrapper(*args, **kwargs):
"""Inner wrapper function"""
templated_func = tf.make_template(name_, func)
return templated_func(*args, **kwargs)
return func_wrapper
return template_decorator
def __init__(self, name):
"""Performs the initialisation necessary for all AbstractModule instances.
Every subclass of AbstractModule must begin their constructor with a call to
this constructor, i.e. `super(MySubModule, self).__init__(name=name)`.
Avoid instantiating sub-modules in __init__ where possible, as they will not
be defined under the module's scope. Instead, instantiate sub-modules in
`build`.
Args:
name: Name of this module. Used to construct the Templated build function.
Raises:
ValueError: If name is not specified.
"""
if not isinstance(name, string_types):
raise ValueError("Name must be a string.")
self._is_connected = False
self._template = tf.make_template(name, self._build,
create_scope_now_=True)
# Update __call__ and the object docstrings to enable better introspection
self.__doc__ = self._build.__doc__
self.__call__.__func__.__doc__ = self._build.__doc__
def __init__(self, name):
"""Performs the initialisation necessary for all AbstractModule instances.
Every subclass of AbstractModule must begin their constructor with a call to
this constructor, i.e. `super(MySubModule, self).__init__(name=name)`.
Avoid instantiating sub-modules in __init__ where possible, as they will not
be defined under the module's scope. Instead, instantiate sub-modules in
`build`.
Args:
name: Name of this module. Used to construct the Templated build function.
Raises:
ValueError: If name is not specified.
"""
if not isinstance(name, string_types):
raise ValueError("Name must be a string.")
self._is_connected = False
self._template = tf.make_template(name, self._build,
create_scope_now_=True)
# Update __call__ and the object docstrings to enable better introspection
self.__doc__ = self._build.__doc__
self.__call__.__func__.__doc__ = self._build.__doc__
def __init__(self, name):
"""
Initialize the module. Each subclass must call this constructor with a name.
Args:
name: Name of this module. Used for `tf.make_template`.
"""
self.name = name
self._template = tf.make_template(name, self._build, create_scope_now_=True)
# Docstrings for the class should be the docstring for the _build method
self.__doc__ = self._build.__doc__
# pylint: disable=E1101
self.__call__.__func__.__doc__ = self._build.__doc__
def __init__(self, arch, is_training=False):
'''
Variational auto-encoder implemented in 2D convolutional neural nets
Input:
`arch`: network architecture (`dict`)
`is_training`: (unused now) it was kept for historical reasons (for `BatchNorm`)
'''
self.arch = arch
self._sanity_check()
self.is_training = is_training
with tf.name_scope('SpeakerRepr'):
self.y_emb = self._l2_regularized_embedding(
self.arch['y_dim'],
self.arch['z_dim'],
'y_embedding')
self._generate = tf.make_template(
'Generator',
self._generator)
self._encode = tf.make_template(
'Encoder',
self._encoder)
self.generate = self.decode # for VAE-GAN extension
def __init__(self, arch, is_training=False):
self.arch = arch
self.is_training = is_training
self._decode = tf.make_template('Decoder', self._generator)
self._encode = tf.make_template('Encoder', self._encoder)
def __init__(self, name):
"""
Initialize the module. Each subclass must call this constructor with a name.
Args:
name: Name of this module. Used for `tf.make_template`.
"""
self.name = name
self._template = tf.make_template(name, self._build, create_scope_now_=True)
# Docstrings for the class should be the docstring for the _build method
self.__doc__ = self._build.__doc__
# pylint: disable=E1101
self.__call__.__func__.__doc__ = self._build.__doc__
def __init__(self, name):
"""
Initialize the module. Each subclass must call this constructor with a name.
Args:
name: Name of this module. Used for `tf.make_template`.
"""
self.name = name
self._template = tf.make_template(
name, self._build, create_scope_now_=True)
# Docstrings for the class should be the docstring for the _build method
self.__doc__ = self._build.__doc__
self.__call__.__func__.__doc__ = self._build.__doc__
def test_variable_reuse_with_template(self):
tmpl1 = tf.make_template('test',
tf.contrib.layers.legacy_fully_connected,
num_output_units=8)
output1 = tmpl1(self.input)
output2 = tmpl1(self.input)
with tf.Session() as sess:
tf.initialize_all_variables().run()
out_value1, out_value2 = sess.run([output1, output2])
self.assertAllClose(out_value1, out_value2)
def test_variable_reuse_with_template(self):
tmpl1 = tf.make_template('test',
tf.contrib.layers.legacy_fully_connected,
num_output_units=8)
output1 = tmpl1(self.input)
output2 = tmpl1(self.input)
with tf.Session() as sess:
tf.global_variables_initializer().run()
out_value1, out_value2 = sess.run([output1, output2])
self.assertAllClose(out_value1, out_value2)
def __init__(self):
"""
Creates a new solver instance.
"""
# TensorFlow function
self.solve = tf.make_template(name_='solver', func_=self.tf_solve)
def __init__(self, scope='baseline', summary_labels=None):
self.summary_labels = set(summary_labels or ())
self.variables = dict()
self.all_variables = dict()
self.summaries = list()
def custom_getter(getter, name, registered=False, **kwargs):
variable = getter(name=name, registered=True, **kwargs)
if not registered:
self.all_variables[name] = variable
if kwargs.get('trainable', True) and not name.startswith('optimization'):
self.variables[name] = variable
if 'variables' in self.summary_labels:
summary = tf.summary.histogram(name=name, values=variable)
self.summaries.append(summary)
return variable
self.predict = tf.make_template(
name_=(scope + '/predict'),
func_=self.tf_predict,
custom_getter_=custom_getter
)
self.loss = tf.make_template(
name_=(scope + '/loss'),
func_=self.tf_loss,
custom_getter_=custom_getter
)
self.regularization_loss = tf.make_template(
name_=(scope + '/regularization-loss'),
func_=self.tf_regularization_loss,
custom_getter_=custom_getter
)
def __init__(self, num_internals=0, scope='layer', summary_labels=None):
self.num_internals = num_internals
self.summary_labels = set(summary_labels or ())
self.named_tensors = dict()
self.variables = dict()
self.all_variables = dict()
self.summaries = list()
def custom_getter(getter, name, registered=False, **kwargs):
variable = getter(name=name, registered=True, **kwargs)
if not registered:
self.all_variables[name] = variable
if kwargs.get('trainable', True) and not name.startswith('optimization'):
self.variables[name] = variable
if 'variables' in self.summary_labels:
summary = tf.summary.histogram(name=name, values=variable)
self.summaries.append(summary)
return variable
self.apply = tf.make_template(
name_=(scope + '/apply'),
func_=self.tf_apply,
custom_getter_=custom_getter
)
self.regularization_loss = tf.make_template(
name_=(scope + '/regularization-loss'),
func_=self.tf_regularization_loss,
custom_getter_=custom_getter
)
def __init__(self, scope='network', summary_labels=None):
self.summary_labels = set(summary_labels or ())
self.variables = dict()
self.all_variables = dict()
self.summaries = list()
def custom_getter(getter, name, registered=False, **kwargs):
variable = getter(name=name, registered=True, **kwargs)
if not registered:
self.all_variables[name] = variable
if kwargs.get('trainable', True) and not name.startswith('optimization'):
self.variables[name] = variable
if 'variables' in self.summary_labels:
summary = tf.summary.histogram(name=name, values=variable)
self.summaries.append(summary)
return variable
self.apply = tf.make_template(
name_=(scope + '/apply'),
func_=self.tf_apply,
custom_getter_=custom_getter
)
self.regularization_loss = tf.make_template(
name_=(scope + '/regularization-loss'),
func_=self.tf_regularization_loss,
custom_getter_=custom_getter
)
def initialize(self, custom_getter):
super(PGProbRatioModel, self).initialize(custom_getter)
# Model comparison functions
self.reference = tf.make_template(
name_='reference',
func_=self.tf_reference,
custom_getter_=custom_getter
)
self.compare = tf.make_template(
name_='compare',
func_=self.tf_compare,
custom_getter_=custom_getter
)
def __init__(self, name):
"""
Initialize the module. Each subclass must call this constructor with a name.
Args:
name: Name of this module. Used for `tf.make_template`.
"""
self.name = name
self._template = tf.make_template(name, self._build, create_scope_now_=True)
# Docstrings for the class should be the docstring for the _build method
self.__doc__ = self._build.__doc__
# pylint: disable=E1101
self.__call__.__func__.__doc__ = self._build.__doc__
def __init__(self, arch, is_training=False):
self.arch = arch
self._sanity_check()
self.is_training = is_training
self._generate = tf.make_template(
'Generator',
self._generator)
self._discriminate = tf.make_template(
'Discriminator',
self._discriminator)
self._encode = tf.make_template(
'Encoder',
self._encoder)