def __init__(self, input_type=None, output_type=None, name_or_scope=None):
"""Creates the layer.
Args:
input_type: A type.
output_type: A type.
name_or_scope: A string or variable scope. If a string, a new variable
scope will be created by calling
[`create_variable_scope`](#create_variable_scope), with defaults
inherited from the current variable scope. If no caching device is set,
it will be set to `lambda op: op.device`. This is because `tf.while` can
be very inefficient if the variables it uses are not cached locally.
"""
if name_or_scope is None: name_or_scope = type(self).__name__
if isinstance(name_or_scope, tf.VariableScope):
self._vscope = name_or_scope
name = str(self._vscope.name)
elif isinstance(name_or_scope, six.string_types):
self._vscope = create_variable_scope(name_or_scope)
name = name_or_scope
else:
raise TypeError('name_or_scope must be a tf.VariableScope or a string: '
'%s' % (name_or_scope,))
if self._vscope.caching_device is None:
self._vscope.set_caching_device(lambda op: op.device)
super(Layer, self).__init__(input_type, output_type, name)
if not hasattr(self, '_constructor_name'):
self._constructor_name = '__.%s' % self.__class__.__name__
if not hasattr(self, '_constructor_args'):
self._constructor_args = None
if not hasattr(self, '_constructor_kwargs'):
self._constructor_kwargs = None
评论列表
文章目录