def __call__(self, inputs, is_train=True, is_debug=False):
self.is_train = is_train
self.is_debug = is_debug
outputs = tf.convert_to_tensor(inputs) # Check if necessary
# assert input shape
with tf.variable_scope(self.name, reuse=self.reuse) as scope:
print_message(scope.name)
with tf.variable_scope('conv1') as vscope:
outputs = conv3d(outputs, [self.batch_size] + self.configs.conv_info.l1,
is_train=self.is_train, with_w=True)
if is_debug and not self.reuse:
print(vscope.name, outputs)
outputs = tf.layers.dropout(outputs, rate=self.configs.dropout, training=self.is_train, name='outputs')
self.net['conv1_outputs'] = outputs
with tf.variable_scope('conv2') as vscope:
outputs = conv3d(outputs, [self.batch_size] + self.configs.conv_info.l2,
is_train=self.is_train, with_w=True)
if is_debug and not self.reuse:
print(vscope.name, outputs)
outputs = tf.layers.dropout(outputs, rate=self.configs.dropout, training=self.is_train, name='outputs')
self.net['conv2_outputs'] = outputs
with tf.variable_scope('conv3') as vscope:
outputs = conv3d(outputs, [self.batch_size] + self.configs.conv_info.l3,
is_train=self.is_train, with_w=True)
if is_debug and not self.reuse:
print(vscope.name, outputs)
outputs = tf.layers.dropout(outputs, rate=self.configs.dropout, training=self.is_train, name='outputs')
self.net['conv3_outputs'] = outputs
with tf.variable_scope('fc') as vscope:
fc_dim = reduce(mul, self.configs.conv_info.l3, 1)
outputs = tf.reshape(outputs, [self.batch_size] + [fc_dim], name='reshape')
outputs = linear(outputs, 1)
if is_debug and not self.reuse:
print(vscope.name, outputs)
self.net['fc_outputs'] = outputs
self.reuse = True
self.variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name)
return tf.nn.sigmoid(outputs), outputs
评论列表
文章目录