def create_base(self, inputs, is_training):
"""Creates a base part of the Model (no gradients, losses or summaries)."""
with tf.name_scope('Model'):
with slim.arg_scope([slim.fully_connected], activation_fn=tf.nn.relu,
# weights_regularizer=slim.l2_regularizer(0.01),
# weights_initializer=initializers.xavier_initializer(seed=self._config.random_seed),
# biases_initializer=tf.constant_initializer(0.1)
):
# first fully connected layer
net = slim.fully_connected(inputs, self._config.mlp_params['hidden_sizes'][0], scope='fc1')
# dropout1
net = slim.dropout(net, self._config.keep_prob, is_training=is_training, scope='dropout1')
# second fully connected layer
net = slim.fully_connected(net, self._config.mlp_params['hidden_sizes'][1], scope='fc2')
# dropout2
net = slim.dropout(net, self._config.keep_prob, is_training=is_training, scope='dropout2')
# final fully-connected dense layer
logits = slim.fully_connected(net, self._config.num_classes, activation_fn=None, scope='fc3')
with tf.name_scope('output'):
predicted_classes = tf.to_int32(tf.argmax(logits, dimension=1), name='y')
return logits, predicted_classes
评论列表
文章目录