def make_fc_layer(
self, inp_lyr, name_fc_lyr,
name_w, shp_w, name_b=None, shp_b=None,
initializer=xavier_init(uniform=False)
):
""" TODO - regularize batch norm params? """
W = self.make_wbkernels(name_w, shp_w, initializer=initializer)
b = self.make_wbkernels(
name_b, shp_b, initializer=tf.zeros_initializer()
)
fc_lyr = tf.nn.bias_add(
tf.matmul(inp_lyr, W, name=name_fc_lyr+'_matmul'), b,
data_format=self.data_format, name=name_fc_lyr,
)
if self.use_batch_norm:
fc_lyr = tf.contrib.layers.batch_norm(
fc_lyr, decay=self.batch_norm_decay, center=True, scale=True,
data_format=self.data_format, is_training=self.is_training
)
return fc_lyr
评论列表
文章目录