def make_bn(input, phase, axis=-1, epsilon=0.001, mask=None, num_updates=None, name=None):
default_decay = GraphCNNGlobal.BN_DECAY
with tf.variable_scope(name, default_name='BatchNorm') as scope:
input_size = input.get_shape()[axis].value
if axis == -1:
axis = len(input.get_shape())-1
axis_arr = [i for i in range(len(input.get_shape())) if i != axis]
if mask == None:
batch_mean, batch_var = tf.nn.moments(input, axis_arr)
else:
batch_mean, batch_var = tf.nn.weighted_moments(input, axis_arr, mask)
gamma = make_variable('gamma', input_size, initializer=tf.constant_initializer(1))
beta = make_bias_variable('bias', input_size)
ema = tf.train.ExponentialMovingAverage(decay=default_decay, num_updates=num_updates)
def mean_var_with_update():
ema_apply_op = ema.apply([batch_mean, batch_var])
with tf.control_dependencies([ema_apply_op]):
return tf.identity(batch_mean), tf.identity(batch_var)
mean, var = tf.cond(phase, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var)))
return tf.nn.batch_normalization(input, mean, var, beta, gamma, 1e-3)
评论列表
文章目录