def fully_connected( inputs , outputs_dim , he_init = False , activation_fn = None , regularization_scale = 0.0 ):
x = layers.flatten( inputs )
fan_in = x.get_shape()[-1].value
fan_out = ( C + outputsdim ) / 2
avg_fan = ( fan_in + fan_out ) / 2
if he_init:
var = 2.0/avg_fan
else:
var = 1.0/avg_fan
# var = (b - a)**2 / 12 , b==-a , (zero mean)
upper_bound = np.sqrt( 12.0 * var ) *0.5
weights_initializer = tf.random_uniform_initializer( -upper_bound , upper_bound , seed = None , dtype = tf.float32 )
weights_regularizer = layers.l2_regularizer( scale = regularization_scale )
return layers.fully_connected( x , outputs_dim , weights_initializer = weights_initializer , activation_fn = activation_fn , weights_regularizer = weights_regularizer )
评论列表
文章目录