def activation_based_init(nonlinearity):
"""Returns initialiaation based on a nonlinearlity"""
init = tf.uniform_unit_scaling_initializer()
if nonlinearity == tf.nn.relu:
init = tf.contrib.layers.xavier_initializer()
elif nonlinearity == tf.nn.elu:
init = tf.contrib.layers.variance_scaling_initializer()
elif nonlinearity == selu:
init = tf.contrib.layers.variance_scaling_initializer(factor=1.0, mode='FAN_IN')
return init
评论列表
文章目录