def __init__(self, input_size, output_size, activation):
self.input_size = input_size
self.output_size = output_size
# activation function
self.name = activation
if activation == 'softplus':
self._activation = tf.nn.softplus
if activation == 'relu':
self._activation = tf.nn.relu
if activation == 'sigmoid':
self._activation = tf.sigmoid
if activation == 'tanh':
self._activation = tf.tanh
if activation == 'linear':
self._activation = lambda x: x
if activation == 'softmax':
self._activation = tf.nn.softmax
# parameters
W = tf.Variable(init_weights(input_size, output_size))
b = tf.Variable(tf.zeros([output_size]))
#b = tf.Variable(init_weights(output_size, 0))
self.params = [W, b]
deterministic_layer.py 文件源码
python
阅读 25
收藏 0
点赞 0
评论 0
评论列表
文章目录