layer_normalization_RNN.py 文件源码

python
阅读 24 收藏 0 点赞 0 评论 0

项目:New_Layers-Keras-Tensorflow 作者: WeidiXie 项目源码 文件源码
def __init__(self, output_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 activation='tanh', beta_init='zero', gamma_init='one',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 gamma_regularizer=None, beta_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.activation = activations.get(activation)
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.beta_init = initializations.get(beta_init)
        self.gamma_init = initializations.get(gamma_init)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.gamma_regularizer = regularizers.get(gamma_regularizer)
        self.beta_regularizer = regularizers.get(beta_regularizer)
        self.dropout_W = dropout_W
        self.dropout_U = dropout_U
        self.epsilon = 1e-5
        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(LN_SimpleRNN, self).__init__(**kwargs)
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号