def __init__(self,d_model, d_k, d_v, sequence_length, h, batch_size,Q, K_s,layer_index,decoder_sent_length,type="attention",mask=None,dropout_keep_prob=None):
"""
:param d_model:
:param d_k:
:param d_v:
:param sequence_length:
:param h:
:param batch_size:
:param Q: value from decoder
:param K_s: output of encoder
"""
super(AttentionEncoderDecoder, self).__init__(d_model, d_k, d_v, sequence_length, h, batch_size)
self.Q=Q
self.K_s=K_s
self.layer_index=layer_index
self.type=type
self.decoder_sent_length=decoder_sent_length
self.initializer = tf.random_normal_initializer(stddev=0.1)
self.mask=mask
self.dropout_keep_prob=dropout_keep_prob
a2_attention_between_enc_dec.py 文件源码
python
阅读 25
收藏 0
点赞 0
评论 0
评论列表
文章目录