def __init__(self, h, output_dim,
init='glorot_uniform', **kwargs):
self.init = initializations.get(init)
self.h = h
self.output_dim = output_dim
#removing the regularizers and the dropout
super(AttenLayer, self).__init__(**kwargs)
# this seems necessary in order to accept 3 input dimensions
# (samples, timesteps, features)
self.input_spec=[InputSpec(ndim=3)]
attentionlayer.py 文件源码
python
阅读 19
收藏 0
点赞 0
评论 0
评论列表
文章目录