def build(self,input_shapes):
'''
build?????????
U_a: x?attention???????
U_m: attention_vec?attention???????
U_s: attention???softmax???????
'''
input_shape=input_shapes[0]
super(AttentionLSTM,self).build(input_shape)
self.input_spec = [InputSpec(shape=input_shapes[0]),InputSpec(shape=input_shapes[1])]
#attention_dim=self.input_spec[1].shape[1]
attention_dim=self.att_dim
input_dim = input_shape[2]
#attention??
self.U_a=self.inner_init((input_dim,self.output_dim),
name='{}_U_a'.format(self.name))
self.b_a=K.zeros((self.output_dim,),name='{}_b_a'.format(self.name))
self.U_m=self.inner_init((attention_dim,self.output_dim),
name='{}_U_m'.format(self.name))
self.b_m=K.zeros((self.output_dim,),name='{}_b_m'.format(self.name))
if self.single_attention_param:
self.U_s = self.inner_init((self.output_dim, 1),
name='{}_U_s'.format(self.name))
self.b_s = K.zeros((1,), name='{}_b_s'.format(self.name))
else:
self.U_s = self.inner_init((self.output_dim, self.output_dim),
name='{}_U_s'.format(self.name))
self.b_s = K.zeros((self.output_dim,), name='{}_b_s'.format(self.name))
self.trainable_weights+=[self.U_a,self.U_m,self.U_s,
self.b_a,self.b_m,self.b_s]
if self.initial_weights is not None:
self.set_weights(self.initial_weights)
del self.initial_weights
评论列表
文章目录