def __init__(
self, incomings, num_units,
W_g=init.Normal(0.1),
W_h=init.Normal(0.1),
W_v=init.Normal(0.1),
W_s=init.Normal(0.1),
W_p=init.Normal(0.1),
nonlinearity=nonlinearities.tanh,
nonlinearity_atten=nonlinearities.softmax,
**kwargs
):
super(AttenLayer, self).__init__(incomings, **kwargs)
self.batch_size = self.input_shapes[0][0] # None
num_inputs = self.input_shapes[2][1] # k
feature_dim = self.input_shapes[0][1] # d
self.num_units = num_units
self.nonlinearity = nonlinearity
self.nonlinearity_atten = nonlinearity_atten
self.W_h_to_attenGate = self.add_param(
W_h, (num_inputs, 1),
name='W_h_to_atten'
)
self.W_g_to_attenGate = self.add_param(
W_g,
(feature_dim, num_inputs),
name='W_g_to_atten'
)
self.W_v_to_attenGate = self.add_param(
W_v,
(feature_dim, num_inputs),
name='W_v_to_atten'
)
self.W_s_to_attenGate = self.add_param(
W_s,
(feature_dim, num_inputs),
name='W_s_to_atten'
)
self.W_p = self.add_param(
W_p,
(feature_dim, num_units),
name='W_p_to_atten'
)
self.num_inputs = num_inputs
评论列表
文章目录