attention_layers.py 文件源码

python
阅读 22 收藏 0 点赞 0 评论 0

项目:Keras_note 作者: LibCorner 项目源码 文件源码
def build(self,input_shapes):
        '''
        build?????????    
        U_a: x?attention???????
        U_m: attention_vec?attention???????
        U_s: attention???softmax???????
        '''
        input_shape=input_shapes[0]
        super(AttentionLSTM,self).build(input_shape)
        self.input_spec = [InputSpec(shape=input_shapes[0]),InputSpec(shape=input_shapes[1])]
        #attention_dim=self.input_spec[1].shape[1]
        attention_dim=self.att_dim
        input_dim = input_shape[2]
        #attention??
        self.U_a=self.inner_init((input_dim,self.output_dim),
                                 name='{}_U_a'.format(self.name))
        self.b_a=K.zeros((self.output_dim,),name='{}_b_a'.format(self.name))

        self.U_m=self.inner_init((attention_dim,self.output_dim),
                                 name='{}_U_m'.format(self.name))
        self.b_m=K.zeros((self.output_dim,),name='{}_b_m'.format(self.name))

        if self.single_attention_param:
            self.U_s = self.inner_init((self.output_dim, 1),
                                       name='{}_U_s'.format(self.name))
            self.b_s = K.zeros((1,), name='{}_b_s'.format(self.name))
        else:
            self.U_s = self.inner_init((self.output_dim, self.output_dim),
                                       name='{}_U_s'.format(self.name))
            self.b_s = K.zeros((self.output_dim,), name='{}_b_s'.format(self.name))

        self.trainable_weights+=[self.U_a,self.U_m,self.U_s,
                                 self.b_a,self.b_m,self.b_s]
        if self.initial_weights is not None:
            self.set_weights(self.initial_weights)
            del self.initial_weights
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号