ja_lstm_tagger.py 文件源码

python
阅读 21 收藏 0 点赞 0 评论 0

项目:depccg 作者: masashi-y 项目源码 文件源码
def __call__(self, xs):
        """
        xs [(w,s,p,y), ..., ]
        w: word, c: char, l: length, y: label
        """
        batchsize = len(xs)
        ws, cs, ls, ts = zip(*xs)
        # cs: [(sentence length, max word length)]
        ws = map(self.emb_word, ws)
        # ls: [(sentence length, char dim)]
        # cs = map(lambda (c, l): F.sum(self.emb_char(c), 1) / l, zip(cs, ls))
        # cs = [F.reshape(F.average_pooling_2d(
        #         F.expand_dims(self.emb_char(c), 0), (l, 1)), (-1, self.char_dim))
        #             for c, l in zip(cs, ls)]
        # before conv: (sent len, 1, max word len, char_size)
        # after conv: (sent len, char_size, max word len, 1)
        # after max_pool: (sent len, char_size, 1, 1)
        cs = [F.squeeze(
            F.max_pooling_2d(
                self.conv_char(
                    F.expand_dims(
                        self.emb_char(c), 1)), (l, 1)))
                    for c, l in zip(cs, ls)]
        # [(sentence length, (word_dim + char_dim))]
        xs_f = [F.dropout(F.concat([w, c]),
            self.dropout_ratio, train=self.train) for w, c in zip(ws, cs)]
        xs_b = [x[::-1] for x in xs_f]
        cx_f, hx_f, cx_b, hx_b = self._init_state(batchsize)
        _, _, hs_f = self.lstm_f(hx_f, cx_f, xs_f, train=self.train)
        _, _, hs_b = self.lstm_b(hx_b, cx_b, xs_b, train=self.train)
        hs_b = [x[::-1] for x in hs_b]
        # ys: [(sentence length, number of category)]
        ys = [self.linear2(F.relu(self.linear1(F.concat([h_f, h_b]))))
                    for h_f, h_b in zip(hs_f, hs_b)]
        # ys = [self.linear2(F.relu(
        #         self.linear1(
        #             F.squeeze(
        #                 F.transpose(
        #                     F.relu(self.conv1(
        #                         F.reshape(
        #                             F.concat([h_f, h_b]),
        #                             (1, 1, -1, 2 * self.hidden_dim))), (0, 3, 2, 1))
        #                 )))))
        #             for h_f, h_b in zip(hs_f, hs_b)]
        loss = reduce(lambda x, y: x + y,
            [F.softmax_cross_entropy(y, t) for y, t in zip(ys, ts)])
        acc = reduce(lambda x, y: x + y,
            [F.accuracy(y, t, ignore_label=IGNORE) for y, t in zip(ys, ts)])

        acc /= batchsize
        chainer.report({
            "loss": loss,
            "accuracy": acc
            }, self)
        return loss
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号