def __init__(self,
vocab_size,
sequence_size,
setting=None,
checkpoint_path="",
temperature=10,
tying=False):
super().__init__(vocab_size, sequence_size, setting, checkpoint_path)
self.temperature = temperature
self.tying = tying
self.gamma = self.setting.gamma
if tying:
self.model.pop() # remove activation
self.model.pop() # remove projection (use self embedding)
self.model.add(Lambda(lambda x: K.dot(x, K.transpose(self.embedding.embeddings))))
self.model.add(Activation("softmax"))
评论列表
文章目录