def _build(self):
V = self.V # vocabulary size
M = self.flags.embedding_size # 64
C = self.flags.classes
W = self.flags.window_size
S = self.flags.seq_len*2+1
B = self.flags.batch_size
H = 32
is_training = tf.placeholder(dtype=tf.bool)
netname = "CBOW"
with tf.variable_scope(netname):
self.inputs = tf.placeholder(dtype=tf.int32,shape=[None, S]) #[B,S]
# each element is a word id.
layer_name = "{}/embedding".format(netname)
x = self._get_embedding(layer_name, self.inputs, V, M, reuse=False) # [B, S, M]
netname = "BaoBaoMiaoCnn"
with tf.variable_scope(netname):
x = tf.expand_dims(x, axis=3) # [B,S,M,1]
net1 = self.conv_maxpool(x,W,M,S,H,"%s/conv1"%netname,1) # [B,1,1,16]
net2 = self.conv_maxpool(x,W*2,M,S,H,"%s/conv2"%netname,1)
net3 = self.conv_maxpool(x,W//2,M,S,H,"%s/conv3"%netname,1)
net = tf.concat([net1,net2,net3],axis=3) # [B,1,1,48]
net = self._batch_normalization(net, layer_name='%s/batch_norm1'%(netname))
net = tf.squeeze(net) # [B,48]
#net = self._fc(net, fan_in=H*3, fan_out=H, layer_name="%s/fc0"%netname, activation='relu')
net = self._fc(net, fan_in=H*3, fan_out=C, layer_name="%s/fc1"%netname, activation=None)
self.logit = net
self.is_training = is_training
评论列表
文章目录