def buildSiameseNN(self, left_nn, right_nn):
#construct fully connected layer-extend even more networks
print(self.nfeatures)
weights = {
'out': tf.Variable(tf.random_normal([2*self.nfeatures, self.n_classes],dtype=tf.float64),dtype = tf.float64)
}
biases = {
'out': tf.Variable(tf.random_normal([self.n_classes],dtype=tf.float64),dtype = tf.float64)
}
joint_layer = tf.concat([left_nn,right_nn],1)
print("joint layer-->"+str(joint_layer))
batch_normalized = self.insertBatchNNLayer(joint_layer,[0],[2*self.nfeatures])
batch_normalized = tf.matmul(batch_normalized, weights['out']) + biases['out']
result = tf.nn.softmax(batch_normalized)
#add softmax layer
return result
siamese_lstm_network1.py 文件源码
python
阅读 24
收藏 0
点赞 0
评论 0
评论列表
文章目录