def mlp_layer_softmax(tparams, layer1_input, prefix='mlp_layer'):
""" layer1_input: n_sample * n_feature 64*20
input_shape: (num of hiddens, number of input features) 200*20
pred_shape: (num of labels, number of hiddens) 2*200
y_recon : n_label *n_sample 2*64
"""
hidden_2_out = tensor.nnet.sigmoid(tensor.dot(layer1_input, tparams[_p(prefix,'W1')].T) + tparams[_p(prefix,'b1')] ) # 64*200
y_recons = tensor.dot(hidden_2_out, tparams[_p(prefix,'V1')].T) + tparams[_p(prefix,'c1')]
#y_recons = tensor.tanh(y_recons) * 10 # avoid numerical issues/label smoothing
#y_recons = tensor.nnet.softmax(y_recons) # 64*2
max_w = tensor.max(y_recons, axis = 1, keepdims=True)
e0 = tensor.exp(y_recons - max_w)
y_recons = e0 / tensor.sum(e0, axis = 1, keepdims=True)
return y_recons
评论列表
文章目录