def fnn_model_fn(features,labels,mode):
print(features)
print(labels)
# output_labels = tf.reshape(labels,[-1,1])
dense = tf.layers.dense(features,units=nhidden,activation=tf.nn.relu,use_bias=True)
print(dense)
logits = tf.layers.dense(dense,units=1,use_bias=True)
print(logits)
onehot_labels = tf.one_hot(indices=tf.cast(labels, tf.int32), depth=1)
if mode != learn.ModeKeys.EVAL:
# loss = tf.losses.sigmoid_cross_entropy(output_labels,logits)
# loss = tf.losses.mean_squared_error(labels=output_labels,predictions=logits)
loss = tf.losses.softmax_cross_entropy(
onehot_labels=onehot_labels, logits=logits)
if mode==learn.ModeKeys.TRAIN:
train_op = tf.contrib.layers.optimize_loss(
loss=loss,
global_step=tf.contrib.framework.get_global_step(),
learning_rate=learning_rate,
optimizer="SGD")
predictions = {
"classes": tf.round(logits),
"probabilities": tf.nn.softmax(
logits, name="softmax_tensor")
}
return model_fn.ModelFnOps(
mode=mode, predictions=predictions, loss=loss, train_op=train_op)
logic_gate_linear_regressor.py 文件源码
python
阅读 35
收藏 0
点赞 0
评论 0
评论列表
文章目录