def sdca_classifier_model_fn(features, targets, mode, params):
"""Estimator's linear model_fn."""
feature_columns = params["feature_columns"]
optimizer = params["optimizer"]
weight_column_name = params["weight_column_name"]
loss_type = params["loss_type"]
if not isinstance(optimizer, sdca_optimizer.SDCAOptimizer):
raise ValueError("Optimizer must be of type SDCAOptimizer")
loss_fn = {
"logistic_loss": _log_loss_with_two_classes,
"hinge_loss": _hinge_loss,
}[loss_type]
logits, columns_to_variables, bias = (
layers.weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=1))
_add_bias_column(feature_columns, features, bias, targets,
columns_to_variables)
loss = None
if mode != estimator.ModeKeys.INFER:
loss = math_ops.reduce_mean(loss_fn(logits, targets), name="loss")
logging_ops.scalar_summary("loss", loss)
train_op = None
if mode == estimator.ModeKeys.TRAIN:
global_step = contrib_variables.get_global_step()
train_op = optimizer.get_train_step(
columns_to_variables, weight_column_name, loss_type, features,
targets, global_step)
predictions = {}
predictions[_LOGISTIC] = math_ops.sigmoid(logits)
logits = array_ops.concat(1, [array_ops.zeros_like(logits), logits])
predictions[_PROBABILITIES] = nn.softmax(logits)
predictions[_CLASSES] = math_ops.argmax(logits, 1)
return predictions, loss, train_op
# Ensures consistency with LinearComposableModel.
评论列表
文章目录