def diet_adam_optimizer_params():
"""Default hyperparameters for a DietAdamOptimizer.
Returns:
a hyperparameters object.
"""
return tf.contrib.training.HParams(
quantize=int(True), # use 16-bit fixed-point
quantization_scale=10.0 / tf.int16.max,
optimizer="DietAdam",
learning_rate=1.0,
learning_rate_warmup_steps=2000,
learning_rate_decay_scheme="noam", # "noam" or "none"
epsilon=1e-10,
beta1=0.0, # we can save memory if beta1=0
beta2=0.98,
factored_second_moment_accumulator=int(True), # this saves memory
)
评论列表
文章目录