def optimize_function(model, solver_params, config=None):
"""
Create a optimizing function receives gradients.
Parameters:
params - parameters
config - training configuration
Returns:
updating function receives gradients
"""
gradients_ = [dim_to_var(p.ndim) for p in model.params]
lr_ = tt.fscalar('lr_')
updates = optimizers.optimizer(lr=lr_,
model=model,
gradients=gradients_,
solver_params=solver_params)
return t.function(inputs=[lr_]+ gradients_, outputs=[], updates=updates)
评论列表
文章目录