def compile(
self,s_inputs_, s_loss_, v_params_, s_grads_=None, s_reg_=0,
fetches_=None, updates_=None, givens_=None,
trunc_grad_=None, profile_=False
):
'''
compile optimizer against specific model
Args:
s_inputs_: list of symbolic input tensors, including label
s_loss_: optimization loss, symbolic scalar
v_params_: list of shared parameters to optimize
s_grads: list of gradients to apply, must be same order as v_params_, default is None (use autodiff).
s_reg_: symbolic regularization term, default 0 (no regularization)
updates: update operation for shared values after a step of optimization,
usually RNN states. Takes form [(v_var, s_new_var), ...]
Returns: None
'''
self.s_loss = s_loss_
self.s_reg = s_reg_
if s_grads_ is None:
s_grads_ = T.grad(
self.s_loss + self.s_reg, list(v_params_), disconnected_inputs='warn')
if type(trunc_grad_)==float:
self.s_grads = [T.clip(g,-trunc_grad_,trunc_grad_) for g in s_grads_]
else:
self.s_grads = s_grads_
评论列表
文章目录