def build_bprop_graph(self):
optimizer = self.get_optimizer()
# there are either costs assigned to specific params
# OR let blocks do the gradient
costs = self.link_here('costs').keys()
isinstance_check = [isinstance(c, ParametersLink) for c in costs]
if any(isinstance_check):
assert all(isinstance_check), "Some costs have parameters associated "+\
"to them and others don't. All costs need to be binded."
grads = OrderedDict()
for cost in costs:
grads.update(zip(cost.parameters,
theano.grad(cost.model_var, cost.params)))
cost = None
else:
cost = sum(costs)
grads = None
algorithm = GradientDescent(
cost=cost, gradients=grads,
parameters=self.model_parameters,
step_rule=optimizer)
self.algorithm = algorithm
评论列表
文章目录