def __call__(self, inputs, outputs, disable=(), train=True,tuning_layer='fc1000'):
"""Executes a sub-network of the network.
This function acts as an interpreter of the network definition for
Caffe. On execution, it interprets each layer one by one, and if the
bottom blobs are already computed, then emulates the layer and stores
output blobs as :class:`~chainer.Variable` objects.
Args:
inputs (dict): A dictionary whose key-value pairs indicate initial
correspondences between blob names and
:class:`~chainer.Variable` objects.
outputs (Iterable): A list of blob names whose corresponding
:class:`~chainer.Variable` objects are returned.
disable (Iterable): A list of layer names that will be ignored
during the forward computation.
train (bool): If ``True``, this function emulates the TRAIN phase
of the Caffe layers. Otherwise, it emulates the TEST phase.
Returns:
tuple: A tuple of output :class:`~chainer.Variable` objects
corresponding to elements of the `outputs` argument.
"""
self.train = False
variables = dict(inputs)
#exit()
cnt=1
self.cleargrads()
for func_name, bottom, top in self.layers:
cnt+=1
if (func_name in disable or
func_name not in self.forwards or
any(blob not in variables for blob in bottom)):
continue
#import cupy.cuda.runtime as rt
#print cnt,func_name,rt.memGetInfo()[0]/1024
#print cnt,func_name
func = self.forwards[func_name]
input_vars = tuple(variables[blob] for blob in bottom)
if func_name==tuning_layer:
volatile = 'off' if train else 'on'
new_input_vars =[]
for blob in input_vars:
new_input_vars.append(
chainer.Variable(blob.data,volatile=volatile))
input_vars = new_input_vars
self.train=True
output_vars = func(*input_vars)
#if cnt==tuning_layer:
if not isinstance(output_vars, collections.Iterable):
output_vars = output_vars,
for var, name in zip(output_vars, top):
variables[name] = var
self.variables = variables
#print variables
return tuple(variables[blob] for blob in outputs)
评论列表
文章目录