def _invert_Conv2DLayer(self, layer, feeder):
def _check_padding_same():
for s, p in zip(layer.filter_size, layer.pad):
if s % 2 != 1:
return False
elif s//2 != p:
return False
return True
# Warning they are swapped here.
feeder = self._put_rectifiers(feeder,layer)
f_s = layer.filter_size
if layer.pad == 'same' or _check_padding_same():
pad = 'same'
elif layer.pad == 'valid' or layer.pad == (0, 0):
pad = 'full'
else:
raise RuntimeError("Define your padding as full or same.")
# By definition the
# Flip filters must be on to be a proper deconvolution.
num_filters = L.get_output_shape(layer.input_layer)[1]
if layer.stride == (4,4):
# Todo: clean this!
print("Applying alexnet hack.")
feeder = L.Upscale2DLayer(feeder, layer.stride, mode='dilate')
output_layer = L.Conv2DLayer(feeder,
num_filters=num_filters,
filter_size=f_s,
stride=1, pad=pad,
nonlinearity=None, b=None,
flip_filters=True)
print("Applying alexnet hack part 2.")
conv_layer = output_layer
output_layer = L.SliceLayer(L.SliceLayer(output_layer,
slice(0,-3), axis=3),
slice(0,-3), axis=2)
output_layer.W = conv_layer.W
elif layer.stride == (2,2):
# Todo: clean this! Seems to be the same code as for AlexNet above.
print("Applying GoogLeNet hack.")
feeder = L.Upscale2DLayer(feeder, layer.stride, mode='dilate')
output_layer = L.Conv2DLayer(feeder,
num_filters=num_filters,
filter_size=f_s,
stride=1, pad=pad,
nonlinearity=None, b=None,
flip_filters=True)
else:
# Todo: clean this. Repetitions all over.
output_layer = L.Conv2DLayer(feeder,
num_filters=num_filters,
filter_size=f_s,
stride=1, pad=pad,
nonlinearity=None, b=None,
flip_filters=True)
return output_layer
评论列表
文章目录