def _invert_layer(self, layer, feeder):
layer_type = type(layer)
if L.get_output_shape(feeder) != L.get_output_shape(layer):
feeder = L.ReshapeLayer(feeder, (-1,)+L.get_output_shape(layer)[1:])
if layer_type is L.InputLayer:
return self._invert_InputLayer(layer, feeder)
elif layer_type is L.FlattenLayer:
return self._invert_FlattenLayer(layer, feeder)
elif layer_type is L.DenseLayer:
return self._invert_DenseLayer(layer, feeder)
elif layer_type is L.Conv2DLayer:
return self._invert_Conv2DLayer(layer, feeder)
elif layer_type is L.DropoutLayer:
return self._invert_DropoutLayer(layer, feeder)
elif layer_type in [L.MaxPool2DLayer, L.MaxPool1DLayer]:
return self._invert_MaxPoolingLayer(layer, feeder)
elif layer_type is L.PadLayer:
return self._invert_PadLayer(layer, feeder)
elif layer_type is L.SliceLayer:
return self._invert_SliceLayer(layer, feeder)
elif layer_type is L.LocalResponseNormalization2DLayer:
return self._invert_LocalResponseNormalisation2DLayer(layer, feeder)
elif layer_type is L.GlobalPoolLayer:
return self._invert_GlobalPoolLayer(layer, feeder)
else:
return self._invert_UnknownLayer(layer, feeder)
python类FlattenLayer()的实例源码
def create_network():
l = 1000
pool_size = 5
test_size1 = 13
test_size2 = 7
test_size3 = 5
kernel1 = 128
kernel2 = 128
kernel3 = 128
layer1 = InputLayer(shape=(None, 1, 4, l+1024))
layer2_1 = SliceLayer(layer1, indices=slice(0, l), axis = -1)
layer2_2 = SliceLayer(layer1, indices=slice(l, None), axis = -1)
layer2_3 = SliceLayer(layer2_2, indices = slice(0,4), axis = -2)
layer2_f = FlattenLayer(layer2_3)
layer3 = Conv2DLayer(layer2_1,num_filters = kernel1, filter_size = (4,test_size1))
layer4 = Conv2DLayer(layer3,num_filters = kernel1, filter_size = (1,test_size1))
layer5 = Conv2DLayer(layer4,num_filters = kernel1, filter_size = (1,test_size1))
layer6 = MaxPool2DLayer(layer5, pool_size = (1,pool_size))
layer7 = Conv2DLayer(layer6,num_filters = kernel2, filter_size = (1,test_size2))
layer8 = Conv2DLayer(layer7,num_filters = kernel2, filter_size = (1,test_size2))
layer9 = Conv2DLayer(layer8,num_filters = kernel2, filter_size = (1,test_size2))
layer10 = MaxPool2DLayer(layer9, pool_size = (1,pool_size))
layer11 = Conv2DLayer(layer10,num_filters = kernel3, filter_size = (1,test_size3))
layer12 = Conv2DLayer(layer11,num_filters = kernel3, filter_size = (1,test_size3))
layer13 = Conv2DLayer(layer12,num_filters = kernel3, filter_size = (1,test_size3))
layer14 = MaxPool2DLayer(layer13, pool_size = (1,pool_size))
layer14_d = DenseLayer(layer14, num_units= 256)
layer3_2 = DenseLayer(layer2_f, num_units = 128)
layer15 = ConcatLayer([layer14_d,layer3_2])
layer16 = DropoutLayer(layer15,p=0.5)
layer17 = DenseLayer(layer16, num_units=256)
network = DenseLayer(layer17, num_units= 2, nonlinearity=softmax)
return network
#random search to initialize the weights
def create_network():
l = 1000
pool_size = 5
test_size1 = 13
test_size2 = 7
test_size3 = 5
kernel1 = 128
kernel2 = 128
kernel3 = 128
layer1 = InputLayer(shape=(None, 1, 4, l+1024))
layer2_1 = SliceLayer(layer1, indices=slice(0, l), axis = -1)
layer2_2 = SliceLayer(layer1, indices=slice(l, None), axis = -1)
layer2_3 = SliceLayer(layer2_2, indices = slice(0,4), axis = -2)
layer2_f = FlattenLayer(layer2_3)
layer3 = Conv2DLayer(layer2_1,num_filters = kernel1, filter_size = (4,test_size1))
layer4 = Conv2DLayer(layer3,num_filters = kernel1, filter_size = (1,test_size1))
layer5 = Conv2DLayer(layer4,num_filters = kernel1, filter_size = (1,test_size1))
layer6 = MaxPool2DLayer(layer5, pool_size = (1,pool_size))
layer7 = Conv2DLayer(layer6,num_filters = kernel2, filter_size = (1,test_size2))
layer8 = Conv2DLayer(layer7,num_filters = kernel2, filter_size = (1,test_size2))
layer9 = Conv2DLayer(layer8,num_filters = kernel2, filter_size = (1,test_size2))
layer10 = MaxPool2DLayer(layer9, pool_size = (1,pool_size))
layer11 = Conv2DLayer(layer10,num_filters = kernel3, filter_size = (1,test_size3))
layer12 = Conv2DLayer(layer11,num_filters = kernel3, filter_size = (1,test_size3))
layer13 = Conv2DLayer(layer12,num_filters = kernel3, filter_size = (1,test_size3))
layer14 = MaxPool2DLayer(layer13, pool_size = (1,pool_size))
layer14_d = DenseLayer(layer14, num_units= 256)
layer3_2 = DenseLayer(layer2_f, num_units = 128)
layer15 = ConcatLayer([layer14_d,layer3_2])
#layer16 = DropoutLayer(layer15,p=0.5)
layer17 = DenseLayer(layer15, num_units=256)
network = DenseLayer(layer17, num_units= 1, nonlinearity=None)
return network
#random search to initialize the weights
def _initialize_network(self, img_input_shape, misc_len, output_size, img_input, misc_input=None, **kwargs):
input_layers = []
inputs = [img_input]
# weights_init = lasagne.init.GlorotUniform("relu")
weights_init = lasagne.init.HeNormal("relu")
network = ls.InputLayer(shape=img_input_shape, input_var=img_input)
input_layers.append(network)
network = ls.Conv2DLayer(network, num_filters=32, filter_size=8, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=4)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=4, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=2)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=3, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=1)
if self.misc_state_included:
inputs.append(misc_input)
network = ls.FlattenLayer(network)
misc_input_layer = ls.InputLayer(shape=(None, misc_len), input_var=misc_input)
input_layers.append(misc_input_layer)
if "additional_misc_layer" in kwargs:
misc_input_layer = ls.DenseLayer(misc_input_layer, int(kwargs["additional_misc_layer"]),
nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(0.1))
network = ls.ConcatLayer([network, misc_input_layer])
network = ls.DenseLayer(network, 512, nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(0.1))
network = ls.DenseLayer(network, output_size, nonlinearity=None, b=lasagne.init.Constant(.1))
return network, input_layers, inputs
def _initialize_network(self, img_input_shape, misc_len, output_size, img_input, misc_input=None, **kwargs):
input_layers = []
inputs = [img_input]
# weights_init = lasagne.init.GlorotUniform("relu")
weights_init = lasagne.init.HeNormal("relu")
network = ls.InputLayer(shape=img_input_shape, input_var=img_input)
input_layers.append(network)
network = ls.Conv2DLayer(network, num_filters=32, filter_size=8, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(.1), stride=4)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=4, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(.1), stride=2)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=3, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(.1), stride=1)
if self.misc_state_included:
inputs.append(misc_input)
network = ls.FlattenLayer(network)
misc_input_layer = ls.InputLayer(shape=(None, misc_len), input_var=misc_input)
input_layers.append(misc_input_layer)
if "additional_misc_layer" in kwargs:
misc_input_layer = ls.DenseLayer(misc_input_layer, int(kwargs["additional_misc_layer"]),
nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(0.1))
network = ls.ConcatLayer([network, misc_input_layer])
# Duelling here
advanteges_branch = ls.DenseLayer(network, 256, nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(.1))
advanteges_branch = ls.DenseLayer(advanteges_branch, output_size, nonlinearity=None,
b=lasagne.init.Constant(.1))
state_value_branch = ls.DenseLayer(network, 256, nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(.1))
state_value_branch = ls.DenseLayer(state_value_branch, 1, nonlinearity=None,
b=lasagne.init.Constant(.1))
network = DuellingMergeLayer([advanteges_branch, state_value_branch])
return network, input_layers, inputs
def _initialize_network(self, img_input_shape, misc_len, output_size, img_input, misc_input=None, **kwargs):
input_layers = []
inputs = [img_input]
# weights_init = lasagne.init.GlorotUniform("relu")
weights_init = lasagne.init.HeNormal("relu")
network = ls.InputLayer(shape=img_input_shape, input_var=img_input)
input_layers.append(network)
network = ls.Conv2DLayer(network, num_filters=32, filter_size=8, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=4)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=4, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=2)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=3, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=1)
network = ls.FlattenLayer(network)
if self.misc_state_included:
health_inputs = 4
units_per_health_input = 100
layers_for_merge = []
for i in range(health_inputs):
oh_input = lasagne.utils.one_hot(misc_input[:, i] - 1, units_per_health_input)
health_input_layer = ls.InputLayer(shape=(None, units_per_health_input), input_var=oh_input)
inputs.append(oh_input)
input_layers.append(health_input_layer)
layers_for_merge.append(health_input_layer)
misc_input_layer = ls.InputLayer(shape=(None, misc_len - health_inputs),
input_var=misc_input[:, health_inputs:])
input_layers.append(misc_input_layer)
layers_for_merge.append(misc_input_layer)
inputs.append(misc_input[:, health_inputs:])
layers_for_merge.append(network)
network = ls.ConcatLayer(layers_for_merge)
network = ls.DenseLayer(network, 512, nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(0.1))
network = ls.DenseLayer(network, output_size, nonlinearity=None, b=lasagne.init.Constant(.1))
return network, input_layers, inputs
def _initialize_network(self, img_input_shape, misc_len, output_size, img_input, misc_input=None, **kwargs):
input_layers = []
inputs = [img_input]
# weights_init = lasagne.init.GlorotUniform("relu")
weights_init = lasagne.init.HeNormal("relu")
network = ls.InputLayer(shape=img_input_shape, input_var=img_input)
input_layers.append(network)
network = ls.Conv2DLayer(network, num_filters=32, filter_size=8, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=4)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=4, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=2)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=3, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=1)
network = ls.FlattenLayer(network)
if self.misc_state_included:
health_inputs = 4
units_per_health_input = 100
layers_for_merge = []
for i in range(health_inputs):
health_input_layer = ls.InputLayer(shape=(None, 1), input_var=misc_input[:, i:i + 1])
health_layer = ls.DenseLayer(health_input_layer, units_per_health_input, nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(0.1))
health_layer = ls.DenseLayer(health_layer, units_per_health_input, nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(0.1))
inputs.append(misc_input[:, i:i + 1])
input_layers.append(health_input_layer)
layers_for_merge.append(health_layer)
misc_input_layer = ls.InputLayer(shape=(None, misc_len - health_inputs),
input_var=misc_input[:, health_inputs:])
input_layers.append(misc_input_layer)
layers_for_merge.append(misc_input_layer)
inputs.append(misc_input[:, health_inputs:])
layers_for_merge.append(network)
network = ls.ConcatLayer(layers_for_merge)
network = ls.DenseLayer(network, 512, nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(0.1))
network = ls.DenseLayer(network, output_size, nonlinearity=None, b=lasagne.init.Constant(.1))
return network, input_layers, inputs
def _initialize_network(self, img_input_shape, misc_len, output_size, img_input, misc_input=None, **kwargs):
input_layers = []
inputs = [img_input]
# weights_init = lasagne.init.GlorotUniform("relu")
weights_init = lasagne.init.HeNormal("relu")
network = ls.InputLayer(shape=img_input_shape, input_var=img_input)
input_layers.append(network)
network = ls.Conv2DLayer(network, num_filters=32, filter_size=8, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=4)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=4, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=2)
network = ls.Conv2DLayer(network, num_filters=64, filter_size=3, nonlinearity=rectify, W=weights_init,
b=lasagne.init.Constant(0.1), stride=1)
network = ls.FlattenLayer(network)
if self.misc_state_included:
layers_for_merge = []
health_inputs = 4
units_per_health_input = 100
for i in range(health_inputs):
oh_input = lasagne.utils.one_hot(misc_input[:, i] - 1, units_per_health_input)
health_input_layer = ls.InputLayer(shape=(None, units_per_health_input), input_var=oh_input)
inputs.append(oh_input)
input_layers.append(health_input_layer)
layers_for_merge.append(health_input_layer)
time_inputs = 4
# TODO set this somewhere else cause it depends on skiprate and timeout ....
units_pertime_input = 525
for i in range(health_inputs,health_inputs+time_inputs):
oh_input = lasagne.utils.one_hot(misc_input[:, i] - 1, units_pertime_input)
time_input_layer = ls.InputLayer(shape=(None, units_pertime_input), input_var=oh_input)
inputs.append(oh_input)
input_layers.append(time_input_layer)
layers_for_merge.append(time_input_layer)
other_misc_input = misc_input[:, health_inputs+time_inputs:]
other_misc_shape = (None, misc_len - health_inputs-time_inputs)
other_misc_input_layer = ls.InputLayer(shape=other_misc_shape,
input_var=other_misc_input)
input_layers.append(other_misc_input_layer)
layers_for_merge.append(other_misc_input_layer)
inputs.append(other_misc_input)
layers_for_merge.append(network)
network = ls.ConcatLayer(layers_for_merge)
network = ls.DenseLayer(network, 512, nonlinearity=rectify,
W=weights_init, b=lasagne.init.Constant(0.1))
network = ls.DenseLayer(network, output_size, nonlinearity=None, b=lasagne.init.Constant(.1))
return network, input_layers, inputs