def get_layer_names(self):
"""
:return: Names of all the layers kept by Keras
"""
layer_names = [x.name for x in self.model.layers]
return layer_names
python类layers()的实例源码
def fprop(self, x):
"""
Exposes all the layers of the model returned by get_layer_names.
:param x: A symbolic representation of the network input
:return: A dictionary mapping layer names to the symbolic
representation of their output.
"""
from keras.models import Model as KerasModel
if self.keras_model is None:
# Get the input layer
new_input = self.model.get_input_at(0)
# Make a new model that returns each of the layers as output
out_layers = [x_layer.output for x_layer in self.model.layers]
self.keras_model = KerasModel(new_input, out_layers)
# and get the outputs for that model on the input x
outputs = self.keras_model(x)
# Keras only returns a list for outputs of length >= 1, if the model
# is only one layer, wrap a list
if len(self.model.layers) == 1:
outputs = [outputs]
# compute the dict to return
fprop_dict = dict(zip(self.get_layer_names(), outputs))
return fprop_dict
def _build_block_model(inputs, block):
if isinstance(inputs, list) and len(inputs) == 1:
inputs = inputs[0]
if block.input_layers and len(block.input_layers) > 0:
for layer in block.input_layers:
inputs = _build_layer_model(inputs, layer)
for layer in block.layers:
inputs = _build_layer_model(inputs, layer)
return inputs
def _get_layer_model(layer_type):
if is_custom_layer(layer_type):
return get_custom_layer(layer_type)[0]
modules = [keras.layers, keras.layers.normalization]
for module in modules:
model = getattr(module, layer_type)
if model:
return model
return None
def copy_weights(teacher_model, student_model, layer_names):
'''Copy weights from teacher_model to student_model,
for layers with names listed in layer_names
'''
for name in layer_names:
weights = teacher_model.get_layer(name=name).get_weights()
student_model.get_layer(name=name).set_weights(weights)
# methods to construct teacher_model and student_models
def __init__(self, cp):
print("Building network ...")
# First, we build the network, starting with an input layer
# Recurrent layers expect input of shape
# (batch size, SEQ_LENGTH, num_features)
# this is the placeholder tensor for the input sequences
sequence = Input(shape=(maxlen,), dtype='int32')
# this embedding layer will transform the sequences of integers
# into vectors of size 128
embedded = Embedding(max_features, 128, input_length=maxlen)(sequence)
# apply forwards LSTM
forwards = LSTM(64)(embedded)
# apply backwards LSTM
backwards = LSTM(64, go_backwards=True)(embedded)
# concatenate the outputs of the 2 LSTMs
merged = merge([forwards, backwards], mode='concat', concat_axis=-1)
after_dp = Dropout(0.5)(merged)
output = Dense(1, activation='sigmoid')(after_dp)
self.model = Model(input=sequence, output=output)
# try using different optimizers and different optimizer configs
self.model.compile('adam', 'binary_crossentropy', metrics=['accuracy'])
def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a time distributed `keras.models.Model` according to the ResNet18 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> y = keras_resnet.models.TimeDistributedResNet18(x)
>>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output)
>>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y)
>>> model = keras.models.Model(x, y)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [2, 2, 2, 2]
return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_basic_2d, include_top=include_top, classes=classes, *args, **kwargs)
def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a time distributed `keras.models.Model` according to the ResNet34 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> y = keras_resnet.models.TimeDistributedResNet34(x)
>>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output)
>>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y)
>>> model = keras.models.Model(x, y)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [3, 4, 6, 3]
return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_basic_2d, include_top=include_top, classes=classes, *args, **kwargs)
def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a time distributed `keras.models.Model` according to the ResNet50 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> y = keras_resnet.models.TimeDistributedResNet50(x)
>>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output)
>>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y)
>>> model = keras.models.Model(x, y)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [3, 4, 6, 3]
return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs)
def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a time distributed `keras.models.Model` according to the ResNet101 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> y = keras_resnet.models.TimeDistributedResNet101(x)
>>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output)
>>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y)
>>> model = keras.models.Model(x, y)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [3, 4, 23, 3]
return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs)
def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a time distributed `keras.models.Model` according to the ResNet152 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> y = keras_resnet.models.TimeDistributedResNet152(x)
>>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output)
>>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y)
>>> model = keras.models.Model(x, y)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [3, 8, 36, 3]
return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs)
def ResNet18(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a `keras.models.Model` according to the ResNet18 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> model = keras_resnet.models.ResNet18(x, classes=classes)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [2, 2, 2, 2]
return ResNet(inputs, blocks, block=keras_resnet.blocks.basic_2d, include_top=include_top, classes=classes, *args, **kwargs)
def ResNet34(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a `keras.models.Model` according to the ResNet34 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> model = keras_resnet.models.ResNet34(x, classes=classes)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [3, 4, 6, 3]
return ResNet(inputs, blocks, block=keras_resnet.blocks.basic_2d, include_top=include_top, classes=classes, *args, **kwargs)
def ResNet50(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a `keras.models.Model` according to the ResNet50 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> model = keras_resnet.models.ResNet50(x)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [3, 4, 6, 3]
numerical_names = [False, False, False, False]
return ResNet(inputs, blocks, numerical_names=numerical_names, block=keras_resnet.blocks.bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs)
def ResNet101(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a `keras.models.Model` according to the ResNet101 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> model = keras_resnet.models.ResNet101(x, classes=classes)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [3, 4, 23, 3]
numerical_names = [False, True, True, False]
return ResNet(inputs, blocks, numerical_names=numerical_names, block=keras_resnet.blocks.bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs)
def ResNet152(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs):
"""
Constructs a `keras.models.Model` according to the ResNet152 specifications.
:param inputs: input tensor (e.g. an instance of `keras.layers.Input`)
:param blocks: the network’s residual architecture
:param include_top: if true, includes classification layers
:param classes: number of classes to classify (include_top must be true)
:return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`)
Usage:
>>> import keras_resnet.models
>>> shape, classes = (224, 224, 3), 1000
>>> x = keras.layers.Input(shape)
>>> model = keras_resnet.models.ResNet152(x, classes=classes)
>>> model.compile("adam", "categorical_crossentropy", ["accuracy"])
"""
if blocks is None:
blocks = [3, 8, 36, 3]
numerical_names = [False, True, True, False]
return ResNet(inputs, blocks, numerical_names=numerical_names, block=keras_resnet.blocks.bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs)
def __init__(self, n_in, hidden_layer_size, n_out, hidden_layer_type, output_type='linear', dropout_rate=0.0, loss_function='mse', optimizer='adam'):
""" This function initialises a neural network
:param n_in: Dimensionality of input features
:param hidden_layer_size: The layer size for each hidden layer
:param n_out: Dimensionality of output features
:param hidden_layer_type: the activation types of each hidden layers, e.g., TANH, LSTM, GRU, BLSTM
:param output_type: the activation type of the output layer, by default is 'LINEAR', linear regression.
:param dropout_rate: probability of dropout, a float number between 0 and 1.
:type n_in: Integer
:type hidden_layer_size: A list of integers
:type n_out: Integrer
"""
self.n_in = int(n_in)
self.n_out = int(n_out)
self.n_layers = len(hidden_layer_size)
self.hidden_layer_size = hidden_layer_size
self.hidden_layer_type = hidden_layer_type
assert len(self.hidden_layer_size) == len(self.hidden_layer_type)
self.output_type = output_type
self.dropout_rate = dropout_rate
self.loss_function = loss_function
self.optimizer = optimizer
# create model
self.model = Sequential()
def define_feedforward_model(self):
seed = 12345
np.random.seed(seed)
# add hidden layers
for i in range(self.n_layers):
if i == 0:
input_size = self.n_in
else:
input_size = self.hidden_layer_size[i - 1]
self.model.add(Dense(
units=self.hidden_layer_size[i],
activation=self.hidden_layer_type[i],
kernel_initializer="normal",
input_dim=input_size))
self.model.add(Dropout(self.dropout_rate))
# add output layer
self.final_layer = self.model.add(Dense(
units=self.n_out,
activation=self.output_type.lower(),
kernel_initializer="normal",
input_dim=self.hidden_layer_size[-1]))
# Compile the model
self.compile_model()
def concatenate(x):
if hasattr(keras.layers, 'Concatenate'):
return keras.layers.Concatenate()(x)
else:
return keras.layers.merge(x, mode='concat')
def add(x):
if hasattr(keras.layers, 'Add'):
return keras.layers.Add()(x)
else:
return keras.layers.merge(x, mode='sum')