def get_model(input_shape, output_shape, params):
print('compiling model...')
# Dimension of The last Convolutional Feature Map (eg. if input 32x32 and there are 5 conv layers 2x2 fm_size = 27)
fm_size = input_shape[-1] - params['cl']
# Tuple with the pooling size for the last convolutional layer using the params['pf']
pool_siz = (np.round(fm_size*params['pf']).astype(int), np.round(fm_size*params['pf']).astype(int))
# Initialization of the model
model = Sequential()
# Add convolutional layers to model
model.add(Convolution2D(params['k']*get_FeatureMaps(1, params['fp']), 2, 2, init='orthogonal', input_shape=input_shape[1:]))
model.add(LeakyReLU(params['a']))
for i in range(2, params['cl']+1):
model.add(Convolution2D(params['k']*get_FeatureMaps(i, params['fp']), 2, 2, init='orthogonal'))
model.add(LeakyReLU(params['a']))
# Add Pooling and Flatten layers to model
if params['pt'] == 'Avg':
model.add(AveragePooling2D(pool_size=pool_siz))
elif params['pt'] == 'Max':
model.add(MaxPooling2D(pool_size=pool_siz))
else:
sys.exit("Wrong type of Pooling layer")
model.add(Flatten())
model.add(Dropout(params['do']))
# Add Dense layers and Output to model
model.add(Dense(int(params['k']*get_FeatureMaps(params['cl'], params['fp']))/params['pf']*6, init='he_uniform'))
model.add(LeakyReLU(0))
model.add(Dropout(params['do']))
model.add(Dense(int(params['k']*get_FeatureMaps(params['cl'], params['fp']))/params['pf']*2, init='he_uniform'))
model.add(LeakyReLU(0))
model.add(Dropout(params['do']))
model.add(Dense(output_shape[1], init='he_uniform', activation='softmax'))
# Compile model and select optimizer and objective function
if params['opt'] not in ['Adam', 'Adagrad', 'SGD']:
sys.exit('Wrong optimizer: Please select one of the following. Adam, Adagrad, SGD')
if get_Obj(params['obj']) not in ['MSE', 'categorical_crossentropy']:
sys.exit('Wrong Objective: Please select one of the following. MSE, categorical_crossentropy')
model.compile(optimizer=params['opt'], loss=get_Obj(params['obj']))
return model
评论列表
文章目录