def __prepare_model(self):
print('Build model...')
model = Sequential()
model.add(TimeDistributedDense(output_dim=self.hidden_cnt,
input_dim=self.input_dim,
input_length=self.input_length,
activation='sigmoid'))
# model.add(TimeDistributed(Dense(output_dim=self.hidden_cnt,
# input_dim=self.input_dim,
# input_length=self.input_length,
# activation='sigmoid')))
# my modification since import error from keras.layers.core import TimeDistributedMerge
# model.add(TimeDistributedMerge(mode='ave')) #comment by me
##################### my ref #########################################################
# # add a layer that returns the concatenation
# # of the positive part of the input and
# # the opposite of the negative part
#
# def antirectifier(x):
# x -= K.mean(x, axis=1, keepdims=True)
# x = K.l2_normalize(x, axis=1)
# pos = K.relu(x)
# neg = K.relu(-x)
# return K.concatenate([pos, neg], axis=1)
#
# def antirectifier_output_shape(input_shape):
# shape = list(input_shape)
# assert len(shape) == 2 # only valid for 2D tensors
# shape[-1] *= 2
# return tuple(shape)
#
# model.add(Lambda(antirectifier, output_shape=antirectifier_output_shape))
#############################################################################
model.add(Lambda(function=lambda x: K.mean(x, axis=1),
output_shape=lambda shape: (shape[0],) + shape[2:]))
# model.add(Dropout(0.5))
model.add(Dropout(0.93755))
model.add(Dense(self.hidden_cnt, activation='tanh'))
model.add(Dense(self.output_dim, activation='softmax'))
# try using different optimizers and different optimizer configs
print('Compile model...')
# sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
# model.compile(loss='categorical_crossentropy', optimizer=sgd)
# return model
##my add
adagrad = keras.optimizers.Adagrad(lr=0.01, epsilon=1e-08, decay=0.0)
model.compile(loss='categorical_crossentropy', optimizer=adagrad)
return model
perceptron.py 文件源码
python
阅读 25
收藏 0
点赞 0
评论 0
评论列表
文章目录