def main(resume=None):
l = 300
dataset = './data/ubiquitous_train.hkl'
print('Loading dataset {}...'.format(dataset))
X_train, y_train = hkl.load(dataset)
X_train = X_train.reshape(-1, 4, 1, l).astype(floatX)
y_train = np.array(y_train, dtype='int32')
indice = np.arange(X_train.shape[0])
np.random.shuffle(indice)
X_train = X_train[indice]
y_train = y_train[indice]
print('X_train shape: {}, y_train shape: {}'.format(X_train.shape, y_train.shape))
layers = [
(InputLayer, {'shape': (None, 4, 1, l)}),
(Conv2DLayer, {'num_filters': 64, 'filter_size': (1, 4)}),
(Conv2DLayer, {'num_filters': 64, 'filter_size': (1, 3)}),
(Conv2DLayer, {'num_filters': 64, 'filter_size': (1, 3)}),
(MaxPool2DLayer, {'pool_size': (1, 2)}),
(Conv2DLayer, {'num_filters': 64, 'filter_size': (1, 2)}),
(Conv2DLayer, {'num_filters': 64, 'filter_size': (1, 2)}),
(Conv2DLayer, {'num_filters': 64, 'filter_size': (1, 2)}),
(MaxPool2DLayer, {'pool_size': (1, 2)}),
(DenseLayer, {'num_units': 64}),
(DropoutLayer, {}),
(DenseLayer, {'num_units': 64}),
(DenseLayer, {'num_units': 2, 'nonlinearity': softmax})]
lr = theano.shared(np.float32(1e-4))
net = NeuralNet(
layers=layers,
max_epochs=100,
update=adam,
update_learning_rate=lr,
train_split=TrainSplit(eval_size=0.1),
on_epoch_finished=[
AdjustVariable(lr, target=1e-8, half_life=20)],
verbose=4)
if resume != None:
net.load_params_from(resume)
net.fit(X_train, y_train)
net.save_params_to('./models/net_params.pkl')
评论列表
文章目录