def model(data, hidden_layers, hidden_neurons, output_file, validation_split=0.9):
train_n = int(validation_split * len(data))
batch_size = 50
train_data = data[:train_n,:]
val_data = data[train_n:,:]
input_sh = Input(shape=(data.shape[1],))
encoded = noise.GaussianNoise(0.2)(input_sh)
for i in range(hidden_layers):
encoded = Dense(hidden_neurons[i], activation='relu')(encoded)
encoded = noise.GaussianNoise(0.2)(encoded)
decoded = Dense(hidden_neurons[-2], activation='relu')(encoded)
for j in range(hidden_layers-3,-1,-1):
decoded = Dense(hidden_neurons[j], activation='relu')(decoded)
decoded = Dense(data.shape[1], activation='sigmoid')(decoded)
autoencoder = Model(input=input_sh, output=decoded)
autoencoder.compile(optimizer='adadelta', loss='mse')
checkpointer = ModelCheckpoint(filepath='data/bestmodel' + output_file + ".hdf5", verbose=1, save_best_only=True)
earlystopper = EarlyStopping(monitor='val_loss', patience=15, verbose=1)
train_generator = DataGenerator(batch_size)
train_generator.fit(train_data, train_data)
val_generator = DataGenerator(batch_size)
val_generator.fit(val_data, val_data)
autoencoder.fit_generator(train_generator,
samples_per_epoch=len(train_data),
nb_epoch=100,
validation_data=val_generator,
nb_val_samples=len(val_data),
max_q_size=batch_size,
callbacks=[checkpointer, earlystopper])
enco = Model(input=input_sh, output=encoded)
enco.compile(optimizer='adadelta', loss='mse')
reprsn = enco.predict(data)
return reprsn
评论列表
文章目录