def __init__(self, hidden_layer_shape = [64], weight_decay=1e-4,
batch_normalization=True, activation='relu', save_fname=None,
patience = 6, lr=2e-3, min_lr = 2e-6, verbose = 2, mu=None,
refit = False, gpu_list = None, optimizer=None, nb_epochs=1000,
kernel_initializer = 'glorot_normal', lr_patience = 3):
self.model = Sequential()
self.hidden = hidden_layer_shape
self.wd = weight_decay
self.bn = batch_normalization
self.activation = activation
self.fname = save_fname
self.patience = patience
self.lr = lr
self.min_lr = min_lr
self.verbose = verbose
self.mu = mu
self.epochs = nb_epochs
self.refit = refit
self.gpus = gpu_list
self.ki = kernel_initializer
self.lr_patience = lr_patience
if optimizer is None:
self.opt = Nadam(self.lr)
if self.refit:
raise NotImplementedError('I have not implemented the refit functionality yet.')
评论列表
文章目录