def search_model(experiment_label, steps, batch_size=32):
""" This is where we put everythin together.
We get the dataset, build the Training and Experiment objects, and run the experiment.
The experiments logs are generated in ~/minos/experiment_label
We use the CpuEnvironment to have the experiment run on the cpu, with 2 parralel processes.
We could use GpuEnvironment to use GPUs, and specify which GPUs to use, and how many tasks
per GPU
"""
batch_iterator, test_batch_iterator, nb_classes = get_reuters_dataset(batch_size, max_words)
layout = build_layout(max_words, nb_classes)
training = Training(
Objective('categorical_crossentropy'),
Optimizer(optimizer='Adam'),
Metric('categorical_accuracy'),
epoch_stopping_condition(),
batch_size)
parameters = custom_experiment_parameters()
experiment = Experiment(
experiment_label,
layout,
training,
batch_iterator,
test_batch_iterator,
CpuEnvironment(n_jobs=1),
parameters=parameters)
run_ga_search_experiment(
experiment,
population_size=100,
generations=steps,
resume=False,
log_level='DEBUG')
评论列表
文章目录