def train(args):
model = EmbeddingTagger(args.model, 50, 20, 30)
model.setup_training(args.embed)
if args.initmodel:
print('Load model from', args.initmodel)
chainer.serializers.load_npz(args.initmodel, model)
train = CCGBankDataset(args.model, args.train)
train_iter = chainer.iterators.SerialIterator(train, args.batchsize)
val = CCGBankDataset(args.model, args.val)
val_iter = chainer.iterators.SerialIterator(
val, args.batchsize, repeat=False, shuffle=False)
optimizer = chainer.optimizers.SGD(lr=0.01)
optimizer.setup(model)
updater = training.StandardUpdater(train_iter, optimizer)
trainer = training.Trainer(updater, (args.epoch, 'epoch'), args.model)
val_interval = 5000, 'iteration'
log_interval = 200, 'iteration'
val_model = model.copy()
trainer.extend(extensions.Evaluator(val_iter, val_model), trigger=val_interval)
trainer.extend(extensions.dump_graph('main/loss'))
trainer.extend(extensions.snapshot(), trigger=val_interval)
trainer.extend(extensions.snapshot_object(
model, 'model_iter_{.updater.iteration}'), trigger=val_interval)
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.PrintReport([
'epoch', 'iteration', 'main/loss', 'validation/main/loss',
'main/accuracy', 'validation/main/accuracy',
]), trigger=log_interval)
trainer.extend(extensions.ProgressBar(update_interval=10))
trainer.run()
评论列表
文章目录