def train(dataset: DataSet, n_iter: int = 3000, batch_size: int = 25) -> Iterator[AutoEncoder]:
n = dataset.size
input_dimension = dataset.input.shape[1]
hidden_dimension = 2
model = AutoEncoder(input_dimension, hidden_dimension)
optimizer = optimizers.Adam()
optimizer.setup(model)
for j in range(n_iter):
shuffled = np.random.permutation(n)
for i in range(0, n, batch_size):
indices = shuffled[i:i+batch_size]
x = Variable(dataset.input[indices])
model.cleargrads()
loss = model(x)
loss.backward()
optimizer.update()
yield model
评论列表
文章目录