def train(self, sentences, iterations=1000):
# Preprocess sentences to create indices of context and next words
self.dictionary = build_dictionary(sentences, self.vocabulary_size)
indices = to_indices(sentences, self.dictionary)
self.reverse_dictionary = {index: word for word, index in self.dictionary.items()}
inputs, outputs = self.create_context(indices)
# Create cost and gradient function for gradient descent
shapes = [self.W_shape, self.U_shape, self.H_shape, self.C_shape]
flatten_nplm_cost_gradient = flatten_cost_gradient(nplm_cost_gradient, shapes)
cost_gradient = bind_cost_gradient(flatten_nplm_cost_gradient, inputs, outputs,
sampler=get_stochastic_sampler(10))
# Train neural network
parameters_size = np.sum(np.product(shape) for shape in shapes)
initial_parameters = np.random.normal(size=parameters_size)
self.parameters, cost_history = gradient_descent(cost_gradient, initial_parameters, iterations)
return cost_history
评论列表
文章目录