def __init__(self, training_filename: str, hyperparameters: dict, combination_type='eqnet'):
self.__hyperparameters = hyperparameters
self.__dataset_extractor = TreeDatasetExtractor(training_filename)
self.__rng = RandomStreams()
self.__rnn = RNN(self.__hyperparameters['memory_size'], self.__hyperparameters, self.__rng,
self.__dataset_extractor, combination_type=combination_type)
check_hyperparameters(self.REQUIRED_HYPERPARAMETERS | self.__rnn.required_hyperparameters,
self.__hyperparameters)
target_embeddings = np.random.randn(self.__hyperparameters['memory_size'],
self.__dataset_extractor.num_equivalent_classes) * 10 ** \
self.__hyperparameters[
"log_init_scale_embedding"]
self.__target_embeddings = theano.shared(target_embeddings.astype(theano.config.floatX),
name="target_embeddings")
self.__target_embeddings_dropout = dropout(self.__hyperparameters['dropout_rate'], self.__rng,
self.__target_embeddings, True)
self.__target_bias = np.log(self.__dataset_extractor.training_empirical_distribution)
self.__trainable_params = list(self.__rnn.get_params().values()) + [self.__target_embeddings]
self.__compiled_methods = None
self.__trained_parameters = None
评论列表
文章目录