def __init__(self, memory_size: int, num_node_types: int, max_num_children: int, hyperparameters: dict,
rng: RandomStreams, name: str = "single_layer_combination"):
self.__memory_size = memory_size
self.__rng = rng
self.__name = name
self.__hyperparameters = hyperparameters
w = np.random.randn(num_node_types, memory_size, max_num_children * memory_size) * \
10 ** self.__hyperparameters["log_init_scale_embedding"]
self.__w = theano.shared(w.astype(theano.config.floatX), name=name + ":w")
bias = np.random.randn(num_node_types, memory_size) * 10 ** self.__hyperparameters["log_init_scale_embedding"]
self.__bias = theano.shared(bias.astype(theano.config.floatX), name=name + ":b")
self.__w_with_dropout = \
dropout(self.__hyperparameters['dropout_rate'], self.__rng, self.__w, True)
评论列表
文章目录