def propUp(self, vis):
"""This function propagates the visible units activation upwards to
the hidden units
Note that we return also the pre-sigmoid activation of the
layer. As it will turn out later, due to how Theano deals with
optimizations, this symbolic variable will be needed to write
down a more stable computational graph (see details in the
reconstruction cost function)
"""
pre_sigmoid_activation = T.dot(vis, self.w) + self.hbias
return [pre_sigmoid_activation,activation(pre_sigmoid_activation)]
评论列表
文章目录