def propDown(self, hid):
"""This function propagates the hidden units activation downwards to
the visible units
Note that we return also the pre_sigmoid_activation of the
layer. As it will turn out later, due to how Theano deals with
optimizations, this symbolic variable will be needed to write
down a more stable computational graph (see details in the
reconstruction cost function)
"""
pre_sigmoid_activation = T.dot(hid, self.w.T) + self.vbias
return [pre_sigmoid_activation, activation(pre_sigmoid_activation)]
评论列表
文章目录