def __init__(self, x, y, n_x, n_y):
# initialize with 0 the weights as a matrix of shape (n_in, n_out)
self.w = theano.shared(
value=numpy.zeros((n_x, n_y), dtype=theano.config.floatX),
name='w',
borrow=True
)
# initialize the biases b as a vector of n_out 0s
self.b = theano.shared(
value=numpy.zeros((n_y,), dtype=theano.config.floatX),
name='b',
borrow=True
)
self.params = [self.w, self.b]
# save x, y
self.x = x
self.y = y
# calculate
p_y_given_x = T.nnet.softmax(T.dot(self.x, self.w) + self.b)
# probability is maximal
y_pred = T.argmax(p_y_given_x, axis=1)
# error
self.error = T.mean(T.neq(y_pred, self.y))
# cost
self.cost = -T.mean(T.log(p_y_given_x)[T.arange(self.y.shape[0]), self.y])
评论列表
文章目录