def _buildModel(self):
self.updates_ack= True
X = T.matrix('X', dtype=config.floatX)
Y = T.matrix('Y', dtype=config.floatX)
X.tag.test_value, Y.tag.test_value = self._fakeData()
#output_params_t= T.nnet.sigmoid(self._LinearNL(self.tWeights['W_lr'], self.tWeights['b_lr'], X, onlyLinear=True))
output_params_t= T.nnet.sigmoid(self._BNlayer(self.tWeights['W_lr'], self.tWeights['b_lr'], X, validation=False, onlyLinear=True))
nll_t = T.nnet.binary_crossentropy(output_params_t, Y).sum()
#output_params_e = T.nnet.sigmoid(self._LinearNL(self.tWeights['W_lr'], self.tWeights['b_lr'], X, onlyLinear=True))
output_params_e= T.nnet.sigmoid(self._BNlayer(self.tWeights['W_lr'], self.tWeights['b_lr'], X, validation=True, onlyLinear=True))
nll_e = T.nnet.binary_crossentropy(output_params_e, Y).sum()
if not self.params['validate_only']:
model_params = self._getModelParams()
print len(self.updates),' extraneous updates'
optimizer_up, norm_list = self._setupOptimizer(nll_t,
model_params,
lr=self.params['lr'],
divide_grad = T.cast(X.shape[0],config.floatX))
optimizer_up+=self.updates
self.train = theano.function([X,Y], [nll_t,self.tWeights['_lr_BN_running_mean'], self.tWeights['_lr_BN_running_var']], updates = optimizer_up)
self.evaluate = theano.function([X,Y],nll_e)
评论列表
文章目录