def forward(self,x=None,t=None):
if x is None:
x=Tensor.context
xp = Deel.xp
volatile = 'off' if Deel.train else 'on'
h = Variable(np.asarray(x.value,dtype=xp.float32),volatile=volatile)
self.optimizer.zero_grads()
for i in range(len(self.layers)):
h = F.dropout(self.activation(self.layers['l'+str(i)](h)),train=Deel.train)
h = ChainerTensor(h)
h.use()
return h
评论列表
文章目录