def pretraining_function(self,train_set_x,batch_size):
'''
????????????????dA???,??????????
?????minibatch????????minibatch???????
train_set_x: theano.tensor.TensorType ??dA????(????)
batch_size: int [mini]batch??
'''
#[mini]batch???
index=T.lscalar('index')
corruption_level=T.scalar('corruption') #corruption???
learning_rate=T.scalar('lr') #???
#batch??
n_bathes=train_set_x.get_value(borrow=True).shape[0]/batch_size
#??index?????
# batch
batch_begin=index*batch_size
#??index?????batch
batch_end=batch_begin+batch_size
pretrain_fns=[]
for dA in self.dA_layers: #??dA
#???????????
cost,updates=dA.get_cost_updates(corruption_level,
learning_rate)
#??theano??
fn=theano.function(inputs=[index,
theano.Param(corruption_level,default=0.2),
theano.Param(learning_rate,default=0.1)],
outputs=cost,
updates=updates,
givens={self.x:train_set_x[batch_begin:
batch_end]})
#?fn???????
pretrain_fns.append(fn)
return pretrain_fns
评论列表
文章目录