Layer.py 文件源码

python
阅读 21 收藏 0 点赞 0 评论 0

项目:Theano-NN_Starter 作者: nightinwhite 项目源码 文件源码
def CTC_train(self):
        CTC_LOSSs = T.cast(T.mean(self.CTC_LOSS(), axis=0), "float32")
        train_data_d = []
        train_data_m = []
        train_data_m_s = [] 
        learning_rate = T.scalar()
        decay = T.scalar()
        seed = np.random.randint(10e6)
        rng = RandomStreams(seed=seed)
        grad_rate = 0.8
        for data in self.train_data:
            data_d = rng.binomial((1,), p=grad_rate, dtype="float32")[0]*T.grad(CTC_LOSSs, data)
            train_data_d.append(data_d)
            data_m_s = theano.shared(np.zeros(data.get_value().shape).astype(np.float32))
            train_data_m_s.append(data_m_s)
            data_m = data_m_s*decay + (1-decay)*data_d**2
            train_data_m.append(data_m)
        #self.grad_test = theano.function([self.X, self.Y], train_data_d[-4])
        #self.data_d_print = theano.function([self.X,self.Y],train_data_d[0][0])
        #upd = [(d,d-learning_rate*d_d)for d,d_d in zip(self.train_data,train_data_d)]
        upd = [(d, d-learning_rate*d_d/T.sqrt(d_m+1e-4))for d,d_d,d_m in zip(self.train_data,train_data_d,train_data_m)]
        upd1 = [(d_m_s, decay*d_m_s+(1-decay)*d_d**2) for d_m_s,d_d in zip(train_data_m_s,train_data_d)]
        upd +=upd1    
        #self.test = theano.function([self.X,self.Y],train_data_d[0])
        self.sgd_train = theano.function([self.X, self.Y, learning_rate, decay],
                                         [],
                                         updates = upd
                                         )
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号