dca.py 文件源码

python
阅读 25 收藏 0 点赞 0 评论 0

项目:dca 作者: BenjoCowley 项目源码 文件源码
def initialization(Xs, u_0s, results):
    # results is a dictionary from preprocessing
    # initialize U, U_orth, and dcovs
    # U_orth keeps track of the orthogonal space of U


    ### for first dim, initialize u with either user input or randomly
        num_datasets = results['num_datasets'];
        u = [];
        for iset in range(num_datasets):
            num_vars = Xs[iset].shape[0];
            if (u_0s != [] and u_0s[iset].shape[0] == num_vars):  # if user input initialized weights for first dim
                u.append(u_0s[iset][:,1]);
            else:
                u.append(orth(np.random.randn(num_vars, 1)));

    ### get initial recentered matrices for each dataset based on u
        R = [];
        if (results['num_stoch_batch_samples'] == 0): # only for full gradient descent
            for iset in range(num_datasets):
                R.append(get_recentered_matrix(u[iset], Xs[iset]));

            total_dcov = get_total_dcov(R,results['D_given']);
            total_dcov_old = total_dcov * 0.5; # set old value to half, so it'll pass threshold


    ### stochastic gradient descent initialization
        momented_gradf = [];
        stoch_learning_rate = 1;  # initial learning rate for SGD
        if (results['num_stoch_batch_samples'] > 0):
            for iset in range(num_datasets):
                momented_gradf.append(np.zeros(u[iset].shape));

            total_dcov = get_total_dcov_randomlysampled(u, Xs, results['D_given'], results);
            total_dcov_old = total_dcov * 0.5;

        return u, momented_gradf, R, total_dcov, total_dcov_old, stoch_learning_rate, results;
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号