def embedding_learning(train_data, user_dic, artist_dic, context_list, n_users, n_items):
# User embeddings
UC = np.random.normal(0.0, 0.01, (n_users, dim_num))
# Item embeddings
IC = np.random.normal(0.0, 0.01, (n_items, dim_num))
try:
for iteration in range(max_iters):
print 'loading...iteration: %d'%iteration
t = time.time()
for each_data in train_data:
u_i, i, w_i = each_data
w_i = w_i ** dis_coef
# print artist_dic[i]
for u_j in context_list[u_i]:
IC[artist_dic[i]] += learning_rate * ((1 - sigmoid(w_i)) * 2 * alpha * (UC[user_dic[u_i]] - UC[user_dic[u_j]]) - 2 * lamda * IC[artist_dic[i]])
UC[user_dic[u_i]] += learning_rate * ((1 - sigmoid(w_i)) * 2 * alpha * (IC[artist_dic[i]] - UC[user_dic[u_i]]) - 2 * lamda * UC[user_dic[u_i]])
UC[user_dic[u_j]] += learning_rate * ((1 - sigmoid(w_i)) * 2 * alpha * (IC[artist_dic[i]] - UC[user_dic[u_j]]) - 2 * lamda * UC[user_dic[u_j]])
# print IC[artist_dic[i]]
print 'Iter: %d elapsed: %fseconds'%(iteration, time.time() - t)
finally:
np.save(model_dir + 'Item_Emb', IC)
np.save(model_dir + 'User_Emb', UC)
np.savetxt(model_dir + 'Item_Emb.txt', IC)
np.savetxt(model_dir + 'User_Emb.txt', UC)
print 'Model saved...'
评论列表
文章目录