def semi_nmf(x, iter = 30):
'''
Semi Nonnegative Matrix Factorization.
It returns a feature matrix F and a representation matrix G by minimizing
frobenius norm ||X - FG^T||^2. The only contraint is that elements in G to be positive.
Args:
x: input matrix X
int iter: number of iterations of optimization algorithm
Return:
f: feature matrix F
g: representation matrix G
'''
x = x.numpy() # n * m
f, g, p = svd_initialization(x)
if < 2:
raise ValueError("The number of components (r) has to be >=2.")
for i in range(iter):
f = np.dot(x, np.dot(g, la.pinv(np.dot(g.T, g))))
f = np.nan_to_num(f)
Ap = (abs(np.dot(x.T, f)) + np.dot(x.T, f))/2 #m * r
An = (abs(np.dot(x.T, f)) - np.dot(x.T, f))/2
Bp = (abs(np.dot(g, np.dot(f.T, f))) + np.dot(g, np.dot(f.T, f)))/2
Bn = (abs(np.dot(g, np.dot(f.T, f))) - np.dot(g, np.dot(f.T, f)))/2
C = An + Bp
for m in range(C.shape[0]):
for n in range(C.shape[1]):
if C[m, n] is 0:
C[m, n] += 0.0001
for j in range(g.shape[0]):
for k in range(g.shape[1]):
g[j, k] = g[j, k] * np.sqrt( (Ap+Bn)[j,k]/(An+Bp)[j,k] )
g = np.nan_to_num(g)
return torch.from_numpy(f), torch.from_numpy(g)
评论列表
文章目录