def train(self):
super().train()
W = self.Sigma_hat
# prior probabilities (K, 1)
Pi = self.Pi
# class centroids (K, p)
Mu = self.Mu
p = self.p
# the number of class
K = self.n_class
# the dimension you want
L = self.L
# Mu is (K, p) matrix, Pi is (K, 1)
mu = np.sum(Pi * Mu, axis=0)
B = np.zeros((p, p))
for k in range(K):
# vector @ vector equal scalar, use vector[:, None] to transform to matrix
# vec[:, None] equal to vec.reshape((1, vec.shape[0]))
B = B + Pi[k]*((Mu[k] - mu)[:, None] @ ((Mu[k] - mu)[None, :]))
# Be careful, the `eigh` method get the eigenvalues in ascending , which is opposite to R.
Dw, Uw = LA.eigh(W)
# reverse the Dw_ and Uw
Dw = Dw[::-1]
Uw = np.fliplr(Uw)
W_half = self.math.pinv(np.diagflat(Dw**0.5) @ Uw.T)
B_star = W_half.T @ B @ W_half
D_, V = LA.eigh(B_star)
# reverse V
V = np.fliplr(V)
# overwrite `self.A` so that we can reuse `predict` method define in parent class
self.A = np.zeros((L, p))
for l in range(L):
self.A[l, :] = W_half @ V[:, l]
评论列表
文章目录