def logistic_fidelity(self):
#group data and assign state labels
gnd_features = np.hstack([np.real(self.ground_data.T),
np.imag(self.ground_data.T)])
ex_features = np.hstack([np.real(self.excited_data.T),
np.imag(self.excited_data.T)])
#liblinear wants arrays in C order
features = np.ascontiguousarray(np.vstack([gnd_features, ex_features]))
state = np.ascontiguousarray(np.hstack([np.zeros(self.ground_data.shape[1]),
np.ones(self.excited_data.shape[1])]))
#Set up logistic regression with cross-validation using liblinear.
#Cs sets the inverse of the regularization strength, which will be optimized
#through cross-validation. Uses the default Stratified K-Folds
#CV generator, with 3 folds.
#This is set up to be as consistent with the MATLAB implementation
#as I can make it. --GJR
Cs = np.logspace(-1,2,5)
logreg = LogisticRegressionCV(Cs, cv=3, solver='liblinear')
logreg.fit(features, state) #fit the model
predictions = logreg.predict(features) #in-place classification
score = logreg.score(features,state) #mean accuracy of classification
N = len(predictions)
S = np.sum(predictions == state) #how many we got right
#now calculate confidence intervals
c = 0.95
flo = betaincinv(S+1, N-S+1, (1-c)/2., )
fhi = betaincinv(S+1, N-S+1, (1+c)/2., )
logger.info(("In-place logistic regression fidelity: " +
"{:.2f}% ({:.2f}, {:.2f})".format(100*score, 100*flo, 100*fhi)))
评论列表
文章目录