def basic_quality(self, target, feature_vector):
assert (len(target) == len(feature_vector))
if self.quality == 'NWP':
sort_data_p = np.array([x for (y,x) in sorted(zip(feature_vector, target), key=lambda x: x[0])])
sort_data_n = np.array([x for (y,x) in sorted(zip(-1.0 * feature_vector, target), key=lambda x: x[0])])
p_nwp = QualityMeasure.calc_nwp(sort_data_p)
n_nwp = QualityMeasure.calc_nwp(sort_data_n)
return min(n_nwp, p_nwp)
if self.quality == 'corrcoef':
return 1 - abs(np.corrcoef(target, feature_vector)[0][1])
if self.quality == 'mutual_info':
m = MINE()
m.compute_score(target, feature_vector)
return 1.0 - m.mic()
if self.quality == 'chi2':
return 1 - chi2(abs(feature_vector.reshape(len(feature_vector), 1)), target)[0][0]
if self.quality == 'distcorr':
return 1 - distcorr(target, feature_vector)
if self.quality == 'distree':
data = np.column_stack((feature_vector, self.random_feature))
clf = DecisionTreeClassifier(max_depth=5, random_state=0)
clf.fit(data, target)
return 1.0 - clf.feature_importances_[0]
if self.quality == 'knnscore':
errors = []
clf = KNeighborsClassifier()
data = np.array([feature_vector]).transpose()
loo = LeaveOneOut()
for train, test in loo.split(data):
clf = KNeighborsClassifier()
clf.fit(data[train], target[train])
errors.append(accuracy_score(target[test], clf.predict(data[test])))
return 1.0 - np.mean(errors)
return 'WRONG QUALITY NAME'
signal_extractor.py 文件源码
python
阅读 24
收藏 0
点赞 0
评论 0
评论列表
文章目录