def _evaluation_test_helper(self, class_labels, use_probability_estimates, allow_slow, allowed_prob_delta=0.00001):
# Parameters to test
kernel_parameters = [{}, {'kernel': 'rbf', 'gamma': 1.2},
{'kernel': 'linear'},
{'kernel': 'poly'}, {'kernel': 'poly', 'degree': 2}, {'kernel': 'poly', 'gamma': 0.75},
{'kernel': 'poly', 'degree': 0, 'gamma': 0.9, 'coef0':2},
{'kernel': 'sigmoid'}, {'kernel': 'sigmoid', 'gamma': 1.3}, {'kernel': 'sigmoid', 'coef0': 0.8},
{'kernel': 'sigmoid', 'coef0': 0.8, 'gamma': 0.5}
]
non_kernel_parameters = [{}, {'nu': 0.75}, {'nu': 0.25, 'shrinking': True}, {'shrinking': False}]
# Generate some random data
x, y = [], []
random.seed(42)
for _ in range(50):
x.append([random.gauss(200,30), random.gauss(-100,22), random.gauss(100,42)])
y.append(random.choice(class_labels))
column_names = ['x1', 'x2', 'x3']
# make sure first label is seen first, second is seen second, and so on.
for i, val in enumerate(class_labels):
y[i] = val
df = pd.DataFrame(x, columns=column_names)
# Test
for param1 in non_kernel_parameters:
for param2 in kernel_parameters:
cur_params = param1.copy()
cur_params.update(param2)
cur_params['probability'] = use_probability_estimates
cur_params['max_iter'] = 10 # Don't want test to take too long
# print("cur_params=" + str(cur_params))
cur_model = NuSVC(**cur_params)
cur_model.fit(x, y)
spec = scikit_converter.convert(cur_model, column_names, 'target')
if use_probability_estimates:
probability_lists = cur_model.predict_proba(x)
df['classProbability'] = [dict(zip(cur_model.classes_, cur_vals)) for cur_vals in probability_lists]
metrics = evaluate_classifier_with_probabilities(spec, df, probabilities='classProbability')
self.assertEquals(metrics['num_key_mismatch'], 0)
self.assertLess(metrics['max_probability_error'], allowed_prob_delta)
else:
df['prediction'] = cur_model.predict(x)
metrics = evaluate_classifier(spec, df, verbose=False)
self.assertEquals(metrics['num_errors'], 0)
if not allow_slow:
break
if not allow_slow:
break
评论列表
文章目录