def _print_ml_analytics_results_random_forest(self):
try:
final_model_obj = self.trained_final_model.named_steps['final_model']
except:
final_model_obj = self.trained_final_model
print('\n\nHere are the results from our ' + final_model_obj.model_name)
if self.name is not None:
print(self.name)
print('predicting ' + self.output_column)
# XGB's Classifier has a proper .feature_importances_ property, while the XGBRegressor does not.
if final_model_obj.model_name in ['XGBRegressor', 'XGBClassifier']:
self._get_xgb_feat_importances(final_model_obj.model)
else:
trained_feature_names = self._get_trained_feature_names()
try:
trained_feature_importances = final_model_obj.model.feature_importances_
except AttributeError as e:
# There was a version of LightGBM that had this misnamed to miss the "s" at the end
trained_feature_importances = final_model_obj.model.feature_importance_
feature_infos = zip(trained_feature_names, trained_feature_importances)
sorted_feature_infos = sorted(feature_infos, key=lambda x: x[1])
print('Here are the feature_importances from the tree-based model:')
print('The printed list will only contain at most the top 50 features.')
for feature in sorted_feature_infos[-50:]:
print(feature[0] + ': ' + str(round(feature[1], 4)))
评论列表
文章目录