def makEnsemble( X, xlist, Y ):
#naive bayes
clf = MultinomialNB()
clf.fit( xlist, Y )
featureSelectModel.append (clf)
#K nearest neighbours
clf = KNeighborsClassifier()
clf.fit( xlist, Y )
featureSelectModel.append (clf)
#Logistic regression
clf = LogisticRegression(C=1)
clf.fit( xlist, Y )
featureSelectModel.append (clf)
#random forest
clf = RandomForestClassifier(n_estimators = 400)
clf.fit( X, Y )
wholeFeatureModel.append (clf)
#extra forest
clf = ExtraTreesClassifier(n_estimators = 400)
clf.fit( X, Y )
wholeFeatureModel.append (clf)
#decision forest
clf = DecisionTreeClassifier(max_depth=None, min_samples_split=1, random_state=0)
clf.fit( X, Y )
wholeFeatureModel.append (clf)
#gradient boosting
params = {'n_estimators': 500, 'max_depth': 4, 'min_samples_split': 1,
'learning_rate': 0.01}
clf = GradientBoostingClassifier(**params)
clf.fit( X, Y )
wholeFeatureModel.append (clf)
评论列表
文章目录