def __init__(self, isTrain, isOutlierRemoval=0):
super(ClassificationUniformBlending, self).__init__(isTrain, isOutlierRemoval)
# data preprocessing
self.dataPreprocessing()
# create logistic regression object
self.logreg = linear_model.LogisticRegression(tol=1e-6, penalty='l1', C=0.0010985411419875584)
# create adaboost object
self.dt_stump = DecisionTreeClassifier(max_depth=10)
self.ada = AdaBoostClassifier(
base_estimator=self.dt_stump,
learning_rate=1,
n_estimators=5,
algorithm="SAMME.R")
# create knn object
self.knn = neighbors.KNeighborsClassifier(2, weights='uniform')
# create decision tree object
self.decisiontree = DecisionTreeClassifier(max_depth=45, max_features='log2')
# create neural network object
self.net1 = NeuralNet(
layers=[ # three layers: one hidden layer
('input', layers.InputLayer),
('hidden', layers.DenseLayer),
#('hidden2', layers.DenseLayer),
('output', layers.DenseLayer),
],
# layer parameters:
input_shape=(None, 12), # inut dimension is 12
hidden_num_units=6, # number of units in hidden layer
#hidden2_num_units=3, # number of units in hidden layer
output_nonlinearity=lasagne.nonlinearities.sigmoid, # output layer uses sigmoid function
output_num_units=1, # output dimension is 1
# optimization method:
update=nesterov_momentum,
update_learning_rate=0.002,
update_momentum=0.9,
regression=True, # flag to indicate we're dealing with regression problem
max_epochs=25, # we want to train this many epochs
verbose=0,
)
# create PLA object
self.pla = Perceptron()
# create random forest object
self.rf = RandomForestClassifier(max_features='log2', n_estimators=20, max_depth=30)
ClassificationUniformBlending.py 文件源码
python
阅读 24
收藏 0
点赞 0
评论 0
评论列表
文章目录