def rede_neural(X, y):
print("Iniciando treinamento da Rede Neural")
X2 = normalize(X)
clf = MLPClassifier(hidden_layer_sizes=(100,50), activation='tanh', algorithm='adam', alpha=1e-5,
learning_rate='constant',tol=1e-8,learning_rate_init=0.0002,
early_stopping=True,validation_fraction=0.2)
kf = KFold(len(y),n_folds=3)
i = 0
for train,test in kf:
start = time.time()
i = i + 1
print("Treinamento",i)
# dividindo dataset em treino e test
#X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.4, random_state=1)
X_train, X_test, y_train, y_test = X2[train], X2[test], y[train], y[test]
# fit
clf.fit(X_train, y_train)
print("score:",clf.score(X_test, y_test),"(",(time.time()-start)/60.0,"minutos )")
return clf
评论列表
文章目录