def scale_data(X, scaler=None):
""" Scale X with robust scaling.
Args:
X (np.array): feature matrix indexed by binID.
scaler (RobustScaler): pre-trained scaler. Default is None
Returns:
np.array: normalized feature matrix.
RobustScaler: robust scaler fitted with training data,
only returned when there is no pre-trained scaler.
"""
if scaler is not None:
return scaler.transform(X)
else:
scaler = RobustScaler(copy=False)
scaler.fit(X)
return scaler.transform(X), scaler
python类RobustScaler()的实例源码
def choose_best_lag(seq, pre_period, lags = range(1,30), Kmax = 200):
"""
????lazzy model,?????
"""
models = []
# ???
std_sca = StandardScaler().fit(np.array(seq).reshape(-1,1))
# rob_sca = RobustScaler().fit(np.array(seq).reshape(-1,1))
seq = std_sca.transform(np.array(seq).reshape(-1,1))
# ????????????,???????
for input_lag in lags:
# window = input_lag + pre_period
X, Y = create_dataset(seq.flatten(), input_lag, pre_period)
lazzy_models = lazzy_loo(X[-1], X[0:-1], Y[:-1], Kmax)
y_pred = lazzy_prediction(X[-1], X[0:-1], Y[:-1], lazzy_models)
err = err_evaluation(y_pred, Y[-1])
lazzy_models.sort()
models.append((err, input_lag, lazzy_models[0][1] ))
models.sort()
best_lag = models[0][1]
best_k = models[0][2]
return models, best_lag, best_k
def test_large_grid():
"""In this test, we purposely overfit a RandomForest to completely random data
in order to assert that the test error will far supercede the train error.
"""
if not SK18:
custom_cv = KFold(n=y_train.shape[0], n_folds=3, shuffle=True, random_state=42)
else:
custom_cv = KFold(n_splits=3, shuffle=True, random_state=42)
# define the pipe
pipe = Pipeline([
('scaler', SelectiveScaler()),
('pca', SelectivePCA(weight=True)),
('rf', RandomForestClassifier(random_state=42))
])
# define hyper parameters
hp = {
'scaler__scaler': [StandardScaler(), RobustScaler(), MinMaxScaler()],
'pca__whiten': [True, False],
'pca__weight': [True, False],
'pca__n_components': uniform(0.75, 0.15),
'rf__n_estimators': randint(5, 10),
'rf__max_depth': randint(5, 15)
}
# define the grid
grid = RandomizedSearchCV(pipe, hp, n_iter=2, scoring='accuracy', n_jobs=1, cv=custom_cv, random_state=42)
# this will fail because we haven't fit yet
assert_fails(grid.score, (ValueError, AttributeError), X_train, y_train)
# fit the grid
grid.fit(X_train, y_train)
# score for coverage -- this might warn...
with warnings.catch_warnings():
warnings.simplefilter("ignore")
grid.score(X_train, y_train)
# coverage:
assert grid._estimator_type == 'classifier'
# get predictions
tr_pred, te_pred = grid.predict(X_train), grid.predict(X_test)
# evaluate score (SHOULD be better than random...)
accuracy_score(y_train, tr_pred), accuracy_score(y_test, te_pred)
# grid score reports:
# assert fails for bad percentile
assert_fails(report_grid_score_detail, ValueError, **{'random_search': grid, 'percentile': 0.0})
assert_fails(report_grid_score_detail, ValueError, **{'random_search': grid, 'percentile': 1.0})
# assert fails for bad y_axis
assert_fails(report_grid_score_detail, ValueError, **{'random_search': grid, 'y_axis': 'bad_axis'})
# assert passes otherwise
report_grid_score_detail(grid, charts=True, percentile=0.95) # just ensure percentile works
custom_transformers.py 文件源码
项目:pandas-pipelines-custom-transformers
作者: jem1031
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def fit(self, X, y=None):
self.rs = RobustScaler()
self.rs.fit(X)
self.center_ = pd.Series(self.rs.center_, index=X.columns)
self.scale_ = pd.Series(self.rs.scale_, index=X.columns)
return self
def keras_mlp1(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
#model = layers.BatchNormalization()(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Activation('selu')(model)
#model = layers.Dropout(0.7)(model)
model = layers.Dense(64, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
#model = layers.Dropout(0.9)(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.Nadam())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp2(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(1024, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(128, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.RMSprop())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(4, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp1(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
#model = layers.BatchNormalization()(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Activation('selu')(model)
#model = layers.Dropout(0.7)(model)
model = layers.Dense(64, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
#model = layers.Dropout(0.9)(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.Nadam())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp1(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
#model = layers.BatchNormalization()(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Activation('selu')(model)
#model = layers.Dropout(0.7)(model)
model = layers.Dense(64, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
#model = layers.Dropout(0.9)(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.Nadam())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp2(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(1024, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(128, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.RMSprop())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp1(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
#model = layers.BatchNormalization()(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Activation('selu')(model)
#model = layers.Dropout(0.7)(model)
model = layers.Dense(64, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
#model = layers.Dropout(0.9)(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.Nadam())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp1(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
#model = layers.BatchNormalization()(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Activation('selu')(model)
#model = layers.Dropout(0.7)(model)
model = layers.Dense(64, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
#model = layers.Dropout(0.9)(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.Nadam())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp2(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(1024, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(128, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.RMSprop())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp1(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
#model = layers.BatchNormalization()(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Activation('selu')(model)
#model = layers.Dropout(0.7)(model)
model = layers.Dense(64, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
#model = layers.Dropout(0.9)(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.Nadam())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_base(train2, y, test2, v, z, build_model, N_splits, cname, base_seed=42):
v[cname], z[cname] = 0, 0
scores = []
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
model = build_model(train3.shape[1])
model.summary(line_length=120)
model_path = '../data/working/' + cname + base_data_name() + '_keras_model.h5'
num_splits = N_splits
ss = model_selection.StratifiedKFold(n_splits=num_splits, random_state=base_seed)
for n, (itrain, ival) in enumerate(ss.split(train3, y)):
model = build_model(train3.shape[1])
xtrain, xval = train3[itrain], train3[ival]
ytrain, yval = y[itrain], y[ival]
model.fit(
xtrain, ytrain,
epochs=10000,
batch_size=256,
validation_data=(xval, yval),
verbose=0,
callbacks=keras_fit_callbacks(model_path),
shuffle=True
)
model.load_weights(model_path)
p = model.predict(xval)
v.loc[ival, cname] += pconvert(p).ravel()
score = metrics.log_loss(y[ival], p)
print(cname, 'fold %d: '%(n+1), score, now())
scores.append(score)
z[cname] += pconvert(model.predict(test3).ravel())
del model
os.remove(model_path)
cv=np.array(scores)
print(cv, cv.mean(), cv.std())
z[cname] /= num_splits
#@tf_force_cpu
def keras_mlp1(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
#model = layers.BatchNormalization()(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Activation('selu')(model)
#model = layers.Dropout(0.7)(model)
model = layers.Dense(64, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
#model = layers.Dropout(0.9)(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.Nadam())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp2(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(1024, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(128, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.RMSprop())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp2(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(1024, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(128, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.RMSprop())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp1(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(256, kernel_initializer='Orthogonal')(input_)
#model = layers.BatchNormalization()(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Activation('selu')(model)
#model = layers.Dropout(0.7)(model)
model = layers.Dense(64, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
#model = layers.Dropout(0.9)(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
#model = layers.BatchNormalization()(model)
model = layers.Activation('selu')(model)
#model = layers.advanced_activations.PReLU()(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.Nadam())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp2(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(1024, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(128, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.RMSprop())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp2(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(1024, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(128, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(16, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.RMSprop())
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)
def keras_mlp3(train2, y, test2, v, z):
from keras import layers
from keras import models
from keras import optimizers
cname = sys._getframe().f_code.co_name
num_splits = 9
scaler = preprocessing.RobustScaler()
train3 = scaler.fit_transform(train2)
test3 = scaler.transform(test2)
input_dims = train3.shape[1]
def build_model():
input_ = layers.Input(shape=(input_dims,))
model = layers.Dense(512, kernel_initializer='Orthogonal')(input_)
model = layers.Activation('selu')(model)
model = layers.Dense(256, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(32, kernel_initializer='Orthogonal')(model)
model = layers.Activation('selu')(model)
model = layers.Dense(1, activation='sigmoid')(model)
model = models.Model(input_, model)
model.compile(loss = 'binary_crossentropy', optimizer = optimizers.SGD(nesterov=True))
#print(model.summary(line_length=120))
return model
keras_common(train3, y, test3, v, z, num_splits, cname, build_model)