def tsinalis(input_shape, n_classes):
"""
Input size should be [batch, 1d, 2d, ch] = (None, 1, 15000, 1)
"""
model = Sequential(name='Tsinalis')
model.add(Conv1D (kernel_size = (200), filters = 20, input_shape=input_shape, activation='relu'))
print(model.input_shape)
print(model.output_shape)
model.add(MaxPooling1D(pool_size = (20), strides=(10)))
print(model.output_shape)
model.add(keras.layers.core.Reshape([20,-1,1]))
print(model.output_shape)
model.add(Conv2D (kernel_size = (20,30), filters = 400, activation='relu'))
print(model.output_shape)
model.add(MaxPooling2D(pool_size = (1,10), strides=(1,2)))
print(model.output_shape)
model.add(Flatten())
print(model.output_shape)
model.add(Dense (500, activation='relu'))
model.add(Dense (500, activation='relu'))
model.add(Dense(n_classes, activation = 'softmax',activity_regularizer=keras.regularizers.l2() ))
model.compile( loss='categorical_crossentropy', optimizer=keras.optimizers.SGD(), metrics=[keras.metrics.categorical_accuracy])
return model
python类Conv1D()的实例源码
def first_block(tensor_input,filters,kernel_size=3,pooling_size=1,dropout=0.5):
k1,k2 = filters
out = Conv1D(k1,1,padding='same')(tensor_input)
out = BatchNormalization()(out)
out = Activation('relu')(out)
out = Dropout(dropout)(out)
out = Conv1D(k2,kernel_size,padding='same')(out)
pooling = MaxPooling1D(pooling_size,padding='same')(tensor_input)
# out = merge([out,pooling],mode='sum')
out = add([out,pooling])
return out
def test_keras_import(self):
# Pad 1D
model = Sequential()
model.add(ZeroPadding1D(2, input_shape=(224, 3)))
model.add(Conv1D(32, 7, strides=2))
model.build()
self.pad_test(model, 'pad_w', 2)
# Pad 2D
model = Sequential()
model.add(ZeroPadding2D(2, input_shape=(224, 224, 3)))
model.add(Conv2D(32, 7, strides=2))
model.build()
self.pad_test(model, 'pad_w', 2)
# Pad 3D
model = Sequential()
model.add(ZeroPadding3D(2, input_shape=(224, 224, 224, 3)))
model.add(Conv3D(32, 7, strides=2))
model.build()
self.pad_test(model, 'pad_w', 2)
# ********** Export json tests **********
# ********** Data Layers Test **********
def rcnn(input_shape, n_classes):
"""
Input size should be [batch, 1d, ch] = (XXX, 3000, 1)
"""
model = Sequential(name='RCNN test')
model.add(Conv1D (kernel_size = (200), filters = 20, batch_input_shape=input_shape, activation='elu'))
model.add(MaxPooling1D(pool_size = (20), strides=(10)))
model.add(Conv1D (kernel_size = (20), filters = 200, activation='elu'))
model.add(MaxPooling1D(pool_size = (10), strides=(3)))
model.add(Conv1D (kernel_size = (20), filters = 200, activation='elu'))
model.add(MaxPooling1D(pool_size = (10), strides=(3)))
model.add(Dense (512, activation='elu'))
model.add(Dense (512, activation='elu'))
model.add(Reshape((1,model.output_shape[1])))
model.add(LSTM(256, stateful=True, return_sequences=False))
model.add(Dropout(0.3))
model.add(Dense(n_classes, activation = 'sigmoid'))
model.compile(loss='categorical_crossentropy', optimizer=Adadelta())
return model
def repeated_block(x,filters,kernel_size=3,pooling_size=1,dropout=0.5):
k1,k2 = filters
out = BatchNormalization()(x)
out = Activation('relu')(out)
out = Conv1D(k1,kernel_size,strides=2,padding='same')(out)
out = BatchNormalization()(out)
out = Activation('relu')(out)
out = Dropout(dropout)(out)
out = Conv1D(k2,kernel_size,strides=2,padding='same')(out)
pooling = MaxPooling1D(pooling_size,strides=4,padding='same')(x)
out = add([out, pooling])
#out = merge([out,pooling])
return out
def ResidualBlock1D_helper(layers, kernel_size, filters, final_stride=1):
def f(_input):
basic = _input
for ln in range(layers):
#basic = BatchNormalization()( basic ) # triggers known keras bug w/ TimeDistributed: https://github.com/fchollet/keras/issues/5221
basic = ELU()(basic)
basic = Conv1D(filters, kernel_size, kernel_initializer='he_normal',
kernel_regularizer=l2(1.e-4), padding='same')(basic)
# note that this strides without averaging
return AveragePooling1D(pool_size=1, strides=final_stride)(Add()([_input, basic]))
return f
def layer_test_helper_1d_global(layer, channel_index):
# This should test that the output is the correct shape so it should pass
# into a Dense layer rather than a Conv layer.
# The weighted layer is the previous layer,
# Create model
main_input = Input(shape=list(random.randint(10, 20, size=2)))
x = Conv1D(3, 3)(main_input)
x = layer(x)
main_output = Dense(5)(x)
model = Model(inputs=main_input, outputs=main_output)
# Delete channels
del_layer_index = 1
next_layer_index = 3
del_layer = model.layers[del_layer_index]
new_model = operations.delete_channels(model, del_layer, channel_index)
new_w = new_model.layers[next_layer_index].get_weights()
# Calculate next layer's correct weights
channel_count = getattr(del_layer, utils.get_channels_attr(del_layer))
channel_index = [i % channel_count for i in channel_index]
correct_w = model.layers[next_layer_index].get_weights()
correct_w[0] = np.delete(correct_w[0], channel_index, axis=0)
assert weights_equal(correct_w, new_w)
def test_conv1d_lstm(self):
from keras.layers import Conv1D, LSTM, Dense
model = Sequential()
# input_shape = (time_step, dimensions)
model.add(Conv1D(32,3,padding='same',input_shape=(10,8)))
# conv1d output shape = (None, 10, 32)
model.add(LSTM(24))
model.add(Dense(1, activation='sigmoid'))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names) + 2)
self.assertEquals(len(spec.description.output), len(output_names) + 2)
# Test the layer parameters.
layers = spec.neuralNetwork.layers
self.assertIsNotNone(layers[0].convolution)
self.assertIsNotNone(layers[1].simpleRecurrent)
self.assertIsNotNone(layers[2].innerProduct)
def test_tiny_conv1d_dilated_random(self):
np.random.seed(1988)
input_shape = (20, 1)
num_kernels = 2
filter_length = 3
# Define a model
model = Sequential()
model.add(Conv1D(num_kernels, kernel_size = filter_length, padding = 'valid',
input_shape = input_shape, dilation_rate = 3))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_keras_model(model)
def test_tiny_conv_upsample_1d_random(self):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(Conv1D(nb_filters, kernel_size = filter_length, padding='same',
input_shape=(input_length, input_dim)))
model.add(UpSampling1D(size = 2))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_keras_model(model)
def test_tiny_conv_crop_1d_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(Conv1D(nb_filters, kernel_size = filter_length, padding='same',
input_shape=(input_length, input_dim)))
model.add(Cropping1D(cropping = 2))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_keras_model(model, model_precision=model_precision)
def test_tiny_conv_pad_1d_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(Conv1D(nb_filters, kernel_size = filter_length, padding='same',
input_shape=(input_length, input_dim)))
model.add(ZeroPadding1D(padding = 2))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_keras_model(model, model_precision=model_precision)
def __call__(self, inputs):
x = inputs[0]
kernel_regularizer = kr.L1L2(self.l1_decay, self.l2_decay)
x = kl.Conv1D(128, 11,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.MaxPooling1D(4)(x)
x = kl.Flatten()(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Dense(self.nb_hidden,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.Dropout(self.dropout)(x)
return self._build(inputs, x)
def __call__(self, inputs):
x = inputs[0]
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(128, 11,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.MaxPooling1D(4)(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(256, 7,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.MaxPooling1D(4)(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
gru = kl.recurrent.GRU(256, kernel_regularizer=kernel_regularizer)
x = kl.Bidirectional(gru)(x)
x = kl.Dropout(self.dropout)(x)
return self._build(inputs, x)
kaggleQQCharCNNPlus.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def createBaseNetworkSmall(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Embedding(input_dim=inputDim,
output_dim=inputDim, input_length=inputLength))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
kaggleQQCharCNNPlus.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def createBaseNetworkLarge(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Embedding(input_dim=inputDim,
output_dim=inputDim, input_length=inputLength))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
kaggleQQDistRMS_CL.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def createBaseNetworkSmall(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Embedding(input_dim=inputDim,
output_dim=inputDim, input_length=inputLength))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
kaggleQQDistRMS_CL.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def createBaseNetworkLarge(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Embedding(input_dim=inputDim,
output_dim=inputDim, input_length=inputLength))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
kaggleQQSigmoid_SG_smallerAlphabet.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def createBaseNetworkSmall(inputLength, inputDim):
baseNetwork = Sequential()
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim),
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu',
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu',
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu',
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu',
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu',
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
kaggleQQSigmoid_SG_BCE.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def createBaseNetworkSmall(inputLength, inputDim):
baseNetwork = Sequential()
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim),
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
kaggleQQSigmoid_SG_BCE.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def createBaseNetworkLarge(inputLength, inputDim):
baseNetwork = Sequential()
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim),
kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
def createBaseNetworkSmall(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Embedding(input_dim=inputDim,
output_dim=inputDim, input_length=inputLength))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
def createBaseNetworkLarge(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Embedding(input_dim=inputDim,
output_dim=inputDim, input_length=inputLength))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
def createBaseNetworkLarge(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
testSigmoidSmaller.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def netSigmoid(inputLength, inputDim):
baseNetwork = Sequential()
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim),
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
def createBaseNetworkSmall(inputLength, inputDim):
baseNetwork = Sequential()
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim),
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
def createBaseNetworkSmall(inputLength, inputDim):
baseNetwork = Sequential()
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim),
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(128, activation='relu'))
baseNetwork.add(Dropout(0.2))
baseNetwork.add(Dense(128, activation='relu'))
baseNetwork.add(Dropout(0.2))
return baseNetwork
def createBaseNetworkSmall(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Embedding(input_dim=inputDim, output_dim=inputDim, input_length=inputLength))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(1024, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
def createBaseNetworkLarge(inputDim, inputLength):
baseNetwork = Sequential()
baseNetwork.add(Embedding(input_dim=inputDim, output_dim=inputDim, input_length=inputLength))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
baseNetwork.add(Dense(2048, activation='relu'))
baseNetwork.add(Dropout(0.5))
return baseNetwork
kaggleQQ_Euc_Small.py 文件源码
项目:kaggle-quora-question-pairs
作者: voletiv
项目源码
文件源码
阅读 18
收藏 0
点赞 0
评论 0
def createBaseNetworkSmall(inputLength, inputDim):
baseNetwork = Sequential()
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim),
kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(
mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05)))
baseNetwork.add(MaxPooling1D(pool_size=3, strides=3))
baseNetwork.add(Flatten())
baseNetwork.add(Dense(128, activation='relu'))
baseNetwork.add(Dropout(0.2))
baseNetwork.add(Dense(128, activation='relu'))
baseNetwork.add(Dropout(0.2))
return baseNetwork