def test_srelu():
from keras.layers.advanced_activations import SReLU
layer_test(SReLU, kwargs={},
input_shape=(2, 3, 4))
python类SReLU()的实例源码
def buildFeatures(self, type='shared'):
assert self.checkTensor('q-channels')
assert self.checkTensor('a-channels')
srelu = lambda name: SReLU(name=name)
features = []
if type == 'shared':
q_features = self.linkFeature('q-channels', 'shared-convolution', activation='tanh')
a_features = self.linkFeature('a-channels', 'shared-convolution', activation='tanh')
else:
raise Error('Not Supported')
print('q-features', q_features._keras_shape, K.ndim(q_features))
print('a-features', a_features._keras_shape, K.ndim(a_features))
self.tensors['q-features'] = q_features
self.tensors['a-features'] = a_features
def buildFeatures(self, type='shared'):
assert self.checkTensor('q+')
assert self.checkTensor('q-')
assert self.checkTensor('a+')
assert self.checkTensor('a-')
srelu = lambda name: SReLU(name=name)
if type == 'shared':
q_features = self.doubleFeature('q+', 'q-', 'shared-convolution', activation=srelu)
a_features = self.doubleFeature('a+', 'a-', 'shared-convolution', activation=srelu)
else:
raise Error('Not Supported')
print('q-features', q_features._keras_shape)
print('a-features', a_features._keras_shape)
self.tensors['q-features'] = q_features
self.tensors['a-features'] = a_features
def buildFeatures(self, type='shared'):
assert self.checkTensor('q+')
assert self.checkTensor('q-')
assert self.checkTensor('a+')
assert self.checkTensor('a-')
srelu = lambda name: SReLU(name=name)
features = []
if type == 'shared':
q_features = Merge(
mode='concat',
name='q-features',
)([
self.linkFeature('q+', 'shared-convolution', activation=srelu),
self.linkFeature('q-', 'shared-convolution', activation=srelu)
])
a_features = Merge(
mode='concat',
name='a-features',
)([
self.linkFeature('a+', 'shared-convolution', activation=srelu),
self.linkFeature('a-', 'shared-convolution', activation=srelu)
])
else:
raise Error('Not Supported')
self.tensors['q-features'] = q_features
self.tensors['a-features'] = a_features
def test_srelu():
from keras.layers.advanced_activations import SReLU
layer_test(SReLU, kwargs={},
input_shape=(2, 3, 4))
def test_srelu_share():
from keras.layers.advanced_activations import SReLU
layer_test(SReLU, kwargs={'shared_axes': 1},
input_shape=(2, 3, 4))
def test_srelu():
from keras.layers.advanced_activations import SReLU
layer_test(SReLU, kwargs={},
input_shape=(2, 3, 4))
def test_srelu_share():
from keras.layers.advanced_activations import SReLU
layer_test(SReLU, kwargs={'shared_axes': 1},
input_shape=(2, 3, 4))
def create_model():
# advanced activation not used yet
srelu = advanced_activations.SReLU(
t_left_init='zero',
a_left_init='glorot_uniform',
t_right_init='glorot_uniform',
a_right_init='one'
)
# create and return model
model = Sequential()
model.add(Dense(256, input_dim=input_dim, activation='sigmoid'))
model.add(Dense(256, activation='sigmoid'))
model.add(Dense(output_dim, activation='sigmoid'))
return model
def build_model(X,dim=128):
inputs_p = Input(shape=(1,), dtype='int32')
embed_p = Embedding(
num_q,
dim,
dropout=0.2,
input_length=1
)(inputs_p)
inputs_d = Input(shape=(1,), dtype='int32')
embed_d = Embedding(
num_e,
dim,
dropout=0.2,
input_length=1
)(inputs_d)
flatten_p= Flatten()(embed_p)
flatten_d= Flatten()(embed_d)
flatten = merge([
flatten_p,
flatten_d,
],mode='concat')
fc1 = Dense(512)(flatten)
fc1 = SReLU()(fc1)
dp1 = Dropout(0.7)(fc1)
outputs = Dense(1,activation='sigmoid',name='outputs')(dp1)
inputs = [
inputs_p,
inputs_d,
]
model = Model(input=inputs, output=outputs)
nadam = Nadam()
sgd = SGD(lr=1e-3, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(
optimizer=nadam,
loss= 'binary_crossentropy'
)
return model