test_functional_concatenate_model.py 文件源码

python
阅读 21 收藏 0 点赞 0 评论 0

项目:deeplift 作者: kundajelab 项目源码 文件源码
def setUp(self):
        if (hasattr(keras, '__version__')==False):
            self.keras_version = 0.2 #didn't have the __version__ tag
        else:
            self.keras_version = float(keras.__version__[0:3])
        self.inp1 = (np.random.randn(10*10*51)
                    .reshape(10,10,51).transpose(0,2,1))
        self.inp2 = (np.random.randn(10*10*51)
                    .reshape(10,10,51).transpose(0,2,1))
        self.run_functional_tests = True
        if (self.keras_version < 1.0):
            self.run_functional_tests = False
            return #skip setup
        inp1 = keras.layers.Input(shape=(51,10), name="inp1")
        inp2 = keras.layers.Input(shape=(51,10), name="inp2")
        conv = keras.layers.convolutional.Convolution1D(
                 nb_filter=2, filter_length=4,
                 subsample_length=2, activation="relu")
        maxpool = keras.layers.convolutional.MaxPooling1D(pool_length=4, stride=2)
        conv1_out = conv(inp1)
        conv2_out = conv(inp2)
        maxpool1_out = maxpool(conv1_out)
        maxpool2_out = maxpool(conv2_out)
        merge_out = keras.layers.merge([maxpool1_out, maxpool2_out],
                                       mode='concat', concat_axis=2)
        flatten_out = keras.layers.core.Flatten()(merge_out)
        dense1_out = keras.layers.core.Dense(output_dim=5)(flatten_out)
        dense1relu_out = keras.layers.core.Activation("relu")(dense1_out)
        output_preact = keras.layers.core.Dense(
                         output_dim=1, name="output_preact")(dense1relu_out)
        output = keras.layers.core.Activation("sigmoid",
                        name="output_postact")(output_preact)
        self.keras_model = keras.models.Model(input=[inp1, inp2],
                                              output=output)
        self.keras_model.compile(optimizer='rmsprop',
                              loss='binary_crossentropy',
                              metrics=['accuracy'])

        if (self.keras_version <= 0.3): 
            pass
        else:
            keras_output_fprop_func = compile_func(
                [inp1, inp2, keras.backend.learning_phase()],
                self.keras_model.layers[-1].output)
            self.keras_output_fprop_func =\
                lambda x,y: keras_output_fprop_func(x,y,False)
            grad = theano.grad(theano.tensor.sum(output_preact[:,0]),
                               [inp1, inp2])
            grad_func = theano.function(
                    [inp1, inp2, keras.backend.learning_phase()],
                    grad, allow_input_downcast=True, on_unused_input='ignore')
            self.grad_func = lambda x,y: grad_func(x,y,False)
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号