python类__version__()的实例源码

topology.py 文件源码 项目:keras 作者: NVIDIA 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def _updated_config(self):
        """Shared between different serialization methods."""
        from keras import __version__ as keras_version

        config = self.get_config()
        model_config = {
            'class_name': self.__class__.__name__,
            'config': config,
            'keras_version': keras_version
        }
        return model_config
app.py 文件源码 项目:Iris-Classification-with-Heroku 作者: gaborvecsei 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def homepage():
    welcomeLabel = """
    <h1>Welcome at Iris classification by Gabor Vecsei!</h1>

    <h4>Keras ver: {0}</h4>
    """.format(keras.__version__)
    return welcomeLabel
topology.py 文件源码 项目:InnerOuterRNN 作者: Chemoinformatics 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def _updated_config(self):
        '''shared between different serialization methods'''
        from keras import __version__ as keras_version

        config = self.get_config()
        model_config = {
            'class_name': self.__class__.__name__,
            'config': config,
            'keras_version': keras_version
        }
        return model_config
test_conv2d_model_tensorflow_ordering.py 文件源码 项目:deeplift 作者: kundajelab 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def setUp(self):
        if (hasattr(keras, '__version__')==False):
            self.keras_version = 0.2 #didn't have the __version__ tag
        else:
            self.keras_version = float(keras.__version__[0:2])
        if (self.keras_version <= 0.2): 
            pass
        else:
            self.inp = (np.random.randn(10*10*51*51)
                        .reshape(10,10,51,51).transpose(0,2,3,1))
            self.keras_model = keras.models.Sequential()
            conv_layer = keras.layers.convolutional.Convolution2D(
                            nb_filter=2, nb_row=4, nb_col=4, subsample=(2,2),
                            activation="relu", input_shape=(51,51,10),
                            dim_ordering='tf')
            self.keras_model.add(conv_layer)
            self.keras_model.add(keras.layers.convolutional.MaxPooling2D(
                                 pool_size=(4,4), strides=(2,2),
                                 dim_ordering='tf')) 
            self.keras_model.add(keras.layers.convolutional.AveragePooling2D(
                                 pool_size=(4,4), strides=(2,2),
                                 dim_ordering='tf')) 
            self.keras_model.add(keras.layers.core.Flatten())
            self.keras_model.add(keras.layers.core.Dense(output_dim=1))
            self.keras_model.add(keras.layers.core.Activation("sigmoid"))
            self.keras_model.compile(loss="mse", optimizer="sgd")

            if (self.keras_version <= 0.3): 
                self.keras_output_fprop_func = compile_func(
                                [self.keras_model.layers[0].input],
                                self.keras_model.layers[-1].get_output(False))
                grad = theano.grad(theano.tensor.sum(
                           self.keras_model.layers[-2].get_output(False)[:,0]),
                           self.keras_model.layers[0].input)
                self.grad_func = theano.function(
                             [self.keras_model.layers[0].input],
                             grad, allow_input_downcast=True,
                             on_unused_input='ignore')
            else:
                keras_output_fprop_func = compile_func(
                    [self.keras_model.layers[0].input,
                     keras.backend.learning_phase()],
                    self.keras_model.layers[-1].output)
                self.keras_output_fprop_func =\
                    lambda x: keras_output_fprop_func(x,False)
                grad = theano.grad(theano.tensor.sum(
                           self.keras_model.layers[-2].output[:,0]),
                           self.keras_model.layers[0].input)
                grad_func = theano.function(
                             [self.keras_model.layers[0].input,
                              keras.backend.learning_phase()],
                             grad, allow_input_downcast=True,
                             on_unused_input='ignore')
                self.grad_func = lambda x: grad_func(x, False)
test_conv1d_model.py 文件源码 项目:deeplift 作者: kundajelab 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def setUp(self):
        if (hasattr(keras, '__version__')==False):
            self.keras_version = 0.2 #didn't have the __version__ tag
        else:
            self.keras_version = float(keras.__version__[0:3])
        self.inp = (np.random.randn(10*10*51)
                    .reshape(10,10,51).transpose(0,2,1))
        self.keras_model = keras.models.Sequential()
        conv_layer = keras.layers.convolutional.Convolution1D(
                        nb_filter=2, filter_length=4, subsample_length=2,
                        #re. input_shape=(51,10), that is, putting the channel
                        #axis last; this is actually due to the bug
                        #that seems to date back to v0.2.0...
                        #https://github.com/fchollet/keras/blob/0.2.0/keras/layers/convolutional.py#L88
                        activation="relu", input_shape=(51,10))
        self.keras_model.add(conv_layer)
        self.keras_model.add(keras.layers.convolutional.MaxPooling1D(
                             pool_length=4, stride=2)) 
        if (self.keras_version > 0.2):
            self.keras_model.add(keras.layers.convolutional.AveragePooling1D(
                             pool_length=4, stride=2))
        else:
            pass #there was no average pooling in 0.2.0 it seems
        self.keras_model.add(keras.layers.core.Flatten())
        self.keras_model.add(keras.layers.core.Dense(output_dim=1))
        self.keras_model.add(keras.layers.core.Activation("sigmoid"))
        self.keras_model.compile(loss="mse", optimizer="sgd")

        if (self.keras_version <= 0.3): 
            self.keras_output_fprop_func = compile_func(
                            [self.keras_model.layers[0].input],
                            self.keras_model.layers[-1].get_output(False))
            grad = theano.grad(theano.tensor.sum(
                       self.keras_model.layers[-2].get_output(False)[:,0]),
                       self.keras_model.layers[0].input)
            self.grad_func = theano.function(
                         [self.keras_model.layers[0].input],
                         grad, allow_input_downcast=True)
        else:
            keras_output_fprop_func = compile_func(
                [self.keras_model.layers[0].input,
                 keras.backend.learning_phase()],
                self.keras_model.layers[-1].output)
            self.keras_output_fprop_func =\
                lambda x: keras_output_fprop_func(x,False)
            grad = theano.grad(theano.tensor.sum(
                       self.keras_model.layers[-2].output[:,0]),
                       self.keras_model.layers[0].input)
            grad_func = theano.function(
                         [self.keras_model.layers[0].input,
                          keras.backend.learning_phase()],
                         grad, allow_input_downcast=True,
                         on_unused_input='ignore')
            self.grad_func = lambda x: grad_func(x, False)
test_conv2d_model.py 文件源码 项目:deeplift 作者: kundajelab 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def setUp(self):
        if (hasattr(keras, '__version__')==False):
            self.keras_version = 0.2 #didn't have the __version__ tag
        else:
            self.keras_version = float(keras.__version__[0:3])

        self.inp = (np.random.randn(10*10*51*51)
                    .reshape(10,10,51,51))
        self.keras_model = keras.models.Sequential()
        conv_layer = keras.layers.convolutional.Convolution2D(
                        nb_filter=2, nb_row=4, nb_col=4, subsample=(2,2),
                        activation="relu", input_shape=(10,51,51),
                        dim_ordering='th')
        self.keras_model.add(conv_layer)
        if (self.keras_version > 0.2):
            self.keras_model.add(keras.layers.convolutional.MaxPooling2D(
                             pool_size=(4,4), strides=(2,2),
                             dim_ordering='th')) 
            self.keras_model.add(keras.layers.convolutional.AveragePooling2D(
                             pool_size=(4,4), strides=(2,2),
                             dim_ordering='th'))
        else:
            print(self.keras_version)
            self.keras_model.add(keras.layers.convolutional.MaxPooling2D(
                             pool_size=(4,4), stride=(2,2)))  
            #There is no average pooling in version 0.2.0
        self.keras_model.add(keras.layers.core.Flatten())
        self.keras_model.add(keras.layers.core.Dense(output_dim=1))
        self.keras_model.add(keras.layers.core.Activation("sigmoid"))
        self.keras_model.compile(loss="mse", optimizer="sgd")
        if (self.keras_version <= 0.3): 
            self.keras_output_fprop_func = compile_func(
                            [self.keras_model.layers[0].input],
                            self.keras_model.layers[-1].get_output(False))
            grad = theano.grad(theano.tensor.sum(
                       self.keras_model.layers[-2].get_output(False)[:,0]),
                       self.keras_model.layers[0].input)
            self.grad_func = theano.function(
                         [self.keras_model.layers[0].input],
                         grad, allow_input_downcast=True,
                         on_unused_input='ignore')
        else:
            keras_output_fprop_func = compile_func(
                [self.keras_model.layers[0].input,
                 keras.backend.learning_phase()],
                self.keras_model.layers[-1].output)
            self.keras_output_fprop_func =\
                lambda x: keras_output_fprop_func(x,False)
            grad = theano.grad(theano.tensor.sum(
                       self.keras_model.layers[-2].output[:,0]),
                       self.keras_model.layers[0].input)
            grad_func = theano.function(
                         [self.keras_model.layers[0].input,
                          keras.backend.learning_phase()],
                         grad, allow_input_downcast=True,
                         on_unused_input='ignore')
            self.grad_func = lambda x: grad_func(x, False)
test_functional_concatenate_model.py 文件源码 项目:deeplift 作者: kundajelab 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def setUp(self):
        if (hasattr(keras, '__version__')==False):
            self.keras_version = 0.2 #didn't have the __version__ tag
        else:
            self.keras_version = float(keras.__version__[0:3])
        self.inp1 = (np.random.randn(10*10*51)
                    .reshape(10,10,51).transpose(0,2,1))
        self.inp2 = (np.random.randn(10*10*51)
                    .reshape(10,10,51).transpose(0,2,1))
        self.run_functional_tests = True
        if (self.keras_version < 1.0):
            self.run_functional_tests = False
            return #skip setup
        inp1 = keras.layers.Input(shape=(51,10), name="inp1")
        inp2 = keras.layers.Input(shape=(51,10), name="inp2")
        conv = keras.layers.convolutional.Convolution1D(
                 nb_filter=2, filter_length=4,
                 subsample_length=2, activation="relu")
        maxpool = keras.layers.convolutional.MaxPooling1D(pool_length=4, stride=2)
        conv1_out = conv(inp1)
        conv2_out = conv(inp2)
        maxpool1_out = maxpool(conv1_out)
        maxpool2_out = maxpool(conv2_out)
        merge_out = keras.layers.merge([maxpool1_out, maxpool2_out],
                                       mode='concat', concat_axis=2)
        flatten_out = keras.layers.core.Flatten()(merge_out)
        dense1_out = keras.layers.core.Dense(output_dim=5)(flatten_out)
        dense1relu_out = keras.layers.core.Activation("relu")(dense1_out)
        output_preact = keras.layers.core.Dense(
                         output_dim=1, name="output_preact")(dense1relu_out)
        output = keras.layers.core.Activation("sigmoid",
                        name="output_postact")(output_preact)
        self.keras_model = keras.models.Model(input=[inp1, inp2],
                                              output=output)
        self.keras_model.compile(optimizer='rmsprop',
                              loss='binary_crossentropy',
                              metrics=['accuracy'])

        if (self.keras_version <= 0.3): 
            pass
        else:
            keras_output_fprop_func = compile_func(
                [inp1, inp2, keras.backend.learning_phase()],
                self.keras_model.layers[-1].output)
            self.keras_output_fprop_func =\
                lambda x,y: keras_output_fprop_func(x,y,False)
            grad = theano.grad(theano.tensor.sum(output_preact[:,0]),
                               [inp1, inp2])
            grad_func = theano.function(
                    [inp1, inp2, keras.backend.learning_phase()],
                    grad, allow_input_downcast=True, on_unused_input='ignore')
            self.grad_func = lambda x,y: grad_func(x,y,False)


问题


面经


文章

微信
公众号

扫码关注公众号