ncg.py 文件源码

python
阅读 16 收藏 0 点赞 0 评论 0

项目:neural-coarse-graining 作者: arayabrain 项目源码 文件源码
def build_model():
    net = {}

    net["input1"] = lasagne.layers.InputLayer(shape=(None,data.shape[1],None), input_var = invar1 )

    # Transformer part of the network - in-place convolution to transform to the new coarse-grained classes
    net["transform1"] = batch_norm(lasagne.layers.Conv1DLayer(incoming=net["input1"], num_filters=args.tr_filt1, filter_size=args.tr_fs1, pad="same", nonlinearity=leakyReLU, W = lasagne.init.GlorotUniform(gain='relu')))
    net["transform2"] = batch_norm(lasagne.layers.Conv1DLayer(incoming=net["transform1"], num_filters=args.tr_filt2, filter_size=args.tr_fs2, pad="same", nonlinearity=leakyReLU, W = lasagne.init.GlorotUniform(gain='relu')))

    if args.continuous: # If we have continuous CG variables, use a tanh nonlinearity for output. Otherwise, use softmax to treat it as a probability distribution
        net["transform3"] = (lasagne.layers.Conv1DLayer(incoming=net["transform2"], num_filters=CLASSES, filter_size=1, pad="same", nonlinearity=lasagne.nonlinearities.tanh, W = lasagne.init.GlorotUniform(gain='relu')))
    else:
        net["transform3"] = (lasagne.layers.Conv1DLayer(incoming=net["transform2"], num_filters=CLASSES, filter_size=1, pad="same", nonlinearity=convSoftmax, W = lasagne.init.GlorotUniform(gain='relu')))

    # Predictor part. Take the coarse-grained classes and predict them at an offset of DISTANCE
    net["predictor1"] = batch_norm(lasagne.layers.Conv1DLayer(incoming = net["transform3"], num_filters = args.pr_filt1, filter_size=args.pr_fs1, pad="same", nonlinearity = leakyReLU, W = lasagne.init.GlorotUniform(gain='relu')))
    net["predictor2"] = batch_norm(lasagne.layers.Conv1DLayer(incoming = net["predictor1"], num_filters = args.pr_filt2, filter_size=args.pr_fs2, pad="same", nonlinearity = leakyReLU, W = lasagne.init.GlorotUniform(gain='relu')))

    if args.continuous: # If we have continuous CG variables, use a tanh nonlinearity for output. Otherwise, use softmax to treat it as a probability distribution
        net["predictor3"] = (lasagne.layers.Conv1DLayer(incoming = net["predictor2"], num_filters = CLASSES, filter_size=1, pad="same", nonlinearity = lasagne.nonlinearities.tanh, W = lasagne.init.GlorotUniform(gain='relu')))
    else:
        net["predictor3"] = (lasagne.layers.Conv1DLayer(incoming = net["predictor2"], num_filters = CLASSES, filter_size=1, pad="same", nonlinearity = convSoftmax, W = lasagne.init.GlorotUniform(gain='relu')))

    return net
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号