FaceAlignment.py 文件源码

python
阅读 16 收藏 0 点赞 0 评论 0

项目:DeepAlignmentNetwork 作者: MarekKowalski 项目源码 文件源码
def addDANStage(self, stageIdx, net):
        prevStage = 's' + str(stageIdx - 1)
        curStage = 's' + str(stageIdx)

        #CONNNECTION LAYERS OF PREVIOUS STAGE
        net[prevStage + '_transform_params'] = TransformParamsLayer(net[prevStage + '_landmarks'], self.initLandmarks)
        net[prevStage + '_img_output'] = AffineTransformLayer(net['input'], net[prevStage + '_transform_params'])    

        net[prevStage + '_landmarks_affine'] = LandmarkTransformLayer(net[prevStage + '_landmarks'], net[prevStage + '_transform_params'])
        net[prevStage + '_img_landmarks'] = LandmarkImageLayer(net[prevStage + '_landmarks_affine'], (self.imageHeight, self.imageWidth), self.landmarkPatchSize)

        net[prevStage + '_img_feature'] = lasagne.layers.DenseLayer(net[prevStage + '_fc1'], num_units=56 * 56, W=GlorotUniform('relu'))
        net[prevStage + '_img_feature'] = lasagne.layers.ReshapeLayer(net[prevStage + '_img_feature'], (-1, 1, 56, 56))
        net[prevStage + '_img_feature'] = lasagne.layers.Upscale2DLayer(net[prevStage + '_img_feature'], 2)

        #CURRENT STAGE
        net[curStage + '_input'] = batch_norm(lasagne.layers.ConcatLayer([net[prevStage + '_img_output'], net[prevStage + '_img_landmarks'], net[prevStage + '_img_feature']], 1))

        net[curStage + '_conv1_1'] = batch_norm(Conv2DLayer(net[curStage + '_input'], 64, 3, pad='same', W=GlorotUniform('relu')))
        net[curStage + '_conv1_2'] = batch_norm(Conv2DLayer(net[curStage + '_conv1_1'], 64, 3, pad='same', W=GlorotUniform('relu')))
        net[curStage + '_pool1'] = lasagne.layers.Pool2DLayer(net[curStage + '_conv1_2'], 2)

        net[curStage + '_conv2_1'] = batch_norm(Conv2DLayer(net[curStage + '_pool1'], 128, 3, pad=1, W=GlorotUniform('relu')))
        net[curStage + '_conv2_2'] = batch_norm(Conv2DLayer(net[curStage + '_conv2_1'], 128, 3, pad=1, W=GlorotUniform('relu')))
        net[curStage + '_pool2'] = lasagne.layers.Pool2DLayer(net[curStage + '_conv2_2'], 2)

        net[curStage + '_conv3_1'] = batch_norm (Conv2DLayer(net[curStage + '_pool2'], 256, 3, pad=1, W=GlorotUniform('relu')))
        net[curStage + '_conv3_2'] = batch_norm (Conv2DLayer(net[curStage + '_conv3_1'], 256, 3, pad=1, W=GlorotUniform('relu')))  
        net[curStage + '_pool3'] = lasagne.layers.Pool2DLayer(net[curStage + '_conv3_2'], 2)

        net[curStage + '_conv4_1'] = batch_norm(Conv2DLayer(net[curStage + '_pool3'], 512, 3, pad=1, W=GlorotUniform('relu')))
        net[curStage + '_conv4_2'] = batch_norm (Conv2DLayer(net[curStage + '_conv4_1'], 512, 3, pad=1, W=GlorotUniform('relu')))  
        net[curStage + '_pool4'] = lasagne.layers.Pool2DLayer(net[curStage + '_conv4_2'], 2)

        net[curStage + '_pool4'] = lasagne.layers.FlattenLayer(net[curStage + '_pool4'])           
        net[curStage + '_fc1_dropout'] = lasagne.layers.DropoutLayer(net[curStage + '_pool4'], p=0.5)

        net[curStage + '_fc1'] = batch_norm(lasagne.layers.DenseLayer(net[curStage + '_fc1_dropout'], num_units=256, W=GlorotUniform('relu')))

        net[curStage + '_output'] = lasagne.layers.DenseLayer(net[curStage + '_fc1'], num_units=136, nonlinearity=None)
        net[curStage + '_landmarks'] = lasagne.layers.ElemwiseSumLayer([net[prevStage + '_landmarks_affine'], net[curStage + '_output']])

        net[curStage + '_landmarks'] = LandmarkTransformLayer(net[curStage + '_landmarks'], net[prevStage + '_transform_params'], True)
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号