def _res_unit(self, inputs, nb_filter, size=3, stride=1, stage=1, block=1):
name = '%02d-%02d/' % (stage, block)
id_name = '%sid_' % (name)
res_name = '%sres_' % (name)
# Residual branch
# 1x1 down-sample conv
x = kl.BatchNormalization(name=res_name + 'bn1')(inputs)
x = kl.Activation('relu', name=res_name + 'act1')(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(nb_filter[0], 1,
name=res_name + 'conv1',
subsample_length=stride,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
# LxL conv
x = kl.BatchNormalization(name=res_name + 'bn2')(x)
x = kl.Activation('relu', name=res_name + 'act2')(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(nb_filter[1], size,
name=res_name + 'conv2',
border_mode='same',
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
# 1x1 up-sample conv
x = kl.BatchNormalization(name=res_name + 'bn3')(x)
x = kl.Activation('relu', name=res_name + 'act3')(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(nb_filter[2], 1,
name=res_name + 'conv3',
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
# Identity branch
if nb_filter[-1] != inputs._keras_shape[-1] or stride > 1:
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
identity = kl.Conv1D(nb_filter[2], 1,
name=id_name + 'conv1',
subsample_length=stride,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(inputs)
else:
identity = inputs
x = kl.merge([identity, x], name=name + 'merge', mode='sum')
return x
评论列表
文章目录