def __init__(self, ch, bn=True, activation=F.relu, k_size=3):
self.bn = bn
self.activation = activation
layers = {}
pad = k_size//2
layers['c0'] = L.Convolution2D(ch, ch, 3, 1, pad)
layers['c1'] = L.Convolution2D(ch, ch, 3, 1, pad)
if bn:
layers['bn0'] = L.BatchNormalization(ch)
layers['bn1'] = L.BatchNormalization(ch)
super(ResBlock, self).__init__(**layers)
评论列表
文章目录