def conv_act_layer(from_layer, name, num_filter, kernel=(3, 3), pad=(1, 1), \
stride=(1,1), act_type="relu", use_batchnorm=True):
"""
wrapper for a small Convolution group
Parameters:
----------
from_layer : mx.symbol
continue on which layer
name : str
base name of the new layers
num_filter : int
how many filters to use in Convolution layer
kernel : tuple (int, int)
kernel size (h, w)
pad : tuple (int, int)
padding size (h, w)
stride : tuple (int, int)
stride size (h, w)
act_type : str
activation type, can be relu...
use_batchnorm : bool
whether to use batch normalization
Returns:
----------
(conv, relu) mx.Symbols
"""
conv = mx.symbol.Convolution(data=from_layer, kernel=kernel, pad=pad, \
stride=stride, num_filter=num_filter, name="{}".format(name))
if use_batchnorm:
conv = mx.symbol.BatchNorm(data=conv, name="bn_{}".format(name))
if act_type in ['elu', 'leaky', 'prelu', 'rrelu']:
relu = mx.symbol.LeakyReLU(data=conv, act_type=act_type,
name="{}_{}".format(act_type, name), slope=0.1)
elif act_type in ['relu', 'sigmoid', 'softrelu', 'tanh']:
relu = mx.symbol.Activation(data=conv, act_type=act_type, \
name="{}_{}".format(act_type, name))
else:
assert isinstance(act_type, str)
raise ValueError("Invalid activation type: " + str(act_type))
return relu
评论列表
文章目录