def build(self, input_shape):
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis]
kernel_shape = self.kernel_size + (input_dim, self.filters)
base = self.kernel_size[0] * self.kernel_size[1]
if self.H == 'Glorot':
nb_input = int(input_dim * base)
nb_output = int(self.filters * base)
self.H = np.float32(np.sqrt(1.5 / (nb_input + nb_output)))
#print('Glorot H: {}'.format(self.H))
if self.kernel_lr_multiplier == 'Glorot':
nb_input = int(input_dim * base)
nb_output = int(self.filters * base)
self.kernel_lr_multiplier = np.float32(1. / np.sqrt(1.5/ (nb_input + nb_output)))
#print('Glorot learning rate multiplier: {}'.format(self.lr_multiplier))
self.kernel_constraint = Clip(-self.H, self.H)
self.kernel_initializer = initializers.RandomUniform(-self.H, self.H)
self.kernel = self.add_weight(shape=kernel_shape,
initializer=self.kernel_initializer,
name='kernel',
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
if self.use_bias:
self.lr_multipliers = [self.kernel_lr_multiplier, self.bias_lr_multiplier]
self.bias = self.add_weight((self.output_dim,),
initializer=self.bias_initializers,
name='bias',
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
else:
self.lr_multipliers = [self.kernel_lr_multiplier]
self.bias = None
# Set input spec.
self.input_spec = InputSpec(ndim=4, axes={channel_axis: input_dim})
self.built = True
评论列表
文章目录