def reset_parameters(self):
init.constant(self.weight,self.gamma)
python类constant()的实例源码
def test_constant(self):
for as_variable in [True, False]:
for dims in [1, 2, 4]:
input_tensor = self._create_random_nd_tensor(dims, size_min=1, size_max=5, as_variable=as_variable)
val = self._random_float(1, 10)
init.constant(input_tensor, val)
if as_variable:
input_tensor = input_tensor.data
self.assertEqual(input_tensor, input_tensor.clone().fill_(val))
def reset_parameters(self):
init.constant(self.weight,self.gamma)
def weights_init(self,module):
for m in module.modules():
if isinstance(m, nn.Conv2d):
init.xavier_uniform(m.weight, gain=np.sqrt(2))
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def msra_init(net):
'''Init layer parameters.'''
for m in net.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal(m.weight)
if m.bias:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=1e-3)
if m.bias:
init.constant(m.bias, 0)
def xavier_init(net):
'''Init layer parameters.'''
for m in net.modules():
if isinstance(m, nn.Conv2d):
init.xavier_normal(m.weight)
if m.bias is not None:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=1e-3)
if m.bias is not None:
init.constant(m.bias, 0)
def gauss_init(net):
'''Init layer parameters.'''
for m in net.modules():
if isinstance(m, nn.Conv2d):
init.normal(0.0, 0.01)
if m.bias is not None:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=1e-3)
if m.bias is not None:
init.constant(m.bias, 0)
# --- End
def test_constant(self):
for as_variable in [True, False]:
for dims in [1, 2, 4]:
input_tensor = self._create_random_nd_tensor(dims, size_min=1, size_max=5, as_variable=as_variable)
val = self._random_float(1, 10)
init.constant(input_tensor, val)
if as_variable:
input_tensor = input_tensor.data
self.assertEqual(input_tensor, input_tensor.clone().fill_(val))
def weights_init_normal(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.normal(m.weight.data, 0.0, 0.02)
elif classname.find('Linear') != -1:
init.normal(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm2d') != -1:
init.normal(m.weight.data, 1.0, 0.02)
init.constant(m.bias.data, 0.0)
def weights_init_xavier(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.xavier_normal(m.weight.data, gain=0.02)
elif classname.find('Linear') != -1:
init.xavier_normal(m.weight.data, gain=0.02)
elif classname.find('BatchNorm2d') != -1:
init.normal(m.weight.data, 1.0, 0.02)
init.constant(m.bias.data, 0.0)
def weights_init_kaiming(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif classname.find('Linear') != -1:
init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif classname.find('BatchNorm2d') != -1:
init.normal(m.weight.data, 1.0, 0.02)
init.constant(m.bias.data, 0.0)
def weights_init_orthogonal(m):
classname = m.__class__.__name__
print(classname)
if classname.find('Conv') != -1:
init.orthogonal(m.weight.data, gain=1)
elif classname.find('Linear') != -1:
init.orthogonal(m.weight.data, gain=1)
elif classname.find('BatchNorm2d') != -1:
init.normal(m.weight.data, 1.0, 0.02)
init.constant(m.bias.data, 0.0)
def reset_parameters(self):
"""
Initialize parameters
"""
init.uniform(self.thetaA, a=-0.1, b=0.1)
init.uniform(self.thetaB, a=-0.1, b=0.1)
init.uniform(self.U, a=-0.1, b=0.1)
init.constant(self.bias.data, val=0)
def reset_parameters(self):
"""
Initialize parameters TO DO
"""
init.uniform(self.thetaA, a=-0.1, b=0.1)
init.uniform(self.thetaB, a=-0.1, b=0.1)
init.uniform(self.U, a=-0.1, b=0.1)
init.orthogonal(self.gate_U.data)
gate_W_data = torch.eye(self.hidden_size)
gate_W_data = gate_W_data.repeat(1, 2)
self.gate_W.data.set_(gate_W_data)
init.constant(self.bias.data, val=0)
init.constant(self.gate_bias.data, val=0)
def reset_parameters(self):
if hasattr(self, 'sigma_weight'): # Only init after all params added (otherwise super().__init__() fails)
init.uniform(self.weight, -math.sqrt(3 / self.in_features), math.sqrt(3 / self.in_features))
init.uniform(self.bias, -math.sqrt(3 / self.in_features), math.sqrt(3 / self.in_features))
init.constant(self.sigma_weight, self.sigma_init)
init.constant(self.sigma_bias, self.sigma_init)
def test_constant(self):
for as_variable in [True, False]:
for dims in [1, 2, 4]:
input_tensor = self._create_random_nd_tensor(dims, size_min=1, size_max=5, as_variable=as_variable)
val = self._random_float(1, 10)
init.constant(input_tensor, val)
if as_variable:
input_tensor = input_tensor.data
self.assertEqual(input_tensor, input_tensor.clone().fill_(val))
def weights_init_normal(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.normal(m.weight.data, 0.0, 0.02)
elif classname.find('Linear') != -1:
init.normal(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm2d') != -1:
init.normal(m.weight.data, 1.0, 0.02)
init.constant(m.bias.data, 0.0)
def weights_init_xavier(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.xavier_normal(m.weight.data, gain=0.02)
elif classname.find('Linear') != -1:
init.xavier_normal(m.weight.data, gain=0.02)
elif classname.find('BatchNorm2d') != -1:
init.normal(m.weight.data, 1.0, 0.02)
init.constant(m.bias.data, 0.0)
def weights_init_kaiming(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif classname.find('Linear') != -1:
init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif classname.find('BatchNorm2d') != -1:
init.normal(m.weight.data, 1.0, 0.02)
init.constant(m.bias.data, 0.0)
def initWeight(self, init_forget_bias=1):
# See details in https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/rnn.py
for name, params in self.named_parameters():
# weight?xavier????
if 'weight' in name:
init.xavier_uniform(params)
# ??????????GRU?b_iz, b_hz????
elif 'gru.bias_ih_l' in name:
b_ir, b_iz, b_in = params.chunk(3, 0)
init.constant(b_iz, init_forget_bias)
elif 'gru.bias_hh_l' in name:
b_hr, b_hz, b_hn = params.chunk(3, 0)
init.constant(b_hz, init_forget_bias)
# ?????bias?0????
else:
init.constant(params, 0)