def test_modspec_gradcheck():
static_dim = 12
T = 16
torch.manual_seed(1234)
inputs = (Variable(torch.rand(T, static_dim), requires_grad=True),)
n = 16
for norm in [None, "ortho"]:
assert gradcheck(ModSpec(n=n, norm=norm), inputs, eps=1e-4, atol=1e-4)
python类manual_seed()的实例源码
def eval_model(model_path, mode='dev'):
torch.manual_seed(6)
snli_d, mnli_d, embd = data_loader.load_data_sm(
config.DATA_ROOT, config.EMBD_FILE, reseversed=False, batch_sizes=(32, 32, 32, 32, 32), device=0)
m_train, m_dev_m, m_dev_um, m_test_m, m_test_um = mnli_d
m_dev_um.shuffle = False
m_dev_m.shuffle = False
m_dev_um.sort = False
m_dev_m.sort = False
m_test_um.shuffle = False
m_test_m.shuffle = False
m_test_um.sort = False
m_test_m.sort = False
model = StackBiLSTMMaxout()
model.Embd.weight.data = embd
if torch.cuda.is_available():
embd.cuda()
model.cuda()
criterion = nn.CrossEntropyLoss()
model.load_state_dict(torch.load(model_path))
model.max_l = 150
m_pred = model_eval(model, m_dev_m, criterion)
um_pred = model_eval(model, m_dev_um, criterion)
print("dev_mismatched_score (acc, loss):", um_pred)
print("dev_matched_score (acc, loss):", m_pred)
def run_tests():
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--seed', type=int, default=123)
args, remaining = parser.parse_known_args()
torch.manual_seed(args.seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(args.seed)
remaining = [sys.argv[0]] + remaining
unittest.main(argv=remaining)
def _async_set_seed(self, rank, device_id, seed):
torch.manual_seed(seed)
def set_seed(seed, use_cuda):
"""
setting the seed for controlling randomness in this example
:param seed: seed value (int)
:param use_cuda: set the random seed for torch.cuda or not
:return: None
"""
if seed is not None:
torch.manual_seed(seed)
np.random.seed(seed)
if use_cuda:
torch.cuda.manual_seed(seed)
def set_rng_seed(rng_seed):
"""
Sets seeds of torch, numpy, and torch.cuda (if available).
:param int rng_seed: The seed value.
"""
torch.manual_seed(rng_seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(rng_seed)
np.random.seed(rng_seed)
def manual_seed(seed):
return torch.manual_seed(seed)
def init_random_seed(manual_seed):
"""Init random seed."""
seed = None
if manual_seed is None:
seed = random.randint(1, 10000)
else:
seed = manual_seed
print("use random seed: {}".format(seed))
random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(seed)
def train(self, dataset):
self.model.train()
self.embedding_model.train()
self.embedding_model.zero_grad()
self.optimizer.zero_grad()
loss, k = 0.0, 0
# torch.manual_seed(789)
indices = torch.randperm(len(dataset))
for idx in tqdm(range(len(dataset)),desc='Training epoch '+str(self.epoch+1)+''):
tree, sent, label = dataset[indices[idx]]
input = Var(sent)
target = Var(torch.LongTensor([int(label)]))
if self.args.cuda:
input = input.cuda()
target = target.cuda()
emb = F.torch.unsqueeze(self.embedding_model(input), 1)
output, err, _, _ = self.model.forward(tree, emb, training=True)
#params = self.model.childsumtreelstm.getParameters()
# params_norm = params.norm()
err = err/self.args.batchsize # + 0.5*self.args.reg*params_norm*params_norm # custom bias
loss += err.data[0] #
err.backward()
k += 1
if k==self.args.batchsize:
for f in self.embedding_model.parameters():
f.data.sub_(f.grad.data * self.args.emblr)
self.optimizer.step()
self.embedding_model.zero_grad()
self.optimizer.zero_grad()
k = 0
self.epoch += 1
return loss/len(dataset)
# helper function for testing
def _async_set_seed(self, rank, device_id, seed):
torch.manual_seed(seed)
def set_seed(seed):
"""Sets random seed everywhere."""
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
random.seed(seed)
np.random.seed(seed)
def set_random_seed(seed):
global random_seed
random_seed = seed
np.random.seed(seed)
torch.manual_seed(random_seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(random_seed)
def init_random_seed(manual_seed):
"""Init random seed."""
seed = None
if manual_seed is None:
seed = random.randint(1, 10000)
else:
seed = manual_seed
print("use random seed: {}".format(seed))
random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(seed)
def init_random_seed():
"""Init random seed."""
seed = None
if params.manual_seed is None:
seed = random.randint(1, 10000)
else:
seed = params.manual_seed
print("use random seed: {}".format(seed))
random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(seed)
def init_random_seed():
"""Init random seed."""
seed = None
if manual_seed is None:
seed = random.randint(1, 10000)
else:
seed = manual_seed
print("use random seed: {}".format(seed))
random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(seed)
def init_random_seed():
"""Init random seed."""
seed = None
if params.manual_seed is None:
seed = random.randint(1, 10000)
else:
seed = params.manual_seed
print("use random seed: {}".format(seed))
random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(seed)
def setUp(self):
random.seed(123)
torch.manual_seed(123)
def run_tests():
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--seed', type=int, default=123)
args, remaining = parser.parse_known_args()
torch.manual_seed(args.seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(args.seed)
remaining = [sys.argv[0]] + remaining
unittest.main(argv=remaining)
def test_rand(self):
torch.manual_seed(123456)
res1 = torch.rand(SIZE, SIZE)
res2 = torch.Tensor()
torch.manual_seed(123456)
torch.rand(SIZE, SIZE, out=res2)
self.assertEqual(res1, res2)
def test_randn(self):
torch.manual_seed(123456)
res1 = torch.randn(SIZE, SIZE)
res2 = torch.Tensor()
torch.manual_seed(123456)
torch.randn(SIZE, SIZE, out=res2)
self.assertEqual(res1, res2)