def __setstate__(self, state):
disable_cuda = False
for key in self.cuda_dependent_attributes_:
if key not in state:
continue
dump = state.pop(key)
with tempfile.SpooledTemporaryFile() as f:
f.write(dump)
f.seek(0)
if state['use_cuda'] and not torch.cuda.is_available():
disable_cuda = True
val = torch.load(
f, map_location=lambda storage, loc: storage)
else:
val = torch.load(f)
state[key] = val
if disable_cuda:
warnings.warn(
"Model configured to use CUDA but no CUDA devices "
"available. Loading on CPU instead.",
DeviceWarning)
state['use_cuda'] = False
self.__dict__.update(state)
评论列表
文章目录