def default_tensor_type(type):
type_str = torch.typename(type)
def decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
old_type = torch.typename(torch.Tensor())
torch.set_default_tensor_type(type_str)
try:
return fn(*args, **kwargs)
finally:
torch.set_default_tensor_type(old_type)
return wrapper
return decorator
python类set_default_tensor_type()的实例源码
def default_tensor_type(type):
type_str = torch.typename(type)
def decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
old_type = torch.typename(torch.Tensor())
torch.set_default_tensor_type(type_str)
try:
return fn(*args, **kwargs)
finally:
torch.set_default_tensor_type(old_type)
return wrapper
return decorator
def default_tensor_type(type):
type_str = torch.typename(type)
def decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
old_type = torch.typename(torch.Tensor())
torch.set_default_tensor_type(type_str)
try:
return fn(*args, **kwargs)
finally:
torch.set_default_tensor_type(old_type)
return wrapper
return decorator
def default_tensor_type(type):
type_str = torch.typename(type)
def decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
old_type = torch.typename(torch.Tensor())
torch.set_default_tensor_type(type_str)
try:
return fn(*args, **kwargs)
finally:
torch.set_default_tensor_type(old_type)
return wrapper
return decorator
def default_tensor_type(type):
type_str = torch.typename(type)
def decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
old_type = torch.typename(torch.Tensor())
torch.set_default_tensor_type(type_str)
try:
return fn(*args, **kwargs)
finally:
torch.set_default_tensor_type(old_type)
return wrapper
return decorator
def tensors_default_to(host):
"""
Context manager to temporarily use Cpu or Cuda tensors in Pytorch.
:param str host: Either "cuda" or "cpu".
"""
assert host in ('cpu', 'cuda'), host
old_module = torch.Tensor.__module__
name = torch.Tensor.__name__
new_module = 'torch.cuda' if host == 'cuda' else 'torch'
torch.set_default_tensor_type('{}.{}'.format(new_module, name))
try:
yield
finally:
torch.set_default_tensor_type('{}.{}'.format(old_module, name))
def use_cuda(enabled, device_id=0):
"""Verifies if CUDA is available and sets default device to be device_id."""
if not enabled:
return None
assert torch.cuda.is_available(), 'CUDA is not available'
torch.set_default_tensor_type('torch.cuda.FloatTensor')
torch.cuda.set_device(device_id)
return device_id