def init_master_worker(backend, init_method='env://', **kwargs):
warnings.warn("""
================================================================================
WARNING
================================================================================
Master-worker mode is still experimental. The API will change without
notice and we're can't guarantee full correctness and expected performance yet.
We'll announce it once it's ready.
""")
world_size = kwargs.pop('world_size', -1)
group_name = kwargs.pop('group_name', '')
rank = kwargs.pop('rank', -1)
assert len(kwargs) == 0, "got unexpected keyword arguments: %s" % ",".join(kwargs.keys())
if not is_available():
raise RuntimeError("PyTorch built without distributed support")
global _initialized
if _initialized:
raise RuntimeError("trying to initialize torch.distributed twice!")
torch._C._dist_init_master_worker(backend, init_method, world_size,
group_name, rank)
_initialized = _INITIALIZED_MW
import torch.distributed.collectives as collectives
import torch.distributed.remote_types as remote_types
_extend_scope(collectives)
_extend_scope(remote_types)
if not torch._C._dist_init_extension(True, reduce_op, group):
raise RuntimeError("distributed module initialization failed")
评论列表
文章目录