def init_master_worker(backend):
global _initialized
if _initialized:
raise RuntimeError("trying to initialize torch.distributed twice!")
torch._C._dist_init_master_worker(backend)
_initialized = True
import torch.distributed.collectives as collectives
import torch.distributed.remote_types as remote_types
extend_scope(collectives)
extend_scope(remote_types)
assert torch._C._dist_init_extension(True, reduce_op, group)
评论列表
文章目录