def new_group(ranks=None):
"""Creates a new distributed group.
This function requires that all processes in the main group (i.e. all
processes that are part of the distributed job) enter this function, even
if they are not going to be members of the group. Additionally, groups
should be created in the same order in all processes.
Arguments:
ranks (list[int]): List of ranks of group members.
Returns:
A handle of distributed group that can be given to collective calls.
"""
assert torch.distributed._initialized == _INITIALIZED_PG, \
"collective only supported in process-group mode"
if ranks is None:
ranks = list(range(get_world_size()))
return torch._C._dist_new_group(ranks)
评论列表
文章目录