def recv(
communicator, rank, delegate_variable=None, tag=0, device=-1,
force_tuple=False):
"""Receive elements from target process.
This function returns data received from target process. If ``backward()``
is invoked, it will try to send gradients to the target process.
.. note::
If you define non-connected computational graph on one process,
you have to use ``delegate_variable`` to specify the output of
previous computational graph component.
Otherwise ``backward()`` does not work well.
Please refer ``chainermn.functions.pseudo_connect`` for detail.
Args:
communicator (chainer.communicators.CommunicatorBase):
ChainerMN communicator.
rank (int): Target process specifier.
delegate_variable (chainer.Variable):
Pointer to the other non-connected component.
tag (int): Optional message ID (MPI feature).
device (int): Target device specifier.
force_tuple (bool): If ``False`` (the default) a Variable will be
returned when the number of outputs is one. Otherwise, this
method returns a tuple even when the number of outputs is one.
Returns:
~chainer.Variable:
Data received from target process. If ``backward()`` is invoked
by this variable, it will send gradients to the target process.
"""
chainer.utils.experimental('chainermn.functions.recv')
if rank == communicator.rank:
raise ValueError(
'rank must be different from communicator rank, '
'otherwise deadlock occurs')
if delegate_variable is None:
res = Recv(
communicator,
peer_rank=rank,
peer_tag=tag,
device=device)()
else:
delegate_variable.name = 'delegate_variable'
res = Recv(
communicator,
peer_rank=rank,
peer_tag=tag,
device=device)(delegate_variable)
if force_tuple and not isinstance(res, tuple):
return tuple([res])
else:
return res
评论列表
文章目录