Exemplo n.º 1
0
def _apply_reduce(reduction, tensors):
  """Helper function for reduce_* functions."""
  if not tensors:
    raise ValueError('Must pass >0 tensors to reduce operations')

  for t in tensors:
    _check_device(t)
  result = gen_nccl_ops.nccl_reduce(input=tensors, reduction=reduction)
  try:
    next(t for t in tensors if t.device == result.device)
  except StopIteration:
    raise ValueError('One input tensor must be assigned to current device')
  return result
Exemplo n.º 2
0
def _apply_reduce(reduction, tensors):
    """Helper function for reduce_* functions."""
    if not tensors:
        raise ValueError('Must pass >0 tensors to reduce operations')

    for t in tensors:
        _check_device(t)
    result = gen_nccl_ops.nccl_reduce(input=tensors, reduction=reduction)
    try:
        next(t for t in tensors if t.device == result.device)
    except StopIteration:
        raise ValueError('One input tensor must be assigned to current device')
    return result
Exemplo n.º 3
0
def _broadcast_grad(op, accumulated_grad):
  """The gradients for input `Operation` of `broadcast`.

  Args:
    op: The `broadcast send` `Operation` that we are differentiating.
    accumulated_grad: Accumulated gradients with respect to the output of the
      `broadcast` op.

  Returns:
    Gradients with respect to the input of `broadcast`.
  """
  # Grab inputs of accumulated_grad and replace accumulation with reduce_sum.
  grads = [t for t in accumulated_grad.op.inputs]
  for t in grads:
    _check_device(t)

  with ops.device(op.device):
    return gen_nccl_ops.nccl_reduce(input=grads, reduction='sum')
Exemplo n.º 4
0
def _broadcast_grad(op, accumulated_grad):
    """The gradients for input `Operation` of `broadcast`.

  Args:
    op: The `broadcast send` `Operation` that we are differentiating.
    accumulated_grad: Accumulated gradients with respect to the output of the
      `broadcast` op.

  Returns:
    Gradients with respect to the input of `broadcast`.
  """
    # Grab inputs of accumulated_grad and replace accumulation with reduce_sum.
    grads = [t for t in accumulated_grad.op.inputs]
    for t in grads:
        _check_device(t)

    with ops.device(op.device):
        return gen_nccl_ops.nccl_reduce(input=grads, reduction='sum')