Пример #1
0
  def assign_sub(self, delta, use_locking=None, name=None, read_value=True):
    """Subtracts a value from this variable.

    Args:
      delta: A `Tensor`. The value to subtract from this variable.
      use_locking: If `True`, use locking during the operation.
      name: The name to use for the operation.
      read_value: A `bool`. Whether to read and return the new value of the
          variable or not.

    Returns:
      If `read_value` is `True`, this method will return the new value of the
      variable after the assignment has completed. Otherwise, when in graph mode
      it will return the `Operation` that does the assignment, and when in eager
      mode it will return `None`.
    """
    # TODO(apassos): this here and below is not atomic. Consider making it
    # atomic if there's a way to do so without a performance cost for those who
    # don't need it.
    with _handle_graph(self.handle), self._assign_dependencies():
      assign_sub_op = gen_resource_variable_ops.assign_sub_variable_op(
          self.handle, ops.convert_to_tensor(delta, dtype=self.dtype),
          name=name)
    if read_value:
      return self._lazy_read(assign_sub_op)
    return assign_sub_op
Пример #2
0
 def assign_sub(self, delta, use_locking=None, name=None):
   # TODO(apassos): this here and below is not atomic. Consider making it
   # atomic if there's a way to do so without a performance cost for those who
   # don't need it.
   return self._lazy_read(gen_resource_variable_ops.assign_sub_variable_op(
       self.handle,
       ops.convert_to_tensor(delta, dtype=self.dtype),
       name=name))
Пример #3
0
 def assign_sub(self, delta, use_locking=None, name=None, read_value=True):
   del use_locking
   with _handle_graph(self.handle), self._assign_dependencies():
     assign_sub_op = gen_resource_variable_ops.assign_sub_variable_op(
         self.handle,
         ops.convert_to_tensor(delta, dtype=self.dtype),
         name=name)
   if read_value:
     return self._read_variable_op()
   return assign_sub_op
Пример #4
0
 def _assign_moving_average(self, variable, value, one_minus_decay):
   with ops.name_scope(None, 'AssignMovingAvg',
                       [variable, value, one_minus_decay]) as scope:
     with ops.colocate_with(variable):
       update_delta = (variable.read_value() - value) * one_minus_decay
       if isinstance(variable, resource_variable_ops.ResourceVariable):
         # state_ops.assign_sub does an extra read_variable_op after the
         # assign. We avoid that here.
         return gen_resource_variable_ops.assign_sub_variable_op(
             variable.handle, update_delta, name=scope)
       else:
         return state_ops.assign_sub(variable, update_delta, name=scope)
Пример #5
0
 def _assign_moving_average(self, variable, value, one_minus_decay):
     with ops.name_scope(None, 'AssignMovingAvg',
                         [variable, value, one_minus_decay]) as scope:
         with ops.colocate_with(variable):
             update_delta = math_ops.multiply(
                 math_ops.subtract(variable.read_value(), value),
                 one_minus_decay)
             if isinstance(variable,
                           resource_variable_ops.ResourceVariable):
                 # state_ops.assign_sub does an extra read_variable_op after the
                 # assign. We avoid that here.
                 return gen_resource_variable_ops.assign_sub_variable_op(
                     variable.handle, update_delta, name=scope)
             else:
                 return state_ops.assign_sub(variable,
                                             update_delta,
                                             name=scope)
  def assign_sub(self, delta, use_locking=None, name=None, read_value=True):
    """Subtracts a value from this variable.

    Args:
      delta: A `Tensor`. The value to subtract from this variable.
      use_locking: If `True`, use locking during the operation.
      name: The name to use for the operation.
      read_value: A `bool`. Whether to read and return the new value of the
          variable or not.

    Returns:
      If `read_value` is `True`, this method will return the new value of the
      variable after the assignment has completed. Otherwise, when in graph mode
      it will return the `Operation` that does the assignment, and when in eager
      mode it will return `None`.
    """
    # TODO(apassos): this here and below is not atomic. Consider making it
    # atomic if there's a way to do so without a performance cost for those who
    # don't need it.
    assign_sub_op = gen_resource_variable_ops.assign_sub_variable_op(
        self.handle, ops.convert_to_tensor(delta, dtype=self.dtype), name=name)
    if read_value:
      return self._lazy_read(assign_sub_op)
    return assign_sub_op