def update_op_asynchronous(self, optimizer, g, index): if isinstance(g, ops.Tensor): return optimizer._apply_dense(g, self._v, index) else: assert isinstance(g, ops.IndexedSlices), ("Gradient ", g, " is neither a " "tensor nor IndexedSlices.") # pylint: disable=protected-access return optimizer._apply_sparse_duplicate_indices(g, self._v, index)
def update_op(self, optimizer, loss, grad, global_step): if isinstance(grad, ops.Tensor): update_op = optimizer._apply_dense(loss, grad, self.var, global_step) if self.var.constraint is not None: with ops.control_dependencies([update_op]): return self.var.assign(self.var.constraint(self.var)) else: return update_op else: assert isinstance(grad, ops.IndexedSlices), ("Gradient ", grad, " is neither a tensor nor IndexedSlices.") if self.var.constraint is not None: raise RuntimeError("Cannot use a constraint function on a sparse variable.") return optimizer._apply_sparse_duplicate_indices(grad, self.var)
def update_op(self, optimizer, g): if isinstance(g, ops.Tensor): update_op = optimizer._apply_dense(g, self._v) # pylint: disable=protected-access if self._v.constraint is not None: with ops.control_dependencies([update_op]): return self._v.assign(self._v.constraint(self._v)) else: return update_op else: assert isinstance(g, ops.IndexedSlices), ("Gradient ", g, " is neither a " "tensor nor IndexedSlices.") if self._v.constraint is not None: raise RuntimeError( "Cannot use a constraint function on a sparse variable.") # pylint: disable=protected-access return optimizer._apply_sparse_duplicate_indices(g, self._v)