def call(self, *args, **kwargs) -> Any: """ Do not call this directly, use `__call__`: ``` my_lambda(*args, **kwargs) ``` """ # Unwrap arguments args = [feature.native for feature in args] kwargs = {k: kwargs[k].native for k in kwargs} # call function result = self.native_module(*args, **kwargs) # Wrap results if _isnamedtupleinstance(result): result_raw = result._asdict() result_raw = { k: Tensor(data=result_raw[k], trainable=True) for k in result_raw } return type(result)(**result_raw) elif isinstance(result, dict): result = { k: Tensor(data=result[k], trainable=True) for k in result } elif isinstance(result, list): result = [Tensor(data=res, trainable=True) for res in result] else: result = Tensor(data=result, trainable=True) return result
def log(tensor: ITensor) -> ITensor: """ Compute the logarithm of a tensor. :param tensor: The tensor of which the log should be computed. :return: The log tensor. """ return Tensor(data=tf.log(tensor.native), trainable=True)
def pow(tensor: ITensor, exponent: ITensor) -> ITensor: """ Compute the power of a tensor. :param tensor: The tensor of which the pow should be computed. :param exponent: The exponent to take the power with. :return: The pow tensor. """ return Tensor(data=tf.pow(tensor.native, exponent.native), trainable=True)
def clip(tensor: ITensor, min, max) -> ITensor: """ Clip a tensor to a value range. :param tensor: The tensor which should be clipped. :param min: The minimum value to clip to. :param max: The maximum value to clip to. :return: The clipped tensor. """ return Tensor(data=torch.clamp(tensor.native, min, max), trainable=True)