def _parse_inputs(inputs: Iterable[Any]) -> List[Tensor]: ''' Parse all inputs that are not Nodes to Tensors ''' return [ x if isinstance(x, _Node) else Tensor(x, name=str(x)) for x in inputs ]
def forward(self, input: Tensor, target: Tensor) -> Tensor: # Avoid division by zero input.value = clip(input.value, 1e-16, 1 - 1e-16) return -self.reduction_fn(sum(target * log(input), dim=1, keepdim=True), dim=self.dim, keepdim=self.keepdim)
def randint(*shape: int, low=0, high=100, diff=False, name='Tensor[randint]') -> Tensor: ''' Return random integers from low (inclusive) to high (exclusive). ''' return Tensor(np_randint(low, high=high, size=shape), diff=diff, name=name)
def __init__(self, *children: Union[Tensor, ndarray, List[Number], Number]): super(Function, self).__init__(*_parse_inputs(children), name=self.__class__.__name__) # This output placeholder is reused when possible self._output_placeholder = Tensor( None, diff=any(x.diff for x in self.children) and modes.DIFF_ENABLED, creator=self if modes.DIFF_ENABLED else None, name=self._generate_tensor_name()) if modes.DIFF_ENABLED: # If graph building is enabled. # Allocate space for parent's output (output placeholder) for child in self.children: child.parents_outputs.append(self._output_placeholder)
def randn(*shape: int, diff=False, name='Tensor[randn]') -> Tensor: ''' Return a sample (or samples) from the "standard normal" distribution. ''' return Tensor(np_randn(*shape), diff=diff, name=name)
def rand(*shape: int, diff=False, name='Tensor[rand]') -> Tensor: ''' Random values in a given shape. ''' return Tensor(np_rand(*shape), diff=diff, name=name)
def __init__(self, name): super(Add1, self).__init__(name=name) self.one = Tensor(1)
def __init__(self, name): super(Mul2, self).__init__(name=name) self.two = Tensor(2)
def __init__(self, name): super(CustomFlow, self).__init__(name=name) self.two = Tensor(2) self.fourty_two = Tensor(42)