Example #1
0
 def __init__(self, *layers: Union[Layer, List[Layer], Tuple[Layer], Generator[Layer, None, None]]):
     self.layers = to_flat_tuple(layers)
     super().__init__(
         n_in=len(to_flat_tuple(self.layers[0].inputs)),
         n_out=len(to_flat_tuple(self.layers[-1].outputs)),
         inputs=self.layers[0].inputs,
         outputs=self.layers[-1].outputs,
     )
Example #2
0
File: base.py Project: mindis/deepr
    def __init__(
        self,
        n_in: int = None,
        n_out: int = None,
        inputs: Union[str, Tuple[str, ...], List[str]] = None,
        outputs: Union[str, Tuple[str, ...], List[str]] = None,
        name: str = None,
    ):
        # Assert either number of inputs or names of inputs are given
        if n_in is None and inputs is None:
            raise ValueError(
                "You must set either n_in or inputs (both are None)")
        if n_out is None and outputs is None:
            raise ValueError(
                "You must set either n_out or outputs (both are None)")

        def _default_names(num: int, prefix: str = "t_"):
            if num == 1:
                return f"{prefix}0"
            else:
                return tuple(f"{prefix}{idx}" for idx in range(num))

        # Resolve n_in / inputs from arguments
        if n_in is None and inputs is not None:
            n_in = len(to_flat_tuple(inputs))
        elif n_in is not None and inputs is None:
            inputs = _default_names(n_in)

        # Resolve n_out / outputs from arguments
        if n_out is None and outputs is not None:
            n_out = len(to_flat_tuple(outputs))
        elif n_out is not None and outputs is None:
            outputs = _default_names(n_out)

        # For mypy
        assert isinstance(n_in, int)
        assert isinstance(n_out, int)

        # Store attributes
        self.n_in = n_in
        self.n_out = n_out
        self.inputs = inputs
        self.outputs = outputs
        self.name = name

        # Assert coherent attributes
        if self.n_in == 1 and not isinstance(self.inputs, str):
            msg = f"Layer {self} inputs should be a string (n_in = 1)"
            raise ValueError(msg)
        if self.n_out == 1 and not isinstance(self.outputs, str):
            msg = f"Layer {self} outputs should be a string (n_out = 1)"
            raise ValueError(msg)
        if len(to_flat_tuple(self.inputs)) != self.n_in:
            msg = f"{self}: `inputs` inconsistent with `n_in` (n_in={self.n_in}, inputs='{self.inputs})'"
            raise ValueError(msg)
        if len(to_flat_tuple(self.outputs)) != self.n_out:
            msg = f"{self}: `outputs` inconsistent with `n_out` (n_out={self.n_out}, outputs='{self.outputs}'')"
            raise ValueError(msg)
Example #3
0
 def __init__(self, *layers: Union[Layer, List[Layer], Tuple[Layer], Generator[Layer, None, None]]):
     self.layers = to_flat_tuple(layers)
     n_in = sum(layer.n_in for layer in self.layers)
     n_out = sum(layer.n_out for layer in self.layers)
     inputs = to_flat_tuple([layer.inputs for layer in self.layers])
     outputs = to_flat_tuple([layer.outputs for layer in self.layers])
     inputs = (inputs if n_in > 1 else inputs[0]) if len(set(inputs)) == n_in else None
     outputs = (outputs if n_out > 1 else outputs[0]) if len(set(outputs)) == n_out else None
     super().__init__(n_in=n_in, n_out=n_out, inputs=inputs, outputs=outputs)
Example #4
0
 def forward(self, tensors, mode: str = None):
     """Forward method of the layer"""
     tensors = to_flat_tuple(tensors)
     new_tensors, idx = [], 0
     for layer in self.layers:
         tensors_in = tensors[idx] if layer.n_in == 1 else tuple(tensors[idx : idx + layer.n_in])
         new_tensors.append(layer.forward(tensors_in, mode=mode))
         idx += layer.n_in
     result = to_flat_tuple(new_tensors)
     return result if self.n_out > 1 else result[0]
Example #5
0
 def __init__(
     self,
     *preprocessors: Union[Prepro, Tuple[Prepro], List[Prepro],
                           Generator[Prepro, None, None]],
     fuse: bool = True,
     num_parallel_calls: int = None,
 ):
     super().__init__()
     self.preprocessors = (_fuse(*to_flat_tuple(preprocessors),
                                 num_parallel_calls=num_parallel_calls)
                           if fuse else to_flat_tuple(preprocessors))
     self.fuse = fuse
     self.num_parallel_calls = num_parallel_calls
Example #6
0
def _fuse(*preprocessors: Prepro,
          num_parallel_calls: int = None) -> Tuple[Prepro, ...]:
    """Group Map and Filter in _FusedMap and _FusedFilter"""
    def _flatten(prepros):
        for prepro in prepros:
            if isinstance(prepro, Serial):
                yield from _flatten(prepro.preprocessors)
            else:
                yield prepro

    def _prepro_type(prepro: Prepro) -> str:
        if isinstance(prepro, core.Map):
            return "map"
        elif isinstance(prepro, core.Filter):
            return "filter"
        else:
            return "other"

    def _gen():
        for prepro_type, prepros in itertools.groupby(_flatten(preprocessors),
                                                      _prepro_type):
            if prepro_type == "map":
                yield _FusedMap(*list(prepros),
                                num_parallel_calls=num_parallel_calls)
            elif prepro_type == "filter":
                yield _FusedFilter(*list(prepros))
            else:
                yield list(prepros)

    return to_flat_tuple(_gen())
Example #7
0
 def forward(self, tensors, mode: str = None):
     """Forward method of the layer"""
     tensors = to_flat_tuple(tensors)
     result = tuple(tensors[idx] for idx in self.indices)
     if len(result) == 1:
         return result[0]
     else:
         return result
Example #8
0
 def __init__(
     self,
     inputs: Union[str, Tuple[str, ...], List[str]] = None,
     outputs: Union[str, Tuple[str, ...], List[str]] = None,
     indices: Union[int, Tuple[int]] = None,
     n_in: int = None,
 ):
     if n_in is None and inputs is None:
         msg = "`n_in` and `inputs` cannot both be `None`"
         raise ValueError(msg)
     if n_in is None:
         n_in = len(to_flat_tuple(inputs))
     self.indices = to_flat_tuple(
         indices) if indices is not None else tuple(range(n_in))
     if inputs is not None and outputs is None:
         outputs_tuple = tuple(
             to_flat_tuple(inputs)[idx] for idx in self.indices)
         outputs = outputs_tuple if len(
             outputs_tuple) > 1 else outputs_tuple[0]
     super().__init__(n_in=n_in,
                      n_out=len(self.indices),
                      inputs=inputs,
                      outputs=outputs)
Example #9
0
    def __init__(
        self,
        *preprocessors: Union[Prepro, Tuple[Prepro], List[Prepro],
                              Generator[Prepro, None, None]],
        fuse: bool = True,
        num_parallel_calls: int = None,
    ):
        super().__init__()
        self.preprocessors = to_flat_tuple(preprocessors)
        self.fuse = fuse
        self.num_parallel_calls = num_parallel_calls

        # Iterable of preprocessors used by the apply method
        self._preprocessors = (_fuse(*self.preprocessors,
                                     num_parallel_calls=num_parallel_calls)
                               if fuse else self.preprocessors)
Example #10
0
 def __init__(
     self,
     inputs: Union[str, Tuple[str, ...], List[str]] = None,
     outputs: Union[str, Tuple[str, ...], List[str]] = None,
     indices: List[int] = None,
     n_in: int = None,
 ):
     if n_in is None and inputs is None:
         msg = "`n_in` and `inputs` cannot both be `None`"
         raise ValueError(msg)
     if n_in is None:
         n_in = len(inputs.split(",")) if isinstance(inputs, str) else len(inputs)  # type: ignore
     if inputs is not None and outputs is None:
         outputs = inputs
     self.indices = to_flat_tuple(indices) if indices is not None else list(range(n_in))
     super().__init__(n_in=n_in, n_out=len(self.indices), inputs=inputs, outputs=outputs)
Example #11
0
 def __init__(
     self,
     layer: Layer,
     mode: Union[str, Tuple[str, ...]] = None,
     inputs: Union[str, Tuple[str, ...], List[str]] = None,
     outputs: Union[str, Tuple[str, ...], List[str]] = None,
 ):
     if inputs is None:
         inputs = layer.inputs
     if outputs is None:
         outputs = layer.outputs
     super().__init__(n_in=layer.n_in,
                      n_out=layer.n_out,
                      inputs=inputs,
                      outputs=outputs,
                      name=layer.name)
     self.layer = layer
     self.mode = to_flat_tuple(mode)
     if self.n_in != self.n_out:
         raise ValueError("Number of inputs / outputs must be the same/")
Example #12
0
 def __init__(self, *layers: Union[Layer, List[Layer], Tuple[Layer],
                                   Generator[Layer, None, None]]):
     self.layers = to_flat_tuple(layers)
     super().__init__(
         n_in=len(to_flat_tuple(self.layers[0].inputs)),
         n_out=len(to_flat_tuple(self.layers[-1].outputs)),
         inputs=self.layers[0].inputs,
         outputs=self.layers[-1].outputs,
     )
     # Check consistency of inputs / outputs of intermediate layers
     keys = set(to_flat_tuple(self.inputs))
     for layer in self.layers:
         for key in to_flat_tuple(layer.inputs):
             if key not in keys:
                 raise ValueError(
                     f"Input '{key}' of layer {layer} not found")
         keys.update(to_flat_tuple(layer.outputs))
Example #13
0
    def __call__(
        self,
        tensors: Union[tf.Tensor, Dict[str, tf.Tensor], Tuple[tf.Tensor, ...]],
        mode: str = None,
        reuse: bool = False,
    ) -> Union[tf.Tensor, Dict[str, tf.Tensor], Tuple[tf.Tensor, ...]]:
        """Forward as tuple or dictionary depending on tensors type.

        Wraps the layer call in a variable scope to be able to reuse
        variable with the ``reuse`` argument, adds a tf.identity
        operator to each output tensor using self.outputs.

        If tensors is a Dict, it returns a dictionary whose keys are
        defined by self.outputs.

        Otherwise, input tensors type is expected to be, if
            - n_in = 1: one tensor (NOT wrapped in a tuple)
            - n_in > 1: a tuple of tensors
        In that case, output tensors type is expected to be, if
            - n_out = 1: one tensor (NOT wrapped in a tuple)
            - n_out > 1: a tuple of tensors

        NOTE: Each call to this method performs inspection on the inputs
        and outputs type, which can be costly in terms of computation.

        This is not an issue when building graphs with tf.estimator as
        the graph is compiled once and for all.

        However, when using a :class:`~Layer` to preprocess a :class:`~tf.data.Dataset`
        (eg. with a ``map`` transformation), this method will be called
        for each example and might cause slowdown. It is recommended to
        explicitly use ``forward`` or ``forward_as_dict`` in that case.

        Parameters
        ----------
        tensors : Union[tf.Tensor, Dict[str, tf.Tensor], Tuple[tf.Tensor, ...]]
            Input tensors
        mode : str, optional
            One of tf.estimator.ModeKeys
        reuse : bool, optional
            Encapsulates layer call in a variable scope with reuse=reuse
        """
        with tf.variable_scope(tf.get_variable_scope(), reuse=reuse):
            if isinstance(tensors, dict):
                # Check that tensors is coherent with self.inputs
                if not set(to_flat_tuple(self.inputs)) <= set(tensors):
                    msg = f"Missing inputs: {set(to_flat_tuple(self.inputs)) - set(tensors)}"
                    raise KeyError(msg)

                # Call forward_as_dict to get output tensors
                tensors_dict = self.forward_as_dict(tensors, mode)

                # Check that tensors_dict is coherent with self.outputs
                if not set(to_flat_tuple(self.outputs)) <= set(tensors_dict):
                    msg = f"Missing outputs: {set(to_flat_tuple(self.outputs)) - set(tensors_dict)}"
                    raise KeyError(msg)

                return tensors_dict
            else:
                # Check that tensors is coherent with self.n_in
                if self.n_in == 1 and isinstance(tensors, tuple):
                    msg = f"Expected 1 input, but got {tensors} (should not be a tuple)"
                    raise KeyError(msg)
                if self.n_in > 1 and len(to_flat_tuple(tensors)) != self.n_in:
                    msg = f"Expected {self.n_in} inputs, but got {tensors}"
                    raise KeyError(msg)

                # Call forward and convert to tuple
                tensors_tuple = self.forward(tensors, mode)

                # Check that tensors_tuple is coherent with outputs
                if len(to_flat_tuple(tensors_tuple)) != self.n_out:
                    raise IndexError(
                        f"Expected {self.n_out} outputs but got {tensors_tuple}"
                    )

                return tensors_tuple
Example #14
0
 def __init__(self, *args: Union[str, pathlib.Path, "Path"]):
     self.path = os.path.join(*[str(arg) for arg in to_flat_tuple(args)])