Esempio n. 1
0
 def broadcast(cls_or_self, *others: tp.Union[tp.ArrayLike, "BaseAccessor"],
               **kwargs) -> reshape_fns.BCRT:
     """See `vectorbt.base.reshape_fns.broadcast`."""
     others = tuple(
         map(lambda x: x.obj if isinstance(x, BaseAccessor) else x, others))
     if isinstance(cls_or_self, type):
         return reshape_fns.broadcast(*others, **kwargs)
     return reshape_fns.broadcast(cls_or_self.obj, *others, **kwargs)
Esempio n. 2
0
 def broadcast(self_or_cls, *others, **kwargs):
     """See `vectorbt.base.reshape_fns.broadcast`."""
     others = tuple(
         map(lambda x: x._obj
             if isinstance(x, Base_Accessor) else x, others))
     if isinstance(self_or_cls, type):
         return reshape_fns.broadcast(*others, **kwargs)
     return reshape_fns.broadcast(self_or_cls._obj, *others, **kwargs)
Esempio n. 3
0
    def concat(self_or_cls, *others, keys=None, broadcast_kwargs={}):
        """Concatenate with `others` along columns.

        All arguments will be broadcast using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`. Use `keys` as the outermost level.

        ## Example

        ```python-repl
        >>> import vectorbt as vbt
        >>> import pandas as pd

        >>> sr = pd.Series([1, 2], index=['x', 'y'])
        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])
        >>> sr.vbt.concat(df, keys=['c', 'd'])
              c     d
           a  b  a  b
        x  1  1  3  4
        y  2  2  5  6
        ```
        """
        others = tuple(map(lambda x: x._obj if isinstance(x, Base_Accessor) else x, others))
        if isinstance(self_or_cls, type):
            objs = others
        else:
            objs = (self_or_cls._obj,) + others
        broadcasted = reshape_fns.broadcast(*objs, **broadcast_kwargs)
        broadcasted = tuple(map(reshape_fns.to_2d, broadcasted))
        out = pd.concat(broadcasted, axis=1, keys=keys)
        if not isinstance(out.columns, pd.MultiIndex) and np.all(out.columns == 0):
            out.columns = pd.RangeIndex(start=0, stop=len(out.columns), step=1)
        return out
Esempio n. 4
0
    def concat(self_or_cls, *others, keys=None, broadcast_kwargs={}):
        """Concatenate with `others` along columns.

        All arguments will be broadcasted using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`. Use `keys` as the outermost level.

        Example:
            ```python-repl
            >>> import vectorbt as vbt
            >>> import pandas as pd
            >>> sr = pd.Series([1, 2], index=['x', 'y'])
            >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])

            >>> sr.vbt.concat(df, keys=['c', 'd'])
                  c     d
               a  b  a  b
            x  1  1  3  4
            y  2  2  5  6
            ```"""
        others = tuple(map(lambda x: x._obj if isinstance(x, Base_Accessor) else x, others))
        if isinstance(self_or_cls, type):
            objs = others
        else:
            objs = (self_or_cls._obj,) + others
        broadcasted = reshape_fns.broadcast(*objs, **broadcast_kwargs)
        broadcasted = tuple(map(reshape_fns.to_2d, broadcasted))
        concatenated = pd.concat(broadcasted, axis=1)
        if keys is not None:
            concatenated.columns = index_fns.combine_indexes(keys, broadcasted[0].columns)
        return concatenated
Esempio n. 5
0
    def map_reduce_between(self,
                           other=None,
                           map_func_nb=None,
                           map_args=None,
                           reduce_func_nb=None,
                           reduce_args=None,
                           broadcast_kwargs=None):
        """See `vectorbt.signals.nb.map_reduce_between_nb`.

        If `other` specified, see `vectorbt.signals.nb.map_reduce_between_two_nb`.
        Both will be broadcast using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Note that `map_args` and `reduce_args` won't be broadcast.

        ## Example

        Get average distance between signals in `sig`:
        ```python-repl
        >>> distance_map_nb = njit(lambda from_i, to_i, col: to_i - from_i)
        >>> mean_reduce_nb = njit(lambda col, a: np.nanmean(a))

        >>> sig.vbt.signals.map_reduce_between(
        ...     map_func_nb=distance_map_nb,
        ...     reduce_func_nb=mean_reduce_nb)
        a    NaN
        b    2.0
        c    1.0
        dtype: float64
        ```
        """
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        checks.assert_not_none(map_func_nb)
        checks.assert_not_none(reduce_func_nb)
        checks.assert_numba_func(map_func_nb)
        checks.assert_numba_func(reduce_func_nb)
        if map_args is None:
            map_args = ()
        if reduce_args is None:
            reduce_args = ()

        if other is None:
            # One input array
            result = nb.map_reduce_between_nb(self.to_2d_array(), map_func_nb,
                                              map_args, reduce_func_nb,
                                              reduce_args)
            if isinstance(self._obj, pd.Series):
                return result[0]
            return pd.Series(result, index=self.wrapper.columns)
        else:
            # Two input arrays
            obj, other = reshape_fns.broadcast(self._obj, other,
                                               **broadcast_kwargs)
            checks.assert_dtype(other, np.bool)
            result = nb.map_reduce_between_two_nb(obj.vbt.to_2d_array(),
                                                  other.vbt.to_2d_array(),
                                                  map_func_nb, map_args,
                                                  reduce_func_nb, reduce_args)
            return obj.vbt.wrapper.wrap_reduced(result)
Esempio n. 6
0
    def generate_random_exits(self, prob=None, seed=None, wait=1):
        """Generate exit signals randomly.

        If `prob` is None, see `vectorbt.signals.nb.generate_rand_ex_nb`.
        Otherwise, see `vectorbt.signals.nb.generate_rand_ex_by_prob_nb`.

        ## Example

        After each entry in `sig`, generate exactly one exit:
        ```python-repl
        >>> sig.vbt.signals.generate_random_exits(seed=42)
                        a      b      c
        2020-01-01  False  False  False
        2020-01-02  False   True  False
        2020-01-03   True  False  False
        2020-01-04  False   True  False
        2020-01-05  False  False   True
        ```

        After each entry in `sig` and at each time step, generate exit with 50% probability:
        ```python-repl
        >>> sig.vbt.signals.generate_random_exits(prob=0.5, seed=42)
                        a      b      c
        2020-01-01  False  False  False
        2020-01-02   True  False  False
        2020-01-03  False  False  False
        2020-01-04  False  False  False
        2020-01-05  False  False   True
        ```
        """
        if prob is not None:
            obj, prob = reshape_fns.broadcast(self._obj, prob, keep_raw=[False, True])
            return obj.vbt.wrapper.wrap(nb.generate_rand_ex_by_prob_nb(
                obj.vbt.to_2d_array(), prob, wait, obj.ndim == 2, seed=seed))
        return self.wrapper.wrap(nb.generate_rand_ex_nb(self.to_2d_array(), wait, seed=seed))
Esempio n. 7
0
    def clean(self_or_cls, *args, entry_first=True, broadcast_kwargs=None, wrap_kwargs=None):
        """Clean signals.

        If one array passed, see `SignalsAccessor.first`.
        If two arrays passed, entries and exits, see `vectorbt.signals.nb.clean_enex_nb`."""
        if not isinstance(self_or_cls, type):
            args = (self_or_cls._obj, *args)
        if len(args) == 1:
            obj = args[0]
            if not isinstance(obj, (pd.Series, pd.DataFrame)):
                wrapper = ArrayWrapper.from_shape(np.asarray(obj).shape)
                obj = wrapper.wrap(obj, **merge_dicts({}, wrap_kwargs))
            return obj.vbt.signals.first()
        elif len(args) == 2:
            if broadcast_kwargs is None:
                broadcast_kwargs = {}
            entries, exits = reshape_fns.broadcast(*args, **broadcast_kwargs)
            entries_out, exits_out = nb.clean_enex_nb(
                entries.vbt.to_2d_array(),
                exits.vbt.to_2d_array(),
                entry_first
            )
            return (
                entries.vbt.wrapper.wrap(entries_out, **merge_dicts({}, wrap_kwargs)),
                exits.vbt.wrapper.wrap(exits_out, **merge_dicts({}, wrap_kwargs))
            )
        else:
            raise ValueError("Either one or two arrays must be passed")
Esempio n. 8
0
    def generate_stop_loss_exits(self,
                                 ts,
                                 stops,
                                 trailing=False,
                                 first=True,
                                 keys=None,
                                 broadcast_kwargs={}):
        """See `vectorbt.signals.nb.generate_stop_loss_exits_nb`.

        Arguments will be broadcasted using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`. Argument `stops` can be either a single number, an array of 
        numbers, or a 3D array, where each matrix corresponds to a single configuration. 
        Use `keys` as the outermost level.

        Example:
            For each entry in `sig`, set stop loss for 10% and 20% below the entry price:

            ```python-repl
            >>> ts = pd.Series([1, 2, 3, 2, 1])
            >>> print(sig.vbt.signals.generate_stop_loss_exits(ts, [0.1, 0.5]))
            stop_loss                   0.1                  0.5
                            a      b      c      a      b      c
            2020-01-01  False  False  False  False  False  False
            2020-01-02  False  False  False  False  False  False
            2020-01-03  False  False  False  False  False  False
            2020-01-04  False   True   True  False  False  False
            2020-01-05  False  False  False  False  False   True
            >>> print(sig.vbt.signals.generate_stop_loss_exits(ts, [0.1, 0.5], trailing=True))
            trail_stop                  0.1                  0.5
                            a      b      c      a      b      c
            2020-01-01  False  False  False  False  False  False
            2020-01-02  False  False  False  False  False  False
            2020-01-03  False  False  False  False  False  False
            2020-01-04   True   True   True  False  False  False
            2020-01-05  False  False  False   True  False   True
            ```"""
        entries = self._obj
        checks.assert_type(ts, (pd.Series, pd.DataFrame))

        entries, ts = reshape_fns.broadcast(entries,
                                            ts,
                                            **broadcast_kwargs,
                                            writeable=True)
        stops = reshape_fns.broadcast_to_array_of(stops,
                                                  entries.vbt.to_2d_array())
        exits = nb.generate_stop_loss_exits_nb(entries.vbt.to_2d_array(),
                                               ts.vbt.to_2d_array(),
                                               stops,
                                               trailing=trailing,
                                               first=first)

        # Build column hierarchy
        if keys is not None:
            param_columns = keys
        else:
            name = 'trail_stop' if trailing else 'stop_loss'
            param_columns = index_fns.index_from_values(stops, name=name)
        columns = index_fns.combine_indexes(param_columns, entries.vbt.columns)
        return entries.vbt.wrap(exits, columns=columns)
Esempio n. 9
0
    def rank(self,
             reset_by=None,
             after_false=False,
             allow_gaps=False,
             broadcast_kwargs=None,
             wrap_kwargs=None):
        """See `vectorbt.signals.nb.rank_nb`.

        ## Example

        Rank each True value in each partition in `sig`:
        ```python-repl
        >>> sig.vbt.signals.rank()
                    a  b  c
        2020-01-01  1  1  1
        2020-01-02  0  0  2
        2020-01-03  0  1  3
        2020-01-04  0  0  0
        2020-01-05  0  1  0

        >>> sig.vbt.signals.rank(after_false=True)
                    a  b  c
        2020-01-01  0  0  0
        2020-01-02  0  0  0
        2020-01-03  0  1  0
        2020-01-04  0  0  0
        2020-01-05  0  1  0

        >>> sig.vbt.signals.rank(allow_gaps=True)
                    a  b  c
        2020-01-01  1  1  1
        2020-01-02  0  0  2
        2020-01-03  0  2  3
        2020-01-04  0  0  0
        2020-01-05  0  3  0

        >>> sig.vbt.signals.rank(reset_by=~sig, allow_gaps=True)
                    a  b  c
        2020-01-01  1  1  1
        2020-01-02  0  0  2
        2020-01-03  0  1  3
        2020-01-04  0  0  0
        2020-01-05  0  1  0
        ```
        """
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        if reset_by is not None:
            obj, reset_by = reshape_fns.broadcast(self._obj, reset_by,
                                                  **broadcast_kwargs)
            reset_by = reset_by.vbt.to_2d_array()
        else:
            obj = self._obj
        ranked = nb.rank_nb(obj.vbt.to_2d_array(),
                            reset_by=reset_by,
                            after_false=after_false,
                            allow_gaps=allow_gaps)
        return obj.vbt.wrapper.wrap(ranked, **merge_dicts({}, wrap_kwargs))
Esempio n. 10
0
    def generate_stop_exits(self, ts, stop, trailing=False, entry_wait=1, exit_wait=1,
                            first=True, iteratively=False, broadcast_kwargs=None, wrap_kwargs=None):
        """Generate exits based on when `ts` hits the stop.

        For arguments, see `vectorbt.signals.nb.stop_choice_nb`.
        If `iteratively` is True, see `vectorbt.signals.nb.generate_stop_ex_iter_nb`.
        Otherwise, see `vectorbt.signals.nb.generate_stop_ex_nb`.

        Arguments `entries`, `ts` and `stop` will broadcast using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        For arguments, see `vectorbt.signals.nb.stop_choice_nb`.

        ## Example

        ```python-repl
        >>> ts = pd.Series([1, 2, 3, 2, 1])

        >>> # stop loss
        >>> sig.vbt.signals.generate_stop_exits(ts, -0.1)
                        a      b      c
        2020-01-01  False  False  False
        2020-01-02  False  False  False
        2020-01-03  False  False  False
        2020-01-04  False   True   True
        2020-01-05  False  False  False

        >>> # trailing stop loss
        >>> sig.vbt.signals.generate_stop_exits(ts, -0.1, trailing=True)
                        a      b      c
        2020-01-01  False  False  False
        2020-01-02  False  False  False
        2020-01-03  False  False  False
        2020-01-04   True   True   True
        2020-01-05  False  False  False
        ```
        """
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        entries = self._obj

        keep_raw = (False, True, True)
        broadcast_kwargs = merge_dicts(dict(require_kwargs=dict(requirements='W')), broadcast_kwargs)
        entries, ts, stop = reshape_fns.broadcast(entries, ts, stop, **broadcast_kwargs, keep_raw=keep_raw)

        # Perform generation
        if iteratively:
            new_entries, exits = nb.generate_stop_ex_iter_nb(
                entries.vbt.to_2d_array(), ts, stop, trailing, entry_wait, exit_wait, entries.ndim == 2)
            return entries.vbt.wrapper.wrap(new_entries, **merge_dicts({}, wrap_kwargs)), \
                   entries.vbt.wrapper.wrap(exits, **merge_dicts({}, wrap_kwargs))
        else:
            exits = nb.generate_stop_ex_nb(
                entries.vbt.to_2d_array(), ts, stop, trailing, exit_wait, first, entries.ndim == 2)
            return entries.vbt.wrapper.wrap(exits, **merge_dicts({}, wrap_kwargs))
Esempio n. 11
0
    def combine_with(self,
                     other,
                     *args,
                     combine_func=None,
                     to_2d=False,
                     broadcast_kwargs=None,
                     wrap_kwargs=None,
                     **kwargs):
        """Combine both using `combine_func` into a Series/DataFrame of the same shape.

        All arguments will be broadcast using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Arguments `*args` and `**kwargs` will be directly passed to `combine_func`.
        If `to_2d` is True, 2-dimensional NumPy arrays will be passed, otherwise as is.

        !!! note
            The resulted array must have the same shape as broadcast input arrays.

        ## Example

        ```python-repl
        >>> import vectorbt as vbt
        >>> import pandas as pd

        >>> sr = pd.Series([1, 2], index=['x', 'y'])
        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])
        >>> sr.vbt.combine_with(df, combine_func=lambda x, y: x + y)
           a  b
        x  4  5
        y  7  8
        ```
        """
        if isinstance(other, BaseAccessor):
            other = other._obj
        checks.assert_not_none(combine_func)
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        if checks.is_numba_func(combine_func):
            # Numba requires writable arrays
            broadcast_kwargs = merge_dicts(
                dict(require_kwargs=dict(requirements='W')), broadcast_kwargs)
        new_obj, new_other = reshape_fns.broadcast(self._obj, other,
                                                   **broadcast_kwargs)
        # Optionally cast to 2d array
        if to_2d:
            new_obj_arr = reshape_fns.to_2d(new_obj, raw=True)
            new_other_arr = reshape_fns.to_2d(new_other, raw=True)
        else:
            new_obj_arr = np.asarray(new_obj)
            new_other_arr = np.asarray(new_other)
        result = combine_func(new_obj_arr, new_other_arr, *args, **kwargs)
        return new_obj.vbt.wrapper.wrap(result, **merge_dicts({}, wrap_kwargs))
Esempio n. 12
0
    def generate_take_profit_exits(self, ts, stops, first=True, iteratively=False, keys=None, broadcast_kwargs={}):
        """Generate take profit exits.

        See `vectorbt.signals.nb.generate_tp_ex_iter_nb` if `iteratively` is `True`, otherwise see
        `vectorbt.signals.nb.generate_tp_ex_nb`.

        Arguments will be broadcasted using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`. Argument `stops` can be either a single number, an array of
        numbers, or a 3D array, where each matrix corresponds to a single configuration.
        Use `keys` as the outermost level.

        Example:
            For each entry in `sig`, set take profit for 10% and 20% above the entry price:
            ```python-repl
            >>> ts = pd.Series([1, 2, 3, 4, 5])
            >>> sig.vbt.signals.generate_take_profit_exits(ts, [0.1, 0.5])
            take_profit                  0.1                  0.5
                             a      b      c      a      b      c
            2020-01-01   False  False  False  False  False  False
            2020-01-02    True   True  False   True   True  False
            2020-01-03   False  False  False  False  False  False
            2020-01-04   False   True   True  False  False  False
            2020-01-05   False  False  False  False  False   True
            ```"""
        entries = self._obj
        checks.assert_type(ts, (pd.Series, pd.DataFrame))

        broadcast_kwargs = merge_kwargs(dict(require_kwargs=dict(requirements='W')), broadcast_kwargs)
        entries, ts = reshape_fns.broadcast(entries, ts, **broadcast_kwargs)
        stops = reshape_fns.broadcast_to_array_of(stops, entries.vbt.to_2d_array())

        # Build column hierarchy
        if keys is not None:
            param_columns = keys
        else:
            param_columns = index_fns.index_from_values(stops, name='take_profit')
        columns = index_fns.combine_indexes(param_columns, entries.vbt.columns)

        # Perform generation
        if iteratively:
            new_entries, exits = nb.generate_tp_ex_iter_nb(
                entries.vbt.to_2d_array(),
                ts.vbt.to_2d_array(),
                stops)
            return entries.vbt.wrap(new_entries, columns=columns), entries.vbt.wrap(exits, columns=columns)
        else:
            exits = nb.generate_tp_ex_nb(
                entries.vbt.to_2d_array(),
                ts.vbt.to_2d_array(),
                stops,
                first=first)
            return entries.vbt.wrap(exits, columns=columns)
Esempio n. 13
0
    def map_reduce_between(self,
                           *args,
                           other=None,
                           map_func_nb=None,
                           reduce_func_nb=None,
                           broadcast_kwargs={}):
        """See `vectorbt.signals.nb.map_reduce_between_nb`. 

        If `other` specified, see `vectorbt.signals.nb.map_reduce_between_two_nb`.

        Arguments will be broadcasted using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Example:
            Get average distance between signals in `sig`:

            ```python-repl
            >>> distance_map_nb = njit(lambda col, from_i, to_i: to_i - from_i)
            >>> mean_reduce_nb = njit(lambda col, a: np.nanmean(a))

            >>> print(sig.vbt.signals.map_reduce_between(
            ...     map_func_nb=distance_map_nb, reduce_func_nb=mean_reduce_nb))
            a    NaN
            b    2.0
            c    1.0
            dtype: float64
            ```"""
        checks.assert_not_none(map_func_nb)
        checks.assert_not_none(reduce_func_nb)
        checks.assert_numba_func(map_func_nb)
        checks.assert_numba_func(reduce_func_nb)

        if other is None:
            # One input array
            result = nb.map_reduce_between_nb(self.to_2d_array(), map_func_nb,
                                              reduce_func_nb, *args)
            if isinstance(self._obj, pd.Series):
                return result[0]
            return pd.Series(result, index=self.columns)
        else:
            # Two input arrays
            obj, other = reshape_fns.broadcast(self._obj, other,
                                               **broadcast_kwargs)
            checks.assert_dtype(other, np.bool_)
            result = nb.map_reduce_between_two_nb(self.to_2d_array(),
                                                  other.vbt.to_2d_array(),
                                                  map_func_nb, reduce_func_nb,
                                                  *args)
            return self.wrap_reduced(result)
Esempio n. 14
0
    def rank_partitions(self,
                        reset_by: tp.Optional[tp.ArrayLike] = None,
                        after_false: bool = False,
                        broadcast_kwargs: tp.KwargsLike = None,
                        wrap_kwargs: tp.KwargsLike = None) -> tp.SeriesFrame:
        """See `vectorbt.signals.nb.rank_partitions_nb`.

        ## Example

        Rank each partition of True values in `sig`:
        ```python-repl
        >>> sig.vbt.signals.rank_partitions()
                    a  b  c
        2020-01-01  1  1  1
        2020-01-02  0  0  1
        2020-01-03  0  2  1
        2020-01-04  0  0  0
        2020-01-05  0  3  0

        >>> sig.vbt.signals.rank_partitions(after_false=True)
                    a  b  c
        2020-01-01  0  0  0
        2020-01-02  0  0  0
        2020-01-03  0  1  0
        2020-01-04  0  0  0
        2020-01-05  0  2  0

        >>> sig.vbt.signals.rank_partitions(reset_by=sig)
                    a  b  c
        2020-01-01  1  1  1
        2020-01-02  0  0  1
        2020-01-03  0  1  1
        2020-01-04  0  0  0
        2020-01-05  0  1  0
        ```
        """
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        if reset_by is not None:
            obj, reset_by = reshape_fns.broadcast(self._obj, reset_by,
                                                  **broadcast_kwargs)
            reset_by = reset_by.vbt.to_2d_array()
        else:
            obj = self._obj
        ranked = nb.rank_partitions_nb(obj.vbt.to_2d_array(),
                                       reset_by=reset_by,
                                       after_false=after_false)
        return obj.vbt.wrapper.wrap(ranked, **merge_dicts({}, wrap_kwargs))
Esempio n. 15
0
    def generate_random_exits(
            self,
            prob: tp.Optional[tp.ArrayLike] = None,
            seed: tp.Optional[int] = None,
            wait: int = 1,
            wrap_kwargs: tp.KwargsLike = None) -> tp.SeriesFrame:
        """Generate exit signals randomly.

        If `prob` is None, see `vectorbt.signals.nb.generate_rand_ex_nb`.
        Otherwise, see `vectorbt.signals.nb.generate_rand_ex_by_prob_nb`.

        ## Example

        After each entry in `sig`, generate exactly one exit:
        ```python-repl
        >>> sig.vbt.signals.generate_random_exits(seed=42)
                        a      b      c
        2020-01-01  False  False  False
        2020-01-02  False   True  False
        2020-01-03   True  False  False
        2020-01-04  False   True  False
        2020-01-05  False  False   True
        ```

        After each entry in `sig` and at each time step, generate exit with 50% probability:
        ```python-repl
        >>> sig.vbt.signals.generate_random_exits(prob=0.5, seed=42)
                        a      b      c
        2020-01-01  False  False  False
        2020-01-02   True  False  False
        2020-01-03  False  False  False
        2020-01-04  False  False  False
        2020-01-05  False  False   True
        ```
        """
        if prob is not None:
            obj, prob = reshape_fns.broadcast(self._obj,
                                              prob,
                                              keep_raw=[False, True])
            exits = nb.generate_rand_ex_by_prob_nb(obj.vbt.to_2d_array(),
                                                   prob,
                                                   wait,
                                                   obj.ndim == 2,
                                                   seed=seed)
            return obj.vbt.wrapper.wrap(exits, **merge_dicts({}, wrap_kwargs))
        exits = nb.generate_rand_ex_nb(self.to_2d_array(), wait, seed=seed)
        return self.wrapper.wrap(exits, **merge_dicts({}, wrap_kwargs))
Esempio n. 16
0
    def rank_partitions(self,
                        reset_by=None,
                        after_false=False,
                        broadcast_kwargs={}):
        """See `vectorbt.signals.nb.rank_partitions_nb`.

        Example:
            Rank each partition of `True` values in `sig`:

            ```python-repl
            >>> print(sig.vbt.signals.rank_partitions())
                        a  b  c
            2020-01-01  1  1  1
            2020-01-02  0  0  1
            2020-01-03  0  2  1
            2020-01-04  0  0  0
            2020-01-05  0  3  0
            >>> print(sig.vbt.signals.rank_partitions(after_false=True))
                        a  b  c
            2020-01-01  0  0  0
            2020-01-02  0  0  0
            2020-01-03  0  1  0
            2020-01-04  0  0  0
            2020-01-05  0  2  0
            >>> print(sig.vbt.signals.rank_partitions(reset_by=sig))
                        a  b  c
            2020-01-01  1  1  1
            2020-01-02  0  0  1
            2020-01-03  0  1  1
            2020-01-04  0  0  0
            2020-01-05  0  1  0
            ```"""
        if reset_by is not None:
            obj, reset_by = reshape_fns.broadcast(self._obj, reset_by,
                                                  **broadcast_kwargs)
            reset_by = reset_by.vbt.to_2d_array()
        else:
            obj = self._obj
        ranked = nb.rank_partitions_nb(obj.vbt.to_2d_array(),
                                       reset_by=reset_by,
                                       after_false=after_false)
        return obj.vbt.wrap(ranked)
Esempio n. 17
0
    def concat(cls_or_self,
               *others: tp.ArrayLike,
               broadcast_kwargs: tp.KwargsLike = None,
               keys: tp.Optional[tp.IndexLike] = None) -> tp.Frame:
        """Concatenate with `others` along columns.

        Args:
            *others (array_like): List of objects to be concatenated with this array.
            broadcast_kwargs (dict): Keyword arguments passed to `vectorbt.base.reshape_fns.broadcast`.
            keys (index_like): Outermost column level.

        ## Example

        ```python-repl
        >>> import vectorbt as vbt
        >>> import pandas as pd

        >>> sr = pd.Series([1, 2], index=['x', 'y'])
        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])
        >>> sr.vbt.concat(df, keys=['c', 'd'])
              c     d
           a  b  a  b
        x  1  1  3  4
        y  2  2  5  6
        ```
        """
        others = tuple(
            map(lambda x: x.obj if isinstance(x, BaseAccessor) else x, others))
        if isinstance(cls_or_self, type):
            objs = others
        else:
            objs = (cls_or_self.obj, ) + others
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        broadcasted = reshape_fns.broadcast(*objs, **broadcast_kwargs)
        broadcasted = tuple(map(reshape_fns.to_2d, broadcasted))
        out = pd.concat(broadcasted, axis=1, keys=keys)
        if not isinstance(out.columns, pd.MultiIndex) and np.all(
                out.columns == 0):
            out.columns = pd.RangeIndex(start=0, stop=len(out.columns), step=1)
        return out
Esempio n. 18
0
    def map_reduce_between(
            self,
            other: tp.Optional[tp.ArrayLike] = None,
            map_func_nb: tp.Optional[tp.SignalMapFunc] = None,
            map_args: tp.Optional[tp.Args] = None,
            reduce_func_nb: tp.Optional[tp.SignalReduceFunc] = None,
            reduce_args: tp.Optional[tp.Args] = None,
            broadcast_kwargs: tp.KwargsLike = None,
            wrap_kwargs: tp.KwargsLike = None) -> tp.MaybeSeries:
        """See `vectorbt.signals.nb.map_reduce_between_nb`.

        If `other` specified, see `vectorbt.signals.nb.map_reduce_between_two_nb`.
        Both will broadcast using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Note that `map_args` and `reduce_args` won't be broadcast.

        ## Example

        Get average distance between signals in `sig`:
        ```python-repl
        >>> distance_map_nb = njit(lambda from_i, to_i, col: to_i - from_i)
        >>> mean_reduce_nb = njit(lambda col, a: np.nanmean(a))

        >>> sig.vbt.signals.map_reduce_between(
        ...     map_func_nb=distance_map_nb,
        ...     reduce_func_nb=mean_reduce_nb)
        a    NaN
        b    2.0
        c    1.0
        dtype: float64
        ```
        """
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        checks.assert_not_none(map_func_nb)
        checks.assert_not_none(reduce_func_nb)
        checks.assert_numba_func(map_func_nb)
        checks.assert_numba_func(reduce_func_nb)
        if map_args is None:
            map_args = ()
        if reduce_args is None:
            reduce_args = ()

        wrap_kwargs = merge_dicts(dict(name_or_index='map_reduce_between'),
                                  wrap_kwargs)
        if other is None:
            # One input array
            result = nb.map_reduce_between_nb(self.to_2d_array(), map_func_nb,
                                              map_args, reduce_func_nb,
                                              reduce_args)
            return self.wrapper.wrap_reduced(result, **wrap_kwargs)
        else:
            # Two input arrays
            obj, other = reshape_fns.broadcast(self._obj, other,
                                               **broadcast_kwargs)
            checks.assert_dtype(other, np.bool_)
            result = nb.map_reduce_between_two_nb(obj.vbt.to_2d_array(),
                                                  other.vbt.to_2d_array(),
                                                  map_func_nb, map_args,
                                                  reduce_func_nb, reduce_args)
            return obj.vbt.wrapper.wrap_reduced(result, **wrap_kwargs)
Esempio n. 19
0
    def from_signals(cls,
                     main_price,
                     entries,
                     exits,
                     size=np.inf,
                     size_type=SizeType.Shares,
                     entry_price=None,
                     exit_price=None,
                     init_capital=None,
                     fees=None,
                     fixed_fees=None,
                     slippage=None,
                     accumulate=False,
                     broadcast_kwargs={},
                     freq=None,
                     **kwargs):
        """Build portfolio from entry and exit signals.

        For each signal in `entries`, buys `size` of shares for `entry_price` to enter
        a position. For each signal in `exits`, sells everything for `exit_price`
        to exit the position. Accumulation of orders is disabled by default.

        For more details, see `vectorbt.portfolio.nb.simulate_from_signals_nb`.

        Args:
            main_price (pandas_like): Main price of the asset, such as close. Will broadcast.
            entries (array_like): Boolean array of entry signals. Will broadcast.
            exits (array_like): Boolean array of exit signals. Will broadcast.
            size (float or array_like): The amount of shares to order. Will broadcast.

                To buy/sell everything, set the size to `np.inf`.
            size_type (int or array_like): See `vectorbt.portfolio.enums.SizeType`.

                Only `SizeType.Shares` and `SizeType.Cash` are supported.
            entry_price (array_like): Entry price. Defaults to `main_price`. Will broadcast.
            exit_price (array_like): Exit price. Defaults to `main_price`. Will broadcast.
            init_capital (float or array_like): The initial capital. Will broadcast.

                Allowed is either a single value or value per column.
            fees (float or array_like): Fees in percentage of the order value. Will broadcast.
            fixed_fees (float or array_like): Fixed amount of fees to pay per order. Will broadcast.
            slippage (float or array_like): Slippage in percentage of price. Will broadcast.
            accumulate (bool): If `accumulate` is `True`, entering the market when already
                in the market will be allowed to increase a position.
            broadcast_kwargs: Keyword arguments passed to `vectorbt.base.reshape_fns.broadcast`.
            freq (any): Index frequency in case `main_price.index` is not datetime-like.
            **kwargs: Keyword arguments passed to the `__init__` method.

        For defaults, see `vectorbt.defaults.portfolio`.

        All time series will be broadcasted together using `vectorbt.base.reshape_fns.broadcast`.
        At the end, they will have the same metadata.

        Example:
            Portfolio from various signal sequences:
            ```python-repl
            >>> entries = pd.DataFrame({
            ...     'a': [True, False, False, False, False],
            ...     'b': [True, False, True, False, True],
            ...     'c': [True, True, True, True, True]
            ... }, index=index)
            >>> exits = pd.DataFrame({
            ...     'a': [False, False, False, False, False],
            ...     'b': [False, True, False, True, False],
            ...     'c': [True, True, True, True, True]
            ... }, index=index)
            >>> portfolio = vbt.Portfolio.from_signals(
            ...     price, entries, exits, size=10,
            ...     init_capital=100, fees=0.0025, fixed_fees=1., slippage=0.001)

            >>> portfolio.orders.records
               col  idx  size  price      fees  side
            0    0    0  10.0  1.001  1.025025     0
            1    1    0  10.0  1.001  1.025025     0
            2    1    1  10.0  1.998  1.049950     1
            3    1    2  10.0  3.003  1.075075     0
            4    1    3  10.0  1.998  1.049950     1
            5    1    4  10.0  1.001  1.025025     0
            6    2    0  10.0  1.001  1.025025     0
            >>> portfolio.equity
                                 a           b           c
            2020-01-01   98.964975   98.964975   98.964975
            2020-01-02  108.964975  107.895025  108.964975
            2020-01-03  118.964975  106.789950  118.964975
            2020-01-04  108.964975   95.720000  108.964975
            2020-01-05   98.964975   94.684975   98.964975
            ```
        """
        # Get defaults
        if entry_price is None:
            entry_price = main_price
        if exit_price is None:
            exit_price = main_price
        if init_capital is None:
            init_capital = defaults.portfolio['init_capital']
        if size is None:
            size = defaults.portfolio['size']
        if size_type is None:
            size_type = defaults.portfolio['size_type']
        if fees is None:
            fees = defaults.portfolio['fees']
        if fixed_fees is None:
            fixed_fees = defaults.portfolio['fixed_fees']
        if slippage is None:
            slippage = defaults.portfolio['slippage']

        # Perform checks
        checks.assert_type(main_price, (pd.Series, pd.DataFrame))
        checks.assert_dtype(entries, np.bool_)
        checks.assert_dtype(exits, np.bool_)

        # Broadcast inputs
        # Only main_price is broadcasted, others can remain unchanged thanks to flexible indexing
        keep_raw = (False, True, True, True, True, True, True, True, True,
                    True, True)
        main_price, entries, exits, size, size_type, entry_price, \
            exit_price, fees, fixed_fees, slippage, init_capital = \
            reshape_fns.broadcast(
                main_price, entries, exits, size, size_type, entry_price, exit_price, fees,
                fixed_fees, slippage, init_capital, **broadcast_kwargs,
                writeable=True, keep_raw=keep_raw)
        target_shape = (main_price.shape[0],
                        main_price.shape[1] if main_price.ndim > 1 else 1)

        # Perform calculation
        order_records, cash, shares = nb.simulate_from_signals_nb(
            target_shape,
            init_capital,
            entries,
            exits,
            size,
            size_type,
            entry_price,
            exit_price,
            fees,
            fixed_fees,
            slippage,
            accumulate,
            is_2d=main_price.ndim == 2)

        # Bring to the same meta
        cash = main_price.vbt.wrap(cash)
        shares = main_price.vbt.wrap(shares)
        orders = Orders(order_records, main_price, freq=freq)
        if checks.is_series(main_price):
            init_capital = init_capital.item(0)
        else:
            init_capital = np.broadcast_to(init_capital, (target_shape[1], ))
            init_capital = main_price.vbt.wrap_reduced(init_capital)

        return cls(main_price,
                   init_capital,
                   orders,
                   cash,
                   shares,
                   freq=freq,
                   **kwargs)
Esempio n. 20
0
    def combine(self,
                other: tp.MaybeTupleList[tp.Union[tp.ArrayLike,
                                                  "BaseAccessor"]],
                *args,
                allow_multiple: bool = True,
                combine_func: tp.Optional[tp.Callable] = None,
                keep_pd: bool = False,
                to_2d: bool = False,
                concat: bool = False,
                numba_loop: bool = False,
                use_ray: bool = False,
                broadcast: bool = True,
                broadcast_kwargs: tp.KwargsLike = None,
                keys: tp.Optional[tp.IndexLike] = None,
                wrap_kwargs: tp.KwargsLike = None,
                **kwargs) -> tp.SeriesFrame:
        """Combine with `other` using `combine_func`.

        Args:
            other (array_like): Object to combine this array with.
            *args: Variable arguments passed to `combine_func`.
            allow_multiple (bool): Whether a tuple/list will be considered as multiple objects in `other`.
            combine_func (callable): Function to combine two arrays.

                Can be Numba-compiled.
            keep_pd (bool): Whether to keep inputs as pandas objects, otherwise convert to NumPy arrays.
            to_2d (bool): Whether to reshape inputs to 2-dim arrays, otherwise keep as-is.
            concat (bool): Whether to concatenate the results along the column axis.
                Otherwise, pairwise combine into a Series/DataFrame of the same shape.

                If True, see `vectorbt.base.combine_fns.combine_and_concat`.
                If False, see `vectorbt.base.combine_fns.combine_multiple`.
            numba_loop (bool): Whether to loop using Numba.

                Set to True when iterating large number of times over small input,
                but note that Numba doesn't support variable keyword arguments.
            use_ray (bool): Whether to use Ray to execute `combine_func` in parallel.

                Only works with `numba_loop` set to False and `concat` is set to True.
                See `vectorbt.base.combine_fns.ray_apply` for related keyword arguments.
            broadcast (bool): Whether to broadcast all inputs.
            broadcast_kwargs (dict): Keyword arguments passed to `vectorbt.base.reshape_fns.broadcast`.
            keys (index_like): Outermost column level.
            wrap_kwargs (dict): Keyword arguments passed to `vectorbt.base.array_wrapper.ArrayWrapper.wrap`.
            **kwargs: Keyword arguments passed to `combine_func`.

        !!! note
            If `combine_func` is Numba-compiled, will broadcast using `WRITEABLE` and `C_CONTIGUOUS`
            flags, which can lead to an expensive computation overhead if passed objects are large and
            have different shape/memory order. You also must ensure that all objects have the same data type.

            Also remember to bring each in `*args` to a Numba-compatible format.

        ## Example

        ```python-repl
        >>> import vectorbt as vbt
        >>> import pandas as pd

        >>> sr = pd.Series([1, 2], index=['x', 'y'])
        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])

        >>> sr.vbt.combine(df, combine_func=lambda x, y: x + y)
           a  b
        x  4  5
        y  7  8

        >>> sr.vbt.combine([df, df*2], combine_func=lambda x, y: x + y)
            a   b
        x  10  13
        y  17  20

        >>> sr.vbt.combine([df, df*2], combine_func=lambda x, y: x + y, concat=True, keys=['c', 'd'])
              c       d
           a  b   a   b
        x  4  5   7   9
        y  7  8  12  14
        ```

        Use Ray for small inputs and large processing times:

        ```python-repl
        >>> def combine_func(a, b):
        ...     time.sleep(1)
        ...     return a + b

        >>> sr = pd.Series([1, 2, 3])

        >>> %timeit sr.vbt.combine([1, 1, 1], combine_func=combine_func)
        3.01 s ± 2.98 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)

        >>> %timeit sr.vbt.combine([1, 1, 1], combine_func=combine_func, concat=True, use_ray=True)
        1.02 s ± 2.32 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)
        ```
        """
        if not allow_multiple or not isinstance(other, (tuple, list)):
            others = (other, )
        else:
            others = other
        others = tuple(
            map(lambda x: x.obj if isinstance(x, BaseAccessor) else x, others))
        checks.assert_not_none(combine_func)
        # Broadcast arguments
        if broadcast:
            if broadcast_kwargs is None:
                broadcast_kwargs = {}
            if checks.is_numba_func(combine_func):
                # Numba requires writeable arrays
                # Plus all of our arrays must be in the same order
                broadcast_kwargs = merge_dicts(
                    dict(require_kwargs=dict(requirements=['W', 'C'])),
                    broadcast_kwargs)
            new_obj, *new_others = reshape_fns.broadcast(
                self.obj, *others, **broadcast_kwargs)
        else:
            new_obj, new_others = self.obj, others
        if not checks.is_pandas(new_obj):
            new_obj = ArrayWrapper.from_shape(new_obj.shape).wrap(new_obj)
        # Optionally cast to 2d array
        if to_2d:
            inputs = tuple(
                map(lambda x: reshape_fns.to_2d(x, raw=not keep_pd),
                    (new_obj, *new_others)))
        else:
            if not keep_pd:
                inputs = tuple(
                    map(lambda x: np.asarray(x), (new_obj, *new_others)))
            else:
                inputs = new_obj, *new_others
        if len(inputs) == 2:
            result = combine_func(inputs[0], inputs[1], *args, **kwargs)
            return ArrayWrapper.from_obj(new_obj).wrap(
                result, **merge_dicts({}, wrap_kwargs))
        if concat:
            # Concat the results horizontally
            if checks.is_numba_func(combine_func) and numba_loop:
                if use_ray:
                    raise ValueError("Ray cannot be used within Numba")
                for i in range(1, len(inputs)):
                    checks.assert_meta_equal(inputs[i - 1], inputs[i])
                result = combine_fns.combine_and_concat_nb(
                    inputs[0], inputs[1:], combine_func, *args, **kwargs)
            else:
                if use_ray:
                    result = combine_fns.combine_and_concat_ray(
                        inputs[0], inputs[1:], combine_func, *args, **kwargs)
                else:
                    result = combine_fns.combine_and_concat(
                        inputs[0], inputs[1:], combine_func, *args, **kwargs)
            columns = ArrayWrapper.from_obj(new_obj).columns
            if keys is not None:
                new_columns = index_fns.combine_indexes([keys, columns])
            else:
                top_columns = pd.Index(np.arange(len(new_others)),
                                       name='combine_idx')
                new_columns = index_fns.combine_indexes([top_columns, columns])
            return ArrayWrapper.from_obj(new_obj).wrap(
                result, **merge_dicts(dict(columns=new_columns), wrap_kwargs))
        else:
            # Combine arguments pairwise into one object
            if use_ray:
                raise ValueError("Ray cannot be used with concat=False")
            if checks.is_numba_func(combine_func) and numba_loop:
                for i in range(1, len(inputs)):
                    checks.assert_dtype_equal(inputs[i - 1], inputs[i])
                result = combine_fns.combine_multiple_nb(
                    inputs, combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_multiple(inputs, combine_func,
                                                      *args, **kwargs)
            return ArrayWrapper.from_obj(new_obj).wrap(
                result, **merge_dicts({}, wrap_kwargs))
Esempio n. 21
0
    def from_orders(cls,
                    main_price,
                    order_size,
                    size_type=SizeType.Shares,
                    order_price=None,
                    init_capital=None,
                    fees=None,
                    fixed_fees=None,
                    slippage=None,
                    broadcast_kwargs={},
                    freq=None,
                    **kwargs):
        """Build portfolio from orders.

        Starting with initial capital `init_capital`, at each time step, orders the number
        of shares specified in `order_size` for `order_price`.

        For more details, see `vectorbt.portfolio.nb.simulate_from_orders_nb`.

        Args:
            main_price (pandas_like): Main price of the asset, such as close. Will broadcast.
            order_size (float or array_like): The amount of shares to order. Will broadcast.

                If the size is positive, this is the number of shares to buy.
                If the size is negative, this is the number of shares to sell.
                To buy/sell everything, set the size to `np.inf`.
            size_type (int or array_like): See `vectorbt.portfolio.enums.SizeType`.
            order_price (array_like): Order price. Defaults to `main_price`. Will broadcast.
            init_capital (float or array_like): The initial capital. Will broadcast.

                Allowed is either a single value or value per column.
            fees (float or array_like): Fees in percentage of the order value. Will broadcast.
            fixed_fees (float or array_like): Fixed amount of fees to pay per order. Will broadcast.
            slippage (float or array_like): Slippage in percentage of price. Will broadcast.
            broadcast_kwargs: Keyword arguments passed to `vectorbt.base.reshape_fns.broadcast`.
            freq (any): Index frequency in case `main_price.index` is not datetime-like.
            **kwargs: Keyword arguments passed to the `__init__` method.

        For defaults, see `vectorbt.defaults.portfolio`.

        All time series will be broadcasted together using `vectorbt.base.reshape_fns.broadcast`.
        At the end, they will have the same metadata.

        Example:
            Portfolio from various order sequences:
            ```python-repl
            >>> portfolio = vbt.Portfolio.from_orders(price, orders,
            ...     init_capital=100, fees=0.0025, fixed_fees=1., slippage=0.001)

            >>> portfolio.orders.records
                col  idx        size  price      fees  side
            0     0    0   98.654463  1.001  1.246883     0
            1     1    0    1.000000  1.001  1.002502     0
            2     1    1    1.000000  2.002  1.005005     0
            3     1    2    1.000000  3.003  1.007507     0
            4     1    3    1.000000  2.002  1.005005     0
            5     1    4    4.000000  0.999  1.009990     1
            6     2    0   98.654463  1.001  1.246883     0
            7     2    1   98.654463  1.998  1.492779     1
            8     2    2   64.646521  3.003  1.485334     0
            9     2    3   64.646521  1.998  1.322909     1
            10    2    4  126.398131  1.001  1.316311     0
            >>> portfolio.equity
                                 a          b           c
            2020-01-01   98.654463  98.996498   98.654463
            2020-01-02  197.308925  98.989493  195.618838
            2020-01-03  295.963388  99.978985  193.939564
            2020-01-04  197.308925  95.971980  127.840840
            2020-01-05   98.654463  90.957990  126.398131
            ```
        """
        # Get defaults
        if order_price is None:
            order_price = main_price
        if init_capital is None:
            init_capital = defaults.portfolio['init_capital']
        if fees is None:
            fees = defaults.portfolio['fees']
        if fixed_fees is None:
            fixed_fees = defaults.portfolio['fixed_fees']
        if slippage is None:
            slippage = defaults.portfolio['slippage']

        # Perform checks
        checks.assert_type(main_price, (pd.Series, pd.DataFrame))

        # Broadcast inputs
        # Only main_price is broadcasted, others can remain unchanged thanks to flexible indexing
        keep_raw = (False, True, True, True, True, True, True, True)
        main_price, order_size, size_type, order_price, fees, fixed_fees, slippage, init_capital = \
            reshape_fns.broadcast(
                main_price, order_size, size_type, order_price, fees, fixed_fees, slippage, init_capital,
                **broadcast_kwargs, writeable=True, keep_raw=keep_raw)
        target_shape = (main_price.shape[0],
                        main_price.shape[1] if main_price.ndim > 1 else 1)

        # Perform calculation
        order_records, cash, shares = nb.simulate_from_orders_nb(
            target_shape,
            init_capital,
            order_size,
            size_type,
            order_price,
            fees,
            fixed_fees,
            slippage,
            is_2d=main_price.ndim == 2)

        # Bring to the same meta
        cash = main_price.vbt.wrap(cash)
        shares = main_price.vbt.wrap(shares)
        orders = Orders(order_records, main_price, freq=freq)
        if checks.is_series(main_price):
            init_capital = init_capital.item(0)
        else:
            init_capital = np.broadcast_to(init_capital, (target_shape[1], ))
            init_capital = main_price.vbt.wrap_reduced(init_capital)

        return cls(main_price,
                   init_capital,
                   orders,
                   cash,
                   shares,
                   freq=freq,
                   **kwargs)
Esempio n. 22
0
    def from_order_func(cls,
                        main_price,
                        order_func_nb,
                        *args,
                        init_capital=None,
                        row_wise=False,
                        row_prep_func_nb=None,
                        broadcast_kwargs={},
                        freq=None,
                        **kwargs):
        """Build portfolio from a custom order function.

        Starting with initial capital `init_capital`, iterates over shape `main_price.shape`, and for
        each data point, generates an order using `order_func_nb`. This way, you can specify order
        size, price and transaction costs dynamically (for example, based on the current balance).

        if `row_wise` is `True`, see `vectorbt.portfolio.nb.simulate_row_wise_nb`.
        Otherwise, see `vectorbt.portfolio.nb.simulate_nb`.

        Args:
            main_price (pandas_like): Main price of the asset, such as close. Will broadcast.
            order_func_nb (function): Function that returns an order.

                See `vectorbt.portfolio.enums.Order`.
            *args: Arguments passed to `order_func_nb`.
            init_capital (float or array_like): The initial capital. Will broadcast.

                Allowed is either a single value or value per column.
            row_wise (bool): If `True`, iterates over rows, otherwise over columns.

                Set to `True` if columns depend upon each other.
            row_prep_func_nb (function): Function to call before iterating over the next row.

                Can be used to do preprocessing, such as to calculate past returns.
            broadcast_kwargs: Keyword arguments passed to `vectorbt.base.reshape_fns.broadcast`.
            freq (any): Index frequency in case `main_price.index` is not datetime-like.
            **kwargs: Keyword arguments passed to the `__init__` method.

        For defaults, see `vectorbt.defaults.portfolio`.

        All time series will be broadcasted together using `vectorbt.base.reshape_fns.broadcast`.
        At the end, they will have the same metadata.

        !!! note
            `order_func_nb` must be Numba-compiled.

        Example:
            Placing a buy order each day:
            ```python-repl
            >>> from vectorbt.portfolio import Order, SizeType

            >>> @njit
            ... def order_func_nb(oc, price):
            ...     return Order(10, SizeType.Shares, price[oc.i],
            ...         fees=0.01, fixed_fees=1., slippage=0.01)

            >>> portfolio = vbt.Portfolio.from_order_func(
            ...     price, order_func_nb, price.values, init_capital=100)

            >>> portfolio.orders.records
               col  idx  size  price   fees  side
            0    0    0  10.0   1.01  1.101     0
            1    0    1  10.0   2.02  1.202     0
            2    0    2  10.0   3.03  1.303     0
            3    0    3  10.0   2.02  1.202     0
            4    0    4  10.0   1.01  1.101     0
            >>> portfolio.equity
            2020-01-01     98.799
            2020-01-02    107.397
            2020-01-03    125.794
            2020-01-04     94.392
            2020-01-05     53.191
            Name: a, dtype: float64
            ```
        """
        # Get defaults
        if init_capital is None:
            init_capital = defaults.portfolio['init_capital']

        # Perform checks
        checks.assert_type(main_price, (pd.Series, pd.DataFrame))
        checks.assert_numba_func(order_func_nb)

        # Broadcast inputs
        # Only main_price is broadcasted, others can remain unchanged thanks to flexible indexing
        keep_raw = (False, True)
        main_price, init_capital = reshape_fns.broadcast(main_price,
                                                         init_capital,
                                                         **broadcast_kwargs,
                                                         writeable=True,
                                                         keep_raw=keep_raw)
        target_shape = (main_price.shape[0],
                        main_price.shape[1] if main_price.ndim > 1 else 1)

        # Perform calculation
        if row_wise:
            if row_prep_func_nb is None:
                row_prep_func_nb = nb.none_row_prep_func_nb
            order_records, cash, shares = nb.simulate_row_wise_nb(
                target_shape, init_capital, row_prep_func_nb, order_func_nb,
                *args)
        else:
            if row_prep_func_nb is not None:
                raise ValueError(
                    "Function row_prep_func_nb can be only called when row_wise=True"
                )
            order_records, cash, shares = nb.simulate_nb(
                target_shape, init_capital, order_func_nb, *args)

        # Bring to the same meta
        cash = main_price.vbt.wrap(cash)
        shares = main_price.vbt.wrap(shares)
        orders = Orders(order_records, main_price, freq=freq)
        if checks.is_series(main_price):
            init_capital = init_capital.item(0)
        else:
            init_capital = np.broadcast_to(init_capital, (target_shape[1], ))
            init_capital = main_price.vbt.wrap_reduced(init_capital)

        return cls(main_price,
                   init_capital,
                   orders,
                   cash,
                   shares,
                   freq=freq,
                   **kwargs)
Esempio n. 23
0
    def from_combs(cls, ts, windows, r, ewm=False, param_product=False, names=None, **kwargs):
        """Create multiple `MA` combinations according to `itertools.combinations`.

        Args:
            ts (pandas_like): Time series (such as price).
            windows (array_like of int): Size of the moving window.
            r (int): The number of `MA` instances to combine.
            ewm (bool or array_like of bool): If `True`, uses exponential moving average, otherwise 
                uses simple moving average.
            param_product (bool): If `True`, builds a Cartesian product out of all parameters.
            names (list of str): A list of names for each `MA` instance.
            **kwargs: Keyword arguments passed to `vectorbt.indicators.factory.from_params_pipeline.`
        Returns:
            tuple of MA
        Example:
            ```python-repl
            >>> fast_ma, slow_ma = vbt.MA.from_combs(price['Close'],
            ...     [10, 20, 30], 2, ewm=[False, False, True], names=['fast', 'slow'])

            >>> print(fast_ma.ma)
            fast_window                    10          20
            fast_ewm         False      False       False
            Date                                         
            2019-02-28         NaN        NaN         NaN
            2019-03-01         NaN        NaN         NaN
            2019-03-02         NaN        NaN         NaN
            ...                ...        ...         ...
            2019-08-29   10155.972  10155.972  10447.3480
            2019-08-30   10039.466  10039.466  10359.5555
            2019-08-31    9988.727   9988.727  10264.9095

            [185 rows x 3 columns]

            >>> print(slow_ma.ma)
            slow_window          20                          30
            slow_ewm          False          True          True
            Date                                               
            2019-02-28          NaN           NaN           NaN
            2019-03-01          NaN           NaN           NaN
            2019-03-02          NaN           NaN           NaN
            ...                 ...           ...           ...
            2019-08-29   10447.3480  10423.585970  10423.585970
            2019-08-30   10359.5555  10370.333077  10370.333077
            2019-08-31   10264.9095  10322.612024  10322.612024

            [185 rows x 3 columns]

            ```

            The naive way without caching is the follows:
            ```py
            window_combs = itertools.combinations([10, 20, 30], 2)
            ewm_combs = itertools.combinations([False, False, True], 2)
            fast_windows, slow_windows = np.asarray(list(window_combs)).transpose()
            fast_ewms, slow_ewms = np.asarray(list(ewm_combs)).transpose()

            fast_ma = vbt.MA.from_params(price['Close'], 
            ...     fast_windows, fast_ewms, name='fast')
            slow_ma = vbt.MA.from_params(price['Close'], 
            ...     slow_windows, slow_ewms, name='slow')
            ```

            Having this, you can now compare these `MA` instances:
            ```python-repl
            >>> entry_signals = fast_ma.ma_above(slow_ma, crossed=True)
            >>> exit_signals = fast_ma.ma_below(slow_ma, crossed=True)

            >>> print(entry_signals)
            fast_window            10     20
            fast_ewm     False  False  False
            slow_window     20            30
            slow_ewm     False   True   True
            Date                            
            2019-02-28   False  False  False
            2019-03-01   False  False  False
            2019-03-02   False  False  False
            ...            ...    ...    ...
            2019-08-29   False  False  False
            2019-08-30   False  False  False
            2019-08-31   False  False  False

            [185 rows x 3 columns]
            ```

            Notice how `MA.ma_above` method created a new column hierarchy for you. You can now use
            it for indexing as follows:

            ```py
            fig = price['Close'].vbt.plot(name='Price')
            fig = entry_signals[(10, False, 20, False)]\\
                .vbt.signals.plot_as_markers(price['Close'], signal_type='entry', fig=fig)
            fig = exit_signals[(10, False, 20, False)]\\
                .vbt.signals.plot_as_markers(price['Close'], signal_type='exit', fig=fig)

            fig.show()
            ```
            ![](/vectorbt/docs/img/MA_from_combs.png)
        """

        if names is None:
            names = ['ma' + str(i + 1) for i in range(r)]
        param_list = [windows, ewm]
        if param_product:
            param_list = create_param_product(param_list)
        else:
            param_list = reshape_fns.broadcast(*param_list, writeable=True)
        windows, ewm = param_list
        cache_dict = cls.from_params(ts, windows, ewm=ewm, return_cache=True, **kwargs)
        param_lists = zip(*itertools.combinations(zip(windows, ewm), r))
        mas = []
        for i, param_list in enumerate(param_lists):
            i_windows, i_ewm = zip(*param_list)
            mas.append(cls.from_params(ts, i_windows, ewm=i_ewm, cache=cache_dict, name=names[i], **kwargs))
        return tuple(mas)
Esempio n. 24
0
    def combine_with_multiple(self, others, *args, combine_func=None, to_2d=False,
                              concat=False, broadcast_kwargs={}, keys=None, **kwargs):
        """Combine with `others` using `combine_func`.

        All arguments will be broadcast using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        If `concat` is True, concatenate the results along columns,
        see `vectorbt.base.combine_fns.combine_and_concat`.
        Otherwise, pairwise combine into a Series/DataFrame of the same shape, 
        see `vectorbt.base.combine_fns.combine_multiple`.

        Arguments `*args` and `**kwargs` will be directly passed to `combine_func`. 
        If `to_2d` is True, 2-dimensional NumPy arrays will be passed, otherwise as is.
        Use `keys` as the outermost level.

        !!! note
            If `combine_func` is Numba-compiled, will broadcast using `WRITEABLE` and `C_CONTIGUOUS`
            flags, which can lead to an expensive computation overhead if passed objects are large and
            have different shape/memory order. You also must ensure that all objects have the same data type.

            Also remember to bring each in `*args` to a Numba-compatible format.

        ## Example

        ```python-repl
        >>> import vectorbt as vbt
        >>> import pandas as pd

        >>> sr = pd.Series([1, 2], index=['x', 'y'])
        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])

        >>> sr.vbt.combine_with_multiple([df, df*2],
        ...     combine_func=lambda x, y: x + y)
            a   b
        x  10  13
        y  17  20

        >>> sr.vbt.combine_with_multiple([df, df*2],
        ...     combine_func=lambda x, y: x + y, concat=True, keys=['c', 'd'])
              c       d
           a  b   a   b
        x  4  5   7   9
        y  7  8  12  14
        ```
        """
        others = tuple(map(lambda x: x._obj if isinstance(x, Base_Accessor) else x, others))
        checks.assert_not_none(combine_func)
        checks.assert_type(others, Iterable)
        # Broadcast arguments
        if checks.is_numba_func(combine_func):
            # Numba requires writeable arrays
            # Plus all of our arrays must be in the same order
            broadcast_kwargs = merge_dicts(dict(require_kwargs=dict(requirements=['W', 'C'])), broadcast_kwargs)
        new_obj, *new_others = reshape_fns.broadcast(self._obj, *others, **broadcast_kwargs)
        # Optionally cast to 2d array
        if to_2d:
            bc_arrays = tuple(map(lambda x: reshape_fns.to_2d(x, raw=True), (new_obj, *new_others)))
        else:
            bc_arrays = tuple(map(lambda x: np.asarray(x), (new_obj, *new_others)))
        if concat:
            # Concat the results horizontally
            if checks.is_numba_func(combine_func):
                for i in range(1, len(bc_arrays)):
                    checks.assert_meta_equal(bc_arrays[i - 1], bc_arrays[i])
                result = combine_fns.combine_and_concat_nb(bc_arrays[0], bc_arrays[1:], combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_and_concat(bc_arrays[0], bc_arrays[1:], combine_func, *args, **kwargs)
            columns = new_obj.vbt.wrapper.columns
            if keys is not None:
                new_columns = index_fns.combine_indexes(keys, columns)
            else:
                top_columns = pd.Index(np.arange(len(new_others)), name='combine_idx')
                new_columns = index_fns.combine_indexes(top_columns, columns)
            return new_obj.vbt.wrapper.wrap(result, columns=new_columns)
        else:
            # Combine arguments pairwise into one object
            if checks.is_numba_func(combine_func):
                for i in range(1, len(bc_arrays)):
                    checks.assert_dtype_equal(bc_arrays[i - 1], bc_arrays[i])
                result = combine_fns.combine_multiple_nb(bc_arrays, combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_multiple(bc_arrays, combine_func, *args, **kwargs)
            return new_obj.vbt.wrapper.wrap(result)
Esempio n. 25
0
    def combine_with(self,
                     other,
                     *args,
                     combine_func=None,
                     keep_pd=False,
                     to_2d=False,
                     broadcast=True,
                     broadcast_kwargs=None,
                     wrap_kwargs=None,
                     **kwargs):
        """Combine both using `combine_func` into a Series/DataFrame of the same shape.

        Args:
            other (array_like): Object to be combined with this array.
            *args: Variable arguments passed to `combine_func`.
            combine_func (callable): Function to combine two arrays.

                Can be Numba-compiled.
            keep_pd (bool): Whether to keep inputs as pandas objects, otherwise convert to NumPy arrays.
            to_2d (bool): Whether to reshape inputs to 2-dim arrays, otherwise keep as-is.
            broadcast (bool): Whether to broadcast all inputs.
            broadcast_kwargs (dict): Keyword arguments passed to `vectorbt.base.reshape_fns.broadcast`.
            wrap_kwargs (dict): Keyword arguments passed to `vectorbt.base.array_wrapper.ArrayWrapper.wrap`.
            **kwargs: Keyword arguments passed to `combine_func`.

        !!! note
            The resulted array must have the same shape as broadcast input arrays.

        ## Example

        ```python-repl
        >>> import vectorbt as vbt
        >>> import pandas as pd

        >>> sr = pd.Series([1, 2], index=['x', 'y'])
        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])
        >>> sr.vbt.combine_with(df, combine_func=lambda x, y: x + y)
           a  b
        x  4  5
        y  7  8
        ```
        """
        if isinstance(other, BaseAccessor):
            other = other._obj
        checks.assert_not_none(combine_func)
        if broadcast:
            if broadcast_kwargs is None:
                broadcast_kwargs = {}
            if checks.is_numba_func(combine_func):
                # Numba requires writable arrays
                broadcast_kwargs = merge_dicts(
                    dict(require_kwargs=dict(requirements='W')),
                    broadcast_kwargs)
            new_obj, new_other = reshape_fns.broadcast(self._obj, other,
                                                       **broadcast_kwargs)
        else:
            new_obj, new_other = self._obj, other
        # Optionally cast to 2d array
        if to_2d:
            inputs = tuple(
                map(lambda x: reshape_fns.to_2d(x, raw=not keep_pd),
                    (new_obj, new_other)))
        else:
            if not keep_pd:
                inputs = tuple(
                    map(lambda x: np.asarray(x), (new_obj, new_other)))
            else:
                inputs = new_obj, new_other
        result = combine_func(inputs[0], inputs[1], *args, **kwargs)
        return new_obj.vbt.wrapper.wrap(result, **merge_dicts({}, wrap_kwargs))
Esempio n. 26
0
    def generate_ohlc_stop_exits(self, open, high=None, low=None, close=None, is_open_safe=True,
                                out_dict=None, sl_stop=0., ts_stop=0., tp_stop=0., entry_wait=1,
                                exit_wait=1, first=True, iteratively=False, broadcast_kwargs=None, wrap_kwargs=None):
        """Generate exits based on when the price hits (trailing) stop loss or take profit.

        If any of `high`, `low` or `close` is None, it will be set to `open`.

        Use `out_dict` as a dict to pass `hit_price` and `stop_type` arrays. You can also
        set `out_dict` to {} to produce these arrays automatically and still have access to them.

        For arguments, see `vectorbt.signals.nb.ohlc_stop_choice_nb`.
        If `iteratively` is True, see `vectorbt.signals.nb.generate_ohlc_stop_ex_iter_nb`.
        Otherwise, see `vectorbt.signals.nb.generate_ohlc_stop_ex_nb`.

        All array-like arguments including stops and `out_dict` will broadcast using
        `vectorbt.base.reshape_fns.broadcast` with `broadcast_kwargs`.

        For arguments, see `vectorbt.signals.nb.ohlc_stop_choice_nb`.

        !!! note
            `open` isn't necessarily open price, but can be any entry price (even previous close).
            Stop price is calculated based solely upon `open`.

        ## Example

        ```python-repl
        >>> from vectorbt.signals.enums import StopType

        >>> price = pd.DataFrame({
        ...     'open': [10, 11, 12, 11, 10],
        ...     'high': [11, 12, 13, 12, 11],
        ...     'low': [9, 10, 11, 10, 9],
        ...     'close': [10, 11, 12, 11, 10]
        ... })
        >>> out_dict = {}
        >>> exits = sig.vbt.signals.generate_ohlc_stop_exits(
        ...     price['open'], price['high'], price['low'], price['close'],
        ...     out_dict=out_dict, sl_stop=0.2, ts_stop=0.2, tp_stop=0.2)
        >>> out_dict['hit_price'][~exits] = np.nan
        >>> out_dict['stop_type'][~exits] = -1

        >>> exits
                        a      b      c
        2020-01-01  False  False  False
        2020-01-02   True   True  False
        2020-01-03  False  False  False
        2020-01-04  False  False  False
        2020-01-05  False  False   True

        >>> out_dict['hit_price']
                       a     b    c
        2020-01-01   NaN   NaN  NaN
        2020-01-02  12.0  12.0  NaN
        2020-01-03   NaN   NaN  NaN
        2020-01-04   NaN   NaN  NaN
        2020-01-05   NaN   NaN  9.6

        >>> out_dict['stop_type'].applymap(
        ...     lambda x: StopType._fields[x] if x in StopType else '')
                             a           b         c
        2020-01-01
        2020-01-02  TakeProfit  TakeProfit
        2020-01-03
        2020-01-04
        2020-01-05                          StopLoss
        ```
        """
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        entries = self._obj
        if high is None:
            high = open
        if low is None:
            low = open
        if close is None:
            close = open
        if out_dict is None:
            out_dict = {}
        hit_price_out = out_dict.get('hit_price', None)
        stop_type_out = out_dict.get('stop_type', None)
        out_args = ()
        if hit_price_out is not None:
            out_args += (hit_price_out,)
        if stop_type_out is not None:
            out_args += (stop_type_out,)

        keep_raw = (False, True, True, True, True, True, True, True) + (False,) * len(out_args)
        broadcast_kwargs = merge_dicts(dict(require_kwargs=dict(requirements='W')), broadcast_kwargs)
        entries, open, high, low, close, sl_stop, ts_stop, tp_stop, *out_args = reshape_fns.broadcast(
            entries, open, high, low, close, sl_stop, ts_stop, tp_stop, *out_args,
            **broadcast_kwargs, keep_raw=keep_raw)
        if hit_price_out is None:
            hit_price_out = np.empty_like(entries, dtype=np.float_)
        else:
            hit_price_out = out_args[0]
            if checks.is_pandas(hit_price_out):
                hit_price_out = hit_price_out.vbt.to_2d_array()
            out_args = out_args[1:]
        if stop_type_out is None:
            stop_type_out = np.empty_like(entries, dtype=np.int_)
        else:
            stop_type_out = out_args[0]
            if checks.is_pandas(stop_type_out):
                stop_type_out = stop_type_out.vbt.to_2d_array()

        # Perform generation
        if iteratively:
            new_entries, exits = nb.generate_ohlc_stop_ex_iter_nb(
                entries.vbt.to_2d_array(), open, high, low, close, hit_price_out,
                stop_type_out, sl_stop, ts_stop, tp_stop, is_open_safe, entry_wait,
                exit_wait, first, entries.ndim == 2)
            out_dict['hit_price'] = entries.vbt.wrapper.wrap(hit_price_out, **merge_dicts({}, wrap_kwargs))
            out_dict['stop_type'] = entries.vbt.wrapper.wrap(stop_type_out, **merge_dicts({}, wrap_kwargs))
            return entries.vbt.wrapper.wrap(new_entries, **merge_dicts({}, wrap_kwargs)), \
                   entries.vbt.wrapper.wrap(exits, **merge_dicts({}, wrap_kwargs))
        else:
            exits = nb.generate_ohlc_stop_ex_nb(
                entries.vbt.to_2d_array(), open, high, low, close, hit_price_out,
                stop_type_out, sl_stop, ts_stop, tp_stop, is_open_safe, exit_wait,
                first, entries.ndim == 2)
            out_dict['hit_price'] = entries.vbt.wrapper.wrap(hit_price_out, **merge_dicts({}, wrap_kwargs))
            out_dict['stop_type'] = entries.vbt.wrapper.wrap(stop_type_out, **merge_dicts({}, wrap_kwargs))
            return entries.vbt.wrapper.wrap(exits, **merge_dicts({}, wrap_kwargs))
Esempio n. 27
0
def from_params_pipeline(ts_list,
                         param_list,
                         level_names,
                         num_outputs,
                         custom_func,
                         *args,
                         pass_lists=False,
                         pass_2d=True,
                         param_product=False,
                         broadcast_kwargs={},
                         return_raw=False,
                         **kwargs):
    """A pipeline for calculating an indicator, used by `IndicatorFactory`.

    Args:
        ts_list (list of array_like): A list of time series objects. At least one must be a pandas object.
        param_list (list of array_like): A list of parameters. Each element is either an array-like object
            or a single value of any type.
        level_names (list of str): A list of column level names corresponding to each parameter.
        num_outputs (int): The number of output arrays.
        custom_func (function): A custom calculation function. See `IndicatorFactory.from_custom_func`.
        *args: Arguments passed to the `custom_func`.
        pass_lists (bool): If `True`, arguments are passed to the `custom_func` as lists.
        pass_2d (bool): If `True`, time series arrays will be passed as two-dimensional, otherwise as is.
        param_product (bool): If `True`, builds a Cartesian product out of all parameters.
        broadcast_kwargs (dict): Keyword arguments passed to the `vectorbt.base.reshape_fns.broadcast`
            on time series objects.
        return_raw (bool): If `True`, returns the raw output without post-processing.
        **kwargs: Keyword arguments passed to the `custom_func`.

            Some common arguments include `return_cache` to return cache and `cache` to pass cache. 
            Those are only applicable to `custom_func` that supports it (`custom_func` created using
            `IndicatorFactory.from_apply_func` are supported by default).
            
    Returns:
        A list of transformed inputs (`pandas_like`), a list of generated outputs (`pandas_like`), 
        a list of parameter arrays (`np.ndarray`), a list of parameter mappers (`pd.Series`),
        a list of other generated outputs that are outside of  `num_outputs`.

    Explanation:

        Does the following:

        * Takes one or multiple time series objects in `ts_list` and broadcasts them. For example:

        ```python-repl
        >>> sr = pd.Series([1, 2], index=['x', 'y'])
        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])
        >>> ts_list = [sr, df]

        >>> ts_list = vbt.base.reshape_fns.broadcast(*ts_list)
        >>> print(ts_list[0])
        a  b
        x  1  1
        y  2  2
        >>> print(ts_list[1])
        a  b
        x  3  4
        y  5  6
        ```

        * Takes one or multiple parameters in `param_list`, converts them to NumPy arrays and 
            broadcasts them. For example:

        ```python-repl
        >>> p1, p2, p3 = 1, [2, 3, 4], [False]
        >>> param_list = [p1, p2, p3]

        >>> param_list = vbt.base.reshape_fns.broadcast(*param_list)
        >>> print(param_list[0])
        array([1, 1, 1])
        >>> print(param_list[1])
        array([2, 3, 4])
        >>> print(param_list[2])
        array([False, False, False])
        ```

        * Performs calculation using `custom_func` to build output arrays (`output_list`) and 
            other objects (`other_list`, optionally). For example:

        ```python-repl
        >>> def custom_func(ts1, ts2, p1, p2, p3, *args, **kwargs):
        ...     return np.hstack((
        ...         ts1 + ts2 + p1[0] * p2[0],
        ...         ts1 + ts2 + p1[1] * p2[1],
        ...         ts1 + ts2 + p1[2] * p2[2],
        ...     ))

        >>> output = custom_func(*ts_list, *param_list)
        >>> print(output)
        array([[ 6,  7,  7,  8,  8,  9],
               [ 9, 10, 10, 11, 11, 12]])
        ```

        * Creates new column hierarchy based on parameters and level names. For example:

        ```python-repl
        >>> p1_columns = pd.Index(param_list[0], name='p1')
        >>> p2_columns = pd.Index(param_list[1], name='p2')
        >>> p3_columns = pd.Index(param_list[2], name='p3')
        >>> p_columns = vbt.base.index_fns.stack_indexes(p1_columns, p2_columns, p3_columns)
        >>> new_columns = vbt.base.index_fns.combine_indexes(p_columns, ts_list[0].columns)

        >>> output_df = pd.DataFrame(output, columns=new_columns)
        >>> print(output_df)
        p1                                         1                        
        p2             2             3             4    
        p3  False  False  False  False  False  False    
                a      b      a      b      a      b
        0       6      7      7      8      8      9
        1       9     10     10     11     11     12
        ```

        * Broadcasts objects in `ts_list` to match the shape of objects in `output_list` through tiling.
            This is done to be able to compare them and generate signals, since you cannot compare NumPy 
            arrays that have totally different shapes, such as (2, 2) and (2, 6). For example:

        ```python-repl
        >>> new_ts_list = [
        ...     ts_list[0].vbt.tile(len(param_list[0]), keys=p_columns),
        ...     ts_list[1].vbt.tile(len(param_list[0]), keys=p_columns)
        ... ]
        >>> print(new_ts_list[0])
        p1                                         1                        
        p2             2             3             4    
        p3  False  False  False  False  False  False     
                a      b      a      b      a      b
        0       1      1      1      1      1      1
        1       2      2      2      2      2      2
        ```

        * Builds parameter mappers that will link parameters from `param_list` to columns in 
            `ts_list` and `output_list`. This is done to enable column indexing using parameter values.
    """
    if len(ts_list) > 1:
        # Broadcast time series
        ts_list = reshape_fns.broadcast(*ts_list,
                                        **broadcast_kwargs,
                                        writeable=True)
    # Check time series objects
    checks.assert_type(ts_list[0], (pd.Series, pd.DataFrame))
    # Convert params to 1-dim arrays
    param_list = list(map(reshape_fns.to_1d, param_list))
    if len(param_list) > 1:
        # Check level names
        checks.assert_type(level_names, (list, tuple))
        checks.assert_same_len(param_list, level_names)
        for ts in ts_list:
            # Every time series object should be free of the specified level names in its columns
            for level_name in level_names:
                if level_name is not None:
                    if checks.is_frame(ts):
                        checks.assert_level_not_exists(ts.columns, level_name)
        if param_product:
            # Make Cartesian product out of all params
            param_list = create_param_product(param_list)
        else:
            # Broadcast such that each array has the same length
            param_list = reshape_fns.broadcast(*param_list, writeable=True)
    # Perform main calculation
    if pass_2d:
        array_list = tuple(
            map(lambda x: reshape_fns.to_2d(np.asarray(x)), ts_list))
    else:
        array_list = tuple(map(lambda x: np.asarray(x), ts_list))
    if pass_lists:
        output_list = custom_func(array_list, param_list, *args, **kwargs)
    else:
        output_list = custom_func(*array_list, *param_list, *args, **kwargs)
    if return_raw or kwargs.get('return_cache', False):
        return output_list  # return raw cache outputs
    if not isinstance(output_list, (tuple, list, List)):
        output_list = [output_list]
    else:
        output_list = list(output_list)
    # Other outputs should be returned without post-processing (for example cache_dict)
    if len(output_list) > num_outputs:
        other_list = output_list[num_outputs:]
    else:
        other_list = []
    # Process only the num_outputs outputs
    output_list = output_list[:num_outputs]
    if len(param_list) > 0:
        # Build new column levels on top of time series levels
        new_columns = build_column_hierarchy(param_list, level_names,
                                             ts_list[0].vbt.columns)
        # Wrap into new pandas objects both time series and output objects
        new_ts_list = list(
            map(lambda x: broadcast_ts(x, param_list[0].shape[0], new_columns),
                ts_list))
        # Build mappers to easily map between parameters and columns
        mapper_list = [
            build_mapper(x, ts_list[0], new_columns, level_names[i])
            for i, x in enumerate(param_list)
        ]
    else:
        # Some indicators don't have any params
        new_columns = ts_list[0].vbt.columns
        new_ts_list = list(ts_list)
        mapper_list = []
    output_list = list(
        map(lambda x: wrap_output(x, ts_list[0], new_columns), output_list))
    if len(mapper_list) > 1:
        # Tuple object is a mapper that accepts tuples of parameters
        tuple_mapper = build_tuple_mapper(mapper_list, new_columns,
                                          tuple(level_names))
        mapper_list.append(tuple_mapper)
    return new_ts_list, output_list, param_list, mapper_list, other_list