예제 #1
0
    def generate_exits(self, exit_choice_func_nb, *args, wait=1, wrap_kwargs=None):
        """See `vectorbt.signals.nb.generate_ex_nb`.

        ## Example

        Fill all space after signals in `sig`:
        ```python-repl
        >>> @njit
        ... def exit_choice_func_nb(from_i, to_i, col, temp_range):
        ...     return temp_range[from_i:to_i]

        >>> temp_range = np.arange(sig.shape[0])  # reuse memory
        >>> sig.vbt.signals.generate_exits(exit_choice_func_nb, temp_range)
                        a      b      c
        2020-01-01  False  False  False
        2020-01-02   True   True  False
        2020-01-03   True  False  False
        2020-01-04   True   True   True
        2020-01-05   True  False   True
        ```
        """
        checks.assert_numba_func(exit_choice_func_nb)

        exits = nb.generate_ex_nb(self.to_2d_array(), wait, exit_choice_func_nb, *args)
        return self.wrapper.wrap(exits, **merge_dicts({}, wrap_kwargs))
예제 #2
0
    def groupby_apply(self, by, apply_func_nb, *args, on_matrix=False, **kwargs):
        """See `vectorbt.timeseries.nb.groupby_apply_nb` and 
        `vectorbt.timeseries.nb.groupby_apply_matrix_nb` for `on_matrix=True`.

        For `by`, see [pandas.DataFrame.groupby](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.groupby.html).

        Example:
            ```python-repl
            >>> mean_nb = njit(lambda col, i, a: np.nanmean(a))
            >>> print(df.vbt.timeseries.groupby_apply([1, 1, 2, 2, 3], 
            ...     mean_nb))
                 a    b    c
            1  1.5  4.5  1.5
            2  3.5  2.5  2.5
            3  5.0  1.0  1.0

            >>> mean_matrix_nb = njit(lambda i, a: np.nanmean(a))
            >>> print(df.vbt.timeseries.groupby_apply([1, 1, 2, 2, 3], 
            ...     mean_matrix_nb, on_matrix=True))
                      a         b         c
            1  2.500000  2.500000  2.500000
            2  2.833333  2.833333  2.833333
            3  2.333333  2.333333  2.333333
            ```"""
        checks.assert_numba_func(apply_func_nb)

        regrouped = self._obj.groupby(by, axis=0, **kwargs)
        groups = Dict()
        for i, (k, v) in enumerate(regrouped.indices.items()):
            groups[i] = np.asarray(v)
        if on_matrix:
            result = nb.groupby_apply_matrix_nb(self.to_2d_array(), groups, apply_func_nb, *args)
        else:
            result = nb.groupby_apply_nb(self.to_2d_array(), groups, apply_func_nb, *args)
        return self.wrap_array(result, index=list(regrouped.indices.keys()))
예제 #3
0
    def map_reduce_between(self,
                           other=None,
                           map_func_nb=None,
                           map_args=None,
                           reduce_func_nb=None,
                           reduce_args=None,
                           broadcast_kwargs=None):
        """See `vectorbt.signals.nb.map_reduce_between_nb`.

        If `other` specified, see `vectorbt.signals.nb.map_reduce_between_two_nb`.
        Both will be broadcast using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Note that `map_args` and `reduce_args` won't be broadcast.

        ## Example

        Get average distance between signals in `sig`:
        ```python-repl
        >>> distance_map_nb = njit(lambda from_i, to_i, col: to_i - from_i)
        >>> mean_reduce_nb = njit(lambda col, a: np.nanmean(a))

        >>> sig.vbt.signals.map_reduce_between(
        ...     map_func_nb=distance_map_nb,
        ...     reduce_func_nb=mean_reduce_nb)
        a    NaN
        b    2.0
        c    1.0
        dtype: float64
        ```
        """
        if broadcast_kwargs is None:
            broadcast_kwargs = {}
        checks.assert_not_none(map_func_nb)
        checks.assert_not_none(reduce_func_nb)
        checks.assert_numba_func(map_func_nb)
        checks.assert_numba_func(reduce_func_nb)
        if map_args is None:
            map_args = ()
        if reduce_args is None:
            reduce_args = ()

        if other is None:
            # One input array
            result = nb.map_reduce_between_nb(self.to_2d_array(), map_func_nb,
                                              map_args, reduce_func_nb,
                                              reduce_args)
            if isinstance(self._obj, pd.Series):
                return result[0]
            return pd.Series(result, index=self.wrapper.columns)
        else:
            # Two input arrays
            obj, other = reshape_fns.broadcast(self._obj, other,
                                               **broadcast_kwargs)
            checks.assert_dtype(other, np.bool)
            result = nb.map_reduce_between_two_nb(obj.vbt.to_2d_array(),
                                                  other.vbt.to_2d_array(),
                                                  map_func_nb, map_args,
                                                  reduce_func_nb, reduce_args)
            return obj.vbt.wrapper.wrap_reduced(result)
예제 #4
0
    def rolling_apply(self, window, apply_func_nb, *args, on_matrix=False):
        """See `vectorbt.generic.nb.rolling_apply_nb` and
        `vectorbt.generic.nb.rolling_apply_matrix_nb` for `on_matrix=True`.

        Example:
            ```python-repl
            >>> mean_nb = njit(lambda col, i, a: np.nanmean(a))
            >>> print(df.vbt.rolling_apply(3, mean_nb))
                          a    b         c
            2020-01-01  1.0  5.0  1.000000
            2020-01-02  1.5  4.5  1.500000
            2020-01-03  2.0  4.0  2.000000
            2020-01-04  3.0  3.0  2.333333
            2020-01-05  4.0  2.0  2.000000
            >>> mean_matrix_nb = njit(lambda i, a: np.nanmean(a))
            >>> print(df.vbt.rolling_apply(3, mean_matrix_nb, on_matrix=True))
                               a         b         c
            2020-01-01  2.333333  2.333333  2.333333
            2020-01-02  2.500000  2.500000  2.500000
            2020-01-03  2.666667  2.666667  2.666667
            2020-01-04  2.777778  2.777778  2.777778
            2020-01-05  2.666667  2.666667  2.666667
            ```"""
        checks.assert_numba_func(apply_func_nb)

        if on_matrix:
            result = nb.rolling_apply_matrix_nb(self.to_2d_array(), window,
                                                apply_func_nb, *args)
        else:
            result = nb.rolling_apply_nb(self.to_2d_array(), window,
                                         apply_func_nb, *args)
        return self.wrap(result)
예제 #5
0
    def generate_both(cls, shape, entry_choice_func_nb, exit_choice_func_nb,
                      entry_args, exit_args, **kwargs):
        """See `vectorbt.signals.nb.generate_enex_nb`.

        `**kwargs` will be passed to pandas constructor.

        Example:
            Generate entry and exit signals one after another:
            ```python-repl
            >>> @njit
            ... def entry_choice_func_nb(col, from_i, to_i, wait1):
            ...     next_pos = col + from_i + wait1
            ...     if next_pos < to_i:
            ...          return np.array([next_pos])
            ...     return np.empty(0, dtype=np.int_)
            >>> @njit
            ... def exit_choice_func_nb(col, from_i, to_i, wait2):
            ...     next_pos = col + from_i + wait2
            ...     if next_pos < to_i:
            ...          return np.array([next_pos])
            ...     return np.empty(0, dtype=np.int_)

            >>> en, ex = pd.DataFrame.vbt.signals.generate_both(
            ...     (5, 3), entry_choice_func_nb, exit_choice_func_nb, (0,), (1,),
            ...     index=sig.index, columns=sig.columns)
            >>> en
                            a      b      c
            2020-01-01   True  False  False
            2020-01-02  False   True  False
            2020-01-03  False  False   True
            2020-01-04   True  False  False
            2020-01-05  False  False  False
            >>> ex
                            a      b      c
            2020-01-01  False  False  False
            2020-01-02  False  False  False
            2020-01-03   True  False  False
            2020-01-04  False  False  False
            2020-01-05  False   True  False
            ```"""
        checks.assert_numba_func(entry_choice_func_nb)
        checks.assert_numba_func(exit_choice_func_nb)

        if not isinstance(shape, tuple):
            shape = (shape, 1)
        elif isinstance(shape, tuple) and len(shape) == 1:
            shape = (shape[0], 1)

        result1, result2 = nb.generate_enex_nb(
            shape,
            entry_choice_func_nb,
            exit_choice_func_nb,
            entry_args,
            exit_args
        )
        if cls.is_series():
            if shape[1] > 1:
                raise ValueError("Use DataFrame accessor")
            return pd.Series(result1[:, 0], **kwargs), pd.Series(result2[:, 0], **kwargs)
        return pd.DataFrame(result1, **kwargs), pd.DataFrame(result2, **kwargs)
예제 #6
0
    def expanding_apply(self, apply_func_nb, *args, on_matrix=False):
        """See `vectorbt.timeseries.nb.expanding_apply_nb` and 
        `vectorbt.timeseries.nb.expanding_apply_matrix_nb` for `on_matrix=True`.

        Example:
            ```python-repl
            >>> mean_nb = njit(lambda col, i, a: np.nanmean(a))
            >>> print(df.vbt.timeseries.expanding_apply(mean_nb))
                          a    b    c
            2018-01-01  1.0  5.0  1.0
            2018-01-02  1.5  4.5  1.5
            2018-01-03  2.0  4.0  2.0
            2018-01-04  2.5  3.5  2.0
            2018-01-05  3.0  3.0  1.8

            >>> mean_matrix_nb = njit(lambda i, a: np.nanmean(a))
            >>> print(df.vbt.timeseries.expanding_apply( 
            ...     mean_matrix_nb, on_matrix=True))
                               a         b         c
            2018-01-01  2.333333  2.333333  2.333333
            2018-01-02  2.500000  2.500000  2.500000
            2018-01-03  2.666667  2.666667  2.666667
            2018-01-04  2.666667  2.666667  2.666667
            2018-01-05  2.600000  2.600000  2.600000
            ```"""
        checks.assert_numba_func(apply_func_nb)

        if on_matrix:
            result = nb.expanding_apply_matrix_nb(self.to_2d_array(),
                                                  apply_func_nb, *args)
        else:
            result = nb.expanding_apply_nb(self.to_2d_array(), apply_func_nb,
                                           *args)
        return self.wrap(result)
예제 #7
0
파일: accessors.py 프로젝트: jzay/vectorbt
    def map_reduce_partitions(self, map_func_nb=None, map_args=None,
                              reduce_func_nb=None, reduce_args=None):
        """See `vectorbt.signals.nb.map_reduce_partitions_nb`.

        ## Example

        Get average length of each partition in `sig`:
        ```python-repl
        >>> distance_map_nb = njit(lambda from_i, to_i, col: to_i - from_i)
        >>> mean_reduce_nb = njit(lambda col, a: np.nanmean(a))

        >>> sig.vbt.signals.map_reduce_partitions(
        ...     map_func_nb=distance_map_nb,
        ...     reduce_func_nb=mean_reduce_nb)
        a    1.0
        b    1.0
        c    3.0
        dtype: float64
        ```
        """
        checks.assert_not_none(map_func_nb)
        checks.assert_not_none(reduce_func_nb)
        checks.assert_numba_func(map_func_nb)
        checks.assert_numba_func(reduce_func_nb)
        if map_args is None:
            map_args = ()
        if reduce_args is None:
            reduce_args = ()

        result = nb.map_reduce_partitions_nb(
            self.to_2d_array(),
            map_func_nb, map_args,
            reduce_func_nb, reduce_args
        )
        return self.wrapper.wrap_reduced(result)
예제 #8
0
    def reduce(self,
               reduce_func_nb,
               *args,
               idx_arr=None,
               to_array=False,
               to_idx=False,
               idx_labeled=True,
               default_val=np.nan,
               group_by=None,
               **kwargs):
        """Reduce mapped array by column.

        If `to_array` is False and `to_idx` is False, see `vectorbt.records.nb.reduce_mapped_nb`.
        If `to_array` is False and `to_idx` is True, see `vectorbt.records.nb.reduce_mapped_to_idx_nb`.
        If `to_array` is True and `to_idx` is False, see `vectorbt.records.nb.reduce_mapped_to_array_nb`.
        If `to_array` is True and `to_idx` is True, see `vectorbt.records.nb.reduce_mapped_to_idx_array_nb`.

        If `to_idx` is True, must pass `idx_arr`. Set `idx_labeled` to False to return raw positions instead
        of labels. Use `default_val` to set the default value. Set `group_by` to False to disable grouping.

        `**kwargs` will be passed to `vectorbt.base.array_wrapper.ArrayWrapper.wrap_reduced`."""
        # Perform checks
        checks.assert_numba_func(reduce_func_nb)
        if idx_arr is None:
            if self.idx_arr is None:
                if to_idx:
                    raise ValueError("Must pass idx_arr")
            idx_arr = self.idx_arr

        # Perform main computation
        col_map = self.col_mapper.get_col_map(group_by=group_by)
        if not to_array:
            if not to_idx:
                out = nb.reduce_mapped_nb(self.values, col_map, default_val,
                                          reduce_func_nb, *args)
            else:
                out = nb.reduce_mapped_to_idx_nb(self.values, col_map, idx_arr,
                                                 default_val, reduce_func_nb,
                                                 *args)
        else:
            if not to_idx:
                out = nb.reduce_mapped_to_array_nb(self.values, col_map,
                                                   default_val, reduce_func_nb,
                                                   *args)
            else:
                out = nb.reduce_mapped_to_idx_array_nb(self.values, col_map,
                                                       idx_arr, default_val,
                                                       reduce_func_nb, *args)

        # Perform post-processing
        if to_idx:
            nan_mask = np.isnan(out)
            if idx_labeled:
                out = out.astype(np.object)
                out[~nan_mask] = self.wrapper.index[out[~nan_mask].astype(
                    np.int_)]
            else:
                out[nan_mask] = -1
                out = out.astype(np.int_)
        return self.wrapper.wrap_reduced(out, group_by=group_by, **kwargs)
예제 #9
0
    def generate(cls, shape, choice_func_nb, *args, **kwargs):
        """See `vectorbt.signals.nb.generate_nb`.

        `**kwargs` will be passed to pandas constructor.

        Example:
            Generate random signals manually:
            ```python-repl
            >>> @njit
            ... def choice_func_nb(col, from_i, to_i):
            ...     return col + from_i

            >>> pd.DataFrame.vbt.signals.generate((5, 3),
            ...     choice_func_nb, index=sig.index, columns=sig.columns)
                            a      b      c
            2020-01-01   True  False  False
            2020-01-02  False   True  False
            2020-01-03  False  False   True
            2020-01-04  False  False  False
            2020-01-05  False  False  False
            ```"""
        checks.assert_numba_func(choice_func_nb)

        if not isinstance(shape, tuple):
            shape = (shape, 1)
        elif isinstance(shape, tuple) and len(shape) == 1:
            shape = (shape[0], 1)

        result = nb.generate_nb(shape, choice_func_nb, *args)

        if cls.is_series():
            if shape[1] > 1:
                raise ValueError("Use DataFrame accessor")
            return pd.Series(result[:, 0], **kwargs)
        return pd.DataFrame(result, **kwargs)
예제 #10
0
    def apply_mapper(self, map_func_nb, *args):
        """Apply `map_func_nb` on each position using `vectorbt.portfolio.nb.map_positions_nb` 
        and filter the results with `pos_filters`.
        
        This way, all time series created on top of positions will be automatically filtered."""
        checks.assert_numba_func(map_func_nb)

        # Apply map
        result = nb.map_positions_nb(self.portfolio.shares.vbt.to_2d_array(),
                                     self.pos_status, map_func_nb, *args)
        result = self.wrap_array(result)

        # Apply passed filters
        for pos_filter in self.pos_filters:
            if isfunction(pos_filter):
                filter_func_nb = pos_filter
                args = ()
            else:
                filter_func_nb = pos_filter[0]
                if len(pos_filter) > 1:
                    args = pos_filter[1:]
                else:
                    args = ()
            checks.assert_numba_func(filter_func_nb)
            result = result.vbt.timeseries.filter(filter_func_nb, *args)

        return result
예제 #11
0
    def generate(cls, shape, choice_func_nb, *args, **kwargs):
        """See `vectorbt.signals.nb.generate_nb`.

        `**kwargs` will be passed to pandas constructor.

        Example:
            Generate random signals manually:

            ```python-repl
            >>> @njit
            ... def choice_func_nb(col, from_i, to_i):
            ...     return np.random.choice(np.arange(from_i, to_i+1), replace=False)

            >>> print(pd.DataFrame.vbt.signals.generate((5, 3), 
            ...     choice_func_nb, index=index, columns=columns))
                            a      b      c
            2018-01-01  False  False  False
            2018-01-02  False  False  False
            2018-01-03  False   True  False
            2018-01-04   True  False  False
            2018-01-05  False  False   True
            ```"""
        checks.assert_numba_func(choice_func_nb)

        if not isinstance(shape, tuple):
            shape = (shape, 1)
        elif isinstance(shape, tuple) and len(shape) == 1:
            shape = (shape[0], 1)

        result = nb.generate_nb(shape, choice_func_nb, *args)

        if cls.is_series():
            return pd.Series(result[:, 0], **kwargs)
        return pd.DataFrame(result, **kwargs)
예제 #12
0
파일: accessors.py 프로젝트: yamen/vectorbt
    def groupby_apply(self, by, apply_func_nb, *args, on_matrix=False, **kwargs):
        """See `vectorbt.generic.nb.groupby_apply_nb` and
        `vectorbt.generic.nb.groupby_apply_matrix_nb` for `on_matrix=True`.

        For `by`, see `pd.DataFrame.groupby`.

        Example:
            ```python-repl
            >>> mean_nb = njit(lambda col, i, a: np.nanmean(a))
            >>> df.vbt.groupby_apply([1, 1, 2, 2, 3], mean_nb)
                 a    b    c
            1  1.5  4.5  1.5
            2  3.5  2.5  2.5
            3  5.0  1.0  1.0
            >>> mean_matrix_nb = njit(lambda i, a: np.nanmean(a))
            >>> df.vbt.groupby_apply([1, 1, 2, 2, 3], mean_matrix_nb, on_matrix=True)
                      a         b         c
            1  2.500000  2.500000  2.500000
            2  2.833333  2.833333  2.833333
            3  2.333333  2.333333  2.333333
            ```"""
        checks.assert_numba_func(apply_func_nb)

        regrouped = self._obj.groupby(by, axis=0, **kwargs)
        groups = Dict()
        for i, (k, v) in enumerate(regrouped.indices.items()):
            groups[i] = np.asarray(v)
        if on_matrix:
            result = nb.groupby_apply_matrix_nb(self.to_2d_array(), groups, apply_func_nb, *args)
        else:
            result = nb.groupby_apply_nb(self.to_2d_array(), groups, apply_func_nb, *args)
        return self.wrap_reduced(result, index=list(regrouped.indices.keys()))
예제 #13
0
파일: base.py 프로젝트: vlam020/vectorbt
    def map(self,
            map_func_nb: tp.RecordMapFunc,
            *args,
            idx_field: tp.Optional[str] = None,
            value_map: tp.Optional[tp.ValueMapLike] = None,
            group_by: tp.GroupByLike = None,
            **kwargs) -> MappedArray:
        """Map each record to a scalar value. Returns mapped array.

        See `vectorbt.records.nb.map_records_nb`."""
        checks.assert_numba_func(map_func_nb)
        mapped_arr = nb.map_records_nb(self.values, map_func_nb, *args)
        if idx_field is None:
            idx_field = self.idx_field
        if idx_field is not None:
            idx_arr = self.values[idx_field]
        else:
            idx_arr = None
        return MappedArray(self.wrapper,
                           mapped_arr,
                           self.values['col'],
                           id_arr=self.values['id'],
                           idx_arr=idx_arr,
                           value_map=value_map,
                           **kwargs).regroup(group_by)
예제 #14
0
    def map(self,
            map_func_nb,
            *args,
            idx_field=None,
            value_map=None,
            group_by=None,
            **kwargs):
        """Map each record to a scalar value. Returns mapped array.

        See `vectorbt.records.nb.map_records_nb`."""
        checks.assert_numba_func(map_func_nb)
        mapped_arr = nb.map_records_nb(self.values, map_func_nb, *args)
        if idx_field is None:
            idx_field = self.idx_field
        if idx_field is not None:
            idx_arr = self.values[idx_field]
        else:
            idx_arr = None
        return MappedArray(self.wrapper,
                           mapped_arr,
                           self.values['col'],
                           id_arr=self.values['id'],
                           idx_arr=idx_arr,
                           value_map=value_map,
                           **kwargs).regroup(group_by)
예제 #15
0
    def reduce(self,
               reduce_func_nb: tp.ReduceFunc,
               *args,
               idx_arr: tp.Optional[tp.Array1d] = None,
               returns_array: bool = False,
               returns_idx: bool = False,
               to_index: bool = True,
               fill_value: tp.Scalar = np.nan,
               group_by: tp.GroupByLike = None,
               wrap_kwargs: tp.KwargsLike = None) -> tp.MaybeSeriesFrame:
        """Reduce mapped array by column/group.

        If `returns_array` is False and `returns_idx` is False, see `vectorbt.records.nb.reduce_mapped_nb`.
        If `returns_array` is False and `returns_idx` is True, see `vectorbt.records.nb.reduce_mapped_to_idx_nb`.
        If `returns_array` is True and `returns_idx` is False, see `vectorbt.records.nb.reduce_mapped_to_array_nb`.
        If `returns_array` is True and `returns_idx` is True, see `vectorbt.records.nb.reduce_mapped_to_idx_array_nb`.

        If `returns_idx` is True, must pass `idx_arr`. Set `to_index` to False to return raw positions instead
        of labels. Use `fill_value` to set the default value. Set `group_by` to False to disable grouping.
        """
        # Perform checks
        checks.assert_numba_func(reduce_func_nb)
        if idx_arr is None:
            if self.idx_arr is None:
                if returns_idx:
                    raise ValueError("Must pass idx_arr")
            idx_arr = self.idx_arr

        # Perform main computation
        col_map = self.col_mapper.get_col_map(group_by=group_by)
        if not returns_array:
            if not returns_idx:
                out = nb.reduce_mapped_nb(self.values, col_map, fill_value,
                                          reduce_func_nb, *args)
            else:
                out = nb.reduce_mapped_to_idx_nb(self.values, col_map, idx_arr,
                                                 fill_value, reduce_func_nb,
                                                 *args)
        else:
            if not returns_idx:
                out = nb.reduce_mapped_to_array_nb(self.values, col_map,
                                                   fill_value, reduce_func_nb,
                                                   *args)
            else:
                out = nb.reduce_mapped_to_idx_array_nb(self.values, col_map,
                                                       idx_arr, fill_value,
                                                       reduce_func_nb, *args)

        # Perform post-processing
        wrap_kwargs = merge_dicts(
            dict(name_or_index='reduce' if not returns_array else None,
                 to_index=returns_idx and to_index,
                 fillna=-1 if returns_idx else None,
                 dtype=np.int_ if returns_idx else None), wrap_kwargs)
        return self.wrapper.wrap_reduced(out, group_by=group_by, **wrap_kwargs)
예제 #16
0
    def test_assert_numba_func(self):
        def test_func(x):
            return x

        @njit
        def test_func_nb(x):
            return x

        checks.assert_numba_func(test_func_nb)
        with pytest.raises(Exception) as e_info:
            checks.assert_numba_func(test_func)
예제 #17
0
파일: base.py 프로젝트: jingmouren/vectorbt
    def map(self,
            map_func_nb: tp.RecordMapFunc, *args,
            dtype: tp.Optional[tp.DTypeLike] = None,
            **kwargs) -> MappedArray:
        """Map each record to a scalar value. Returns mapped array.

        See `vectorbt.records.nb.map_records_nb`.

        `**kwargs` are passed to `Records.map_array`."""
        checks.assert_numba_func(map_func_nb)
        mapped_arr = nb.map_records_nb(self.values, map_func_nb, *args)
        mapped_arr = np.asarray(mapped_arr, dtype=dtype)
        return self.map_array(mapped_arr, **kwargs)
예제 #18
0
파일: records.py 프로젝트: vicliu6/vectorbt
    def reduce_records(self, reduce_func_nb, *args):
        """Perform a reducing operation over the records of each column.
        
        See `vectorbt.portfolio.nb.reduce_records_nb`."""
        checks.assert_numba_func(reduce_func_nb)

        return self.wrapper.wrap_reduced(
            nb.reduce_records_nb(
                self._records,
                len(self.wrapper.columns),
                self.col_field,
                reduce_func_nb,
                *args))
예제 #19
0
    def generate_iteratively(cls, shape, choice_func1_nb, choice_func2_nb,
                             *args, **kwargs):
        """See `vectorbt.signals.nb.generate_iteratively_nb`.

        `**kwargs` will be passed to pandas constructor.

        Example:
            Generate entry and exit signals one after another:

            ```python-repl
            >>> @njit
            ... def choice_func1_nb(col, from_i, to_i):
            ...     return np.array([from_i])
            >>> @njit
            ... def choice_func2_nb(col, from_i, to_i):
            ...     return np.array([from_i])

            >>> entries, exits = pd.DataFrame.vbt.signals.generate_iteratively(
            ...     (5, 3), choice_func1_nb, choice_func2_nb, 
            ...     index=sig.index, columns=sig.columns)
            >>> print(entries)
                            a      b      c
            2020-01-01   True   True   True
            2020-01-02  False  False  False
            2020-01-03   True   True   True
            2020-01-04  False  False  False
            2020-01-05   True   True   True
            >>> print(exits)
                            a      b      c
            2020-01-01  False  False  False
            2020-01-02   True   True   True
            2020-01-03  False  False  False
            2020-01-04   True   True   True
            2020-01-05  False  False  False
            ```"""
        checks.assert_numba_func(choice_func1_nb)
        checks.assert_numba_func(choice_func2_nb)

        if not isinstance(shape, tuple):
            shape = (shape, 1)
        elif isinstance(shape, tuple) and len(shape) == 1:
            shape = (shape[0], 1)

        result1, result2 = nb.generate_iteratively_nb(shape, choice_func1_nb,
                                                      choice_func2_nb, *args)
        if cls.is_series():
            if shape[1] > 1:
                raise ValueError("Use DataFrame accessor")
            return pd.Series(result1[:, 0],
                             **kwargs), pd.Series(result2[:, 0], **kwargs)
        return pd.DataFrame(result1, **kwargs), pd.DataFrame(result2, **kwargs)
예제 #20
0
    def map_reduce_between(self,
                           *args,
                           other=None,
                           map_func_nb=None,
                           reduce_func_nb=None,
                           broadcast_kwargs={}):
        """See `vectorbt.signals.nb.map_reduce_between_nb`. 

        If `other` specified, see `vectorbt.signals.nb.map_reduce_between_two_nb`.

        Arguments will be broadcasted using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Example:
            Get maximum distance between signals in `signals`:

            ```python-repl
            >>> distance_map_nb = njit(lambda col, prev_i, next_i: next_i - prev_i)
            >>> max_reduce_nb = njit(lambda col, a: np.nanmax(a))

            >>> print(signals.vbt.signals.map_reduce_between(
            ...     map_func_nb=distance_map_nb, reduce_func_nb=max_reduce_nb))
            a    3.0
            b    3.0
            c    NaN
            dtype: float64
            ```"""
        checks.assert_not_none(map_func_nb)
        checks.assert_not_none(reduce_func_nb)
        checks.assert_numba_func(map_func_nb)
        checks.assert_numba_func(reduce_func_nb)

        if other is None:
            # One input array
            result = nb.map_reduce_between_nb(self.to_2d_array(), map_func_nb,
                                              reduce_func_nb, *args)
            if isinstance(self._obj, pd.Series):
                return result[0]
            return pd.Series(result, index=self.columns)
        else:
            # Two input arrays
            obj, other = reshape_fns.broadcast(self._obj, other,
                                               **broadcast_kwargs)
            other.vbt.signals.validate()
            result = nb.map_reduce_between_two_nb(self.to_2d_array(),
                                                  other.vbt.to_2d_array(),
                                                  map_func_nb, reduce_func_nb,
                                                  *args)
            if isinstance(obj, pd.Series):
                return result[0]
            return pd.Series(result, index=obj.vbt.columns)
예제 #21
0
파일: records.py 프로젝트: vicliu6/vectorbt
    def map_records_to_matrix(self, map_func_nb, *args):
        """Map each record to a value that is then stored in a matrix.
        
        See `vectorbt.portfolio.nb.map_records_to_matrix_nb`."""
        checks.assert_numba_func(map_func_nb)

        return self.wrapper.wrap(
            nb.map_records_to_matrix_nb(
                self._records,
                (len(self.wrapper.index), len(self.wrapper.columns)),
                self.col_field,
                self.row_field,
                map_func_nb,
                *args))
예제 #22
0
    def test_assert_numba_func(self):
        def test_func(x):
            return x

        @njit
        def test_func_nb(x):
            return x

        checks.assert_numba_func(test_func_nb)
        try:
            checks.assert_numba_func(test_func)
            raise Exception
        except:
            pass
예제 #23
0
    def resample_apply(self,
                       freq,
                       apply_func_nb,
                       *args,
                       on_matrix=False,
                       **kwargs):
        """See `vectorbt.timeseries.nb.groupby_apply_nb` and 
        `vectorbt.timeseries.nb.groupby_apply_matrix_nb` for `on_matrix=True`.

        For `freq`, see `pandas.DataFrame.resample`.

        Example:
            ```python-repl
            >>> mean_nb = njit(lambda col, i, a: np.nanmean(a))
            >>> print(df.vbt.timeseries.resample_apply('2d', mean_nb))
                          a    b    c
            2018-01-01  1.5  4.5  1.5
            2018-01-03  3.5  2.5  2.5
            2018-01-05  5.0  1.0  1.0

            >>> mean_matrix_nb = njit(lambda i, a: np.nanmean(a))
            >>> print(df.vbt.timeseries.resample_apply('2d', 
            ...     mean_matrix_nb, on_matrix=True))
                               a         b         c
            2018-01-01  2.500000  2.500000  2.500000
            2018-01-03  2.833333  2.833333  2.833333
            2018-01-05  2.333333  2.333333  2.333333
            ```"""
        checks.assert_numba_func(apply_func_nb)

        resampled = self._obj.resample(freq, axis=0, **kwargs)
        groups = Dict()
        for i, (k, v) in enumerate(resampled.indices.items()):
            groups[i] = np.asarray(v)
        if on_matrix:
            result = nb.groupby_apply_matrix_nb(self.to_2d_array(), groups,
                                                apply_func_nb, *args)
        else:
            result = nb.groupby_apply_nb(self.to_2d_array(), groups,
                                         apply_func_nb, *args)
        result_obj = self.wrap_array(result,
                                     index=list(resampled.indices.keys()))
        resampled_arr = np.full(
            (resampled.ngroups, self.to_2d_array().shape[1]), np.nan)
        resampled_obj = self.wrap_array(resampled_arr,
                                        index=pd.Index(list(
                                            resampled.groups.keys()),
                                                       freq=freq))
        resampled_obj.loc[result_obj.index] = result_obj.values
        return resampled_obj
예제 #24
0
    def map_reduce_between(self,
                           *args,
                           other=None,
                           map_func_nb=None,
                           reduce_func_nb=None,
                           broadcast_kwargs={}):
        """See `vectorbt.signals.nb.map_reduce_between_nb`. 

        If `other` specified, see `vectorbt.signals.nb.map_reduce_between_two_nb`.

        Arguments will be broadcasted using `vectorbt.base.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Example:
            Get average distance between signals in `sig`:

            ```python-repl
            >>> distance_map_nb = njit(lambda col, from_i, to_i: to_i - from_i)
            >>> mean_reduce_nb = njit(lambda col, a: np.nanmean(a))

            >>> print(sig.vbt.signals.map_reduce_between(
            ...     map_func_nb=distance_map_nb, reduce_func_nb=mean_reduce_nb))
            a    NaN
            b    2.0
            c    1.0
            dtype: float64
            ```"""
        checks.assert_not_none(map_func_nb)
        checks.assert_not_none(reduce_func_nb)
        checks.assert_numba_func(map_func_nb)
        checks.assert_numba_func(reduce_func_nb)

        if other is None:
            # One input array
            result = nb.map_reduce_between_nb(self.to_2d_array(), map_func_nb,
                                              reduce_func_nb, *args)
            if isinstance(self._obj, pd.Series):
                return result[0]
            return pd.Series(result, index=self.columns)
        else:
            # Two input arrays
            obj, other = reshape_fns.broadcast(self._obj, other,
                                               **broadcast_kwargs)
            checks.assert_dtype(other, np.bool_)
            result = nb.map_reduce_between_two_nb(self.to_2d_array(),
                                                  other.vbt.to_2d_array(),
                                                  map_func_nb, reduce_func_nb,
                                                  *args)
            return self.wrap_reduced(result)
예제 #25
0
파일: base.py 프로젝트: khprash/vectorbt
    def map(self, map_func_nb, *args, idx_arr=None):
        """Map each record to a scalar value. Returns `MappedArray`.

        See `vectorbt.records.nb.map_records_nb`."""
        checks.assert_numba_func(map_func_nb)

        mapped_arr = nb.map_records_nb(self.records_arr, map_func_nb, *args)
        if idx_arr is None:
            if self.idx_field is not None:
                idx_arr = self.records_arr[self.idx_field]
            else:
                idx_arr = None
        return MappedArray(mapped_arr,
                           self.records_arr['col'],
                           self.wrapper,
                           idx_arr=idx_arr)
예제 #26
0
    def reduce_to_array(self, reduce_func_nb, *args, **kwargs):
        """See `vectorbt.generic.nb.reduce_to_array_nb`.

        `**kwargs` will be passed to `vectorbt.base.array_wrapper.ArrayWrapper.wrap_reduced`.

        Example:
            ```python-repl
            >>> min_max_nb = njit(lambda col, a: np.array([np.nanmin(a), np.nanmax(a)]))
            >>> df.vbt.reduce_to_array(min_max_nb, index=['min', 'max'])
                   a    b    c
            min  1.0  1.0  1.0
            max  5.0  5.0  3.0
            ```"""
        checks.assert_numba_func(reduce_func_nb)

        out = nb.reduce_to_array_nb(self.to_2d_array(), reduce_func_nb, *args)
        return self.wrap_reduced(out, **kwargs)
예제 #27
0
    def reduce_to_array(self, reduce_func_nb, *args, **kwargs):
        """See `vectorbt.timeseries.nb.reduce_to_array_nb`.

        `**kwargs` will be passed to `TimeSeries_Accessor.wrap_reduced_array`.

        Example:
            ```python-repl
            >>> min_max_nb = njit(lambda col, a: np.array([np.nanmin(a), np.nanmax(a)]))
            >>> print(df.vbt.timeseries.reduce_to_array(min_max_nb, index=['min', 'max']))
                   a    b    c
            min  1.0  1.0  1.0
            max  5.0  5.0  3.0
            ```"""
        checks.assert_numba_func(reduce_func_nb)

        result = nb.reduce_to_array_nb(self.to_2d_array(), reduce_func_nb, *args)
        return self.wrap_reduced_array(result, **kwargs)
예제 #28
0
    def reduce(self, reduce_func_nb, *args, **kwargs):
        """See `vectorbt.timeseries.nb.reduce_nb`.

        `**kwargs` will be passed to `TimeSeries_Accessor.wrap_reduced`.

        Example:
            ```python-repl
            >>> mean_nb = njit(lambda col, a: np.nanmean(a))
            >>> print(df.vbt.timeseries.reduce(mean_nb))
            a    3.0
            b    3.0
            c    1.8
            dtype: float64
            ```"""
        checks.assert_numba_func(reduce_func_nb)

        result = nb.reduce_nb(self.to_2d_array(), reduce_func_nb, *args)
        return self.wrap_reduced(result, **kwargs)
예제 #29
0
    def filter(self, filter_func_nb, *args):
        """See `vectorbt.timeseries.nb.filter_nb`.

        Example:
            ```python-repl
            >>> greater_nb = njit(lambda col, i, a: a > 2)
            >>> print(df.vbt.timeseries.filter(greater_nb))
                          a    b    c
            2018-01-01  NaN  5.0  NaN
            2018-01-02  NaN  4.0  NaN
            2018-01-03  3.0  3.0  3.0
            2018-01-04  4.0  NaN  NaN
            2018-01-05  5.0  NaN  NaN
            ```"""
        checks.assert_numba_func(filter_func_nb)

        result = nb.filter_nb(self.to_2d_array(), filter_func_nb, *args)
        return self.wrap(result)
예제 #30
0
    def applymap(self, apply_func_nb, *args):
        """See `vectorbt.timeseries.nb.applymap_nb`.

        Example:
            ```python-repl
            >>> multiply_nb = njit(lambda col, i, a: a ** 2)
            >>> print(df.vbt.timeseries.applymap(multiply_nb))
                           a     b    c
            2018-01-01   1.0  25.0  1.0
            2018-01-02   4.0  16.0  4.0
            2018-01-03   9.0   9.0  9.0
            2018-01-04  16.0   4.0  4.0
            2018-01-05  25.0   1.0  1.0
            ```"""
        checks.assert_numba_func(apply_func_nb)

        result = nb.applymap_nb(self.to_2d_array(), apply_func_nb, *args)
        return self.wrap(result)