示例#1
0
文件: nb.py 项目: khprash/vectorbt
def generate_take_profit_exits_nb(entries, ts, stops, first):
    """For each `True` in `entries`, find the first value in `ts` that is above the stop.

    For arguments, see `generate_stop_loss_nb`."""
    return combine_fns.apply_and_concat_one_nb(len(stops),
                                               take_profit_apply_nb, entries,
                                               ts, stops, first)
示例#2
0
文件: nb.py 项目: yamen/vectorbt
def generate_sl_ex_nb(entries, ts, stops, trailing=False, first=True):
    """Generate (trailing) stop loss exits using `generate_ex_nb`.

    For each signal in `entries`, find the first value in `ts` that is below the (trailing) stop.

    Args:
        entries (array_like): 2-dim boolean array of entry signals.
        ts (array_like): 2-dim time series array such as price.
        stops (array_like): 3-dim array of stop values.

            !!! note
                `stops` must be a 3D array - an array out of 2-dim arrays each of `ts` shape.
                Each of these arrays will correspond to a different stop configuration.
        trailing (bool): If `True`, uses trailing stop, otherwise constant stop.
        first (bool): If `True`, selects the first signal, otherwise returns the whole sequence.

    Example:
        ```python-repl
        >>> import numpy as np
        >>> from vectorbt.signals.nb import generate_sl_ex_nb
        >>> from vectorbt.base.reshape_fns import broadcast_to_array_of

        >>> entries = np.asarray([False, True, False, False, False])[:, None]
        >>> ts = np.asarray([1, 2, 3, 2, 1])[:, None]
        >>> stops = broadcast_to_array_of([0.1, 0.5], ts)

        >>> generate_sl_ex_nb(entries, ts, stops, trailing=True, first=True)
        [[False False]
         [False False]
         [False False]
         [ True False]
         [False  True]]
        ```"""
    return combine_fns.apply_and_concat_one_nb(
        len(stops), sl_apply_nb, entries, ts, stops, trailing, first)
示例#3
0
    def apply_and_concat(self,
                         ntimes,
                         *args,
                         apply_func=None,
                         to_2d=False,
                         keys=None,
                         wrap_kwargs=None,
                         **kwargs):
        """Apply `apply_func` `ntimes` times and concatenate the results along columns.
        See `vectorbt.base.combine_fns.apply_and_concat_one`.

        Arguments `*args` and `**kwargs` will be directly passed to `apply_func`.
        If `to_2d` is True, 2-dimensional NumPy arrays will be passed, otherwise as is.
        Use `keys` as the outermost level.

        !!! note
            The resulted arrays to be concatenated must have the same shape as broadcast input arrays.

        ## Example

        ```python-repl
        >>> import vectorbt as vbt
        >>> import pandas as pd

        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])
        >>> df.vbt.apply_and_concat(3, [1, 2, 3],
        ...     apply_func=lambda i, a, b: a * b[i], keys=['c', 'd', 'e'])
              c       d       e
           a  b   a   b   a   b
        x  3  4   6   8   9  12
        y  5  6  10  12  15  18
        ```
        """
        checks.assert_not_none(apply_func)
        # Optionally cast to 2d array
        if to_2d:
            obj_arr = reshape_fns.to_2d(self._obj, raw=True)
        else:
            obj_arr = np.asarray(self._obj)
        if checks.is_numba_func(apply_func):
            result = combine_fns.apply_and_concat_one_nb(
                ntimes, apply_func, obj_arr, *args, **kwargs)
        else:
            result = combine_fns.apply_and_concat_one(ntimes, apply_func,
                                                      obj_arr, *args, **kwargs)
        # Build column hierarchy
        if keys is not None:
            new_columns = index_fns.combine_indexes(keys, self.wrapper.columns)
        else:
            top_columns = pd.Index(np.arange(ntimes), name='apply_idx')
            new_columns = index_fns.combine_indexes(top_columns,
                                                    self.wrapper.columns)
        return self.wrapper.wrap(result,
                                 group_by=False,
                                 **merge_dicts(dict(columns=new_columns),
                                               wrap_kwargs))
示例#4
0
文件: factory.py 项目: wcy/vectorbt
        def custom_func(input_list, in_output_list, param_list, *args, input_shape=None, flex_2d=None,
                        entry_args=None, exit_args=None, cache_args=None, entry_kwargs=None,
                        exit_kwargs=None, cache_kwargs=None, return_cache=False, use_cache=None, **_kwargs):
            # Get arguments
            if len(input_list) == 0:
                if input_shape is None:
                    raise ValueError("Pass input_shape if no input time series passed")
            else:
                input_shape = input_list[0].shape

            if entry_args is None:
                entry_args = ()
            if exit_args is None:
                exit_args = ()
            if cache_args is None:
                cache_args = ()
            if exit_only:
                if len(exit_args) > 0:
                    raise ValueError("Use *args instead of exit_args when exit_only=True")
                exit_args = args
            else:
                if len(args) > 0:
                    raise ValueError("*args can be only used when exit_only=True")

            if entry_kwargs is None:
                entry_kwargs = {}
            if exit_kwargs is None:
                exit_kwargs = {}
            if cache_kwargs is None:
                cache_kwargs = {}
            if exit_only:
                if len(exit_kwargs) > 0:
                    raise ValueError("Use **kwargs instead of exit_kwargs when exit_only=True")
                exit_kwargs = _kwargs
            else:
                if len(_kwargs) > 0:
                    raise ValueError("**kwargs can be only used when exit_only=True")

            kwargs_defaults = dict(
                input_shape=input_shape,
                wait=1,
                first=True,
                flex_2d=flex_2d,
            )
            entry_kwargs = merge_dicts(kwargs_defaults, entry_kwargs)
            exit_kwargs = merge_dicts(kwargs_defaults, exit_kwargs)
            cache_kwargs = merge_dicts(kwargs_defaults, cache_kwargs)
            entry_wait = entry_kwargs['wait']
            exit_wait = exit_kwargs['wait']

            # Distribute arguments across functions
            entry_input_tuple = ()
            exit_input_tuple = ()
            cache_input_tuple = ()
            for input_name in entry_input_names:
                entry_input_tuple += (input_list[input_names.index(input_name)],)
            for input_name in exit_input_names:
                exit_input_tuple += (input_list[input_names.index(input_name)],)
            for input_name in cache_input_names:
                cache_input_tuple += (input_list[input_names.index(input_name)],)

            entry_in_output_list = []
            exit_in_output_list = []
            cache_in_output_list = []
            for in_output_name in entry_in_output_names:
                entry_in_output_list.append(in_output_list[in_output_names.index(in_output_name)])
            for in_output_name in exit_in_output_names:
                exit_in_output_list.append(in_output_list[in_output_names.index(in_output_name)])
            for in_output_name in cache_in_output_names:
                cache_in_output_list.append(in_output_list[in_output_names.index(in_output_name)])

            entry_param_list = []
            exit_param_list = []
            cache_param_list = []
            for param_name in entry_param_names:
                entry_param_list.append(param_list[param_names.index(param_name)])
            for param_name in exit_param_names:
                exit_param_list.append(param_list[param_names.index(param_name)])
            for param_name in cache_param_names:
                cache_param_list.append(param_list[param_names.index(param_name)])

            n_params = len(param_list[0]) if len(param_list) > 0 else 1
            entry_in_output_tuples = list(zip(*entry_in_output_list))
            exit_in_output_tuples = list(zip(*exit_in_output_list))
            entry_param_tuples = list(zip(*entry_param_list))
            exit_param_tuples = list(zip(*exit_param_list))

            def _build_more_args(func_settings, func_kwargs):
                pass_kwargs = func_settings.get('pass_kwargs', [])
                more_args = ()
                for key in pass_kwargs:
                    value = None
                    if isinstance(key, tuple):
                        key, value = key
                    else:
                        if key.startswith('temp_idx_arr'):
                            value = np.empty((input_shape[0],), dtype=np.int_)
                    value = func_kwargs.get(key, value)
                    more_args += (value,)
                return more_args

            entry_more_args = _build_more_args(entry_settings, entry_kwargs)
            exit_more_args = _build_more_args(exit_settings, exit_kwargs)
            cache_more_args = _build_more_args(cache_settings, cache_kwargs)

            # Caching
            cache = use_cache
            if cache is None and cache_func is not None:
                _cache_in_output_list = cache_in_output_list
                _cache_param_list = cache_param_list
                if checks.is_numba_func(cache_func):
                    if len(_cache_in_output_list) > 0:
                        _cache_in_output_list = [to_typed_list(in_outputs) for in_outputs in _cache_in_output_list]
                    if len(_cache_param_list) > 0:
                        _cache_param_list = [to_typed_list(params) for params in _cache_param_list]

                cache = cache_func(
                    *cache_input_tuple,
                    *_cache_in_output_list,
                    *_cache_param_list,
                    *cache_args,
                    *cache_more_args
                )
            if return_cache:
                return cache
            if cache is None:
                cache = ()
            if not isinstance(cache, (tuple, list, List)):
                cache = (cache,)

            entry_cache = ()
            exit_cache = ()
            if entry_settings.get('pass_cache', False):
                entry_cache = cache
            if exit_settings.get('pass_cache', False):
                exit_cache = cache

            # Apply and concatenate
            if exit_only and not iteratively:
                if len(exit_in_output_names) > 0:
                    _exit_in_output_tuples = (to_typed_list(exit_in_output_tuples),)
                else:
                    _exit_in_output_tuples = ()
                if len(exit_param_names) > 0:
                    _exit_param_tuples = (to_typed_list(exit_param_tuples),)
                else:
                    _exit_param_tuples = ()

                return combine_fns.apply_and_concat_one_nb(
                    n_params,
                    apply_func_nb,
                    input_list[0],
                    exit_wait,
                    exit_input_tuple,
                    *_exit_in_output_tuples,
                    *_exit_param_tuples,
                    exit_args + exit_more_args + exit_cache
                )

            else:
                if len(entry_in_output_names) > 0:
                    _entry_in_output_tuples = (to_typed_list(entry_in_output_tuples),)
                else:
                    _entry_in_output_tuples = ()
                if len(entry_param_names) > 0:
                    _entry_param_tuples = (to_typed_list(entry_param_tuples),)
                else:
                    _entry_param_tuples = ()
                if len(exit_in_output_names) > 0:
                    _exit_in_output_tuples = (to_typed_list(exit_in_output_tuples),)
                else:
                    _exit_in_output_tuples = ()
                if len(exit_param_names) > 0:
                    _exit_param_tuples = (to_typed_list(exit_param_tuples),)
                else:
                    _exit_param_tuples = ()

                return combine_fns.apply_and_concat_multiple_nb(
                    n_params,
                    apply_func_nb,
                    input_shape,
                    entry_wait,
                    exit_wait,
                    entry_input_tuple,
                    exit_input_tuple,
                    *_entry_in_output_tuples,
                    *_exit_in_output_tuples,
                    *_entry_param_tuples,
                    *_exit_param_tuples,
                    entry_args + entry_more_args + entry_cache,
                    exit_args + exit_more_args + exit_cache
                )
示例#5
0
    def apply_and_concat(self,
                         ntimes: int,
                         *args,
                         apply_func: tp.Optional[tp.Callable] = None,
                         keep_pd: bool = False,
                         to_2d: bool = False,
                         numba_loop: bool = False,
                         use_ray: bool = False,
                         keys: tp.Optional[tp.IndexLike] = None,
                         wrap_kwargs: tp.KwargsLike = None,
                         **kwargs) -> tp.Frame:
        """Apply `apply_func` `ntimes` times and concatenate the results along columns.
        See `vectorbt.base.combine_fns.apply_and_concat_one`.

        Args:
            ntimes (int): Number of times to call `apply_func`.
            *args: Variable arguments passed to `apply_func`.
            apply_func (callable): Apply function.

                Can be Numba-compiled.
            keep_pd (bool): Whether to keep inputs as pandas objects, otherwise convert to NumPy arrays.
            to_2d (bool): Whether to reshape inputs to 2-dim arrays, otherwise keep as-is.
            numba_loop (bool): Whether to loop using Numba.

                Set to True when iterating large number of times over small input,
                but note that Numba doesn't support variable keyword arguments.
            use_ray (bool): Whether to use Ray to execute `combine_func` in parallel.

                Only works with `numba_loop` set to False and `concat` is set to True.
                See `vectorbt.base.combine_fns.ray_apply` for related keyword arguments.
            keys (index_like): Outermost column level.
            wrap_kwargs (dict): Keyword arguments passed to `vectorbt.base.array_wrapper.ArrayWrapper.wrap`.
            **kwargs: Keyword arguments passed to `combine_func`.

        !!! note
            The resulted arrays to be concatenated must have the same shape as broadcast input arrays.

        ## Example

        ```python-repl
        >>> import vectorbt as vbt
        >>> import pandas as pd

        >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])
        >>> df.vbt.apply_and_concat(3, [1, 2, 3],
        ...     apply_func=lambda i, a, b: a * b[i], keys=['c', 'd', 'e'])
              c       d       e
           a  b   a   b   a   b
        x  3  4   6   8   9  12
        y  5  6  10  12  15  18
        ```

        Use Ray for small inputs and large processing times:

        ```python-repl
        >>> def apply_func(i, a):
        ...     time.sleep(1)
        ...     return a

        >>> sr = pd.Series([1, 2, 3])

        >>> %timeit sr.vbt.apply_and_concat(3, apply_func=apply_func)
        3.01 s ± 2.15 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)

        >>> %timeit sr.vbt.apply_and_concat(3, apply_func=apply_func, use_ray=True)
        1.01 s ± 2.31 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)
        ```
        """
        checks.assert_not_none(apply_func)
        # Optionally cast to 2d array
        if to_2d:
            obj = reshape_fns.to_2d(self.obj, raw=not keep_pd)
        else:
            if not keep_pd:
                obj = np.asarray(self.obj)
            else:
                obj = self.obj
        if checks.is_numba_func(apply_func) and numba_loop:
            if use_ray:
                raise ValueError("Ray cannot be used within Numba")
            result = combine_fns.apply_and_concat_one_nb(
                ntimes, apply_func, obj, *args, **kwargs)
        else:
            if use_ray:
                result = combine_fns.apply_and_concat_one_ray(
                    ntimes, apply_func, obj, *args, **kwargs)
            else:
                result = combine_fns.apply_and_concat_one(
                    ntimes, apply_func, obj, *args, **kwargs)
        # Build column hierarchy
        if keys is not None:
            new_columns = index_fns.combine_indexes(
                [keys, self.wrapper.columns])
        else:
            top_columns = pd.Index(np.arange(ntimes), name='apply_idx')
            new_columns = index_fns.combine_indexes(
                [top_columns, self.wrapper.columns])
        return self.wrapper.wrap(result,
                                 group_by=False,
                                 **merge_dicts(dict(columns=new_columns),
                                               wrap_kwargs))
示例#6
0
def generate_tp_ex_nb(entries, ts, stops, first=True):
    """Generate take profit exits using `generate_ex_nb`.

    For arguments, see `generate_sl_ex_nb`."""
    return combine_fns.apply_and_concat_one_nb(len(stops), tp_apply_nb,
                                               entries, ts, stops, first)