Example #1
0
    def __init__(self,
                 main_price,
                 init_capital,
                 orders,
                 cash,
                 shares,
                 freq=None,
                 year_freq=None,
                 levy_alpha=None,
                 risk_free=None,
                 required_return=None,
                 cutoff=None,
                 factor_returns=None,
                 incl_unrealized_stats=False):
        # Perform checks
        checks.assert_type(main_price, (pd.Series, pd.DataFrame))
        if checks.is_frame(main_price):
            checks.assert_type(init_capital, pd.Series)
            checks.assert_same(main_price.columns, init_capital.index)
        else:
            checks.assert_ndim(init_capital, 0)
        checks.assert_same_meta(main_price, cash)
        checks.assert_same_meta(main_price, shares)

        # Store passed arguments
        self._main_price = main_price
        self._init_capital = init_capital
        self._orders = orders
        self._cash = cash
        self._shares = shares
        self._incl_unrealized_stats = incl_unrealized_stats

        freq = main_price.vbt(freq=freq).freq
        if freq is None:
            raise ValueError(
                "Couldn't parse the frequency of index. You must set `freq`.")
        self._freq = freq

        year_freq = main_price.vbt.returns(year_freq=year_freq).year_freq
        if freq is None:
            raise ValueError("You must set `year_freq`.")
        self._year_freq = year_freq

        # Parameters
        self._levy_alpha = defaults.portfolio[
            'levy_alpha'] if levy_alpha is None else levy_alpha
        self._risk_free = defaults.portfolio[
            'risk_free'] if risk_free is None else risk_free
        self._required_return = defaults.portfolio[
            'required_return'] if required_return is None else required_return
        self._cutoff = defaults.portfolio[
            'cutoff'] if cutoff is None else cutoff
        self._factor_returns = defaults.portfolio[
            'factor_returns'] if factor_returns is None else factor_returns

        # Supercharge
        PandasIndexer.__init__(self, _indexing_func)
        self.wrapper = ArrayWrapper.from_obj(main_price, freq=freq)
Example #2
0
def perform_init_checks(ts_list, output_list, param_list, mapper_list, name):
    """Perform checks on objects created by running or slicing an indicator."""
    checks.assert_type(ts_list[0], (pd.Series, pd.DataFrame))
    for ts in ts_list + output_list:
        checks.assert_same_meta(ts_list[0], ts)
    for params in param_list:
        checks.assert_same_shape(param_list[0], params)
    for mapper in mapper_list:
        checks.assert_type(mapper, pd.Series)
        checks.assert_same_index(
            reshape_fns.to_2d(ts_list[0]).iloc[0, :], mapper)
    checks.assert_type(name, str)
Example #3
0
    def __init__(self, ts, cash, shares, investment, slippage, commission):
        checks.assert_type(ts, (pd.Series, pd.DataFrame))
        ts.vbt.timeseries.validate()

        checks.assert_same_meta(ts, cash)
        checks.assert_same_meta(ts, shares)

        self.ts = ts
        self.cash = cash
        self.shares = shares
        self.investment = investment
        self.slippage = slippage
        self.commission = commission
Example #4
0
    def __init__(self, price, cash, shares, init_capital, paid_fees,
                 paid_slippage):
        checks.assert_type(price, (pd.Series, pd.DataFrame))
        checks.assert_same_meta(price, cash)
        checks.assert_same_meta(price, shares)

        self.price = price
        self.cash = cash
        self.shares = shares
        self.init_capital = init_capital
        self.paid_fees = paid_fees
        self.paid_slippage = paid_slippage

        ArrayWrapper.__init__(self, self)
Example #5
0
    def __init__(self,
                 price,
                 init_capital,
                 order_records,
                 cash,
                 shares,
                 data_freq=None,
                 year_freq=None,
                 risk_free=None,
                 required_return=None,
                 cutoff=None,
                 factor_returns=None):
        # Perform checks
        checks.assert_type(price, (pd.Series, pd.DataFrame))
        checks.assert_type(order_records, np.ndarray)
        checks.assert_same_shape(order_records, OrderRecord, axis=(1, 0))
        checks.assert_same_meta(price, cash)
        checks.assert_same_meta(price, shares)

        # Main parameters
        self._price = price
        self._init_capital = init_capital
        self._order_records = order_records
        self._cash = cash
        self._shares = shares

        # Other parameters
        if data_freq is None:
            data_freq = price.vbt.timeseries.timedelta
        else:
            data_freq = pd.to_timedelta(data_freq)
        self._data_freq = data_freq
        year_freq = defaults.portfolio[
            'year_freq'] if year_freq is None else year_freq
        year_freq = pd.to_timedelta(year_freq)
        self._year_freq = year_freq
        self._ann_factor = year_freq / data_freq
        self._risk_free = defaults.portfolio[
            'risk_free'] if risk_free is None else risk_free
        self._required_return = defaults.portfolio[
            'required_return'] if required_return is None else required_return
        self._cutoff = defaults.portfolio[
            'cutoff'] if cutoff is None else cutoff
        if factor_returns is not None:
            factor_returns = reshape_fns.broadcast_to(factor_returns, price)
        self._factor_returns = factor_returns

        # Supercharge
        self.wrapper = TSRArrayWrapper.from_obj(price)
Example #6
0
def perform_init_checks(ts_list, output_list, param_list, mapper_list, name):
    """Perform checks on objects created by running or slicing an indicator."""
    for ts in ts_list:
        checks.assert_type(ts, (pd.Series, pd.DataFrame))
        ts.vbt.timeseries.validate()
    for i in range(1, len(ts_list) + len(output_list)):
        checks.assert_same_meta((ts_list + output_list)[i - 1],
                                (ts_list + output_list)[i])
    for i in range(1, len(param_list)):
        checks.assert_same_shape(param_list[i - 1], param_list[i])
    for mapper in mapper_list:
        checks.assert_type(mapper, pd.Series)
        checks.assert_same_index(
            reshape_fns.to_2d(ts_list[0]).iloc[0, :], mapper)
    checks.assert_type(name, str)
Example #7
0
    def __init__(self,
                 price,
                 cash,
                 shares,
                 init_capital,
                 fees_paid,
                 slippage_paid,
                 data_freq=None,
                 year_freq=None,
                 risk_free=None,
                 required_return=None,
                 cutoff=None,
                 factor_returns=None):
        checks.assert_type(price, (pd.Series, pd.DataFrame))
        checks.assert_same_meta(price, cash)
        checks.assert_same_meta(price, shares)
        checks.assert_same_meta(price, fees_paid)
        checks.assert_same_meta(price, slippage_paid)

        # Time series
        self._price = price
        self._cash = cash
        self._shares = shares
        self._fees_paid = fees_paid
        self._slippage_paid = slippage_paid

        # User-defined parameters
        self._init_capital = init_capital
        if data_freq is None:
            data_freq = price.vbt.timeseries.timedelta
        else:
            data_freq = pd.to_timedelta(data_freq)
        self._data_freq = data_freq
        year_freq = defaults.portfolio[
            'year_freq'] if year_freq is None else year_freq
        year_freq = pd.to_timedelta(year_freq)
        self._year_freq = year_freq
        self._ann_factor = year_freq / data_freq
        self._risk_free = defaults.portfolio[
            'risk_free'] if risk_free is None else risk_free
        self._required_return = defaults.portfolio[
            'required_return'] if required_return is None else required_return
        self._cutoff = defaults.portfolio[
            'cutoff'] if cutoff is None else cutoff
        if factor_returns is not None:
            factor_returns = reshape_fns.broadcast_to(factor_returns, price)
        self._factor_returns = factor_returns

        ArrayWrapper.__init__(self, self.price)
Example #8
0
    def combine_with_multiple(self,
                              others,
                              *args,
                              combine_func=None,
                              pass_2d=False,
                              concat=False,
                              broadcast_kwargs={},
                              as_columns=None,
                              **kwargs):
        """Combine with `others` using `combine_func`.

        All arguments will be broadcasted using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        If `concat` is `True`, concatenate the results along columns, 
        see `vectorbt.utils.combine_fns.combine_and_concat`.
        Otherwise, pairwise combine into a Series/DataFrame of the same shape, 
        see `vectorbt.utils.combine_fns.combine_multiple`.

        Arguments `*args` and `**kwargs` will be directly passed to `combine_func`. 
        If `pass_2d` is `True`, 2-dimensional NumPy arrays will be passed, otherwise as is.
        Use `as_columns` as a top-level column level.

        !!! note
            If `combine_func` is Numba-compiled, will broadcast using `writeable=True` and
            copy using `order='C'` flags, which can lead to an expensive computation overhead if
            passed objects are large and have different shape/memory order. You also must ensure 
            that all objects have the same data type.

            Also remember to bring each in `*args` to a Numba-compatible format.

        Example:
            ```python-repl
            >>> import pandas as pd
            >>> sr = pd.Series([1, 2], index=['x', 'y'])
            >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])

            >>> print(sr.vbt.combine_with_multiple([df, df*2], 
            ...     combine_func=lambda x, y: x + y))
                a   b
            x  10  13
            y  17  20

            >>> print(sr.vbt.combine_with_multiple([df, df*2], 
            ...     combine_func=lambda x, y: x + y, concat=True, as_columns=['c', 'd']))
                  c       d    
               a  b   a   b
            x  4  5   7   9
            y  7  8  12  14
            ```"""
        others = tuple(
            map(lambda x: x._obj
                if isinstance(x, Base_Accessor) else x, others))
        checks.assert_not_none(combine_func)
        checks.assert_type(others, Iterable)
        # Broadcast arguments
        if checks.is_numba_func(combine_func):
            # Numba requires writable arrays
            broadcast_kwargs = {**dict(writeable=True), **broadcast_kwargs}
            # Plus all of our arrays must be in the same order
            broadcast_kwargs['copy_kwargs'] = {
                **dict(order='C'),
                **broadcast_kwargs.get('copy_kwargs', {})
            }
        new_obj, *new_others = reshape_fns.broadcast(self._obj, *others,
                                                     **broadcast_kwargs)
        # Optionally cast to 2d array
        if pass_2d:
            bc_arrays = tuple(
                map(lambda x: reshape_fns.to_2d(np.asarray(x)),
                    (new_obj, *new_others)))
        else:
            bc_arrays = tuple(
                map(lambda x: np.asarray(x), (new_obj, *new_others)))
        if concat:
            # Concat the results horizontally
            if checks.is_numba_func(combine_func):
                for i in range(1, len(bc_arrays)):
                    checks.assert_same_meta(bc_arrays[i - 1], bc_arrays[i])
                result = combine_fns.combine_and_concat_nb(
                    bc_arrays[0], bc_arrays[1:], combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_and_concat(
                    bc_arrays[0], bc_arrays[1:], combine_func, *args, **kwargs)
            columns = new_obj.vbt.columns
            if as_columns is not None:
                new_columns = index_fns.combine_indexes(as_columns, columns)
            else:
                new_columns = index_fns.tile_index(columns, len(others))
            return new_obj.vbt.wrap_array(result, columns=new_columns)
        else:
            # Combine arguments pairwise into one object
            if checks.is_numba_func(combine_func):
                for i in range(1, len(bc_arrays)):
                    checks.assert_same_dtype(bc_arrays[i - 1], bc_arrays[i])
                result = combine_fns.combine_multiple_nb(
                    bc_arrays, combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_multiple(bc_arrays, combine_func,
                                                      *args, **kwargs)
            return new_obj.vbt.wrap_array(result)
Example #9
0
    def combine_with_multiple(self,
                              others,
                              *args,
                              combine_func=None,
                              concat=False,
                              broadcast_kwargs={},
                              as_columns=None,
                              **kwargs):
        """Broadcast with other objects to the same shape and combine them all pairwise.

        The returned shape is the same as broadcasted shape if concat is False.
        The returned shape is concatenation of broadcasted shapes if concat is True."""
        others = tuple(
            map(lambda x: x._obj
                if isinstance(x, Base_Accessor) else x, others))
        checks.assert_not_none(combine_func)
        checks.assert_type(others, Iterable)
        # Broadcast arguments
        if checks.is_numba_func(combine_func):
            # Numba requires writable arrays
            broadcast_kwargs = {**dict(writeable=True), **broadcast_kwargs}
            # Plus all of our arrays must be in the same order
            broadcast_kwargs['copy_kwargs'] = {
                **dict(order='C'),
                **broadcast_kwargs.get('copy_kwargs', {})
            }
        new_obj, *new_others = reshape_fns.broadcast(self._obj, *others,
                                                     **broadcast_kwargs)
        broadcasted = tuple(map(np.asarray, (new_obj, *new_others)))
        if concat:
            # Concat the results horizontally
            if checks.is_numba_func(combine_func):
                for i in range(1, len(broadcasted)):
                    # NOTE: all inputs must have the same dtype
                    checks.assert_same_meta(broadcasted[i - 1], broadcasted[i])
                result = combine_fns.combine_and_concat_nb(
                    broadcasted[0], broadcasted[1:], combine_func, *args,
                    **kwargs)
            else:
                result = combine_fns.combine_and_concat(
                    broadcasted[0], broadcasted[1:], combine_func, *args,
                    **kwargs)
            if as_columns is not None:
                new_columns = index_fns.combine(
                    as_columns,
                    reshape_fns.to_2d(new_obj).columns)
            else:
                new_columns = index_fns.tile(
                    reshape_fns.to_2d(new_obj).columns, len(others))
            return new_obj.vbt.wrap_array(result, columns=new_columns)
        else:
            # Combine arguments pairwise into one object
            if checks.is_numba_func(combine_func):
                for i in range(1, len(broadcasted)):
                    # NOTE: all inputs must have the same dtype
                    checks.assert_same_dtype(broadcasted[i - 1],
                                             broadcasted[i])
                result = combine_fns.combine_multiple_nb(
                    broadcasted, combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_multiple(broadcasted,
                                                      combine_func, *args,
                                                      **kwargs)
            return new_obj.vbt.wrap_array(result)
Example #10
0
    def test_assert_same_meta(self):
        index = ['x', 'y', 'z']
        columns = ['a', 'b', 'c']
        checks.assert_same_meta(np.array([1, 2, 3]), np.array([1, 2, 3]))
        checks.assert_same_meta(pd.Series([1, 2, 3], index=index), pd.Series([1, 2, 3], index=index))
        checks.assert_same_meta(pd.DataFrame([[1, 2, 3]], columns=columns), pd.DataFrame([[1, 2, 3]], columns=columns))
        try:
            checks.assert_same_meta(pd.Series([1, 2]), pd.DataFrame([1, 2]))
            raise Exception
        except:
            pass

        try:
            checks.assert_same_meta(pd.DataFrame([1, 2]), pd.DataFrame([1, 2, 3]))
            raise Exception
        except:
            pass

        try:
            checks.assert_same_meta(pd.DataFrame([1, 2, 3]), pd.DataFrame([1, 2, 3], index=index))
            raise Exception
        except:
            pass

        try:
            checks.assert_same_meta(pd.DataFrame([[1, 2, 3]]), pd.DataFrame([[1, 2, 3]], columns=columns))
            raise Exception
        except:
            pass
Example #11
0
    def test_assert_same_meta(self):
        index = ['x', 'y', 'z']
        columns = ['a', 'b', 'c']
        checks.assert_same_meta(np.array([1, 2, 3]), np.array([1, 2, 3]))
        checks.assert_same_meta(pd.Series([1, 2, 3], index=index), pd.Series([1, 2, 3], index=index))
        checks.assert_same_meta(pd.DataFrame([[1, 2, 3]], columns=columns), pd.DataFrame([[1, 2, 3]], columns=columns))
        with pytest.raises(Exception) as e_info:
            checks.assert_same_meta(pd.Series([1, 2]), pd.DataFrame([1, 2]))

        with pytest.raises(Exception) as e_info:
            checks.assert_same_meta(pd.DataFrame([1, 2]), pd.DataFrame([1, 2, 3]))

        with pytest.raises(Exception) as e_info:
            checks.assert_same_meta(pd.DataFrame([1, 2, 3]), pd.DataFrame([1, 2, 3], index=index))

        with pytest.raises(Exception) as e_info:
            checks.assert_same_meta(pd.DataFrame([[1, 2, 3]]), pd.DataFrame([[1, 2, 3]], columns=columns))