Ejemplo n.º 1
0
 def broadcast(self_or_cls, *others, **kwargs):
     others = tuple(
         map(lambda x: x._obj
             if isinstance(x, Base_Accessor) else x, others))
     if isinstance(self_or_cls, type):
         return reshape_fns.broadcast(*others, **kwargs)
     return reshape_fns.broadcast(self_or_cls._obj, *others, **kwargs)
Ejemplo n.º 2
0
    def concat(self_or_cls, *others, as_columns=None, broadcast_kwargs={}):
        """Concatenate with `others` along columns.

        All arguments will be broadcasted using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`. Use `as_columns` as a top-level column level.

        Example:
            ```python-repl
            >>> import pandas as pd
            >>> sr = pd.Series([1, 2], index=['x', 'y'])
            >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])

            >>> print(sr.vbt.concat(df, as_columns=['c', 'd']))
                  c     d
               a  b  a  b
            x  1  1  3  4
            y  2  2  5  6
            ```"""
        others = tuple(
            map(lambda x: x._obj
                if isinstance(x, Base_Accessor) else x, others))
        if isinstance(self_or_cls, type):
            objs = others
        else:
            objs = (self_or_cls._obj, ) + others
        broadcasted = reshape_fns.broadcast(*objs, **broadcast_kwargs)
        broadcasted = tuple(map(reshape_fns.to_2d, broadcasted))
        if checks.is_pandas(broadcasted[0]):
            concated = pd.concat(broadcasted, axis=1)
            if as_columns is not None:
                concated.columns = index_fns.combine_indexes(
                    as_columns, broadcasted[0].columns)
        else:
            concated = np.hstack(broadcasted)
        return concated
Ejemplo n.º 3
0
    def rank(self,
             reset_by=None,
             after_false=False,
             allow_gaps=False,
             broadcast_kwargs={}):
        """See `vectorbt.signals.nb.rank_nb`.

        Example:
            Rank `False` values in `signals`:

            ```python-repl
            >>> not_signals = ~signals
            >>> print(not_signals)
                            a      b      c
            2018-01-01  False   True   True
            2018-01-02   True  False   True
            2018-01-03   True   True  False
            2018-01-04  False   True   True
            2018-01-05   True  False   True
            >>> print(not_signals.vbt.signals.rank())
                        a  b  c
            2018-01-01  0  1  1
            2018-01-02  1  0  2
            2018-01-03  2  1  0
            2018-01-04  0  2  1
            2018-01-05  1  0  2
            >>> print(not_signals.vbt.signals.rank(after_false=True))
                        a  b  c
            2018-01-01  0  0  0
            2018-01-02  1  0  0
            2018-01-03  2  1  0
            2018-01-04  0  2  1
            2018-01-05  1  0  2
            >>> print(not_signals.vbt.signals.rank(allow_gaps=True))
                        a  b  c
            2018-01-01  0  1  1
            2018-01-02  1  0  2
            2018-01-03  2  2  0
            2018-01-04  0  3  3
            2018-01-05  3  0  4
            >>> print(not_signals.vbt.signals.rank(reset_by=signals, allow_gaps=True))
                        a  b  c
            2018-01-01  0  1  1
            2018-01-02  1  0  2
            2018-01-03  2  1  0
            2018-01-04  0  2  1
            2018-01-05  1  0  2
            ```"""
        if reset_by is not None:
            obj, reset_by = reshape_fns.broadcast(self._obj, reset_by,
                                                  **broadcast_kwargs)
            reset_by = reset_by.vbt.to_2d_array()
        else:
            obj = self._obj
        ranked = nb.rank_nb(obj.vbt.to_2d_array(),
                            reset_by=reset_by,
                            after_false=after_false,
                            allow_gaps=allow_gaps)
        return obj.vbt.wrap_array(ranked)
Ejemplo n.º 4
0
    def from_signals(cls,
                     ts,
                     entries,
                     exits,
                     volume=np.inf,
                     accumulate=False,
                     investment=None,
                     slippage=None,
                     commission=None,
                     broadcast_kwargs={}):
        """Build portfolio based on entry and exit signals and the corresponding volume.

        Set volume to the number of shares to buy/sell.
        Set volume to np.inf to buy/sell everything.
        Set accumulate to False to avoid producing new orders if already in the market."""
        if investment is None:
            investment = defaults.portfolio['investment']
        if slippage is None:
            slippage = defaults.portfolio['slippage']
        if commission is None:
            commission = defaults.portfolio['commission']

        checks.assert_type(ts, (pd.Series, pd.DataFrame))
        checks.assert_type(entries, (pd.Series, pd.DataFrame))
        checks.assert_type(exits, (pd.Series, pd.DataFrame))

        ts.vbt.timeseries.validate()
        entries.vbt.signals.validate()
        exits.vbt.signals.validate()

        ts, entries, exits = reshape_fns.broadcast(ts,
                                                   entries,
                                                   exits,
                                                   **broadcast_kwargs,
                                                   writeable=True)

        volume = reshape_fns.broadcast_to(volume,
                                          ts,
                                          writeable=True,
                                          copy_kwargs={'dtype': np.float64})

        investment = float(investment)
        slippage = float(slippage)
        commission = float(commission)

        cash, shares = nb.portfolio_from_signals_np(ts.vbt.to_2d_array(),
                                                    investment, slippage,
                                                    commission,
                                                    entries.vbt.to_2d_array(),
                                                    exits.vbt.to_2d_array(),
                                                    volume.vbt.to_2d_array(),
                                                    accumulate)

        cash = ts.vbt.wrap_array(cash)
        shares = ts.vbt.wrap_array(shares)

        return cls(ts, cash, shares, investment, slippage, commission)
Ejemplo n.º 5
0
    def map_reduce_between(self,
                           *args,
                           other=None,
                           map_func_nb=None,
                           reduce_func_nb=None,
                           broadcast_kwargs={}):
        """See `vectorbt.signals.nb.map_reduce_between_nb`. 

        If `other` specified, see `vectorbt.signals.nb.map_reduce_between_two_nb`.

        Arguments will be broadcasted using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Example:
            Get maximum distance between signals in `signals`:

            ```python-repl
            >>> distance_map_nb = njit(lambda col, prev_i, next_i: next_i - prev_i)
            >>> max_reduce_nb = njit(lambda col, a: np.nanmax(a))

            >>> print(signals.vbt.signals.map_reduce_between(
            ...     map_func_nb=distance_map_nb, reduce_func_nb=max_reduce_nb))
            a    3.0
            b    3.0
            c    NaN
            dtype: float64
            ```"""
        checks.assert_not_none(map_func_nb)
        checks.assert_not_none(reduce_func_nb)
        checks.assert_numba_func(map_func_nb)
        checks.assert_numba_func(reduce_func_nb)

        if other is None:
            # One input array
            result = nb.map_reduce_between_nb(self.to_2d_array(), map_func_nb,
                                              reduce_func_nb, *args)
            if isinstance(self._obj, pd.Series):
                return result[0]
            return pd.Series(result, index=self.columns)
        else:
            # Two input arrays
            obj, other = reshape_fns.broadcast(self._obj, other,
                                               **broadcast_kwargs)
            other.vbt.signals.validate()
            result = nb.map_reduce_between_two_nb(self.to_2d_array(),
                                                  other.vbt.to_2d_array(),
                                                  map_func_nb, reduce_func_nb,
                                                  *args)
            if isinstance(obj, pd.Series):
                return result[0]
            return pd.Series(result, index=obj.vbt.columns)
Ejemplo n.º 6
0
    def generate_stop_loss(self,
                           ts,
                           stops,
                           trailing=False,
                           relative=True,
                           as_columns=None,
                           broadcast_kwargs={}):
        """See `vectorbt.signals.nb.generate_stop_loss_nb`.

        Arguments will be broadcasted using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`. Argument `stops` can be either a single number, an array of 
        numbers, or a 3D array, where each matrix corresponds to a single configuration. 
        Use `as_columns` as a top-level column level.

        Example:
            For each entry in `signals`, set stop loss for 10% and 20% below the price `ts`:

            ```python-repl
            >>> print(signals.vbt.signals.generate_stop_loss(ts, [0.1, 0.2]))
            stop_loss                   0.1                  0.2
                            a      b      c      a      b      c
            2018-01-01  False  False  False  False  False  False
            2018-01-02  False   True  False  False  False  False
            2018-01-03  False  False  False  False  False  False
            2018-01-04  False   True   True  False   True   True
            2018-01-05  False  False  False  False  False  False
            ```"""
        entries = self._obj
        checks.assert_type(ts, (pd.Series, pd.DataFrame))

        entries, ts = reshape_fns.broadcast(entries,
                                            ts,
                                            **broadcast_kwargs,
                                            writeable=True)
        stops = reshape_fns.broadcast_to_array_of(stops,
                                                  entries.vbt.to_2d_array())
        exits = nb.generate_stop_loss_nb(entries.vbt.to_2d_array(),
                                         ts.vbt.to_2d_array(), stops, trailing,
                                         relative)

        # Build column hierarchy
        if as_columns is not None:
            param_columns = as_columns
        else:
            name = 'trail_stop' if trailing else 'stop_loss'
            param_columns = index_fns.index_from_values(stops, name=name)
        columns = index_fns.combine_indexes(param_columns, entries.vbt.columns)
        return entries.vbt.wrap_array(exits, columns=columns)
Ejemplo n.º 7
0
 def concat(self_or_cls, *others, as_columns=None, broadcast_kwargs={}):
     others = tuple(
         map(lambda x: x._obj
             if isinstance(x, Base_Accessor) else x, others))
     if isinstance(self_or_cls, type):
         objs = others
     else:
         objs = (self_or_cls._obj, ) + others
     broadcasted = reshape_fns.broadcast(*objs, **broadcast_kwargs)
     broadcasted = tuple(map(reshape_fns.to_2d, broadcasted))
     if checks.is_pandas(broadcasted[0]):
         concated = pd.concat(broadcasted, axis=1)
         if as_columns is not None:
             concated.columns = index_fns.combine(as_columns,
                                                  broadcasted[0].columns)
     else:
         concated = np.hstack(broadcasted)
     return concated
Ejemplo n.º 8
0
    def from_orders(cls,
                    ts,
                    orders,
                    is_target=False,
                    investment=None,
                    slippage=None,
                    commission=None,
                    broadcast_kwargs={}):
        """Build portfolio based on orders.

        Set an orders element to positive/negative number - a number of shares to buy/sell.
        Set is_target to True to specify the target amount of shares to hold."""
        if investment is None:
            investment = defaults.portfolio['investment']
        if slippage is None:
            slippage = defaults.portfolio['slippage']
        if commission is None:
            commission = defaults.portfolio['commission']

        checks.assert_type(ts, (pd.Series, pd.DataFrame))
        checks.assert_type(orders, (pd.Series, pd.DataFrame))

        ts.vbt.timeseries.validate()
        orders.vbt.timeseries.validate()

        ts, orders = reshape_fns.broadcast(ts,
                                           orders,
                                           **broadcast_kwargs,
                                           writeable=True)

        investment = float(investment)
        slippage = float(slippage)
        commission = float(commission)

        cash, shares = nb.portfolio_from_orders_np(ts.vbt.to_2d_array(),
                                                   investment, slippage,
                                                   commission,
                                                   orders.vbt.to_2d_array(),
                                                   is_target)

        cash = ts.vbt.wrap_array(cash)
        shares = ts.vbt.wrap_array(shares)

        return cls(ts, cash, shares, investment, slippage, commission)
Ejemplo n.º 9
0
    def combine_with(self,
                     other,
                     *args,
                     combine_func=None,
                     pass_2d=False,
                     broadcast_kwargs={},
                     **kwargs):
        """Combine both using `combine_func` into a Series/DataFrame of the same shape.

        All arguments will be broadcasted using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        Arguments `*args` and `**kwargs` will be directly passed to `combine_func`.
        If `pass_2d` is `True`, 2-dimensional NumPy arrays will be passed, otherwise as is.

        Example:
            ```python-repl
            >>> import pandas as pd
            >>> sr = pd.Series([1, 2], index=['x', 'y'])
            >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])

            >>> print(sr.vbt.combine_with(df, combine_func=lambda x, y: x + y))
               a  b
            x  4  5
            y  7  8
            ```"""
        if isinstance(other, Base_Accessor):
            other = other._obj
        checks.assert_not_none(combine_func)
        if checks.is_numba_func(combine_func):
            # Numba requires writable arrays
            broadcast_kwargs = {**dict(writeable=True), **broadcast_kwargs}
        new_obj, new_other = reshape_fns.broadcast(self._obj, other,
                                                   **broadcast_kwargs)
        # Optionally cast to 2d array
        if pass_2d:
            new_obj_arr = reshape_fns.to_2d(np.asarray(new_obj))
            new_other_arr = reshape_fns.to_2d(np.asarray(new_other))
        else:
            new_obj_arr = np.asarray(new_obj)
            new_other_arr = np.asarray(new_other)
        result = combine_func(new_obj_arr, new_other_arr, *args, **kwargs)
        return new_obj.vbt.wrap_array(result)
Ejemplo n.º 10
0
    def combine_with(self,
                     other,
                     *args,
                     combine_func=None,
                     broadcast_kwargs={},
                     **kwargs):
        """Broadcast with other and combine.

        The returned shape is the same as broadcasted shape."""
        if isinstance(other, Base_Accessor):
            other = other._obj
        checks.assert_not_none(combine_func)
        if checks.is_numba_func(combine_func):
            # Numba requires writable arrays
            broadcast_kwargs = {**dict(writeable=True), **broadcast_kwargs}
        new_obj, new_other = reshape_fns.broadcast(self._obj, other,
                                                   **broadcast_kwargs)
        return new_obj.vbt.wrap_array(
            combine_func(np.asarray(new_obj), np.asarray(new_other), *args,
                         **kwargs))
Ejemplo n.º 11
0
    def from_orders(cls,
                    price,
                    orders,
                    is_target=False,
                    init_capital=None,
                    fees=None,
                    slippage=None,
                    broadcast_kwargs={}):
        """Build portfolio from orders.

        Starting with initial capital `init_capital`, at each time step, orders the number 
        of shares specified in `orders`. 

        Args:
            price (pandas_like): Price of the asset.
            orders (int, float or array_like): The amount of shares to order. 

                If the amount is positive, this is the number of shares to buy. 
                If the amount is negative, this is the number of shares to sell.
                To buy/sell everything, set the amount to `numpy.inf`.
            is_target (bool): If `True`, will order the difference between current and target amount.
            init_capital (int or float): The initial capital.
            fees (float or array_like): Trading fees in percentage of the value involved.
            slippage (float or array_like): Slippage in percentage of `price`.

        All array-like arguments will be broadcasted together using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`. At the end, each time series object will have the same metadata.

        Example:
            Portfolio value of various order sequences:
            ```python-repl
            >>> orders = pd.DataFrame({
            ...     'a': [np.inf, 0, 0, 0, 0],
            ...     'b': [1, 1, 1, 1, -np.inf],
            ...     'c': [np.inf, -np.inf, np.inf, -np.inf, np.inf]
            ... }, index=index)

            >>> portfolio = vbt.Portfolio.from_orders(price, orders, 
            ...     init_capital=100, fees=0.0025)

            >>> print(portfolio.cash)
                          a        b           c
            2018-01-01  0.0  98.9975    0.000000
            2018-01-02  0.0  96.9925  199.002494
            2018-01-03  0.0  93.9850    0.000000
            2018-01-04  0.0  91.9800  132.006642
            2018-01-05  0.0  95.9700    0.000000
            >>> print(portfolio.shares)
                                a    b           c
            2018-01-01  99.750623  1.0   99.750623
            2018-01-02  99.750623  2.0    0.000000
            2018-01-03  99.750623  3.0   66.168743
            2018-01-04  99.750623  4.0    0.000000
            2018-01-05  99.750623  0.0  131.677448
            >>> print(portfolio.equity)
                                 a         b           c
            2018-01-01   99.750623   99.9975   99.750623
            2018-01-02  199.501247  100.9925  199.002494
            2018-01-03  299.251870  102.9850  198.506228
            2018-01-04  199.501247   99.9800  132.006642
            2018-01-05   99.750623   95.9700  131.677448
            >>> print(portfolio.total_costs)
            a    0.249377
            b    0.030000
            c    1.904433
            dtype: float64
            ```
        """
        # Get defaults
        if init_capital is None:
            init_capital = defaults.portfolio['init_capital']
        init_capital = float(init_capital)
        if fees is None:
            fees = defaults.portfolio['fees']
        if slippage is None:
            slippage = defaults.portfolio['slippage']

        # Perform checks
        checks.assert_type(price, (pd.Series, pd.DataFrame))
        checks.assert_type(orders, (pd.Series, pd.DataFrame))

        # Broadcast inputs
        price, orders = reshape_fns.broadcast(price,
                                              orders,
                                              **broadcast_kwargs,
                                              writeable=True)
        fees = reshape_fns.broadcast_to(fees,
                                        price,
                                        to_pd=False,
                                        writeable=True)
        slippage = reshape_fns.broadcast_to(slippage,
                                            price,
                                            to_pd=False,
                                            writeable=True)

        # Perform calculation
        cash, shares, paid_fees, paid_slippage = nb.portfolio_nb(
            reshape_fns.to_2d(price, raw=True), init_capital,
            reshape_fns.to_2d(fees, raw=True),
            reshape_fns.to_2d(slippage, raw=True), nb.amount_order_func_nb,
            reshape_fns.to_2d(orders, raw=True), is_target)

        # Bring to the same meta
        cash = price.vbt.wrap_array(cash)
        shares = price.vbt.wrap_array(shares)
        paid_fees = price.vbt.wrap_array(paid_fees)
        paid_slippage = price.vbt.wrap_array(paid_slippage)

        return cls(price, cash, shares, init_capital, paid_fees, paid_slippage)
Ejemplo n.º 12
0
    def from_signals(cls,
                     price,
                     entries,
                     exits,
                     amount=np.inf,
                     init_capital=None,
                     fees=None,
                     slippage=None,
                     broadcast_kwargs={}):
        """Build portfolio from entry and exit signals.

        Starting with initial capital `init_capital`, for each `True` in `entries`/`exits`, 
        orders the number of shares specified in `amount`. 

        Args:
            price (pandas_like): Price of the asset.
            entries (pandas_like): Boolean array of entry signals.
            exits (pandas_like): Boolean array of exit signals.
            amount (int, float or array_like): The amount of shares to order. 

                To buy/sell everything, set the amount to `numpy.inf`.
            init_capital (int or float): The initial capital.
            fees (float or array_like): Trading fees in percentage of the value involved.
            slippage (float or array_like): Slippage in percentage of `price`.

        All array-like arguments will be broadcasted together using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`. At the end, each time series object will have the same metadata.

        !!! note
            There is no mechanism implemented to prevent order accumulation, meaning multiple entry/exit 
            signals one after another may increase/decrease your position in the market. That's why we will
            later calculate P/L of positions instead of trades.

            To select at most one exit signal, use `vectorbt.signals.accessors.Signals_Accessor.first`. 

        Example:
            Portfolio value of various signal sequences:
            ```python-repl
            >>> entries = pd.DataFrame({
            ...     'a': [True, False, False, False, False],
            ...     'b': [True, True, True, True, True],
            ...     'c': [True, False, True, False, True]
            ... }, index=index)
            >>> exits = pd.DataFrame({
            ...     'a': [False, False, False, False, False],
            ...     'b': [False, False, False, False, False],
            ...     'c': [False, True, False, True, False]
            ... }, index=index)

            >>> portfolio = vbt.Portfolio.from_signals(price, entries, 
            ...     exits, amount=10, init_capital=100, fees=0.0025)

            >>> print(portfolio.cash)
                             a       b        c
            2018-01-01  89.975  89.975   89.975
            2018-01-02  89.975  69.925  109.925
            2018-01-03  89.975  39.850   79.850
            2018-01-04  89.975  19.800   99.800
            2018-01-05  89.975   9.775   89.775
            >>> print(portfolio.shares)
                           a     b     c
            2018-01-01  10.0  10.0  10.0
            2018-01-02  10.0  20.0   0.0
            2018-01-03  10.0  30.0  10.0
            2018-01-04  10.0  40.0   0.0
            2018-01-05  10.0  50.0  10.0
            >>> print(portfolio.equity)
                              a        b        c
            2018-01-01   99.975   99.975   99.975
            2018-01-02  109.975  109.925  109.925
            2018-01-03  119.975  129.850  109.850
            2018-01-04  109.975   99.800   99.800
            2018-01-05   99.975   59.775   99.775
            >>> print(portfolio.total_costs)
            a    0.025
            b    0.225
            c    0.225
            dtype: float64
            ```
        """
        # Get defaults
        if init_capital is None:
            init_capital = defaults.portfolio['init_capital']
        init_capital = float(init_capital)
        if fees is None:
            fees = defaults.portfolio['fees']
        if slippage is None:
            slippage = defaults.portfolio['slippage']

        # Perform checks
        checks.assert_type(price, (pd.Series, pd.DataFrame))
        checks.assert_type(entries, (pd.Series, pd.DataFrame))
        checks.assert_type(exits, (pd.Series, pd.DataFrame))
        entries.vbt.signals.validate()
        exits.vbt.signals.validate()

        # Broadcast inputs
        price, entries, exits, amount, fees, slippage = reshape_fns.broadcast(
            price,
            entries,
            exits,
            amount,
            fees,
            slippage,
            **broadcast_kwargs,
            writeable=True)

        # Perform calculation
        cash, shares, paid_fees, paid_slippage = nb.portfolio_nb(
            reshape_fns.to_2d(price, raw=True), init_capital,
            reshape_fns.to_2d(fees, raw=True),
            reshape_fns.to_2d(slippage, raw=True), nb.signals_order_func_nb,
            reshape_fns.to_2d(entries, raw=True),
            reshape_fns.to_2d(exits, raw=True),
            reshape_fns.to_2d(amount, raw=True))

        # Bring to the same meta
        cash = price.vbt.wrap_array(cash)
        shares = price.vbt.wrap_array(shares)
        paid_fees = price.vbt.wrap_array(paid_fees)
        paid_slippage = price.vbt.wrap_array(paid_slippage)

        return cls(price, cash, shares, init_capital, paid_fees, paid_slippage)
Ejemplo n.º 13
0
def from_params_pipeline(ts_list,
                         param_list,
                         level_names,
                         num_outputs,
                         custom_func,
                         *args,
                         pass_lists=False,
                         param_product=False,
                         broadcast_kwargs={},
                         return_raw=False,
                         **kwargs):
    """A pipeline for calculating an indicator, used by `IndicatorFactory`.

    Does the following:

    * Takes one or multiple time series objects in `ts_list` and broadcasts them. For example:

    ```python-repl
    >>> sr = pd.Series([1, 2], index=['x', 'y'])
    >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])
    >>> ts_list = [sr, df]

    >>> ts_list = vbt.utils.reshape_fns.broadcast(*ts_list)
    >>> print(ts_list[0])
       a  b
    x  1  1
    y  2  2
    >>> print(ts_list[1])
       a  b
    x  3  4
    y  5  6
    ```

    * Takes one or multiple parameters in `param_list`, converts them to NumPy arrays and 
        broadcasts them. For example:

    ```python-repl
    >>> p1, p2, p3 = 1, [2, 3, 4], [False]
    >>> param_list = [p1, p2, p3]

    >>> param_list = vbt.utils.reshape_fns.broadcast(*param_list)
    >>> print(param_list[0])
    array([1, 1, 1])
    >>> print(param_list[1])
    array([2, 3, 4])
    >>> print(param_list[2])
    array([False, False, False])
    ```

    * Performs calculation using `custom_func` to build output arrays (`output_list`) and 
        other objects (`other_list`, optional). For example:

    ```python-repl
    >>> def custom_func(ts1, ts2, p1, p2, p3, *args, **kwargs):
    ...     return pd.DataFrame.vbt.concat(
    ...         (ts1.values + ts2.values) + p1[0] * p2[0],
    ...         (ts1.values + ts2.values) + p1[1] * p2[1],
    ...         (ts1.values + ts2.values) + p1[2] * p2[2]
    ...     )

    >>> output = custom_func(*ts_list, *param_list)
    >>> print(output)
    array([[ 6,  7,  7,  8,  8,  9],
           [ 9, 10, 10, 11, 11, 12]])
    ```

    * Creates new column hierarchy based on parameters and level names. For example:

    ```python-repl
    >>> p1_columns = pd.Index(param_list[0], name='p1')
    >>> p2_columns = pd.Index(param_list[1], name='p2')
    >>> p3_columns = pd.Index(param_list[2], name='p3')
    >>> p_columns = vbt.utils.index_fns.stack(p1_columns, p2_columns, p3_columns)
    >>> new_columns = vbt.utils.index_fns.combine(p_columns, ts_list[0].columns)

    >>> output_df = pd.DataFrame(output, columns=new_columns)
    >>> print(output_df)
    p1      1      1      1      1      1      1                        
    p2      2      2      3      3      4      4    
    p3  False  False  False  False  False  False    
            a      b      a      b      a      b
    0       6      7      7      8      8      9
    1       9     10     10     11     11     12
    ```

    * Broadcasts objects in `ts_list` to match the shape of objects in `output_list` through tiling.
        This is done to be able to compare them and generate signals, since you cannot compare NumPy 
        arrays that have totally different shapes, such as (2, 2) and (2, 6). For example:

    ```python-repl
    >>> new_ts_list = [
    ...     ts_list[0].vbt.tile(len(param_list[0]), as_columns=p_columns),
    ...     ts_list[1].vbt.tile(len(param_list[0]), as_columns=p_columns)
    ... ]
    >>> print(new_ts_list[0])
    p1      1      1      1      1      1      1                        
    p2      2      2      3      3      4      4    
    p3  False  False  False  False  False  False     
            a      b      a      b      a      b
    0       1      1      1      1      1      1
    1       2      2      2      2      2      2
    ```

    * Builds parameter mappers that will link parameters from `param_list` to columns in 
        `ts_list` and `output_list`. This is done to enable column indexing using parameter values.

    Args:
        ts_list (list of array_like): A list of time series objects. At least one must be a pandas object.
        param_list (list of array_like): A list of parameters. Each element is either an array-like object
            or a single value of any type.
        level_names (list of str): A list of column level names corresponding to each parameter.
        num_outputs (int): The number of output arrays.
        custom_func (function): A custom calculation function. See `IndicatorFactory.from_custom_func`.
        *args: Arguments passed to the `custom_func`.
        pass_lists (bool): If True, arguments are passed to the `custom_func` as lists. Defaults to False.
        param_product (bool): If True, builds a Cartesian product out of all parameters. Defaults to False.
        broadcast_kwargs (dict, optional): Keyword arguments passed to the `vectorbt.utils.reshape_fns.broadcast` 
            on time series objects.
        return_raw (bool): If True, returns the raw output without post-processing. Defaults to False.
        **kwargs: Keyword arguments passed to the `custom_func`.

            Some common arguments include `return_cache` to return cache and `cache` to pass cache. 
            Those are only applicable to `custom_func` that supports it (`custom_func` created using
            `IndicatorFactory.from_apply_func` are supported by default).
    Returns:
        A list of transformed inputs (`pandas_like`), a list of generated outputs (`pandas_like`), 
        a list of parameter arrays (`numpy.ndarray`), a list of parameter mappers (`pandas.Series`),
        a list of other generated outputs that are outside of  `num_outputs`.
    """
    # Check time series objects
    checks.assert_type(ts_list[0], (pd.Series, pd.DataFrame))
    for i in range(1, len(ts_list)):
        ts_list[i].vbt.timeseries.validate()
    if len(ts_list) > 1:
        # Broadcast time series
        ts_list = reshape_fns.broadcast(*ts_list,
                                        **broadcast_kwargs,
                                        writeable=True)
    # Check level names
    checks.assert_type(level_names, (list, tuple))
    checks.assert_same_len(param_list, level_names)
    for ts in ts_list:
        # Every time series object should be free of the specified level names in its columns
        for level_name in level_names:
            checks.assert_level_not_exists(ts, level_name)
    # Convert params to 1-dim arrays
    param_list = list(map(reshape_fns.to_1d, param_list))
    if len(param_list) > 1:
        if param_product:
            # Make Cartesian product out of all params
            param_list = list(map(reshape_fns.to_1d, param_list))
            param_list = list(zip(*list(itertools.product(*param_list))))
            param_list = list(map(np.asarray, param_list))
        else:
            # Broadcast such that each array has the same length
            param_list = reshape_fns.broadcast(*param_list, writeable=True)
    # Perform main calculation
    if pass_lists:
        output_list = custom_func(ts_list, param_list, *args, **kwargs)
    else:
        output_list = custom_func(*ts_list, *param_list, *args, **kwargs)
    if return_raw or kwargs.get('return_cache', False):
        return output_list  # return raw cache outputs
    if not isinstance(output_list, (tuple, list, List)):
        output_list = [output_list]
    else:
        output_list = list(output_list)
    # Other outputs should be returned without post-processing (for example cache_dict)
    if len(output_list) > num_outputs:
        other_list = output_list[num_outputs:]
    else:
        other_list = []
    # Process only the num_outputs outputs
    output_list = output_list[:num_outputs]
    if len(param_list) > 0:
        # Build new column levels on top of time series levels
        new_columns = build_column_hierarchy(
            param_list, level_names,
            reshape_fns.to_2d(ts_list[0]).columns)
        # Wrap into new pandas objects both time series and output objects
        new_ts_list = list(
            map(lambda x: broadcast_ts(x, param_list[0].shape[0], new_columns),
                ts_list))
        # Build mappers to easily map between parameters and columns
        mapper_list = [
            build_mapper(x, ts_list[0], new_columns, level_names[i])
            for i, x in enumerate(param_list)
        ]
    else:
        # Some indicators don't have any params
        new_columns = reshape_fns.to_2d(ts_list[0]).columns
        new_ts_list = list(ts_list)
        mapper_list = []
    output_list = list(
        map(lambda x: wrap_output(x, ts_list[0], new_columns), output_list))
    if len(mapper_list) > 1:
        # Tuple object is a mapper that accepts tuples of parameters
        tuple_mapper = build_tuple_mapper(mapper_list, new_columns,
                                          tuple(level_names))
        mapper_list.append(tuple_mapper)
    return new_ts_list, output_list, param_list, mapper_list, other_list
Ejemplo n.º 14
0
    def combine_with_multiple(self,
                              others,
                              *args,
                              combine_func=None,
                              pass_2d=False,
                              concat=False,
                              broadcast_kwargs={},
                              as_columns=None,
                              **kwargs):
        """Combine with `others` using `combine_func`.

        All arguments will be broadcasted using `vectorbt.utils.reshape_fns.broadcast`
        with `broadcast_kwargs`.

        If `concat` is `True`, concatenate the results along columns, 
        see `vectorbt.utils.combine_fns.combine_and_concat`.
        Otherwise, pairwise combine into a Series/DataFrame of the same shape, 
        see `vectorbt.utils.combine_fns.combine_multiple`.

        Arguments `*args` and `**kwargs` will be directly passed to `combine_func`. 
        If `pass_2d` is `True`, 2-dimensional NumPy arrays will be passed, otherwise as is.
        Use `as_columns` as a top-level column level.

        !!! note
            If `combine_func` is Numba-compiled, will broadcast using `writeable=True` and
            copy using `order='C'` flags, which can lead to an expensive computation overhead if
            passed objects are large and have different shape/memory order. You also must ensure 
            that all objects have the same data type.

            Also remember to bring each in `*args` to a Numba-compatible format.

        Example:
            ```python-repl
            >>> import pandas as pd
            >>> sr = pd.Series([1, 2], index=['x', 'y'])
            >>> df = pd.DataFrame([[3, 4], [5, 6]], index=['x', 'y'], columns=['a', 'b'])

            >>> print(sr.vbt.combine_with_multiple([df, df*2], 
            ...     combine_func=lambda x, y: x + y))
                a   b
            x  10  13
            y  17  20

            >>> print(sr.vbt.combine_with_multiple([df, df*2], 
            ...     combine_func=lambda x, y: x + y, concat=True, as_columns=['c', 'd']))
                  c       d    
               a  b   a   b
            x  4  5   7   9
            y  7  8  12  14
            ```"""
        others = tuple(
            map(lambda x: x._obj
                if isinstance(x, Base_Accessor) else x, others))
        checks.assert_not_none(combine_func)
        checks.assert_type(others, Iterable)
        # Broadcast arguments
        if checks.is_numba_func(combine_func):
            # Numba requires writable arrays
            broadcast_kwargs = {**dict(writeable=True), **broadcast_kwargs}
            # Plus all of our arrays must be in the same order
            broadcast_kwargs['copy_kwargs'] = {
                **dict(order='C'),
                **broadcast_kwargs.get('copy_kwargs', {})
            }
        new_obj, *new_others = reshape_fns.broadcast(self._obj, *others,
                                                     **broadcast_kwargs)
        # Optionally cast to 2d array
        if pass_2d:
            bc_arrays = tuple(
                map(lambda x: reshape_fns.to_2d(np.asarray(x)),
                    (new_obj, *new_others)))
        else:
            bc_arrays = tuple(
                map(lambda x: np.asarray(x), (new_obj, *new_others)))
        if concat:
            # Concat the results horizontally
            if checks.is_numba_func(combine_func):
                for i in range(1, len(bc_arrays)):
                    checks.assert_same_meta(bc_arrays[i - 1], bc_arrays[i])
                result = combine_fns.combine_and_concat_nb(
                    bc_arrays[0], bc_arrays[1:], combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_and_concat(
                    bc_arrays[0], bc_arrays[1:], combine_func, *args, **kwargs)
            columns = new_obj.vbt.columns
            if as_columns is not None:
                new_columns = index_fns.combine_indexes(as_columns, columns)
            else:
                new_columns = index_fns.tile_index(columns, len(others))
            return new_obj.vbt.wrap_array(result, columns=new_columns)
        else:
            # Combine arguments pairwise into one object
            if checks.is_numba_func(combine_func):
                for i in range(1, len(bc_arrays)):
                    checks.assert_same_dtype(bc_arrays[i - 1], bc_arrays[i])
                result = combine_fns.combine_multiple_nb(
                    bc_arrays, combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_multiple(bc_arrays, combine_func,
                                                      *args, **kwargs)
            return new_obj.vbt.wrap_array(result)
Ejemplo n.º 15
0
    def from_signals(cls,
                     price,
                     entries,
                     exits,
                     size=np.inf,
                     entry_price=None,
                     exit_price=None,
                     init_capital=None,
                     fees=None,
                     fixed_fees=None,
                     slippage=None,
                     accumulate=False,
                     broadcast_kwargs={},
                     **kwargs):
        """Build portfolio from entry and exit signals.

        At each entry signal in `entries`, buys `size` of shares for `entry_price` to enter
        a position. At each exit signal in `exits`, sells everything for `exit_price` 
        to exit the position. Accumulation of orders is disabled by default.

        Args:
            price (pandas_like): Main price of the asset, such as close.
            entries (array_like): Boolean array of entry signals.
            exits (array_like): Boolean array of exit signals.
            size (int, float or array_like): The amount of shares to order. 

                To buy/sell everything, set the size to `numpy.inf`.
            entry_price (array_like): Entry price. Defaults to `price`.
            exit_price (array_like): Exit price. Defaults to `price`.
            init_capital (int or float): The initial capital.
            fees (float or array_like): Fees in percentage of the order value.
            fixed_fees (float or array_like): Fixed amount of fees to pay per order.
            slippage (float or array_like): Slippage in percentage of price.
            accumulate (bool): If `accumulate` is `True`, entering the market when already 
                in the market will be allowed to increase a position.
            **kwargs: Keyword arguments passed to the `__init__` method.

        For defaults, see `vectorbt.defaults.portfolio`.

        All array-like arguments will be broadcasted together using `vectorbt.utils.reshape_fns.broadcast` 
        with `broadcast_kwargs`. At the end, all array objects will have the same metadata.

        Example:
            Portfolio from various signal sequences:
            ```python-repl
            >>> entries = pd.DataFrame({
            ...     'a': [True, False, False, False, False],
            ...     'b': [True, False, True, False, True],
            ...     'c': [True, True, True, True, True]
            ... }, index=index)
            >>> exits = pd.DataFrame({
            ...     'a': [False, False, False, False, False],
            ...     'b': [False, True, False, True, False],
            ...     'c': [True, True, True, True, True]
            ... }, index=index)
            >>> portfolio = vbt.Portfolio.from_signals(
            ...     price, entries, exits, size=10,
            ...     init_capital=100, fees=0.0025, fixed_fees=1., slippage=0.001)

            >>> print(portfolio.order_records)
               Column  Index  Size  Price      Fees  Side
            0     0.0    0.0  10.0  1.001  1.025025   0.0
            1     1.0    0.0  10.0  1.001  1.025025   0.0
            2     1.0    1.0  10.0  1.998  1.049950   1.0
            3     1.0    2.0  10.0  3.003  1.075075   0.0
            4     1.0    3.0  10.0  1.998  1.049950   1.0
            5     1.0    4.0  10.0  1.001  1.025025   0.0
            6     2.0    0.0  10.0  1.001  1.025025   0.0
            >>> print(portfolio.shares)
                           a     b     c
            2018-01-01  10.0  10.0  10.0
            2018-01-02  10.0   0.0  10.0
            2018-01-03  10.0  10.0  10.0
            2018-01-04  10.0   0.0  10.0
            2018-01-05  10.0  10.0  10.0
            >>> print(portfolio.cash)
                                a           b          c
            2018-01-01  88.964975   88.964975  88.964975
            2018-01-02  88.964975  107.895025  88.964975
            2018-01-03  88.964975   76.789950  88.964975
            2018-01-04  88.964975   95.720000  88.964975
            2018-01-05  88.964975   84.684975  88.964975
            ```
        """
        # Get defaults
        if entry_price is None:
            entry_price = price
        if exit_price is None:
            exit_price = price
        if init_capital is None:
            init_capital = defaults.portfolio['init_capital']
        if fees is None:
            fees = defaults.portfolio['fees']
        if fixed_fees is None:
            fixed_fees = defaults.portfolio['fixed_fees']
        if slippage is None:
            slippage = defaults.portfolio['slippage']

        # Perform checks
        checks.assert_type(price, (pd.Series, pd.DataFrame))
        checks.assert_dtype(entries, np.bool_)
        checks.assert_dtype(exits, np.bool_)

        # Broadcast inputs
        price, entries, exits, size, entry_price, exit_price, fees, fixed_fees, slippage = \
            reshape_fns.broadcast(price, entries, exits, size, entry_price, exit_price, fees,
                                  fixed_fees, slippage, **broadcast_kwargs, writeable=True)

        # Perform calculation
        order_records, cash, shares = nb.simulate_from_signals_nb(
            reshape_fns.to_2d(price, raw=True).shape, init_capital,
            reshape_fns.to_2d(entries, raw=True),
            reshape_fns.to_2d(exits, raw=True),
            reshape_fns.to_2d(size, raw=True),
            reshape_fns.to_2d(entry_price, raw=True),
            reshape_fns.to_2d(exit_price, raw=True),
            reshape_fns.to_2d(fees, raw=True),
            reshape_fns.to_2d(fixed_fees, raw=True),
            reshape_fns.to_2d(slippage, raw=True), accumulate)

        # Bring to the same meta
        cash = price.vbt.wrap(cash)
        shares = price.vbt.wrap(shares)

        return cls(price, init_capital, order_records, cash, shares, **kwargs)
Ejemplo n.º 16
0
    def combine_with_multiple(self,
                              others,
                              *args,
                              combine_func=None,
                              concat=False,
                              broadcast_kwargs={},
                              as_columns=None,
                              **kwargs):
        """Broadcast with other objects to the same shape and combine them all pairwise.

        The returned shape is the same as broadcasted shape if concat is False.
        The returned shape is concatenation of broadcasted shapes if concat is True."""
        others = tuple(
            map(lambda x: x._obj
                if isinstance(x, Base_Accessor) else x, others))
        checks.assert_not_none(combine_func)
        checks.assert_type(others, Iterable)
        # Broadcast arguments
        if checks.is_numba_func(combine_func):
            # Numba requires writable arrays
            broadcast_kwargs = {**dict(writeable=True), **broadcast_kwargs}
            # Plus all of our arrays must be in the same order
            broadcast_kwargs['copy_kwargs'] = {
                **dict(order='C'),
                **broadcast_kwargs.get('copy_kwargs', {})
            }
        new_obj, *new_others = reshape_fns.broadcast(self._obj, *others,
                                                     **broadcast_kwargs)
        broadcasted = tuple(map(np.asarray, (new_obj, *new_others)))
        if concat:
            # Concat the results horizontally
            if checks.is_numba_func(combine_func):
                for i in range(1, len(broadcasted)):
                    # NOTE: all inputs must have the same dtype
                    checks.assert_same_meta(broadcasted[i - 1], broadcasted[i])
                result = combine_fns.combine_and_concat_nb(
                    broadcasted[0], broadcasted[1:], combine_func, *args,
                    **kwargs)
            else:
                result = combine_fns.combine_and_concat(
                    broadcasted[0], broadcasted[1:], combine_func, *args,
                    **kwargs)
            if as_columns is not None:
                new_columns = index_fns.combine(
                    as_columns,
                    reshape_fns.to_2d(new_obj).columns)
            else:
                new_columns = index_fns.tile(
                    reshape_fns.to_2d(new_obj).columns, len(others))
            return new_obj.vbt.wrap_array(result, columns=new_columns)
        else:
            # Combine arguments pairwise into one object
            if checks.is_numba_func(combine_func):
                for i in range(1, len(broadcasted)):
                    # NOTE: all inputs must have the same dtype
                    checks.assert_same_dtype(broadcasted[i - 1],
                                             broadcasted[i])
                result = combine_fns.combine_multiple_nb(
                    broadcasted, combine_func, *args, **kwargs)
            else:
                result = combine_fns.combine_multiple(broadcasted,
                                                      combine_func, *args,
                                                      **kwargs)
            return new_obj.vbt.wrap_array(result)
Ejemplo n.º 17
0
    def from_combinations(cls, ts, windows, r, ewm=False, names=None, **kwargs):
        """Create multiple `MA` combinations according to `itertools.combinations`.

        Args:
            ts (pandas_like): Time series (such as price).
            windows (array_like of int): Size of the moving window.
            r (int): The number of `MA` instances to combine.
            ewm (bool or array_like of bool): If `True`, uses exponential moving average, otherwise 
                uses simple moving average.
            names (list of str): A list of names for each `MA` instance.
            **kwargs: Keyword arguments passed to `vectorbt.indicators.factory.from_params_pipeline.`
        Returns:
            tuple of MA
        Example:
            ```python-repl
            >>> fast_ma, slow_ma = vbt.MA.from_combinations(price['Close'], 
            ...     [10, 20, 30], 2, ewm=[False, False, True], names=['fast', 'slow'])

            >>> print(fast_ma.ma)
            fast_window                    10          20
            fast_ewm         False      False       False
            Date                                         
            2019-02-28         NaN        NaN         NaN
            2019-03-01         NaN        NaN         NaN
            2019-03-02         NaN        NaN         NaN
            ...                ...        ...         ...
            2019-08-29   10155.972  10155.972  10447.3480
            2019-08-30   10039.466  10039.466  10359.5555
            2019-08-31    9988.727   9988.727  10264.9095

            [185 rows x 3 columns]

            >>> print(slow_ma.ma)
            slow_window          20                          30
            slow_ewm          False          True          True
            Date                                               
            2019-02-28          NaN           NaN           NaN
            2019-03-01          NaN           NaN           NaN
            2019-03-02          NaN           NaN           NaN
            ...                 ...           ...           ...
            2019-08-29   10447.3480  10423.585970  10423.585970
            2019-08-30   10359.5555  10370.333077  10370.333077
            2019-08-31   10264.9095  10322.612024  10322.612024

            [185 rows x 3 columns]

            ```

            The naive way without caching is the follows:
            ```py
            window_combs = itertools.combinations([10, 20, 30], 2)
            ewm_combs = itertools.combinations([False, False, True], 2)
            fast_windows, slow_windows = np.asarray(list(window_combs)).transpose()
            fast_ewms, slow_ewms = np.asarray(list(ewm_combs)).transpose()

            fast_ma = vbt.MA.from_params(price['Close'], 
            ...     fast_windows, fast_ewms, name='fast')
            slow_ma = vbt.MA.from_params(price['Close'], 
            ...     slow_windows, slow_ewms, name='slow')
            ```

            Having this, you can now compare these `MA` instances:
            ```python-repl
            >>> entry_signals = fast_ma.ma_above(slow_ma, crossed=True)
            >>> exit_signals = fast_ma.ma_below(slow_ma, crossed=True)

            >>> print(entry_signals)
            fast_window            10     20
            fast_ewm     False  False  False
            slow_window     20            30
            slow_ewm     False   True   True
            Date                            
            2019-02-28   False  False  False
            2019-03-01   False  False  False
            2019-03-02   False  False  False
            ...            ...    ...    ...
            2019-08-29   False  False  False
            2019-08-30   False  False  False
            2019-08-31   False  False  False

            [185 rows x 3 columns]
            ```

            Notice how `MA.ma_above` method created a new column hierarchy for you. You can now use
            it for indexing as follows:

            ```py
            fig = price['Close'].vbt.timeseries.plot(name='Price')
            fig = entry_signals[(10, False, 20, False)]\\
                .vbt.signals.plot_markers(price['Close'], signal_type='entry', fig=fig)
            fig = exit_signals[(10, False, 20, False)]\\
                .vbt.signals.plot_markers(price['Close'], signal_type='exit', fig=fig)

            fig.show()
            ```
            ![](/vectorbt/docs/img/MA_from_combinations.png)
        """

        if names is None:
            names = ['ma' + str(i+1) for i in range(r)]
        windows, ewm = reshape_fns.broadcast(windows, ewm, writeable=True)
        cache_dict = cls.from_params(ts, windows, ewm=ewm, return_cache=True, **kwargs)
        param_lists = zip(*itertools.combinations(zip(windows, ewm), r))
        mas = []
        for i, param_list in enumerate(param_lists):
            i_windows, i_ewm = zip(*param_list)
            mas.append(cls.from_params(ts, i_windows, ewm=i_ewm, cache=cache_dict, name=names[i], **kwargs))
        return tuple(mas)
Ejemplo n.º 18
0
    def from_orders(cls,
                    price,
                    order_size,
                    order_price=None,
                    init_capital=None,
                    fees=None,
                    fixed_fees=None,
                    slippage=None,
                    is_target=False,
                    broadcast_kwargs={},
                    **kwargs):
        """Build portfolio from orders.

        Starting with initial capital `init_capital`, at each time step, orders the number 
        of shares specified in `order_size` for `order_price`. 

        Args:
            price (pandas_like): Main price of the asset, such as close.
            order_size (int, float or array_like): The amount of shares to order. 

                If the size is positive, this is the number of shares to buy. 
                If the size is negative, this is the number of shares to sell.
                To buy/sell everything, set the size to `numpy.inf`.
            order_price (array_like): Order price. Defaults to `price`.
            init_capital (int or float): The initial capital.
            fees (float or array_like): Fees in percentage of the order value.
            fixed_fees (float or array_like): Fixed amount of fees to pay per order.
            slippage (float or array_like): Slippage in percentage of `order_price`.
            is_target (bool): If `True`, will order the difference between current and target size.
            **kwargs: Keyword arguments passed to the `__init__` method.

        For defaults, see `vectorbt.defaults.portfolio`.

        All array-like arguments will be broadcasted together using `vectorbt.utils.reshape_fns.broadcast` 
        with `broadcast_kwargs`. At the end, all array objects will have the same metadata.

        Example:
            Portfolio from various order sequences:
            ```python-repl
            >>> orders = pd.DataFrame({
            ...     'a': [np.inf, 0, 0, 0, 0],
            ...     'b': [1, 1, 1, 1, -np.inf],
            ...     'c': [np.inf, -np.inf, np.inf, -np.inf, np.inf]
            ... }, index=index)
            >>> portfolio = vbt.Portfolio.from_orders(price, orders, 
            ...     init_capital=100, fees=0.0025, fixed_fees=1., slippage=0.001)

            >>> print(portfolio.order_records)
                Column  Index        Size  Price      Fees  Side
            0      0.0    0.0   98.654463  1.001  1.246883   0.0
            1      1.0    0.0    1.000000  1.001  1.002502   0.0
            2      1.0    1.0    1.000000  2.002  1.005005   0.0
            3      1.0    2.0    1.000000  3.003  1.007507   0.0
            4      1.0    3.0    1.000000  2.002  1.005005   0.0
            5      1.0    4.0    4.000000  0.999  1.009990   1.0
            6      2.0    0.0   98.654463  1.001  1.246883   0.0
            7      2.0    1.0   98.654463  1.998  1.492779   1.0
            8      2.0    2.0   64.646521  3.003  1.485334   0.0
            9      2.0    3.0   64.646521  1.998  1.322909   1.0
            10     2.0    4.0  126.398131  1.001  1.316311   0.0
            >>> print(portfolio.shares)
                                a    b           c
            2018-01-01  98.654463  1.0   98.654463
            2018-01-02  98.654463  2.0    0.000000
            2018-01-03  98.654463  3.0   64.646521
            2018-01-04  98.654463  4.0    0.000000
            2018-01-05  98.654463  0.0  126.398131
            >>> print(portfolio.cash)
                          a          b             c
            2018-01-01  0.0  97.996498  0.000000e+00
            2018-01-02  0.0  94.989493  1.956188e+02
            2018-01-03  0.0  90.978985  2.842171e-14
            2018-01-04  0.0  87.971980  1.278408e+02
            2018-01-05  0.0  90.957990  0.000000e+00
            ```
        """
        # Get defaults
        if order_price is None:
            order_price = price
        if init_capital is None:
            init_capital = defaults.portfolio['init_capital']
        init_capital = float(init_capital)
        if fees is None:
            fees = defaults.portfolio['fees']
        if fixed_fees is None:
            fixed_fees = defaults.portfolio['fixed_fees']
        if slippage is None:
            slippage = defaults.portfolio['slippage']

        # Perform checks
        checks.assert_type(price, (pd.Series, pd.DataFrame))

        # Broadcast inputs
        price, order_size, order_price, fees, fixed_fees, slippage = \
            reshape_fns.broadcast(price, order_size, order_price, fees, fixed_fees,
                                  slippage, **broadcast_kwargs, writeable=True)

        # Perform calculation
        order_records, cash, shares = nb.simulate_from_orders_nb(
            reshape_fns.to_2d(price, raw=True).shape, init_capital,
            reshape_fns.to_2d(order_size, raw=True),
            reshape_fns.to_2d(order_price, raw=True),
            reshape_fns.to_2d(fees, raw=True),
            reshape_fns.to_2d(fixed_fees, raw=True),
            reshape_fns.to_2d(slippage, raw=True), is_target)

        # Bring to the same meta
        cash = price.vbt.wrap(cash)
        shares = price.vbt.wrap(shares)

        return cls(price, init_capital, order_records, cash, shares, **kwargs)