def ta_volume_as_time(df: _t.PatchedPandas, volume="Volume"): if df.ndim > 1: res = df.copy() res.index = df[volume].cumsum() return res else: return _pd.Series(df.index, index=df.cumsum())
def ta_z_norm(df: Typing.PatchedPandas, period=200, ddof=1, demean=True, lag=0): # (value - mean) / std s = df.rolling(period).std(ddof=ddof) a = (df - df.rolling(period).mean().shift(lag)) if demean else df return (a / s / 4).rename(df.name)
def ta_z_norm(df: Typing.PatchedPandas, period=200, ddof=1, demean=True, lag=0): if has_indexed_columns(df): return pd.DataFrame( {col: ta_z_norm(df[col], period, ddof, demean) for col in df.columns}, index=df.index ) # (value - mean) / std s = df.rolling(period).std(ddof=ddof) a = (df - df.rolling(period).mean().shift(lag)) if demean else df return (a / s / 4).rename(df.name)
def ta_ncdf_compress(df: Typing.PatchedPandas, period=200, lower_percentile=25, upper_percentile=75) -> Typing.PatchedPandas: if has_indexed_columns(df): return pd.DataFrame( {col: ta_ncdf_compress(df[col], period, lower_percentile, upper_percentile) for col in df.columns}, index=df.index ) f50 = df.rolling(period).mean().rename("f50") fup = df.rolling(period).apply(lambda x: np.percentile(x, upper_percentile)).rename("fup") flo = df.rolling(period).apply(lambda x: np.percentile(x, lower_percentile)).rename("flo") return pd.Series(norm.cdf(0.5 * (df - f50) / (fup - flo)) - 0.5, index=df.index, name=df.name)
def ta_ewma_covariance(df: Typing.PatchedPandas, convert_to='returns', alpha=0.97): data = df.copy() if convert_to == 'returns': data = df.pct_change() if convert_to == 'log-returns': data = _np.log(df) - _np.log(df.shift(1)) data.columns = data.columns.to_list() return data.ewm(com=alpha).cov()
def ta_ncdf_compress(df: Typing.PatchedPandas, period=200, lower_percentile=25, upper_percentile=75) -> Typing.PatchedPandas: f50 = df.rolling(period).mean().rename("f50") fup = df.rolling(period).apply( lambda x: np.percentile(x, upper_percentile)).rename("fup") flo = df.rolling(period).apply( lambda x: np.percentile(x, lower_percentile)).rename("flo") return pd.Series(norm.cdf(0.5 * (df - f50) / (fup - flo)) - 0.5, index=df.index, name=df.name)
def ta_backtest(signal: Typing.PatchedDataFrame, prices: Typing.PatchedPandas, action: Callable[[pd.Series], Tuple[int, float]], slippage: Callable[[float], float] = lambda _: 0): if has_indexed_columns(signal): assert len(signal.columns) == len( prices.columns), "Signal and Prices need the same shape!" res = pd.DataFrame({}, index=signal.index, columns=pd.MultiIndex.from_product([[], []])) for i in range(len(signal.columns)): df = ta_backtest(signal[signal.columns[i]], prices[prices.columns[i]], action, slippage) top_level_name = ",".join(prices.columns[i]) if isinstance( prices.columns[i], tuple) else prices.columns[i] df.columns = pd.MultiIndex.from_product([[top_level_name], df.columns.to_list()]) res = res.join(df) return res assert isinstance(prices, pd.Series), "prices need to be a series!" trades = StreamingTransactionLog() def trade_log_action(row): direction_amount = action(row) if isinstance(direction_amount, tuple): trades.perform_action(*direction_amount) else: trades.rebalance(float(direction_amount)) signal.to_frame().apply(trade_log_action, axis=1, raw=True) return trades.evaluate(prices.rename("price"), slippage)
def ta_week(po: _t.PatchedPandas): if not isinstance(po.index, _pd.DatetimeIndex): df = po.copy() df.index = _pd.to_datetime(df.index) else: df = po return (df.index.week / 52.0).to_series(index=po.index, name="week")
def ta_sinusoidial_week_day(po: _t.PatchedPandas): if not isinstance(po.index, _pd.DatetimeIndex): df = po.copy() df.index = _pd.to_datetime(df.index) else: df = po return _np.sin(2 * _np.pi * (df.index.dayofweek / 6.0)).to_series( index=po.index, name="dow")
def ta_sinusoidal_week(po: _t.PatchedPandas): if not isinstance(po.index, _pd.DatetimeIndex): df = po.copy() df.index = _pd.to_datetime(df.index) else: df = po return _np.sin(2 * _np.pi * (df.index.isocalendar().week / 52.0)).rename("week")
def ta_inverse_gasf(df: Typing.PatchedPandas): if has_indexed_columns(df): # todo implement recursion pass def gaf_decode(x): values = x._.values values = values.reshape(values.shape[1:]) return _np.sqrt(((_np.diag(values) + 1) / 2)).tolist() return df.to_frame().apply(gaf_decode, axis=1, result_type='expand')
def ta_mean_returns(df: Typing.PatchedPandas, period=20) -> _PANDAS: return _wcs(f"mean_return_{period}", df.pct_change().rolling(period).mean())
def ta_performance(df: Typing.PatchedPandas): delta = df.pct_change() + 1 return delta.cumprod()
def ta_log_returns(df: Typing.PatchedPandas, period=1): current = df lagged = df.shift(period) return _wcs("log_return", np.log(current) - np.log(lagged))
def ta_returns(df: Typing.PatchedPandas, period=1): return _wcs("return", df.pct_change(periods=period))
def ta_returns(df: Typing.PatchedPandas): return _wcs("return", df.pct_change())