def _calc_indicator(self, OHLCV_input): """ Calculates the Average Directional Movement Index technical indicator using a wrapper for the TA-lib Args: :param OHLCV_input: the dataframe with the Open, High, Low, Close and Volume values :type OHLCV_input: pandas DataFrame Returns: DataFrame with the indicators values. """ try: high = OHLCV_input['high'].values[:, 0] except IndexError: high = OHLCV_input['high'].values try: low = OHLCV_input['low'].values[:, 0] except IndexError: low = OHLCV_input['low'].values try: close = OHLCV_input['close'].values[:, 0] except IndexError: close = OHLCV_input['close'].values output = DataFrame(ADX(high, low, close, self.__timeperiod)) output.columns = ['ADX%d' % self.__timeperiod] return output
def calculate_indicators(df): logging.info('Calculating indicators...') # Build dictionary: indicator_dict = dict() # MACD fast = 12 slow = 26 signal = 9 macd, macdsignal, macdhist = MACD(df['price_close'], fast, slow, signal) indicator_dict['macd_current'] = macd[-1] indicator_dict['macd_signal_current'] = macdsignal[-1] # ADX time_period = 14 adx = ADX(df['price_high'], df['price_low'], df['price_close'], time_period) indicator_dict['adx_current'] = adx[-1] # DI+/DI- time_period = 14 di_plus = PLUS_DI(df['price_high'], df['price_low'], df['price_close'], time_period) di_minus = MINUS_DI(df['price_high'], df['price_low'], df['price_close'], time_period) indicator_dict['di_plus_current'] = di_plus[-1] indicator_dict['di_minus_current'] = di_minus[-1] return indicator_dict
def data(self): for resolution in ['M15', 'H1']: logging.info('data resolution=%s', resolution) for instrument in self.instruments: from_time = getattr(instrument, 'since' + resolution) include_first = None if from_time is not None: include_first = False # Candles response = API.instrument.candles(instrument.symbol, granularity=resolution, fromTime=from_time, includeFirst=include_first) # candles = [c for c in filter(lambda c: c.complete, response.body['candles'])] candles = response.body['candles'] records = [{ 'time': pd.to_datetime(c.time), 'open': c.mid.o, 'high': c.mid.h, 'low': c.mid.l, 'close': c.mid.c, 'volume': c.volume } for c in candles] if len(records) == 0: continue data = pd.DataFrame.from_records(records, index='time') key = 'data' + resolution if getattr(instrument, key) is None: setattr(instrument, key, data) else: setattr( instrument, key, getattr(instrument, key).append(data)[len(records):]) close = getattr(instrument, key)['close'] setattr( instrument, key, getattr(instrument, key).assign(ema6=EMA(close, timeperiod=6), ema18=EMA(close, timeperiod=18), ema50=EMA(close, timeperiod=50), sma200=SMA(close, timeperiod=200), adx=ADX(getattr(instrument, key)['high'], getattr(instrument, key)['low'], close, timeperiod=14))) setattr(instrument, 'since' + resolution, candles[-1].time)
def calcADX(data): high = [] low = [] close = [] for i in data['candles']: #print(i['mid']['c']) high.append(i['mid']['h']) low.append(i['mid']['l']) close.append(i['mid']['c']) highA = np.array(high) highB = pd.Series(highA) lowA = np.array(low) lowB = pd.Series(lowA) closeA = np.array(close) closeB = pd.Series(closeA) real = ADX(highB, lowB, closeB, timeperiod=14) return real
def handle_data(context, data): context.hold = {stock: False for stock in context.universe} # Request history for the stock history = data.history(context.universe, ["high", "low", "close"], context.index_average_window, "1d") for i in range(5): high = np.array(history['high'].iloc[:, i]) low = np.array(history['low'].iloc[:, i]) close = np.array(history['close'].iloc[:, i]) plus = PLUS_DI(high, low, close, timeperiod=14) minus = MINUS_DI(high, low, close, timeperiod=14) adx = ADX(high, low, close, timeperiod=14) if plus[-1] > minus[-1]: if adx[-1] >= 25: order_target_percent(history['close'].columns[i], 0.2) context.hold[history['close'].columns[i]] = True if adx[-1] <= 20: order_target_percent(history['close'].columns[i], 0.0) if plus[-1] <= minus[-1]: order_target_percent(history['close'].columns[i], 0.0)
def process_col(data, col="", *argv): params = '_'.join(str(x) for x in argv) if(col+"_"+params in data.columns): return if(col=="zero"): data['zero'] = np.full(len(data), 0) if(col=="atr_risk"): from talib import ATR data["atr_risk_"+params]= ATR(data['high'].values, data['low'].values, data['close'].values ,timeperiod=argv[0]) if(col=="macd"): from talib import MACD data['macd_'+params], data['macd_signal_'+params], data['macd_hist_'+params] = MACD(data['close'], fastperiod=argv[0], slowperiod=argv[1], signalperiod=argv[2]) if(col=="rsi"): from talib import RSI data['rsi_'+params] = RSI(data['close'].values, timeperiod=argv[0]) if(col=="adx"): from talib import ADX data['adx_'+params] = ADX(data['high'].values, data['low'].values, data['close'].values, timeperiod=argv[0]) if(col=="kijunsen"): data['kijunsen_'+params] = KIJUNSEN(data['high'],data['low'], timeperiod=argv[0]) if(col=="ema"): from talib import EMA data['ema_'+params] = EMA(data[argv[0]], timeperiod=argv[1]) if(col=="sma"): from talib import SMA data['sma_'+params] = SMA(data[argv[0]], timeperiod=argv[1]) if(col=="hma"): data['hma_'+params] = HMA(data[argv[0]], timeperiod=argv[1]) if(col=="linearreg"): from talib import LINEARREG_ANGLE data['linearreg_'+params] = LINEARREG_ANGLE(data[argv[0]], timeperiod=argv[1]) if(col=="linearreg"): from talib import LINEARREG_ANGLE data['linearreg_'+params] = LINEARREG_ANGLE(data[argv[0]], timeperiod=argv[1]) if(col=="atr_stoploss"): from talib import LINEARREG_ANGLE data['atr_stoploss_'+params] = ATR_STOPLOSS(close = data.close.values, high = data.high.values, low = data.low.values, times=argv[0], stop_early_times=argv[1], early_stop_profit=argv[2], period=argv[3], repaint=True)[0] if(col=="atr"): from talib import ATR data['atr_'+params] = ATR(data['high'].values, data['low'].values, data['close'].values ,timeperiod=argv[0]) if(col=="ssl"): data["ssl_up_"+params],data["ssl_down_"+params]= SSL(data['high'].values, data['low'].values, data['close'].values ,timeperiod=argv[0]) if(col=="ha"): data["ha_open"],data["ha_high"],data["ha_low"],data["ha_close"] = HEIKIN_ASHI(data['open'].values, data['high'].values, data['low'].values, data['close'].values) if(col=="rvi"): data["rvi_"+params],data["rvi_signal_"+params]= RVI(data['high'].values, data['low'].values, data['close'].values, data['open'].values,timeperiod=argv[0]) if(col=="waddah"): data["waddah_bull_"+params],data["waddah_bear_"+params],data["waddah_explo_"+params],data["waddah_dead_"+params]= WADDAH_ATTAR_EXPLOSION(data['close'].values,data['high'].values,data['low'].values, sensitive = argv[0] , fast_period= argv[1], slow_period = argv[2], channel_period = argv[3], channel_mult = argv[4], dead_zone= argv[5]) if(col=="ash"): data["ASH_bull_"+params],data["ASH_bear_"+params]= ASH(data['close'].values, timerperiod=argv[0], smooth =argv[1])
def pullback_strategy_scan(ticker="WMT"): url_prices = 'https://www.alphavantage.co/query?' + urllib.parse.urlencode( { 'interval': 'daily', 'outputsize': 'compact', 'function': 'TIME_SERIES_DAILY', 'symbol': ticker, 'apikey': 'YSPOO5FANVL57LQ2' }) pre_json_prices = urllib.request.urlopen(url_prices, context=ctx).read().decode() loaded_json_prices = json.loads( pre_json_prices)['Time Series (Daily)'].values() prices = list(float(price['4. close']) for price in loaded_json_prices) # 1. Calculate ADX prices_close = list(reversed(prices)) prices_high = list( reversed(list(float(price['2. high']) for price in loaded_json_prices))) prices_low = list( reversed(list(float(price['3. low']) for price in loaded_json_prices))) latest_ADX = ADX(np.asarray(prices_high), np.asarray(prices_low), np.asarray(prices_close), timeperiod=10).tolist() latest_ADX.reverse() # 2. Confirm stock's lowest price is at least W% below the previous day's close url_prices_intraday = 'https://www.alphavantage.co/query?' + urllib.parse.urlencode( { 'outputsize': 'full', 'interval': '1min', 'function': 'TIME_SERIES_INTRADAY', 'symbol': ticker, 'apikey': 'N69PE58L8L68YV07' }) pre_json_prices_intraday = urllib.request.urlopen( url_prices_intraday, context=ctx).read().decode() loaded_json_prices_intraday = list( json.loads(pre_json_prices_intraday) ['Time Series (1min)'].values())[:390] # lowest_price_today prices_intraday = [ float(price['4. close']) for price in loaded_json_prices_intraday ] lowest_price = min(prices_intraday) # close_day_before second_to_last_day_price = prices[1] # 3. Check if today's close is in bottom 25% of day's range latest_price = prices[0] percent_rank = 0 for index in range(1, len(prices_intraday)): if (latest_price > prices_intraday[index]): percent_rank += 1 percent_rank = percent_rank / 390 * 100 # 4. ConnorsRSI calculation official_rsi = two_period_rsi(ticker, prices) if (latest_ADX.pop(0) > 30 and official_rsi <= 15 and second_to_last_day_price * 0.96 >= lowest_price and percent_rank <= 25): return ('(' + str(datetime.now(tz))[:10] + ') ' + ticker + ': OVERSOLD') else: return ('(' + str(datetime.now(tz))[:10] + ') ' + ticker + ': STABLE')
def ADI(df, frequency): ndata = ADX(df['high_price'], df['low_price'], df['close_price'], timeperiod=frequency) adx = pd.Series(ndata, name='ta_adx') df = df.join(adx) return df
def priceTechnicalIndicatorOHLCV(open_price, high_price, low_price, close_price, volume): data_result = pd.DataFrame([]) lags = [7, 14, 21, 28] # ****** 待修改,技术指标的参数只用最常用的一套 # accumlation/distribution ad = AD(high_price, low_price, close_price, volume) for lag in lags: # n day diff of AD tmp = ad.diff(lag) tmp.name = 'AD_DIFF_%dD' % lag data_result = pd.concat([data_result, tmp], axis=1) #Average Directional Movement Index tmp = ADX(high_price, low_price, close_price, lag) tmp.name = 'ADX_%dD' % lag data_result = data_result.join(tmp) # Commodity Channel Index tmp = CCI(high_price, low_price, close_price, lag) tmp.name = 'CCI_%dD' % lag data_result = data_result.join(tmp) # RSI tmp = RSI(close_price, lag) tmp.name = 'RSI_%dD' % lag data_result = data_result.join(tmp) # Stochastic tmp_k, tmp_d = STOCH(high_price, low_price, close_price, fastk_period=lag, slowk_period=3, slowd_period=3) tmp_k.name = 'STOCH_K_%dD' % lag tmp_d.name = 'STOCH_D_%dD' % lag data_result = data_result.join(tmp_k) data_result =data_result.join(tmp_d) # WILLR - Williams' %R tmp = WILLR(high_price, low_price, close_price, lag) tmp.name = 'WILLER_%dD' % lag data_result =data_result.join(tmp) # volatility ratio tmp = VR(high_price, low_price, close_price, lag) tmp.name = 'VR_%dD' % lag data_result = data_result.join(tmp) return data_result
def adx_rank(context, data, symbols, top_rank=5, bot_rank=5, di_window=14): ''' rank symbols using ADX and store top_rank, bot_rank :param context: the context for the trading system :param data: data for the trading system :param symbols: the universe of symbols to trade :param top_rank: the number of trending instruments to store :param bot_rank: the number of oscillating instruments to store :param di_window: the base DI window period (ADX window is * 2) :type context: `zipline.algorithm.TradingAlgorithm` :type data: `zipline.data.data_portal.DataPortal` :type symbols: `list` of ... :type top_rank: `int` :type bot_rank: `int` :type di_window: `int` expects the following in `context`: i fillna log ''' rank = dict() c = context.sbot # ensure we have enough history if context.i < di_window * 2: return for s in symbols: input = _get_data( data, symbol=s, window=di_window * 2, freq='1d', fields=['high', 'low', 'close'], fillna=c['fillna'], fillna_limit=c['fillna_limit'], ) if input is None: continue try: adx = ADX( np.array(input['high']), np.array(input['low']), np.array(input['close']), ) if np.isnan(adx[-1]): c['log'].warn('adx for %s is NaN' % s) else: rank[s] = adx[-1] except Exception as e: if 'inputs are all NaN' in str(e): c['log'].warn('NaN inputs for %s' % s) else: # pragma: no cover raise c['rank'] = sorted(rank.items(), key=lambda t: t[1], reverse=True) c['top'] = c['rank'][:c['top_rank']] c['bot'] = c['rank'][-c['bot_rank']:] v = (c['top_rank'], c['top'], c['bot_rank'], c['bot']) c['log'].info('ranked top %s %s and bot %s %s' % v)
def fetch(self, symbol: str, interval: Interval = Interval.DAY, window: Window = Window.YEAR, indicators: Indicators = [], verbose=False) -> pd.DataFrame: """ Fetch symbol stock OHLCV from Yahoo Finance API Args: symbol (str): Symbol to fetch interval (Interval, optional): Interval (hour, day, week, ...) of data. Defaults to Interval.DAY. window (Window, optional): Length (day, week, month, year) of interval. Defaults to Window.YEAR. indicators (Indicators, optional): Array of indicators to include in the result. Defaults to empty array. Returns: pd.DataFrame: OHLCV pandas DataFrame with interval on window length and indicators if specified """ try: if verbose: print( f"Fetching OHLCV {symbol} stock data on {interval.name} interval and {window.name} window" ) # Generic url to fetch url = f"https://query1.finance.yahoo.com/v8/finance/chart/{symbol}?region=FR&lang=fr-FR&includePrePost=false&interval={interval.value}&range={window.value}&corsDomain=fr.finance.yahoo.com&.tsrc=finance" req = requests.get(url) # Testing request status code if req.status_code == 200: # Extracting data as json data = req.json() # Creating new DataFrame df = pd.DataFrame() # Extract date from object if interval in [ Interval.MINUTE, Interval.TWO_MINUTE, Interval.FIVE_MINUTE, Interval.FIFTEEN_MINUTE, Interval.THIRTY_MINUTE, Interval.HOUR ]: dateFromUnix = [ datetime.utcfromtimestamp(dt).strftime( "%Y-%m-%d %H:%M:%S") for dt in data["chart"]["result"][0]["timestamp"] ] else: dateFromUnix = [ datetime.utcfromtimestamp(dt).strftime("%Y-%m-%d") for dt in data["chart"]["result"][0]["timestamp"] ] # Date & OHLCV to DataFrame df["date"] = pd.to_datetime(dateFromUnix) df["open"] = data["chart"]["result"][0]["indicators"]["quote"][ 0]["open"] df["high"] = data["chart"]["result"][0]["indicators"]["quote"][ 0]["high"] df["low"] = data["chart"]["result"][0]["indicators"]["quote"][ 0]["low"] df["close"] = data["chart"]["result"][0]["indicators"][ "quote"][0]["close"] df["volume"] = data["chart"]["result"][0]["indicators"][ "quote"][0]["volume"] # Drop NaN on close col df.dropna(subset=["close"], inplace=True) # Divide volume column by a 1 000 df["volume"] = df["volume"].div(1000) # Set date column as index df.set_index("date", inplace=True) for indicator in indicators: # ADX if indicator == Indicators.ADX: df[indicator.value] = ADX(df["high"], df["low"], df["close"]) # BBANDS elif indicator == Indicators.BBANDS: df[Indicators.BBANDS.value[0]], df[ Indicators.BBANDS.value[1]], df[ Indicators.BBANDS.value[2]] = BBANDS( df["close"]) df["p_band"] = 100 - \ (df["l_band"] / df["u_band"] * 100) df["p_band_ma_5"] = MA(df["p_band"], timeperiod=5) # EMA elif indicator == Indicators.EMA: for ema in Indicators.EMA.value: df[ema] = EMA(df["close"], timeperiod=int(ema.split("_")[1])) # MA elif indicator == Indicators.MA: for ma in Indicators.MA.value: df[ma] = MA(df["close"], timeperiod=int(ma.split("_")[1])) # OBV elif indicator == Indicators.OBV: df[indicator.value] = OBV(df["close"], df["volume"]) df[indicator.value] = df[indicator.value].div(1000) # PSAR elif indicator == Indicators.PSAR: df[indicator.value] = SAR(df["high"], df["low"]) # PERCENT CHANGE elif indicator == Indicators.P_CHANGE: for p_change in Indicators.P_CHANGE.value: df[p_change] = df["close"].pct_change( int(p_change.split(" ")[1])) * 100 # RSI elif indicator == Indicators.RSI: df[indicator.value] = RSI(df["close"]) return df.round(decimals=2) except Exception as e: print(e)