def reshape_iex_types(ticker,types,jdTmp,saveDB,pgDB,rmode,debugTF=False,hdrTF=True,output=None,sep='|',indexTF=False,tsTF=False,clientM=None,period='quarterly'): dbscm='public';dbidx=False for typx in map(str.strip,types.split(',')) : try: (typy,da)=reshape_iex_typx(ticker,typx,jdTmp,tsTF=tsTF,debugTF=debugTF,period=period) except Exception as e: print >> sys.stderr, "**ERROR: {} of {}/{} @ reshape_iex_typx()".format(str(e),ticker,typx) continue if len(da)<1: print >> sys.stderr, "**No data for {}/{}".format(ticker,typx) continue chart2price(typy,da,saveDB,pgDB,rmode,debugTF=debugTF) table1="iex_{}_temp".format(typy) if debugTF is True: print >> sys.stderr, "debug @ reshape_iex_types():\n",da.tail(2).to_csv(sep='|',index=False) if saveDB is True: da.to_sql(table1,pgDB,schema=dbscm,index=dbidx,if_exists=rmode) if typx in ['quote','peers','earnings','financials','stats']: tbhist="iex_{}_hist".format(typy) if typx=='financials': zpk = {'ticker','pbdate','freq'} elif typx=='quote': #zpk = {'ticker','latestUpdate'} zpk = {'ticker'} else: zpk = {'ticker','pbdate'} write2mdb(da,clientM=clientM,tablename=tbhist,zpk=zpk) else: print >> sys.stderr, da.shape df_tofile(sys.stdout,da,output=output,sep=sep,indexTF=indexTF,hdrTF=hdrTF) return (da, typy)
def spy_component(hostname='localhost', dbname='ara', tablename='spy_component', debugTF=False, **kwargs): ''' Update pgDB Table: spy_list get SPY list in daily basis ref table: spy_list ref site: https://us.spdrs.com/site-content/xls/SPY_All_Holdings.xls?fund=SPY&docname=All+Holdings&onyx_code1=&onyx_code2= ''' url = 'https://us.spdrs.com/site-content/xls/SPY_All_Holdings.xls?fund=SPY&docname=All+Holdings&onyx_code1=&onyx_code2=' if debugTF: sys.stderr.write('URL:{url}\n'.format(url=url)) try: df = pd.read_excel(url, index_col=False, header=3) if len(df) < 500: return {} df.dropna(subset=['Sector'], inplace=True) df = df.drop(df[df['Identifier'] == 'CASH_USD'].index) df = df.reset_index(drop=True) df['ticker'] = [s.replace('.', '-') for s in df['Identifier']] df.at[df['ticker'] == 'CCL-U', 'ticker'] = 'CCL' except Exception as e: sys.stderr.write('**ERROR: spy_component:{}\n'.format(str(e))) return {} mobj, _, _ = write2mdb(df, clientM=None, dbname=dbname, tablename=tablename, zpk={'*'}) return df
def find_stgy(minute_hist,pgDB=None,debugTF=False): if 'pbdate' not in minute_hist and 'epochs' in minute_hist: minute_hist.loc[:,'pbdate']=minute_hist['epochs'].values else: minute_hist.loc[:,'epochs'] = [int(x.strftime("%s000")) for x in minute_hist.index] minute_hist.loc[:,'pbdate']=minute_hist['epochs'].values try: mxdate=minute_hist.query('signal_buysell_macd!=0')['pbdate'].sort_values(ascending=False).iloc[0] minute_latest_macd = minute_hist.query("pbdate>={}".format(mxdate)) except Exception as e: minute_latest_macd = {} if debugTF and len(minute_latest_macd)>0: sys.stderr.write("===minute_latest_macd:\n{}\n".format(minute_latest_macd)) from _alan_pattern import calc_ohlc_pattern,add_MACD_pattern minute_ptn=calc_ohlc_pattern(minute_hist) minute_ptn=add_MACD_pattern(minute_hist,minute_ptn) #-- START CALC PNL # Input: minute_hist,minute_ptn # Output: minute_pnl from _alan_pnl import calc_ohlc_pnl minute_pnl=calc_ohlc_pnl(minute_hist,minute_ptn,prd=251,xfl=-1,xcap=1) #-- SAVE TO DB # mongoDB: ara::minute_pattern, minute_pnl from _alan_str import write2mdb clientM=None mobj_ptn,clientM,msg = write2mdb(minute_ptn,clientM,tablename='minute_pattern',zpk={'ticker','name'}) print >> sys.stderr, msg,clientM #mobj_pnl,clientM,msg = write2mdb(minute_pnl,clientM,tablename='minute_pnl',zpk={'ticker','name','pbdate'}) if pgDB is not None: minute_pnl.to_sql('minute_pnl', pgDB, schema='public', index=False, if_exists='replace') minute_hist.to_sql('minute_hist', pgDB, schema='public', index=False, if_exists='replace') return minute_pnl
def run_eps_nasdaq(dbM, url=None, xdate=None, saveDB=True, tablename=None, wmode='replace'): """ find EPS report via calendar date YYYY-MM-DD """ dv = get_sector_list(xdate, url=url) if len(dv) < 1: return None dg = pd.DataFrame(dv) dfyc = adjust_nasdaq(dg) if any([saveDB is False, tablename is None]): print >> sys.stderr, dfyc.to_csv(sep="|") else: zpk = {'ticker', 'pbdate'} mobj, dbM, err_msg = write2mdb(dfyc, dbM, dbname=dbname, tablename=tablename, zpk=zpk) #dfyc.to_sql(tablename,pgDB,index=False,schema='public',if_exists=wmode) print >> sys.stderr, "{}\n...\n{}\n saved to {}:{}".format( dfyc.head(1), dfyc.tail(1), dbM, tablename) return dfyc
def write_lsi2nlg_info(jobj, zpk=['username', 'category', 'tmplname', 'lang'], dbname='ara', tablename='lsi2nlg'): mobj, clientM, err_msg = write2mdb(jobj, dbname=dbname, tablename=tablename, zpk=zpk) return mobj
def create_headline(dd,ts='',jobj=None,**opts): dd['topLst1']=ast.literal_eval(dd['topLst1']) if hasattr(dd['topLst1'], "__len__") else {} dd['topLst2']=ast.literal_eval(dd['topLst2']) if hasattr(dd['topLst2'], "__len__") else {} if jobj is None: dd['hiloRecord'] = get_hiloRecord(dd['Index1'],dd['pbdate']) else: dd['hiloRecord'] = jobj['YTD'] if jobj['YTD'] else {} hiloName = dd['hiloRecord']['name'] if 'name' in dd['hiloRecord'] else '' sys.stderr.write("{}|{}|".format(dd['pbdate'],hiloName)) dd.update(pcall=popenCall,ticker2label=ticker2label,iex_peers=iex_peers,jj_fmt=jj_fmt) ret = jj_fmt(ts,dd,**opts) opts.update(mp3YN=True) dbname='ara';tablename='mkt_briefing_details' mp3ret = jj_fmt(ts,dd,**opts) block='HEADLLINE';attr='EOD' cdt=next_date() datax=dict(block=block,attr=attr,comment=ret,mp3comment=mp3ret,pbdt=cdt,data={}) for k,v in dd.items(): if not hasattr(v,'__call__'): datax['data'].update({k:v}) write2mdb(datax,dbname=dbname,tablename=tablename) return ret
def intraday_briefing(args=[], region='US', lang='cn', dirname='templates/', outdir="US/mp3_hourly/", dbname='ara', start=None, mp3YN=False, archiveTest=False, saveDB=True, **optx): if 'tablename' not in locals() or tablename is None: tablename = 'mkt_briefing' from headline_calc import headline_calc if 'cdt' not in optx: cdt = datetime.datetime.now() else: cdt = optx['cdt'] if isinstance(cdt, str): cdt = pd.Timestamp(cdt) hm = int(cdt.strftime("%H00")) end_hm = np.clip(hm, 900, 1600) end_hm = get_cutoff_hm(hm=int(hm), region=region) dd = headline_calc(eqtLst=None, np=3) ts = "{% include 'intraday_briefing.j2' %}" ret = jj_fmt(ts, dd=dd, dirname=dirname, outdir=outdir, end_hm=end_hm, **optx) title = 'intraday_briefing' category = 'IntraDay' dd = dict(comment=ret, pbdt=cdt, title=title, hhmm=hm, category=category) if saveDB: clientM = None mobj, clientM, _ = write2mdb(dd, clientM, dbname=dbname, tablename=tablename, zpk=['hhmm', 'category']) #tablename= tablename+'_hist' #mobj, clientM, _ = write2mdb(dd,clientM,dbname=dbname,tablename=tablename,zpk=['pbdt','category']) return ret
def func2mdb(tkLst, tablename='iex_spark_hist', dbname='ara', funcN='iex_minute_chart', zpk={'ticker', 'hhmm'}, **optx): ''' Run 'funcN'() and save the result to mongoDB Default to iex_quote_short() Note, for iex_minute_chart() use tablename='iex_spark_hist',dbname='ara',funcN='iex_minute_chart',zpk={'ticker','hhmm'} also see: yh_spark_hist as supplement for data ''' if funcN in globals(): funcArg = globals()[funcN] else: return {} df = funcArg(tkLst, **optx) if len(df) < 1: return {} # SAVE TO MDB clientM = None sys.stderr.write("===Write to:{}:{}:{}\n".format('MDB', dbname, tablename)) mobj, clientM, err_msg = write2mdb(df, clientM, dbname=dbname, tablename=tablename, zpk=zpk, insertOnly=True) #tablename=tablename.replace('_hist','_temp') #zpk={'ticker'} #sys.stderr.write("===Write to {}:{}:{}\n".format(clientM,dbname,tablename)) #mobj,clientM,err_msg = write2mdb(df,clientM,dbname=dbname,tablename=tablename,zpk=zpk) sys.stderr.write("=== finish {} ===\n".format(clientM)) return mobj
def bb_predefined(scrIds='most_actives',dbname='ara',saveDB=True,mappingTF=True,mmGap=30,addiFilter=1,debugTF=False,**optx): ''' return a ticker list based on the combo of yahoo 'scrIds' list and BB internal tracking tickers where mappingTF: apply list on in the [mapping_ticker_cik] table addiFilter: adds additional query 1 for 'abs(changePercent)>2.5&price>7.99' 2 for 'abs(changePercent)>5&price>7.99' 3 for 'abs(changePercent)>2.5&price>4.99' OR a string such as 'abs(changePercent)>2.5&price>4.99' default for None also see from yh_chart import yh_quote_comparison; help(yh_quote_comparison) Note, Info are pulled from DB 1st if last update was with in 'mmGap' minutes If live data are not available, existed DB info will be used ''' #from yh_predefined import yh_predefined_query tablename = "yh_{}".format(scrIds) cdt=s2dt() try: df,_,_ = find_mdb(dbname=dbname,tablename=tablename,dfTF=True) if len(df)>0 and 'pbdt' in df: t1=df['pbdt'].iloc[0] mmPassed=pd.Timedelta(cdt - t1).total_seconds() / 60.0 if mmPassed>mmGap: # check if DB info is winthin last 'mmGap' (30-minute) df=[] else: df=[] if len(df)>0: if debugTF: sys.stderr.write("===Use {} from MDB:{}\n".format(scrIds,tablename)) return df df=yh_predefined_query(scrIds,dfTF=True) if len(df)<1: # using whatever in the DB if live pulling failed df,_,_ = find_mdb(dbname=dbname,tablename=tablename,dfTF=True) return df df['pbdt'] = cdt if mappingTF: secInfo=sqlQuery('select * from mapping_ticker_cik where act_code=1') df = df.loc[df['ticker'].isin(secInfo['ticker'])] if addiFilter: if addiFilter==1: addiFilter='abs(changePercent)>0.5&price>7.99' elif addiFilter==2: addiFilter='abs(changePercent)>5&price>7.99' elif addiFilter==3: addiFilter='abs(changePercent)>2.5&price>4.99' if debugTF: sys.stderr.write("==addiFilter:{}\n".format(addiFilter)) df = df.query(addiFilter) #df['pbdt'] = cdt df = df.merge(secInfo[['ticker','company_cn','sector','sector_cn']],on='ticker') df.reset_index(drop=True,inplace=True) if debugTF: sys.stderr.write("{}\n".format(df)) if saveDB: clientM=None mobj, clientM, _ = write2mdb(df,clientM,dbname=dbname,tablename=tablename,zpk={'*'}) sys.stderr.write("Data saved to {}::{}".format(clientM,tablename)) except Exception as e: sys.stderr.write("**ERROR: bb_predefined(): {}\n".format(str(e))) df=[] return df.iloc[:6]
dd = [] tablename = 'iex_news_eps' dbname = 'ara' #import dateutil.parser for x in finalvs: if x == 'GOOGL': x = 'GOOG' try: #da = eps_news_grabber(x) da = {} except Exception as e: sys.stderr.write("**ERROR 1:{}, {}".format(x, str(e))) continue try: if 'ticker' not in da or 'eps' not in da: continue da.update(pbdate=pbdate) mobj, _, _ = write2mdb([da], clientM, dbname=dbname, tablename=tablename, zpk={'ticker', 'pbdate'}) dd.append(da) except Exception as e: sys.stderr.write("**ERROR 2:{}, {}".format(x, str(e))) continue #if dd: # df = pd.DataFrame(dd) # mobj,_,_ = write2mdb(df,clientM=None,dbname=dbname,tablename=tablename,zpk={'ticker','pbdate'}) # print(df)
def headline_hist(ndays=2,saveDB=False,end=None,outTF=False,**optx): eqtLst = get_eqtLst() pgDB=conn2pgdb(dbname='ara') # get indices chgPct: idxRtn idxLst = ['^GSPC','^DJI','^IXIC','^SOX'] idxRtnHist = get_stock_hist(idxLst,ndays=ndays,diffTF=True,pgDB=pgDB,end=end) if len(idxRtnHist)<1: return [] # get equity chgPct eqtRtnHist = get_stock_hist(eqtLst,ndays=ndays,pgDB=pgDB,end=end) if len(eqtRtnHist)<1: return [] # get equity chgPct pbLst = idxRtnHist.loc[idxRtnHist['ticker']==idxLst[0],'pbdate'].sort_values(ascending=False)[:] # redirect stdout old_stdout = sys.stdout mystdout = StringIO() sys.stdout = mystdout fwrite=sys.stdout.write fwrite("pbdate|Index1|chgPct1|chgLevel1|UpDn1|close1") fwrite("|Index2|chgPct2|chgLevel2|UpDn2|close2") fwrite("|Index3|chgPct3|chgLevel3|UpDn3|close3") fwrite("|Index4|chgPct4|chgLevel4|UpDn4|close4") print("|topDate1|topUD1|topLst1|topDate2|topUD2|topLst2|conflict") for pbdate in pbLst: idxRtn,idxUp,idxDn = find_top_changes(pbdate=pbdate,dx=idxRtnHist) eqtRtn,eqtUp,eqtDn = find_top_changes(pbdate=pbdate,dx=eqtRtnHist) #print(idxRtn,"\n",eqtUp,"\n",eqtDn) if len(idxRtn)<1: sys.stderr.write("skip date: {}\n".format(pbdate)) continue sp500Ret = idxRtn.loc[idxRtn['ticker']=='^GSPC'].iloc[0].to_dict() dowRet = idxRtn.loc[idxRtn['ticker']=='^DJI'].iloc[0].to_dict() nasdaqRet = idxRtn.loc[idxRtn['ticker']=='^IXIC'].iloc[0].to_dict() phlRet = idxRtn.loc[idxRtn['ticker']=='^SOX'].iloc[0].to_dict() if np.isnan(sp500Ret['chgPct']): sys.stderr.write("skip date: {}\n".format(pbdate)) continue conflict = 0 if dowRet['sign'] == sp500Ret['sign'] else 1 eqtLx1 = eqtUp if sp500Ret['sign']==1 else eqtDn if len(eqtLx1)<1: topLst1=[] eqtLead1 = {} else: dtmp=eqtLx1[['ticker','chgPct']].head(3).to_dict(orient='records') topLst1=[{x['ticker']:round(x['chgPct'],2)} for x in dtmp] #topLst1=["{!r}:{:.2f}".format(*x) for x in (eqtLx1[['ticker','chgPct']].head(3).values)] eqtLead1 = eqtLx1.iloc[0].to_dict() eqtLead1['topLst1']=topLst1 eqtLx2 = eqtUp if dowRet['sign']==1 else eqtDn if len(eqtLx2)<1: topLst2=[] eqtLead2 = {} else: dtmp=eqtLx2[['ticker','chgPct']].head(3).to_dict(orient='records') topLst2=[{x['ticker']:round(x['chgPct'],2)} for x in dtmp] #topLst2=["{!r}:{:.2f}".format(*x) for x in (eqtLx2[['ticker','chgPct']].head(3).values)] eqtLead2 = eqtLx2.iloc[0].to_dict() eqtLead2['topLst2']=topLst2 fwrite("{pbdate}|{ticker}|{chgPct:.2f}|{chgLevel:.0f}|{UpDn}|{close:5g}".format(**sp500Ret)) fwrite("|{ticker}|{chgPct:.2f}|{chgLevel:.0f}|{UpDn}|{close:5g}".format(**dowRet)) fwrite("|{ticker}|{chgPct:.2f}|{chgLevel:.0f}|{UpDn}|{close:5g}".format(**nasdaqRet)) fwrite("|{ticker}|{chgPct:.2f}|{chgLevel:.0f}|{UpDn}|{close:5g}".format(**phlRet)) if len(eqtLx1)>0: fwrite("|{pbdate}|{UpDn}|{topLst1}".format(**eqtLead1)) if len(eqtLx2)>0: fwrite("|{pbdate}|{UpDn}|{topLst2}".format(**eqtLead2)) fwrite("|{conflict}\n".format(conflict=conflict)) # redirect the stdout to string and convert it to dataframe xstr = mystdout.getvalue() sys.stdout = old_stdout fwrite=sys.stdout.write if outTF: fwrite(xstr) df = pd.read_csv(StringIO(xstr),sep='|') from _alan_calc import save2pgdb from _alan_str import write2mdb if saveDB==True: tablename="headline_hist" sys.stderr.write("Save to {}\n".format(tablename)) #save2pgdb(df,db=pgDB,tablename=tablename) clientM=None zpk={"ticker","pbdate"} mobj,clientM,msg = write2mdb(df,clientM,tablename=tablename,zpk=zpk) if 'topDict' in optx and optx['topDict']==True: if 'topLst1' in df: df['topLst1']=[ ast.literal_eval(x) if hasattr(df['topLst1'], "__len__") else {} for x in df['topLst1'] ] if 'topLst2' in df: df['topLst2']=[ ast.literal_eval(x) if hasattr(df['topLst2'], "__len__") else {} for x in df['topLst2'] ] df['chg1']=df['chgLevel1'] df['chg2']=df['chgLevel2'] df['allUpDn']=0 for j in range(df.shape[0]): if all([ x>0 for x in df[['chgPct1','chgPct2','chgPct3']].iloc[j] ]) : allUpDn = 1 elif all([ x<0 for x in df[['chgPct1','chgPct2','chgPct3']].iloc[j] ]) : allUpDn = -1 else: allUpDn = 0 df.loc[df.index[j],'allUpDn']=allUpDn return df
def daily_briefing(start=None, region='US', dirname='templates/', outdir="US/mp3_hourly/", dbname='ara', saveDB=True, **optx): from headline_writer import generate_headline if 'cdt' not in optx: cdt = datetime.datetime.now() else: cdt = optx['cdt'] if isinstance(cdt, str): cdt = pd.Timestamp(cdt) if start is None: start = cdt.strftime('%Y%m%d') sys.stderr(" --cdt:{}, start:{}\n".format(cdt, start)) opts = { 'lang': 'cn', 'dirname': 'templates', 'end': None, 'nlookback': 1, 'args': [], 'sep': '|', 'debugTF': False, 'hostname': 'localhost', 'tablename': None, 'days': 730, 'saveDB': True, 'extraJS': None, 'j2ts': '{% include "daily_briefing_cn.j2" %}', 'onTheFly': True, 'output': None, 'narg': 0, 'filename': None, 'extraQS': None, 'dbname': 'ara', 'mp3YN': False } del optx['args'] hm = int(cdt.strftime("%H00")) # NOTE: sensative to crontab timing issue hm = get_cutoff_hm(hm=int(hm), region=region) if hm < 1600: start = sqlQuery( "select pbdate from prc_hist where name='AAPL' ORDER BY pbdate DESC limit 1" ).iloc[0].values[0] start = int(start) category = 'SoD' else: category = 'EoD' if not os.path.exists(outdir): outdir = './' #ret=generate_headline(opts,start=start,outdir=outdir,category=category,rpt_time=cdt,**optx) ret = jj_fmt(opts['j2ts'], dirname=dirname, start=start, outdir=outdir, rpt_time=cdt, category=category) if 'tablename' not in locals() or tablename is None: tablename = 'mkt_briefing' title = '{}_briefing'.format(category) dd = dict(comment=ret, pbdt=cdt, title=title, hhmm=hm, category=category, rpt_time=cdt) if saveDB: clientM = None mobj, clientM, _ = write2mdb(dd, clientM, dbname=dbname, tablename=tablename, zpk=['hhmm', 'category']) tablename = tablename + '_hist' mobj, clientM, _ = write2mdb(dd, clientM, dbname=dbname, tablename=tablename, zpk=['pbdt', 'category']) return ret
def iex_minute_chart(tkLst=[], filter='', types='chart', range='1d', chartLast=-999, nchunk=100, dfTF=True, debugTF=False, tablename='iex_spark_hist', dbname=None, zpk={'ticker', 'epochs'}, **optx): ''' Pull minute ohlc pricing data from IEX but use marketVolume as volume since market data has 15-minute delay, latest 15 marketVolumes become 0 ''' from _alan_str import write2mdb if not filter: filter = 'date,minute,open,high,low,close,changeOverTime,marketVolume' jdTmp = iex_batchTypes(tkLst, filter=filter, types=types, range=range, chartLast=chartLast, nchunk=nchunk, **optx) if len(jdTmp) < 1: return {} colX = [ "ticker", "open", "high", "low", "close", "volume", "change", "changePercent", "epochs", "hhmm", "pbdt", "pbdate" ] dLst = [] df = pd.DataFrame() clientM = None for ticker in tkLst: try: if ticker not in jdTmp: continue elif types not in jdTmp[ticker]: continue elif len(jdTmp[ticker][types]) < 1: continue jdX = jdTmp[ticker][types] dx = json_normalize(jdX) dx['ticker'] = ticker if '-' in dx['date'].values[0]: dx['pbdate'] = [x.replace('-', '') for x in dx['date']] else: dx['pbdate'] = dx['date'] if 'minute' in dx: dformat = '%Y%m%d%H:%M' pbdt = [ datetime.datetime.strptime(x + y, dformat) for x, y in zip(dx['pbdate'], dx['minute']) ] dx['hhmm'] = [x.strftime('%H%M') for x in pbdt] dx['epochs'] = [int(x.strftime('%s000')) for x in pbdt] dx['pbdate'] = dx['pbdate'].astype(int) dx = dx.dropna() if len(dx) < 1: continue #if dx.shape[0]>1: # dx['changePercent'] = dx['close'].pct_change() # dx['change'] = dx['close'].diff() if "marketVolume" in dx: dx.rename(columns={"marketVolume": "volume"}, inplace=True) if dfTF is False: dLst.extend(dx.to_dict(orient='records')) if tablename is not None and dbname is not None: sys.stderr.write("===Write to:{}:{}:{}\n".format( ticker, dbname, tablename)) mobj, clientM, err_msg = write2mdb(jobj=dx, clientM=clientM, dbname=dbname, tablename=tablename, zpk=zpk, insertOnly=True) continue colX = [x for x in colX if x in dx.columns] dm = dx[colX] df = pd.concat([df, dm]) if debugTF: sys.stderr.write("{}\n".format(df.tail(1))) except Exception as e: sys.stderr.write("**ERROR: {}:{}\n".format(ticker, str(e))) continue if dfTF: df.reset_index(drop=True, inplace=True) return df else: return dLst
def record_hilo_tst(opts={}, **optx): ## ASSIGN local variables if len(opts) < 1: opts, args = parse_opt(sys.argv) opts.update(optx) ticker = args[0] if len(args[0]) > 0 else '^GSPC' ticker = getKeyVal(opts, 'ticker', ticker) debugTF = getKeyVal(opts, 'debugTF', False) start, end, days = getKeyVal(opts, ['start', 'end', 'days'], [None, None, 3600]) tablename = getKeyVal(opts, 'tablename', None) funcName = getKeyVal(opts, 'funcName', 'recordHiLo2') nlookback = getKeyVal(opts, 'nlookback', 1) src = getKeyVal(opts, 'src', 'yh') searchDB = getKeyVal(opts, 'searchDB', True) colLst = getKeyVal(opts, 'colLst', ['close', 'pchg']) colx = getKeyVal(opts, 'colx', 'pchg') mthLst = getKeyVal(opts, 'mthLst', []) if debugTF == True: sys.stderr.write("OPTS:{}\n".format(opts)) ## ARRANGE additional local variables nlookback = -int(nlookback) if funcName in globals(): funcArg = globals()[funcName] else: funcArg = recordHiLo clientM = None ## GET DATA df = psd(ticker, start=start, end=end, days=days, src=src, searchDB=searchDB, debugTF=debugTF) if debugTF == True: sys.stderr.write("DF:\n{}\n".format(df.tail())) if colx == 'pchg' and colx not in df and 'close' in df: df['pchg'] = np.round(df['close'].pct_change(), 4) ## LOOPING funcArg for backtest for xd in df.index[nlookback:]: try: dx = df.loc[df.index <= xd] pbdate = int(xd.strftime('%Y%m%d')) jobj = funcArg(dx, endDT=xd, ticker=ticker, colx=colx, colLst=colLst, mthLst=mthLst) if debugTF == True: sys.stderr.write("{}\n".format(dx.iloc[-1])) #if any([jobj['MTD'],jobj['YTD']]): if jobj['YTD']: sys.stderr.write("{}\n".format(jobj['YTD'])) if tablename is not None: zpk = {"ticker", "pbdate"} mobj, clientM, msg = write2mdb(jobj, clientM, tablename=tablename, zpk=zpk) except Exception as e: continue return jobj