def qntresult(symbol, resdf): techtitle = ['SYMBOL'] techtable = [] result = [ dp.parse(row[PRES['TIMESTAMP']]).strftime('%Y-%m-%d') for row in resdf if row[PRES['SYMBOL']] == symbol ][-8:] srow = [] srow.append(symbol) eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES) for res in result: backmean = p4fns.smean([ float(row[PQS['CLOSE']]) for row in p4fns.blockdf(eqsdb, PQS, res, 21, 'be') ]) fronmean = p4fns.smean([ float(row[PQS['CLOSE']]) for row in p4fns.blockdf(eqsdb, PQS, res, 21, 'fe') ]) effect = round(math.log(fronmean / backmean) * 100, 2) srow.append(effect) techtitle.append(res) techtable.append(srow) techtable = [techtitle] + techtable p4fns.write_json(JSONRESDIR + symbol + JSON, techtable, TECHCOLTYP)
def qntdaily(symbol): eqsdbdf = p4fns.readhdr_csv(NSEEQSDBDIR + symbol + CSV) eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES)[-252:] eqsdbdf = eqsdbdf + eqsdb inputdf = p4fns.rearrange(eqsdbdf, PQS, JSONCOL) p4fns.write_json(JSONDLYDIR + symbol + JSON, inputdf, EQCOLTYP)
def qntpair(symbol, period, deltaP, deltaN, rwindow, mwindow, pairlist): title = ['PAIR', 'NORM', 'DWSTAT'] maxper = period + rwindow + mwindow - 1 table = [] datadb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES)[-maxper:] pclose = [math.log(float(row[PQS['CLOSE']])) for row in datadb] pvwap = [math.log(float(row[PQS['VWAP']])) for row in datadb] dsize = len(pclose) if (dsize >= rwindow + mwindow + 40): for pair in pairlist: reffdb = p4fns.read_csv(NSEEQSDBDIR + pair + CSV)[-maxper:] pvwapR = [math.log(float(row[PQS['VWAP']])) for row in reffdb] regr = p4fns.rolling_regress(pvwap[-dsize:], pvwapR[-dsize:], rwindow) rlen = len(regr) error = [ round((a / b - 1) * 100, 2) for a, b in zip(pclose[-rlen:], regr[-rlen:]) ] mu = p4fns.rolling_smean(error, mwindow) sg = p4fns.rolling_sstdd(error, mwindow) mlen = len(sg) error = error[-mlen:] normdist = int( p4fns.cumnormdist((error[-1] - mu[-1]) / sg[-1]) * 100) et_t1 = sum([ math.pow((error[i] - error[i - 1]), 2) for i in range(1, mlen) ]) et_sq = sum([math.pow(error[i], 2) for i in range(0, mlen)]) dwstat = round(et_t1 / et_sq, 2) table.append([pair, normdist, dwstat]) p4fns.write_csv(NSEPAIRDIR + symbol + CSV, [title] + table, 'w') p4fns.write_json(JSONPAIRDIR + symbol + JSON, [title] + table, [])
def qntperf(symbol, name): perftable = [] eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES) ## Price Values ## ============================================== ## price = [float(row[PQS['CLOSE']]) for row in eqsdb] perf1w = round(math.log(price[-1] / price[-5]) * 100, 2) if len(price) > 5 else '-' perf1m = round(math.log(price[-1] / price[-21]) * 100, 2) if len(price) > 21 else '-' perf3m = round(math.log(price[-1] / price[-63]) * 100, 2) if len(price) > 63 else '-' perf6m = round(math.log(price[-1] / price[-126]) * 100, 2) if len(price) > 126 else '-' perf1y = round(math.log(price[-1] / price[-252]) * 100, 2) if len(price) > 252 else '-' perf2y = round(math.log(price[-1] / price[-504]) * 100, 2) if len(price) > 504 else '-' perf4y = round(math.log(price[-1] / price[-1008]) * 100, 2) if len(price) > 1008 else '-' ## Volatility Values ## ============================================== ## gain = [float(row[PQS['GAIN']]) for row in eqsdb] stdd1w = round(p4fns.sstdd(gain[-5:]) * math.sqrt(252), 2) if len(price) > 5 else '-' stdd1m = round(p4fns.sstdd(gain[-21:]) * math.sqrt(252), 2) if len(price) > 21 else '-' stdd3m = round(p4fns.sstdd(gain[-63:]) * math.sqrt(252), 2) if len(price) > 63 else '-' stdd6m = round(p4fns.sstdd(gain[-126:]) * math.sqrt(252), 2) if len(price) > 126 else '-' stdd1y = round(p4fns.sstdd(gain[-252:]) * math.sqrt(252), 2) if len(price) > 252 else '-' stdd2y = round(p4fns.sstdd(gain[-504:]) * math.sqrt(252), 2) if len(price) > 504 else '-' stdd4y = round(p4fns.sstdd(gain[-1008:]) * math.sqrt(252), 2) if len(price) > 1008 else '-' perftable.append([symbol,name,perf1w,perf1m,perf3m,perf6m,perf1y,perf2y,perf4y,\ stdd1w,stdd1m,stdd3m,stdd6m,stdd1y,stdd2y,stdd4y]) p4fns.write_csv(NSETECHDIR + 'NSEPerf' + CSV, perftable, 'a')
def qnttech(symbol, name): techtable = [] eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES) ## Price Bands ## ============================================== ## price = [float(row[PQS['CLOSE']]) for row in eqsdb] vwap = [float(row[PQS['VWAP']]) for row in eqsdb] pb1m = int((price[-1] - min(price[-21:])) / (max(price[-21:]) - min(price[-21:])) * 100) if len(price) > 21 else '-' pb3m = int((price[-1] - min(price[-63:])) / (max(price[-63:]) - min(price[-63:])) * 100) if len(price) > 63 else '-' pb6m = int((price[-1] - min(price[-126:])) / (max(price[-126:]) - min(price[-126:])) * 100) if len(price) > 126 else '-' pb1y = int((price[-1] - min(price[-252:])) / (max(price[-252:]) - min(price[-252:])) * 100) if len(price) > 252 else '-' ## Bollinger Bands ## ============================================== ## dsize = len(price) period = [21, 63, 126, 252] bb = ['-'] * 4 for i in range(0, 4): if (dsize > period[i] + 1): mu = p4fns.rolling_emean(vwap[-(period[i] + 1):], period[i])[-1] sg = p4fns.rolling_sstdd(vwap[-(period[i] + 1):], period[i])[-1] bb[i] = int(p4fns.cumnormdist((price[-1] - mu) / sg) * 100) techtable.append( [symbol, name, pb1m, pb3m, pb6m, pb1y, bb[0], bb[1], bb[2], bb[3]]) p4fns.write_csv(NSETECHDIR + 'NSETech' + CSV, techtable, 'a')
#!/usr/bin/env python from p4defs import * import p4fns import math import os.path as path cnx500 = p4fns.read_csv(NSEEQDIR + 'CNX500.csv') cnxlist = [row[PXL['SYMBOL']] for row in cnx500] ixclist = ['NIFTY', 'BANKNIFTY'] # General: Prices, Turnover, Volatility # ============================================== ## #for symbol in cnxlist: # genldata = [['TIMESTAMP','OPEN','HIGH','LOW','CLOSE','TURNOVER','VOLATILITY']] # eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR+symbol+CSV), PQS, 'SERIES', REGEQSERIES) # # if path.isfile(NSEDVDIR+'TECH/'+symbol+CSV): # dvsdb = p4fns.read_csv(NSEDVDIR+'TECH/'+symbol+CSV) # ivdict = {row[0]:float(row[1]) for row in dvsdb} # else: # ivdict = {} # # timestamp = [row[PQS['TIMESTAMP']] for row in eqsdb] # avgiv = [] # for i in range (0, len(timestamp)): # if timestamp[i] in ivdict: # avgiv.append(ivdict[timestamp[i]]) # else: # avgiv.append(0) # gain = [float(row[PQS['GAIN']]) for row in eqsdb] # cum_gain = p4fns.rolling_sum(gain, 21)
def qntgenl(symbol, name, sector, industry, mktcap, mcpercent): techtitle = ['SYMBOL','PRICE','GAIN','NAME','SECTOR','INDUSTRY','MKT_CAP','MC_PERCENT',\ 'VOLATILITY','MAX_VTY','MIN_VTY','VOLUME','MAX_VOL','MIN_VOL'] techtable = [] srow = [] srow.append(symbol) eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES) ## Current Values ## ============================================== ## curprice = eqsdb[-1][PQS['CLOSE']] srow.append(curprice) change = round(float(eqsdb[-1][PQS['GAIN']]), 2) srow.append(change) srow.append(name) srow.append(sector) srow.append(industry) srow.append(mktcap) srow.append(mcpercent) ## Volatility ## ============================================== ## if path.isfile(NSEDVSDBDIR + symbol + CSV): dvsdb = p4fns.filterdf(p4fns.filterdf(p4fns.filterdf(p4fns.read_csv(NSEDVSDBDIR+symbol+CSV),\ PDS, 'INSTRUMENT', ['OPTSTK']),\ PDS, 'TIMESTAMP', [today]),\ PDS, 'T2E', [str(x) for x in range(1,50)]) ivlist = [float(row[PDS['IV']]) for row in dvsdb] wtlist = [float(row[PDS['VAL_INLAKH']]) for row in dvsdb] if sum(wtlist) >= 100: avgiv = round(p4fns.wmean(ivlist, wtlist), 2) else: avgiv = 0 else: avgiv = 0 eqdata = eqsdb[-756:] gain = [float(row[PQS['GAIN']]) for row in eqdata] cum_gain = p4fns.rolling_sum(gain, 21) rol_stdd = p4fns.rolling_sstdd(cum_gain, 21) if (avgiv == 0): stdd1m = round(p4fns.sstdd(cum_gain) * math.sqrt(12), 2) volatility = stdd1m else: volatility = avgiv max_stdd = max([volatility, round(max(rol_stdd) * math.sqrt(12), 2)]) min_stdd = min([volatility, round(min(rol_stdd) * math.sqrt(12), 2)]) srow.append(volatility) srow.append(max_stdd) srow.append(min_stdd) ## Volume ## ============================================== ## eqdata = eqsdb[-252:] turnover = [ round(float(row[PQS['TURNOVER']]) / 10000000, 2) for row in eqdata ] volume = p4fns.rolling_emean(turnover, 3) max_vol = max(volume) min_vol = min(volume) srow.append(turnover[-1]) srow.append(max_vol) srow.append(min_vol) ## Create JSON File ## ============================================== ## techtable.append(srow) p4fns.write_csv(NSEGENLDIR + symbol + CSV, [techtitle] + techtable, 'w') p4fns.write_json(JSONGENLDIR + symbol + JSON, [techtitle] + techtable, TECHCOLTYP) genltable = [] grow = [] grow.append(symbol) grow.append(name) grow.append(sector) grow.append(industry) grow.append(curprice) grow.append(change) grow.append(mktcap) grow.append(turnover[-1]) grow.append(volatility) genltable.append(grow) p4fns.write_csv(NSETECHDIR + 'NSEGenl' + CSV, genltable, 'a')
## ============================================================================================= ## ## Date for which database is to be built ## ## ============================================================================================= ## tgtdate = dp.parse(str(sys.argv[1]), dayfirst=True) timestamp = tgtdate.strftime('%Y-%m-%d') rawixfile = NSEIXRAWDIR + timestamp + CSV ## ============================================================================================= ## ## NSE Index DB ## ## ============================================================================================= ## ## Read the Catalog Files ## ============================================================================================= ## ixc0df = p4fns.read_csv(NSEIXCatalog) ixc0dict = {row[PXC['NAME']]: row[PXC['SYMBOL']] for row in ixc0df} ixclist = list(ixc0dict.keys()) ## Append the days values to the Temporal Index DB ## ============================================================================================= ## rawixdf = p4fns.read_csv(rawixfile) ixdf = [] for rrow in rawixdf: trow = [''] * len(PXT) name = rrow[PXR['NAME']] if name in ixclist: trow[PXT['TIMESTAMP']] = timestamp trow[PXT['SYMBOL']] = ixc0dict[rrow[PXR['NAME']]] trow[PXT['OPEN']] = rrow[PXR['OPEN']] if ( rrow[PXR['OPEN']] != '-') else ''
#!/usr/bin/env python from p4defs import * import p4fns import dateutil.parser as dp import math import os.path as path TECHCOLTYP = { 'SYMBOL': 'S', 'PRICE': 'F', 'GAIN': 'F', 'MKT_CAP': 'I', 'MC_PERCENT': 'I' } today = p4fns.read_csv(NSEIXSDBDIR + 'NIFTY' + CSV)[-1][PXS['TIMESTAMP']] ## ********************************************************************************************* ## ## Daily Closing Data ## ********************************************************************************************* ## def qntdaily(symbol): eqsdbdf = p4fns.readhdr_csv(NSEEQSDBDIR + symbol + CSV) eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES)[-252:] eqsdbdf = eqsdbdf + eqsdb inputdf = p4fns.rearrange(eqsdbdf, PQS, JSONCOL) p4fns.write_json(JSONDLYDIR + symbol + JSON, inputdf, EQCOLTYP) ## ********************************************************************************************* ## ## General Information
#!/usr/bin/env python from p4defs import * import m4fns import p4fns cnx500 = p4fns.read_csv(NSEEQDIR + 'CNX500.csv') cnxlist = [row[PXL['SYMBOL']] for row in cnx500] ## ============================================================================================= ## ## Daily Candlestick and Volume chart ## ============================================================================================= ## #donelist = [row[0] for row in p4fns.read_csv(IMGDLYLOG)] #todolist = [sym for sym in cnxlist if sym not in donelist] #periods = {'3M':63,'6M':126,'1Y':252,'2Y':504,'4Y':1008} #for symbol in todolist: # m4fns.pltcandle(symbol, periods) # p4fns.write_csv(IMGDLYLOG, [[symbol]], 'a') # ## ============================================================================================= ## ## Volume and Volatility Bar ## ============================================================================================= ## #donelist = [row[0] for row in p4fns.read_csv(IMGVOLLOG)] #todolist = [sym for sym in cnxlist if sym not in donelist] #for symbol in todolist: # m4fns.pltvol(symbol) # p4fns.write_csv(IMGVOLLOG, [[symbol]], 'a') # ### ============================================================================================= ## ## Bollinger Bands ## ============================================================================================= ## #donelist = [row[0] for row in p4fns.read_csv(IMGBOBLOG)]
def pltcrosregres(symbol, period, deltaP, deltaN, rwindow, mwindow): maxper = period + rwindow + mwindow - 1 datadb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES)[-maxper:] ptimestamp = [ date2num(datetime.strptime(row[PQS['TIMESTAMP']], '%Y-%m-%d')) for row in datadb ] pclose = [math.log(float(row[PQS['CLOSE']])) for row in datadb] pvwap = [math.log(float(row[PQS['VWAP']])) for row in datadb] dsize = len(ptimestamp) if (dsize >= rwindow + mwindow + 40): # pairlist = [row[0] for row in p4fns.read_csv(NSEPAIRDIR+symbol+CSV)]+['NIFTY'] pairlist = ['NIFTY'] for pair in pairlist: if pair in ixlist: reffdb = p4fns.read_csv(NSEIXSDBDIR + pair + CSV)[-maxper:] pvwapR = [math.log(float(row[PXS['CLOSE']])) for row in reffdb] else: reffdb = p4fns.read_csv(NSEEQSDBDIR + pair + CSV)[-maxper:] pvwapR = [math.log(float(row[PQS['VWAP']])) for row in reffdb] regr = p4fns.rolling_regress(pvwap[-dsize:], pvwapR[-dsize:], rwindow) rlen = len(regr) error = [ round((a / b - 1) * 100, 2) for a, b in zip(pclose[-rlen:], regr[-rlen:]) ] stimestamp = ptimestamp[-rlen:] mu = p4fns.rolling_smean(error, mwindow) sg = p4fns.rolling_sstdd(error, mwindow) mlen = len(sg) error = error[-mlen:] stimestamp = stimestamp[-mlen:] mu = mu[-mlen:] sg = sg[-mlen:] upl = [mu[i] + sg[i] * deltaP for i in range(mlen)] lwl = [mu[i] - sg[i] * deltaN for i in range(mlen)] majorl = MonthLocator() xformat = DateFormatter('%b') fig = plt.figure(figsize=(6, 3)) gs = gridspec.GridSpec(1, 1) ax1 = plt.subplot(gs[0]) plt.title(symbol, loc='left', color=textc, weight='bold') plt.title('StatArb [' + symbol + ' vs ' + pair + ']', loc='left', color=textc, weight='bold', size='small') ax1.xaxis.set_major_locator(majorl) ax1.xaxis.set_major_formatter(xformat) ax1.yaxis.tick_right() ax1.grid(b=True, which='major', color=gridc, linestyle=':') ax1.patch.set_facecolor(backc) ax1.spines['bottom'].set_color(labelc) ax1.spines['top'].set_color(backc) ax1.spines['right'].set_color(labelc) ax1.spines['left'].set_color(backc) ax1.tick_params(axis='x', colors=labelc) ax1.tick_params(axis='y', colors=labelc) for label in (ax1.get_xticklabels() + ax1.get_yticklabels()): label.set_fontsize(6) ax1.plot(stimestamp, error, color='deepskyblue', linewidth=1.5) ax1.xaxis_date() ax1.autoscale_view() ax1.set_aspect('auto') plt.setp(ax1.get_xticklabels(), horizontalalignment='center', fontsize=8) ax2 = plt.subplot(gs[0]) ax2.plot(stimestamp, mu, color='royalblue', linewidth=1.5) ax3 = plt.subplot(gs[0]) ax3.plot(stimestamp, upl, color='yellowgreen') ax4 = plt.subplot(gs[0]) ax4.plot(stimestamp, lwl, color='orangered') plt.figtext(0.94, 0.94, '$\copyright$ piby4.com ' + today, color=sitec, size='xx-small', ha='right') gs.tight_layout(fig) # plt.savefig(IMGCRRDIR+symbol+'_'+pair+'.png', facecolor=(backc)) plt.savefig('aaa.png', facecolor=(backc)) plt.close(fig)
#!/usr/bin/env python from p4defs import * import p4fns import math import os.path as path cnxlist = [row[PCAT['SYMBOL']] for row in p4fns.read_csv(NSEEQCatalog)] ixclist = ['NIFTY', 'BANKNIFTY'] count = 0 for symbol in cnxlist + ixclist: count += 1 print count if path.isfile(NSEDVSDBDIR + symbol + CSV): dvxdata = [['TIMESTAMP', 'AVGIV']] eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS, 'SERIES', REGEQSERIES) ## Volatility ## ============================================== ## dvsdb = p4fns.read_csv(NSEDVSDBDIR + symbol + CSV) for row in eqsdb: timestamp = row[PQS['TIMESTAMP']] dvtdb = p4fns.filterdf(p4fns.filterdf(p4fns.filterdf(dvsdb,\ PDS, 'INSTRUMENT', ['OPTSTK']),\ PDS, 'TIMESTAMP', [timestamp]),\ PDS, 'T2E', [str(x) for x in range(1,50)]) ivlist = [float(row[PDS['IV']]) for row in dvtdb] wtlist = [float(row[PDS['VAL_INLAKH']]) for row in dvtdb] if sum(wtlist) >= 100:
#!/usr/bin/env python from p4defs import * import p4fns import q4fns import json cnx500 = p4fns.read_csv(NSEEQDIR+'CNX500.csv') cnxlist = [row[PXL['SYMBOL']] for row in cnx500] ixclist = ['NIFTY','BANKNIFTY'] catalog = p4fns.read_csv(NSEEQCatalog) eqnamdict = {} eqsecdict = {} eqinddict = {} eqmkcdict = {} eqmkclist = [] for row in catalog: eqnamdict[row[PCAT['SYMBOL']]] = row[PCAT['NAME']] eqsecdict[row[PCAT['SYMBOL']]] = row[PCAT['SECTOR']] eqinddict[row[PCAT['SYMBOL']]] = row[PCAT['INDUSTRY']] mktcap = float(row[PCAT['MKT_CAP']]) if row[PCAT['MKT_CAP']] else 0 eqmkcdict[row[PCAT['SYMBOL']]] = mktcap eqmkclist.append(mktcap) donelist = [row[0] for row in p4fns.read_csv(NSEQNTLOG)] ## ********************************************************************************************* ## ## Catalog ## ********************************************************************************************* ## if 'CATALOG' not in donelist: eqcatdf = p4fns.read_csv(NSEEQCatalog) catdf = [] for item in ixclist:
## ============================================================================================= ## ## Date for which database is to be built ## ## ============================================================================================= ## tgtdate = dp.parse(str(sys.argv[1]), dayfirst=True) timestamp = tgtdate.strftime('%Y-%m-%d') rawdvfile = NSEDVRAWDIR + timestamp + CSV ## ============================================================================================= ## ## NSE Temporal Deriv DB ## ============================================================================================= ## ## Append the days values to the Temporal DERIV DB ## ============================================================================================= ## rawdvdf = p4fns.read_csv(rawdvfile) dvdf = [] for rrow in rawdvdf: if (rrow[PDR['CONTRACTS']] != '0'): trow = [''] * len(PDT) trow[PDT['TIMESTAMP']] = timestamp trow[PDT['INSTRUMENT']] = rrow[PDR['INSTRUMENT']] trow[PDT['SYMBOL']] = rrow[PDR['SYMBOL']] trow[PDT['EXPIRY_DT']] = dp.parse(rrow[PDR['EXPIRY_DT']], dayfirst=True).strftime('%Y-%m-%d') trow[PDT['STRIKE_PR']] = rrow[PDR['STRIKE_PR']] trow[PDT['OPTION_TYP']] = rrow[PDR['OPTION_TYP']] trow[PDT['OPEN']] = rrow[PDR['OPEN']] trow[PDT['HIGH']] = rrow[PDR['HIGH']] trow[PDT['LOW']] = rrow[PDR['LOW']] trow[PDT['CLOSE']] = rrow[PDR['CLOSE']]
## ============================================================================================= ## ## Date for which database is to be built ## ## ============================================================================================= ## tgtdate = dp.parse(str(sys.argv[1]), dayfirst=True) timestamp = tgtdate.strftime('%Y-%m-%d') raweqfile = NSEEQRAWDIR + timestamp + CSV ## ============================================================================================= ## ## NSE Temporal Equity DB ## ============================================================================================= ## ## Append the days values to the Temporal EQUITY DB ## ============================================================================================= ## raweqdf = p4fns.read_csv(raweqfile) eqdf = [] for rrow in raweqdf: if rrow[PQR['SERIES']] in VALEQSERIES: trow = [''] * len(PQT) trow[PQT['TIMESTAMP']] = timestamp trow[PQT['ISIN']] = rrow[PQR['ISIN']] trow[PQT['SYMBOL']] = rrow[PQR['SYMBOL']] trow[PQT['SERIES']] = rrow[PQR['SERIES']] trow[PQT['OPEN_U']] = rrow[PQR['OPEN_U']] trow[PQT['HIGH_U']] = rrow[PQR['HIGH_U']] trow[PQT['LOW_U']] = rrow[PQR['LOW_U']] trow[PQT['CLOSE_U']] = rrow[PQR['CLOSE_U']] trow[PQT['LAST_U']] = rrow[PQR['LAST_U']] trow[PQT['PREV_U']] = rrow[PQR['PREV_U']] trow[PQT['VOLUME']] = rrow[PQR['VOLUME']]
#!/usr/bin/env python # -*- coding: utf-8 -*- from p4defs import * import p4fns import sys mode = str(sys.argv[1]) csvdb = str(sys.argv[2]) ixc0df = p4fns.read_csv(NSEIXCatalog) ixclist = [row[PXC['SYMBOL']] for row in ixc0df] eqc0df = p4fns.read_csv(NSEEQCatalog) eqclist = [row[PCAT['SYMBOL']] for row in eqc0df] if (mode == 'WRITE'): if (csvdb == SQLEQCatalog): inputdf = p4fns.readall_csv(NSEEQCatalog) p4fns.write_sql(NSEULDB, csvdb, inputdf, MCAT, EQCATCOL) elif (csvdb == SQLIXCatalog): inputdf = p4fns.readall_csv(NSEIXCatalog) p4fns.write_sql(NSEULDB, csvdb, inputdf, MXC, IXCATCOL) elif (csvdb in ixclist): inputdf = p4fns.readall_csv(NSEIXSDBDIR + csvdb + CSV) p4fns.write_sql(NSEULDB, csvdb, inputdf, MXS, SYIXCOL) elif (csvdb in eqclist): inputdf = p4fns.readall_csv(NSEEQSDBDIR + csvdb + CSV) p4fns.write_sql(NSEULDB, csvdb, inputdf, MQS, SYEQCOL) elif (csvdb == SQLNSEBONUS): inputdf = p4fns.readall_csv(NSEBONUS)
import sys import math import dateutil.parser as dp import pandas as pd mode = 'T' symbol1 = str(sys.argv[1]) symbol2 = str(sys.argv[2]) mwindow = int(sys.argv[3]) deltaP = float(sys.argv[4]) deltaN = float(sys.argv[5]) years = int(sys.argv[6]) dur = int(sys.argv[7]) * 252 ixlist = ['NIFTY', 'BANKNIFTY'] cnx500 = [row[2] for row in p4fns.read_csv(NSEEQDIR + 'CNX500.csv')] cnx100 = [row[2] for row in p4fns.read_csv(NSEEQDIR + 'CNX100.csv')] cnx50 = [row[2] for row in p4fns.read_csv(NSEEQDIR + 'CNX50.csv')] cnxlist = cnx500 days = 252 * years + mwindow result = [] # ============================================================================================= ## # Bollinger Band # ============================================================================================= ## if symbol1 in cnxlist: datadb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol1 + CSV), PQS, 'SERIES', REGEQSERIES)[-days:] price1 = [math.log(float(row[PQS['CLOSE']])) for row in datadb] vwap1 = [math.log(float(row[PQS['VWAP']])) for row in datadb] instrm1 = 'EQ'
#!/usr/bin/env python # Usage ./nsecadb.py from p4defs import * import p4fns from datetime import datetime import dateutil.parser as dp import re ## ============================================================================================= ## ## General ## ============================================================================================= ## eqschdf = p4fns.read_csv(NSESCHCatalog) eqschdict = {row[PSCH['OLDSYMBOL']]: row[PSCH['SYMBOL']] for row in eqschdf} ## ============================================================================================= ## ## Parsing NSE Corporate Actions and Updating the Corporate Actions Catalogs ## ============================================================================================= ## def cadates(row): try: rcdate = (datetime.strptime(row[PRCA['RECDATE']], '%d-%b-%Y')).strftime('%Y-%m-%d') except: try: rcdate = (datetime.strptime(row[PRCA['BCSTART']], '%d-%b-%Y')).strftime('%Y-%m-%d') except: rcdate = '' try: bcdate = (datetime.strptime(row[PRCA['BCEND']],