Exemplo n.º 1
0
def qntperf(symbol, name):
    perftable = []
    eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS,
                           'SERIES', REGEQSERIES)

    ## Price Values
    ## ============================================== ##
    price = [float(row[PQS['CLOSE']]) for row in eqsdb]
    perf1w = round(math.log(price[-1] / price[-5]) *
                   100, 2) if len(price) > 5 else '-'
    perf1m = round(math.log(price[-1] / price[-21]) *
                   100, 2) if len(price) > 21 else '-'
    perf3m = round(math.log(price[-1] / price[-63]) *
                   100, 2) if len(price) > 63 else '-'
    perf6m = round(math.log(price[-1] / price[-126]) *
                   100, 2) if len(price) > 126 else '-'
    perf1y = round(math.log(price[-1] / price[-252]) *
                   100, 2) if len(price) > 252 else '-'
    perf2y = round(math.log(price[-1] / price[-504]) *
                   100, 2) if len(price) > 504 else '-'
    perf4y = round(math.log(price[-1] / price[-1008]) *
                   100, 2) if len(price) > 1008 else '-'

    ## Volatility Values
    ## ============================================== ##
    gain = [float(row[PQS['GAIN']]) for row in eqsdb]
    stdd1w = round(p4fns.sstdd(gain[-5:]) *
                   math.sqrt(252), 2) if len(price) > 5 else '-'
    stdd1m = round(p4fns.sstdd(gain[-21:]) *
                   math.sqrt(252), 2) if len(price) > 21 else '-'
    stdd3m = round(p4fns.sstdd(gain[-63:]) *
                   math.sqrt(252), 2) if len(price) > 63 else '-'
    stdd6m = round(p4fns.sstdd(gain[-126:]) *
                   math.sqrt(252), 2) if len(price) > 126 else '-'
    stdd1y = round(p4fns.sstdd(gain[-252:]) *
                   math.sqrt(252), 2) if len(price) > 252 else '-'
    stdd2y = round(p4fns.sstdd(gain[-504:]) *
                   math.sqrt(252), 2) if len(price) > 504 else '-'
    stdd4y = round(p4fns.sstdd(gain[-1008:]) *
                   math.sqrt(252), 2) if len(price) > 1008 else '-'
    perftable.append([symbol,name,perf1w,perf1m,perf3m,perf6m,perf1y,perf2y,perf4y,\
                      stdd1w,stdd1m,stdd3m,stdd6m,stdd1y,stdd2y,stdd4y])
    p4fns.write_csv(NSETECHDIR + 'NSEPerf' + CSV, perftable, 'a')
Exemplo n.º 2
0
def qntpair(symbol, period, deltaP, deltaN, rwindow, mwindow, pairlist):
    title = ['PAIR', 'NORM', 'DWSTAT']
    maxper = period + rwindow + mwindow - 1
    table = []

    datadb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS,
                            'SERIES', REGEQSERIES)[-maxper:]
    pclose = [math.log(float(row[PQS['CLOSE']])) for row in datadb]
    pvwap = [math.log(float(row[PQS['VWAP']])) for row in datadb]
    dsize = len(pclose)
    if (dsize >= rwindow + mwindow + 40):
        for pair in pairlist:
            reffdb = p4fns.read_csv(NSEEQSDBDIR + pair + CSV)[-maxper:]
            pvwapR = [math.log(float(row[PQS['VWAP']])) for row in reffdb]

            regr = p4fns.rolling_regress(pvwap[-dsize:], pvwapR[-dsize:],
                                         rwindow)
            rlen = len(regr)
            error = [
                round((a / b - 1) * 100, 2)
                for a, b in zip(pclose[-rlen:], regr[-rlen:])
            ]
            mu = p4fns.rolling_smean(error, mwindow)
            sg = p4fns.rolling_sstdd(error, mwindow)
            mlen = len(sg)
            error = error[-mlen:]
            normdist = int(
                p4fns.cumnormdist((error[-1] - mu[-1]) / sg[-1]) * 100)
            et_t1 = sum([
                math.pow((error[i] - error[i - 1]), 2) for i in range(1, mlen)
            ])
            et_sq = sum([math.pow(error[i], 2) for i in range(0, mlen)])
            dwstat = round(et_t1 / et_sq, 2)
            table.append([pair, normdist, dwstat])

        p4fns.write_csv(NSEPAIRDIR + symbol + CSV, [title] + table, 'w')
        p4fns.write_json(JSONPAIRDIR + symbol + JSON, [title] + table, [])
Exemplo n.º 3
0
def qnttech(symbol, name):
    techtable = []
    eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS,
                           'SERIES', REGEQSERIES)

    ## Price Bands
    ## ============================================== ##
    price = [float(row[PQS['CLOSE']]) for row in eqsdb]
    vwap = [float(row[PQS['VWAP']]) for row in eqsdb]
    pb1m = int((price[-1] - min(price[-21:])) /
               (max(price[-21:]) - min(price[-21:])) *
               100) if len(price) > 21 else '-'
    pb3m = int((price[-1] - min(price[-63:])) /
               (max(price[-63:]) - min(price[-63:])) *
               100) if len(price) > 63 else '-'
    pb6m = int((price[-1] - min(price[-126:])) /
               (max(price[-126:]) - min(price[-126:])) *
               100) if len(price) > 126 else '-'
    pb1y = int((price[-1] - min(price[-252:])) /
               (max(price[-252:]) - min(price[-252:])) *
               100) if len(price) > 252 else '-'

    ## Bollinger Bands
    ## ============================================== ##
    dsize = len(price)
    period = [21, 63, 126, 252]
    bb = ['-'] * 4
    for i in range(0, 4):
        if (dsize > period[i] + 1):
            mu = p4fns.rolling_emean(vwap[-(period[i] + 1):], period[i])[-1]
            sg = p4fns.rolling_sstdd(vwap[-(period[i] + 1):], period[i])[-1]
            bb[i] = int(p4fns.cumnormdist((price[-1] - mu) / sg) * 100)

    techtable.append(
        [symbol, name, pb1m, pb3m, pb6m, pb1y, bb[0], bb[1], bb[2], bb[3]])
    p4fns.write_csv(NSETECHDIR + 'NSETech' + CSV, techtable, 'a')
Exemplo n.º 4
0
if (mode == 'T') or (mode == 'C') or (mode == 'D'):
    print profit2

# Ownership statistics
# ==============================================================
if (mode == 'T') or (mode == 'C') or (mode == 'D'):
    print 'Hold1 ' + str(round(float(own.count(1)) / len(own) * 100))
    print 'Hold2 ' + str(round(float(own.count(-1)) / len(own) * 100))

result.append([
    symbol1, symbol2, startdate, enddate, dur, mwindow, trades, profit,
    profit1, profit2
])

if (mode == 'W'):
    p4fns.write_csv('RESULTS/temp.csv', result, 'a')
elif (mode == 'C'):
    # Plot
    # ==============================================================
    df = pd.DataFrame()
    df['TIME'] = [dp.parse(row) for row in timeser]
    df['ERR'] = errorP
    df['OWN'] = own
    df[symbol1] = close1
    df[symbol2] = close2
    df['MU'] = muP
    df['PL'] = [muP[i] + deltaP * sgP[i] for i in range(0, tlen)]
    df['NL'] = [muP[i] - deltaN * sgP[i] for i in range(0, tlen)]
    df = df.set_index(['TIME'])
    p8fns.plot3axis(df, ['ERR', 'MU', 'PL', 'NL'], 'OWN', symbol1, symbol2)
Exemplo n.º 5
0
            round((a - b), 2) for a, b in zip(close[-rlen:], predict[-rlen:])
        ]
        sg = p4fns.rolling_sstdd(error, mwindow)
        mu = mu[-dsize:]
        sg = sg[-dsize:]
        eqdata = eqsdb[-dsize:]
        table = []
        for i in range(0, dsize):
            srow = []
            srow.append(eqdata[i][PQS['TIMESTAMP']])
            srow.append(eqdata[i][PQS['CLOSE']])
            srow.append(mu[i])
            srow.append(sg[i])
            table.append(srow)

    p4fns.write_csv(NSEDIR + 'TECHNICAL/CRR/' + symbol + '_NIFTY' + CSV,
                    [title] + table, 'w')

# Correlelation between pairs of stocks
# =================================================== ##
#for symbol in cnxlist:
#
#    eqsdb          = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR+symbol+CSV), PQS, 'SERIES', REGEQSERIES)
#    symclose       = [float(row[PQS['CLOSE']]) for row in eqsdb]
#    eqsize         = len(eqsdb)
#    title          = ['SYMBOL', 'CORR126']
#    table          = []
#
#    for pair in [x for x in cnxlist if x != symbol]:
#        pairsdb    = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR+pair+CSV), PQS, 'SERIES', REGEQSERIES)
#        pairclose  = [float(row[PQS['CLOSE']]) for row in pairsdb]
#        pairsize   = len(pairsdb)
Exemplo n.º 6
0
def qntgenl(symbol, name, sector, industry, mktcap, mcpercent):
    techtitle      = ['SYMBOL','PRICE','GAIN','NAME','SECTOR','INDUSTRY','MKT_CAP','MC_PERCENT',\
                      'VOLATILITY','MAX_VTY','MIN_VTY','VOLUME','MAX_VOL','MIN_VOL']
    techtable = []
    srow = []
    srow.append(symbol)
    eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS,
                           'SERIES', REGEQSERIES)

    ## Current Values
    ## ============================================== ##
    curprice = eqsdb[-1][PQS['CLOSE']]
    srow.append(curprice)
    change = round(float(eqsdb[-1][PQS['GAIN']]), 2)
    srow.append(change)
    srow.append(name)
    srow.append(sector)
    srow.append(industry)
    srow.append(mktcap)
    srow.append(mcpercent)

    ## Volatility
    ## ============================================== ##
    if path.isfile(NSEDVSDBDIR + symbol + CSV):
        dvsdb      = p4fns.filterdf(p4fns.filterdf(p4fns.filterdf(p4fns.read_csv(NSEDVSDBDIR+symbol+CSV),\
                                                   PDS, 'INSTRUMENT', ['OPTSTK']),\
                                                   PDS, 'TIMESTAMP', [today]),\
                                                   PDS, 'T2E', [str(x) for x in range(1,50)])
        ivlist = [float(row[PDS['IV']]) for row in dvsdb]
        wtlist = [float(row[PDS['VAL_INLAKH']]) for row in dvsdb]
        if sum(wtlist) >= 100:
            avgiv = round(p4fns.wmean(ivlist, wtlist), 2)
        else:
            avgiv = 0
    else:
        avgiv = 0
    eqdata = eqsdb[-756:]
    gain = [float(row[PQS['GAIN']]) for row in eqdata]
    cum_gain = p4fns.rolling_sum(gain, 21)
    rol_stdd = p4fns.rolling_sstdd(cum_gain, 21)
    if (avgiv == 0):
        stdd1m = round(p4fns.sstdd(cum_gain) * math.sqrt(12), 2)
        volatility = stdd1m
    else:
        volatility = avgiv
    max_stdd = max([volatility, round(max(rol_stdd) * math.sqrt(12), 2)])
    min_stdd = min([volatility, round(min(rol_stdd) * math.sqrt(12), 2)])
    srow.append(volatility)
    srow.append(max_stdd)
    srow.append(min_stdd)

    ## Volume
    ## ============================================== ##
    eqdata = eqsdb[-252:]
    turnover = [
        round(float(row[PQS['TURNOVER']]) / 10000000, 2) for row in eqdata
    ]
    volume = p4fns.rolling_emean(turnover, 3)
    max_vol = max(volume)
    min_vol = min(volume)
    srow.append(turnover[-1])
    srow.append(max_vol)
    srow.append(min_vol)

    ## Create JSON File
    ## ============================================== ##
    techtable.append(srow)
    p4fns.write_csv(NSEGENLDIR + symbol + CSV, [techtitle] + techtable, 'w')
    p4fns.write_json(JSONGENLDIR + symbol + JSON, [techtitle] + techtable,
                     TECHCOLTYP)
    genltable = []
    grow = []
    grow.append(symbol)
    grow.append(name)
    grow.append(sector)
    grow.append(industry)
    grow.append(curprice)
    grow.append(change)
    grow.append(mktcap)
    grow.append(turnover[-1])
    grow.append(volatility)
    genltable.append(grow)
    p4fns.write_csv(NSETECHDIR + 'NSEGenl' + CSV, genltable, 'a')
Exemplo n.º 7
0
count = 0

for symbol in cnxlist + ixclist:
    count += 1
    print count
    if path.isfile(NSEDVSDBDIR + symbol + CSV):
        dvxdata = [['TIMESTAMP', 'AVGIV']]
        eqsdb = p4fns.filterdf(p4fns.read_csv(NSEEQSDBDIR + symbol + CSV), PQS,
                               'SERIES', REGEQSERIES)

        ## Volatility
        ## ============================================== ##
        dvsdb = p4fns.read_csv(NSEDVSDBDIR + symbol + CSV)

        for row in eqsdb:
            timestamp = row[PQS['TIMESTAMP']]
            dvtdb      = p4fns.filterdf(p4fns.filterdf(p4fns.filterdf(dvsdb,\
                                                                      PDS, 'INSTRUMENT', ['OPTSTK']),\
                                                                      PDS, 'TIMESTAMP', [timestamp]),\
                                                                      PDS, 'T2E', [str(x) for x in range(1,50)])
            ivlist = [float(row[PDS['IV']]) for row in dvtdb]
            wtlist = [float(row[PDS['VAL_INLAKH']]) for row in dvtdb]
            if sum(wtlist) >= 100:
                avgiv = round(p4fns.wmean(ivlist, wtlist), 2)
            else:
                avgiv = 0

            dvxdata.append([timestamp, avgiv])

        p4fns.write_csv(NSEDVDIR + 'TECH/' + symbol + CSV, dvxdata, 'w')
Exemplo n.º 8
0
    catdf              = []
    for item in ixclist:
        catdf.append(item+' ['+item+']')
    fname = NSEJSONDIR+'NSEIXCatalog'+JSON
    with open(fname, 'w') as fjson:
        json.dump(catdf, fjson)
    
    catdf              = []
    for row in eqcatdf:
        if row[PCAT['SYMBOL']] in cnxlist:
             catdf.append(row[2]+' ['+row[1]+']')
    fname = NSEJSONDIR+'NSECatalog'+JSON
    with open(fname, 'w') as fjson:
        json.dump(catdf, fjson)

    p4fns.write_csv(NSEQNTLOG, [['CATALOG']], 'a')

## ********************************************************************************************* ##
## Daily Closing Data
## ********************************************************************************************* ##
#if 'DAILY' not in donelist:
#    for symbol in cnxlist:
#        q4fns.qntdaily(symbol)
#    p4fns.write_csv(NSEQNTLOG, [['DAILY']], 'a')

## ********************************************************************************************* ##
## General Information
## ********************************************************************************************* ##
if 'GENL' not in donelist:
    techhover      = ['Symbol','Company Name','Sector','Industry','Closing Price','% Change',\
                      'Market Capitalization (Rs Cr)','Volume (Rs Cr)','Volatility']
Exemplo n.º 9
0
#!/usr/bin/env python
from p4defs import *
import p4fns

clearlog = p4fns.readhdr_csv(NSEQNTLOG)
p4fns.write_csv(NSEQNTLOG, clearlog, 'w')

clearlog = p4fns.readhdr_csv(IMGDLYLOG)
p4fns.write_csv(IMGDLYLOG, clearlog, 'w')

clearlog = p4fns.readhdr_csv(IMGVOLLOG)
p4fns.write_csv(IMGVOLLOG, clearlog, 'w')

clearlog = p4fns.readhdr_csv(IMGBOBLOG)
p4fns.write_csv(IMGBOBLOG, clearlog, 'w')

clearlog = p4fns.readhdr_csv(IMGAURLOG)
p4fns.write_csv(IMGAURLOG, clearlog, 'w')

clearlog = p4fns.readhdr_csv(IMGCRRLOG)
p4fns.write_csv(IMGCRRLOG, clearlog, 'w')
Exemplo n.º 10
0
        trow[PDT['EXPIRY_DT']] = dp.parse(rrow[PDR['EXPIRY_DT']],
                                          dayfirst=True).strftime('%Y-%m-%d')
        trow[PDT['STRIKE_PR']] = rrow[PDR['STRIKE_PR']]
        trow[PDT['OPTION_TYP']] = rrow[PDR['OPTION_TYP']]
        trow[PDT['OPEN']] = rrow[PDR['OPEN']]
        trow[PDT['HIGH']] = rrow[PDR['HIGH']]
        trow[PDT['LOW']] = rrow[PDR['LOW']]
        trow[PDT['CLOSE']] = rrow[PDR['CLOSE']]
        trow[PDT['SETTLE_PR']] = rrow[PDR['SETTLE_PR']]
        trow[PDT['CONTRACTS']] = rrow[PDR['CONTRACTS']]
        trow[PDT['VAL_INLAKH']] = rrow[PDR['VAL_INLAKH']]
        trow[PDT['OPEN_INT']] = rrow[PDR['OPEN_INT']]
        trow[PDT['CHG_IN_OI']] = rrow[PDR['CHG_IN_OI']]
        dvdf.append(trow)

p4fns.write_csv(NSEDVTDBFILE, dvdf, 'a')

## ============================================================================================= ##
## NSE Nominal Deriv DB
## ============================================================================================= ##
ixc0df = p4fns.read_csv(NSEIXCatalog)
ixclist = [row[PXC['SYMBOL']] for row in ixc0df]
eqc0df = p4fns.read_csv(NSEEQCatalog)
eqclist = [row[PCAT['SYMBOL']] for row in eqc0df]

## Append the days values to the Nominal DERIV DB
## ============================================================================================= ##
dvdfsdb = []
dvlist = []
for trow in dvdf:
    srow = [''] * len(PDS)
Exemplo n.º 11
0
        trow[PQT['SERIES']] = rrow[PQR['SERIES']]
        trow[PQT['OPEN_U']] = rrow[PQR['OPEN_U']]
        trow[PQT['HIGH_U']] = rrow[PQR['HIGH_U']]
        trow[PQT['LOW_U']] = rrow[PQR['LOW_U']]
        trow[PQT['CLOSE_U']] = rrow[PQR['CLOSE_U']]
        trow[PQT['LAST_U']] = rrow[PQR['LAST_U']]
        trow[PQT['PREV_U']] = rrow[PQR['PREV_U']]
        trow[PQT['VOLUME']] = rrow[PQR['VOLUME']]
        trow[PQT['TURNOVER']] = rrow[PQR['TURNOVER']]
        trow[PQT['CONTRACTS']] = rrow[PQR['CONTRACTS']]
        eqdf.append(trow)
eqdlyis = {row[PQT['SYMBOL']]: row[PQT['ISIN']] for row in eqdf}
eqdlysy = {row[PQT['ISIN']]: row[PQT['SYMBOL']] for row in eqdf}
eqdlycp = {row[PQT['SYMBOL']]: row[PQT['CLOSE_U']] for row in eqdf}

p4fns.write_csv(NSEEQTDBFILE, eqdf, 'a')

## Read the Catalog Files
## ============================================================================================= ##
eqc0df = p4fns.read_csv(NSEEQCatalog)
eqi0df = p4fns.read_csv(NSEISINCatalog)

## Update ISINCatalog with ISIN Changes
## ============================================================================================= ##
eqi1df = []
for row in eqc0df:
    if row[PCAT['SYMBOL']] in set(eqdlyis.keys()):
        if row[PCAT['ISIN']] != eqdlyis[row[PCAT['SYMBOL']]]:
            eqi1df.append(
                [row[PCAT['ISIN']], eqdlyis[row[PCAT['SYMBOL']]], timestamp])
eqisdict = {row[PISN['OLDISIN']]: row[PISN['ISIN']] for row in eqi1df}
Exemplo n.º 12
0
            rrow[PXR['CLOSE']] != '-') else ''
        trow[PXT['PREV']] = "%.2f" % (float(rrow[PXR['CLOSE']]) -
                                      float(rrow[PXR['CHANGE']]))
        trow[PXT['GAIN']] = "%.4f" % (math.log(
            float(rrow[PXR['CLOSE']]) / float(trow[PXT['PREV']])) * 100)
        trow[PXT['VOLUME']] = rrow[PXR['VOLUME']] if (
            rrow[PXR['VOLUME']] != '-') else ''
        trow[PXT['TURNOVER']] = rrow[PXR['TURNOVER']] if (
            rrow[PXR['TURNOVER']] != '-') else ''
        trow[PXT['PE']] = rrow[PXR['PE']] if (rrow[PXR['PE']] != '-') else ''
        trow[PXT['PB']] = rrow[PXR['PB']] if (rrow[PXR['PB']] != '-') else ''
        trow[PXT['DIVYIELD']] = rrow[PXR['DIVYIELD']] if (
            rrow[PXR['DIVYIELD']] != '-') else ''
        ixdf.append(trow)

p4fns.write_csv(NSEIXTDBFILE, ixdf, 'a')

## Append the days values to the Nominal Index DB
## ============================================================================================= ##
for name in ixclist:
    symbol = ixc0dict[name]
    sdbdf = []
    sdbcsv = NSEIXSDBDIR + symbol + CSV
    newdb = False
    if not path.isfile(sdbcsv):
        sdbdf.append(SYIXCOL)
        newdb = True
    for trow in ixdf:
        if (trow[PXT['SYMBOL']] == symbol):
            srow = [''] * len(PXS)
            srow[PXS['TIMESTAMP']] = trow[PXT['TIMESTAMP']]
Exemplo n.º 13
0
    print profit0

# Ownership statistics
# ==============================================================
if (mode == 'T') or (mode == 'C') or (mode == 'D'):
    print 'Hold0 ' + str(round(float(own.count(0)) / len(own) * 100))
    print 'Hold1 ' + str(round(float(own.count(1)) / len(own) * 100))
    print 'Hold2 ' + str(round(float(own.count(-1)) / len(own) * 100))

result.append([
    symbolX, symbolF, symbol1, symbol2, symbol0, startdate, enddate, dur,
    rwindow, mwindow, trades, profit, profit1, profit2, profit0
])

if (mode == 'W'):
    p4fns.write_csv('temp.csv', result, 'a')
elif (mode == 'C') or (mode == 'D'):
    # Plot
    # ==============================================================
    df = pd.DataFrame()
    df['TIME'] = [dp.parse(row) for row in timeser]
    df['ERR'] = errorP
    df['OWN'] = own
    df[symbol1] = close1
    df[symbol2] = close2
    df['MU'] = muP
    df['PL'] = [muP[i] + deltaP * sgP[i] for i in range(0, tlen)]
    df['NL'] = [muP[i] - deltaN * sgP[i] for i in range(0, tlen)]
    df = df.set_index(['TIME'])
    p8fns.plot3axis(df, ['ERR', 'MU', 'PL', 'NL'], 'OWN', symbol1, symbol2)
Exemplo n.º 14
0
                if bonus and origl:
                    ratio = "%.4f" % ((bonus + origl) / origl)
                    bonus0df.append([
                        row[PRCA['SYMBOL']], row[PRCA['FACEVALUE']], ratio,
                        exdate, rcdate, bcdate, 'F'
                    ])
    return bonus0df


bonus0df = p4fns.read_csv(NSEBONUS)
bonus0df = p4fns.symchange(bonus0df, PBON['SYMBOL'], eqschdict)
bonusrdf = p4fns.read_csv(NSERBONUS)
bonusrdf = p4fns.symchange(bonusrdf, PRCA['SYMBOL'], eqschdict)
bonus1df = p4fns.readhdr_csv(NSEBONUS)
bonus1df.extend(genbonus(bonus0df, bonusrdf))
p4fns.write_csv(NSEBONUS, bonus1df, 'w')

#p4fns.write_sql(NSEULDB, SQLNSEBONUS, bonus1df, MBON, BONUSCOL)


## SPLIT
## ============================================================================================= ##
def parsesplit(purpose):
    extract = re.search(
        '([Ff]ace|[Ss]plit|[Ff]rom)[^0-9]+([0-9]+)[^0-9]+([Tt]o|\-)+[^0-9]+([0-9]+)',
        purpose)
    if extract:
        split = float(extract.group(4))
        origl = float(extract.group(2))
    else:
        split = 0