コード例 #1
0
def convertFreqAll_old(freq=ds.freqHelper.minute15):
    from strategy import asset

    futRolTKs = ['DOL', 'IND', 'ES', 'TY']
    #treasury is in fact US/Centrall tz.
    #in fact both are roughly 24hours, but I am getting the most liquid market time.
    tzs = [
        'America/Sao_Paulo', 'America/Sao_Paulo', 'US/Eastern', 'US/Eastern'
    ]
    #['9:30','16:15'] is the ET time for the S&P big congtract
    # trading_hours=[['9:00','18:00'],['9:00','18:00'],['7:30','16:15'],['7:30','16:15']]

    for i, tk in enumerate(futRolTKs):
        md = mds.read_metadata(tk + 'c1', ds.freqHelper.second, mds.assetTS)
        df0 = feeder.get([tk + 'c1', tk + 'c2'],
                         freq=ds.freqHelper.second,
                         date_range=[dt(2000, 1, 1),
                                     dt(2215, 1, 1)],
                         tz=tzs[i])
        a = asset(feederCollection(df0))
        df1 = a._mdf.copy()
        df2 = convertFreq(df1, freq, trading_hours[i])
        df2.md.name = tk + 'r1'
        df2.md.subtype = 'fut_rol'
        df2.md.freq = freq
        mds.write(df2, mds.assetTS)
コード例 #2
0
ファイル: reuters2blp.py プロジェクト: royopa/test
def renameDIs():
    from mDataStore.globalMongo import mds
    lst = mds.find(library=mds.assetTS2)

    for l in lst:
        if l[2].name.startswith('OD'):
            df1 = mds.read(l[2].name,l[2].freq,library=mds.assetTS2)
            df1.md.name =df1.md.name.replace('OD','DI1')
            mds.write(df1,library=mds.assetTS2)
            mds.delete(l[2].name,l[2].freq,library=mds.assetTS2)
コード例 #3
0
def fixTSAll():
    from arctic.date import DateRange
    bkLib = mds.arctic.get_library('assetTS_bk')
    names = bkLib.list_symbols()
    names = [nm.replace('_1Second', '') for nm in names if '1Second' in nm]
    assetBase = mds.findAsset(freq=ds.freqHelper.second, library=mds.assetTS)
    date_range0 = [dt(2000, 1, 1, 0, 0, 0), dt.now()]
    dts = pd.date_range(date_range0[0],
                        date_range0[-1],
                        freq='1YS',
                        normalize=False)
    dts = dts.append(pd.DatetimeIndex([date_range0[-1]]))
    dts = dts.tz_localize('GMT')

    for i, dt0 in enumerate(dts[:-1]):
        date_range = [dt0.to_pydatetime(), dts[i + 1].to_pydatetime()]
        for i, name in enumerate(names):
            print('fix {}. {}/{}'.format(name, i, len(names)))
            # if name != 'ESc1':
            #     continue
            try:
                # df1 = mds.read(name,ds.freqHelper.second,mds.assetTS,date_range=date_range)
                id = name + '_' + ds.freqHelper.second
                df = ds.mDataFrame(
                    bkLib.read(id,
                               date_range=DateRange(date_range[0],
                                                    date_range[1])))
                meta1 = bkLib.read_metadata(id)
                df.md = locate(meta1['cls'])(meta1)

                #mutreta para converter duas vezes. O ajuste esta errado na base. Eh necessario refazer a base
                tzG = pytz.timezone('GMT')
                df.index = df.index.tz_convert(tzG)
                df.index = df.index.tz_localize(
                    None, ambiguous='infer', errors='coerce').tz_localize(
                        pytz.timezone('America/Sao_Paulo'),
                        ambiguous='infer',
                        errors='coerce')
                df = df.loc[df.index.notnull()]
                df.index = df.index.tz_convert(tzG)

            except:
                print('unable to read {} - dt {}->{}'.format(
                    name, date_range[0], date_range[1]))
                continue
            mds.write(df, mds.testTS)
コード例 #4
0
ファイル: reuters2blp.py プロジェクト: royopa/test
def cp2onlineVS():
    from mDataStore.globalMongo import mds

    ms = mds.find(library=mds.assetVS)

    for i,m in enumerate(ms):

        nm = m[2].name
        print('Copying {} - {}/{}'.format(nm,i,len(ms)))
        if m[2].freq=='1BDay':
            df = mds.read(nm,'1BDay',mds.assetVS,date_range=[dt(1990,1,1),dt(2035,1,1)])
            try:
                mds.delete(nm,'1BDay',library=mds.onlineVS)
            except:
                pass

            mds.write(df,library=mds.onlineVS,check_metadata=False)
コード例 #5
0
    def updateCDI(self):
        df = mds.read('CDI', '1BDay', library=mds.assetVS)
        dt0 = dt.now().replace(hour=0, minute=0, second=0, microsecond=0)

        cdi0 = self.blp.getRefData(['BZDIOVRA Index'],
                                   ['px_last']).values[0] / 100

        cdi0 = (1 + cdi0)**(1 / 252) - 1

        j = df.index.get_loc(dt0, method='ffill')

        if df.index[j] == dt0:
            cdi_tm1 = df.close[j - 1]
        else:
            cdi_tm1 = df.close[j]

        df.loc[dt0] = cdi_tm1 * (1 + cdi0)

        mds.delete(df.md.name, df.md.freq, library=mds.onlineVS)
        mds.write(df, library=mds.onlineVS, check_metadata=False)
コード例 #6
0
ファイル: reuters2blp.py プロジェクト: royopa/test
def cp2blp(librarySource=mds.assetTS,libraryDest=mds.assetTS2):
    # from mDataStore.bloomberg import get_srs_meta
    # if srsMeta is None:
    #     srsMeta = get_srs_meta()

    nms,nmsBlp=cnvAllReutersTickers(library=librarySource)

    #k=[i for i,nm in enumerate(nms) if nm =='ESc1'][0]

    for i,nm in enumerate(nms):
        print('Copying {} - {}/{}'.format(nm,i,len(nms)))
        # if not nmsBlp[i].startswith('DI1'):
        #     continue
        df1=mds.read(nm,ds.freqHelper.minute,librarySource,date_range=[dt(1990,1,1),dt(2030,1,1)])
        df1.md.name=nmsBlp[i]
        try:
            mds.delete(df1.md.name,df1.md.freq,library=libraryDest)
        except:
            pass
        mds.write(df1,library=libraryDest)
コード例 #7
0
ファイル: economatica.py プロジェクト: royopa/test
def insertAllPX():
    from mDataStore.globalMongo import mds
    from mDataStore.mongo import mDataFrame, metadataAsset
    files = ['PX_NOT_ADJ.txt', 'PX_ADJ.txt']
    suffixName = ['_E_nadj', '_E']
    subtype = ['equity_nadj', 'equity']
    dtFormat = '%d/%m/%Y'

    origCols = [
        'Data', 'Q Negs', 'Q Títs', 'Volume$', 'Fechamento', 'Abertura',
        'Mínimo', 'Máximo', 'Médio', 'code'
    ]
    newCols = [
        'dt', 'neg', 'volume', 'volume_fin', 'close', 'open', 'low', 'high',
        'vwap', 'code'
    ]
    #need to confirm 'Medio' is vwap
    mapCols = {k: newCols[i] for i, k in enumerate(origCols)}

    for i, f in enumerate(files):
        df = readEconCSVPX(f)
        #df.rename(mapCols[i],inplace=True)
        df.columns = newCols
        df.dt = pd.to_datetime(df.dt, format=dtFormat)
        df.set_index('dt', inplace=True)
        ucode = unique(df.code)
        for n, code in enumerate(ucode):
            print('n: {}/{}'.format(n, len(ucode)))
            df1 = df.loc[df.code == code]
            del df1['code']
            df1 = mDataFrame(df1)
            df1.md = metadataAsset(code + suffixName[i],
                                   'equity',
                                   stDT=df1.index[0])
            df1.md.subtype = subtype[i]
            df1.md.source = 'economatica'

            mds.write(df1,
                      check_metadata=False,
                      prune_previous_version=True,
                      library=mds.assetVS)
コード例 #8
0
def insert2DB(serial, df, library1,mds_or_addr, overwrite, prune_previous_version,  dti, dtm1, dtm3,keep_metadata=False):

    # stDT=None,endDT=None, currency='BRL', fut_like=False,maturity=None
    # from mDataStore.globalMongo import mds

    from mDataStore.mongo import mongoDS,metadataAsset,mDataFrame

    if serial:
        mds = mds_or_addr
    else:
        from dask.distributed import get_worker
        worker = get_worker()
        if not hasattr(worker, 'mds'):
            worker.mds = mongoDS(mds_or_addr)
        mds = worker.mds

    # removal of success flag (unnecessary) - sxan-20190926

    if df.shape[0] == 0:
        warn(df.md.name + ': No data')
        mds.mongoCli.db_dws.mgmt.metadata.remove(dict(name=df.md.name,library=library1))
        return True

    # library1=df['library']
    # library1=df.md.pop('library')
    if overwrite:
        for s in df.columns:
            if df[s].dtype in [np.float32,np.float64] and np.all(np.isnan(df[s])):
                del df[s]

        mds.write(df, library=library1, check_metadata=False, prune_previous_version=prune_previous_version)
        return True

    else:
        return mds.appendIfCheck(df, date_range=[dti, dtm1], library=library1, check_metadata=False,
                                     prune_previous_version=prune_previous_version, replaceIntersection=True,
                                     lastChkDate=dtm3,keep_metadata=keep_metadata
                                 )
コード例 #9
0
def bloombergUpdateFutureLastDates(meta):

    # assert srs_meta is not None

    from mDataStore.mongo import metadataFundamental
    from mDataStore.globalMongo import mds
    from mDataStore.mongo import mDataFrame

    # tb = srs_meta['intra_table']
    # tb = tb[tb.subtype == 'fut_rol']

    meta1 = [m for m in meta if m['subtype']=='fut_rol' and 'feeder_id' in m]

    for i,m in enumerate(tqdm(meta1)):

        df1 = blp.getRefData([m['feeder_id']], ['FUT_CHAIN_LAST_TRADE_DATES'], {"INCLUDE_EXPIRED_CONTRACTS": "Y"})
        if df1.shape[0]==0:
            continue
        aa = np.array(df1.iloc[0, 0])
        df_ = mDataFrame(aa[1:], columns=aa[0])
        df_.md = metadataFundamental(m['name'].lower()+'_dates', type='futureLastTradeDate')

        mds.write(df_, library=mds.fundamentalVS, check_metadata=False, prune_previous_version=True)
コード例 #10
0
def dfs2mongo(dfs):
    indTyp = array([d.index.values[0][-1].__class__.__name__ for d in dfs])
    uTyp = unique(indTyp)
    k = 0
    for typ in uTyp:
        I = indTyp == typ
        dfA = pd.concat(array(dfs)[I], axis=0)

        uCat = dfA.index.unique('category')
        uName = dfA.index.unique('name')
        uFreq = dfA.index.unique('freq')
        uStat = dfA.index.unique('mstat')
        for cat in uCat:
            for name in uName:
                for freq in uFreq:
                    # for mstat in uStat:
                    try:
                        df = dfA[cat, name, freq]
                    except:
                        continue
                    print('writing {}/{}'.format(k, dfA.index.shape[0]))
                    df = df[~df.index.duplicated(keep='first')]
                    df = df.unstack(level='mstat')
                    nm1 = ('BCB_exp_' + cat + '_' + name + '_' + freq).encode(
                        'ascii', errors='ignore').decode("utf-8")
                    df.md = ds.metadataFundamental(nm1,
                                                   type='expectation',
                                                   source='bcb',
                                                   stDT=df.index[0][0],
                                                   category=cat,
                                                   subname=name,
                                                   expfreq=freq)
                    mds.write(df,
                              library=mds.testVS,
                              check_metadata=False,
                              prune_previous_version=True)
                    k = k + 1
コード例 #11
0
def convertOne(i,
               assetBase,
               date_range,
               freq=ds.freqHelper.minute,
               freqBase=ds.freqHelper.second,
               firstRun=False):
    from mDataStore.globalMongo import mds
    name = assetBase.name[i]

    #################### REMOVE THIS ##########################3
    # if firstRun:
    #     try:
    #         mds.delete(name, freq, mds.assetTS)
    #     except:
    #         pass
    ##########################################################3

    print('starting {} . {}/{}'.format(name, i, len(assetBase.name)))
    try:
        df1 = mds.read(name, freq, mds.assetTS, date_range=date_range)
        print('Already Done!')
        return
    except:
        pass

    try:
        df = mds.read(name,
                      freqBase,
                      mds.assetTS,
                      date_range=date_range,
                      tz='GMT')
    except:
        print('unable to read {} - dt {}->{}'.format(name, date_range[0],
                                                     date_range[1]))
        return

    df1 = convertFreq(df, freq)
    df = mds.write(df1, mds.assetTS)
コード例 #12
0
ファイル: economatica.py プロジェクト: royopa/test
def insertALLFundamental():
    '''
    For the columns correspondence see campos_economatica at economatica folder (surf)
    :return:
    '''
    from .globalMongo import mds
    from .mongo import mDataFrame, metadataFundamental
    files = ['BAL.txt', 'INDF.txt', 'INDM.txt', 'SI.txt']
    dtFormat = [None, None, '%d/%m/%Y', '%d/%m/%Y']
    suffixName = ['_BAL', '_INDF', '_INDM', '_SI']
    subtype = ['bal', 'indf', 'indm', 'si']
    cols = []
    newCols = []

    cols.append([
        'Data', 'Qtd Ações|Outstanding|da empresa',
        'Lucro Liquido| Em moeda orig| no exercício| consolid:sim*',
        'EBIT| Em moeda orig| no exercício| consolid:sim*',
        'DeprecAmorExaus| Em moeda orig| no exercício| consolid:sim*',
        'AumLiqCap| Em moeda orig| no exercício| consolid:sim*',
        'ReInFi| Em moeda orig| no exercício| consolid:sim*',
        'Receita| Em moeda orig| no exercício| consolid:sim*',
        'Ativo Tot| Em moeda orig| consolid:sim*',
        'Aum Cap| Em moeda orig| no exercício| consolid:sim*',
        'CaixaEEqCx| Em moeda orig| consolid:sim*',
        'AtvCir| Em moeda orig| consolid:sim*',
        'PasCir| Em moeda orig| consolid:sim*',
        'Patrim Liq| Em moeda orig| consolid:sim*',
        'TotEmFiLP| Em moeda orig| consolid:sim*',
        'DbntLP| Em moeda orig| consolid:sim*',
        'FinLP| Em moeda orig| consolid:sim*',
        'Imobil| Em moeda orig| consolid:sim*',
        'Pas+PL| Em moeda orig| consolid:sim*',
        'AmDesAgi| Em moeda orig| no exercício| consolid:sim*',
        'CxOper| Em moeda orig| no exercício| consolid:sim*',
        'DeprAmor| Em moeda orig| no exercício| consolid:sim*',
        'DasOpe| Em moeda orig| no exercício| consolid:sim*',
        'DbntCP| Em moeda orig| consolid:sim*',
        'DesAdm| Em moeda orig| no exercício| consolid:sim*',
        'DesVen| Em moeda orig| no exercício| consolid:sim*',
        'DivPag| Em moeda orig| no exercício| consolid:sim*',
        'LAIR| Em moeda orig| no exercício| consolid:sim*',
        'Lucro Bruto| Em moeda orig| no exercício| consolid:sim*',
        'TotEmFiCP| Em moeda orig| consolid:sim*',
        'RecBru| Em moeda orig| no exercício| consolid:sim*',
        'ResFin(Ant)| Em moeda orig| no exercício| consolid:sim*',
        'FinCP| Em moeda orig| consolid:sim*',
        'FinObtLiq| Em moeda orig| no exercício| consolid:sim*',
        'IRDife| Em moeda orig| no exercício| consolid:sim*',
        'CPV| Em moeda orig| no exercício| consolid:sim*',
        'LuOpCo| Em moeda orig| no exercício| consolid:sim*',
        'Out Des Adm| Em moeda orig| no exercício| consolid:sim*',
        'PrAcMi| Em moeda orig| no exercício| consolid:sim*',
        'ImpRen| Em moeda orig| no exercício| consolid:sim*',
        'Qtd Ações Méd|Outstanding|da empresa|em 1 ano',
        'AuAcTe| Em moeda orig| no exercício| consolid:sim*',
        'Integ Cap| Em moeda orig| no exercício| consolid:sim*',
        'FinDeb| Em moeda orig| no exercício| consolid:sim*',
        'Redu Cap| Em moeda orig| no exercício| consolid:sim*',
        'DpInCP| Em moeda orig| consolid:sim*',
        'DeInFi| Em moeda orig| no exercício| consolid:sim*', 'code'
    ])

    cols.append([
        'Data',
        'LPA| Em moeda orig| de 12 meses| consolid:sim*| ajust p/ prov',
        'VPA| Em moeda orig| consolid:sim*| ajust p/ prov',
        'Vendas/Acao| Em moeda orig| de 12 meses| consolid:sim*| ajust p/ prov',
        'EBITDA/Acao| Em moeda orig| de 12 meses| consolid:sim*| ajust p/ prov',
        'EBITDA| Em moeda orig| de 12 meses| consolid:sim*',
        'MrgBru| de 12 meses| consolid:sim*',
        'Mrg EBIT| de 12 meses| consolid:sim*',
        'RenPat(med)| Em moeda orig| de 12 meses| consolid:sim*',
        'ROIC (IC medio)%| de 12 meses| consolid:sim*',
        'Capex| Em moeda orig| de 12 meses| consolid:sim*',
        'AlaFin| de 12 meses| consolid:sim*',
        'Invest Cap $| Em moeda orig| consolid:sim*',
        'Depr e Amor| Em moeda orig| de 12 meses| consolid:sim*',
        'AlaOpe| de 12 meses| consolid:sim*', 'code'
    ])
    cols.append([
        'Data', 'P/L|Em moeda orig|de 12 meses|consolid:sim*',
        'Valor Mercado|da empresa|Em moeda orig',
        'Div Yld (fim)|1 anos|Em moeda orig',
        'EV/EBITDA emp|Em moeda orig|de 12 meses|consolid:sim*', 'code'
    ])

    cols.append([
        'Data', 'Qtd\ntítulos', 'Cotação\nmédia', 'Valor$', 'Qtd\ntítulos.1',
        'Qtd\ncontratos', 'Valor$.1', 'Tx mín', 'Tx méd', 'Tx máx', 'Tx mín.1',
        'Tx méd.1', 'Tx máx.1', 'code'
    ])

    newCols.append([
        'dt',
        'numShares',
        'netIncome',
        'ebit',
        'deprecAmortExhaus',
        'netCapitalIncrease',
        'finIncome',
        'revenues',
        'totalAssets',
        'capitalIncrease',
        'cashEquiv',
        'assetST',
        'liabST',
        'netEquity',
        'totLiabLT',
        'debtLT',
        'liabLT',
        'fixedAssets',
        'totalLiab',
        'AmDesAgi',
        'cfo',
        'deprecAmort',
        'DasOpe',
        'debST',
        'admExp',
        'salesExp',
        'divPaid',
        'ebt',
        'grossProfit',
        'totLiabST',
        'revenuesGross',
        'finNetIncome',
        'liabST',
        'FinObtLiq',
        'taxesPayable',
        'cogs',
        'LuOpCo',
        'OutDesAdm',
        'PrAcMi',
        'taxesPaid',
        'numSharesAvg',
        'AuAcTe',
        'IntegCap',
        'FinDeb',
        'ReduCap',
        'DpInCP',
        'DeInFi',
        'code',
    ])
    newCols.append([
        'dt', 'eps', 'bps', 'revps', 'ebitdaps', 'ebitda', 'grossMargin',
        'ebitMargin', 'ROE', 'ROIC', 'capex', 'finLeverage', 'investedCap',
        'deprecAmort', 'operLeverage', 'code'
    ])
    newCols.append(
        ['dt', 'pe12m', 'mktcap', 'div_yld12m', 'ev2ebitda12m', 'code'])
    newCols.append([
        'dt', 'volume', 'vwap', 'volume_fin', 'volume2', 'neg', 'volume_fin2',
        'low_bid', 'avg_bid', 'high_bid', 'low_ask', 'avg_ask', 'high_ask',
        'code'
    ])
    #need to confirm 'Medio' is vwap

    # mapCols=[]
    # for i,v in enumerate(newCols):
    #     mapCols.append({k:newCols[i][j] for j,k in enumerate(cols[i])})

    #    for i,f in enumerate(files):
    i = 3
    f = files[i]
    if i == 3:
        df = readEconCSVPX(f)
    else:
        df = readEconCSV(f)
    df.columns = newCols[i]
    #df=df.rename(mapCols)
    df.dt = df.dt.str.replace('T', 'Q')
    df.dt = pd.to_datetime(df.dt, format=dtFormat[i])

    df.set_index('dt', inplace=True)
    ucode = unique(df.code)
    for n, code in enumerate(ucode):
        print('n: {}/{}'.format(n, len(ucode)))
        df1 = df.loc[df.code == code]
        del df1['code']
        df1 = mDataFrame(df1)
        df1.md = metadataFundamental(code + suffixName[i],
                                     'equity',
                                     stDT=df1.index[0])
        df1.md.source = 'economatica'
        df1.md.subtype = subtype[i]
        mds.write(df1,
                  mds.fundamentalVS,
                  check_metadata=False,
                  prune_previous_version=True)
コード例 #13
0
ファイル: DI_implCopom.py プロジェクト: royopa/test
    out.txFit.append(bs(du1))
    out.duFit.append(du1)
    out.errFit = errFit
    out.spl.append(bs)
    out.duCpom[t, I] = du

    print('t:{}'.format(t))

#uu.save_obj(out,'copomFit')
#out=Dict(uu.load_obj('copomFit'))

###### save COPOM fit
df1 = ds.mDataFrame(
    pd.DataFrame(out.cpomFit.T, index=out.tx.index, columns=out.cpomAll.index))
df1.md = ds.metadataFundamental('copomFit', 'analysis', df1.index[0])
mds.write(df1, check_metadata=False, prune_previous_version=True)
df1.to_excel(globalM.dataRoot + '/cpomHist.xlsx')

###### save Fix Curve
dfVertFix.md = ds.metadataFundamental('ratesCurveBZ', 'analysis',
                                      dfVertFix.index[0])
dfVertFix = dfVertFix[~all(isnan(dfVertFix), 1)]
mds.write(dfVertFix, check_metadata=False, prune_previous_version=True)
#uu.save_obj(out.spl,'rateCurveBZspline')
aspl = array(out.spl)
dfBS = ds.mDataFrame(aspl, df1.index[-aspl.size:], ['bSpline'])
mds.obj.save('ratesCurveBZ', dfBS, 'analysis')

#out.pop('spl')
#scipy.io.savemat('W:\\Multi-Asset Portfolio Solutions\\Databases\\copom_decisions_fit.mat',out)
コード例 #14
0
    def doBars(self):  #for intraday bars
        from mDataStore.mongo import mDataFrame, metadataAsset
        from mDataStore import mongo

        if self.onlineTable is None:
            return

        t1 = time.time()
        # print('doBars1')
        if hasattr(self, 't0_bars') and (t1 - self.t0_bars < 1):
            return

        self.t0_bars = t1
        dt_today = dt.today().replace(hour=0,
                                      minute=0,
                                      second=0,
                                      microsecond=0)
        dt_today_loc = pd.Timestamp(dt_today).tz_localize('America/Sao_Paulo')
        dt_max = dt.today().replace(year=dt_today.year + 1,
                                    hour=0,
                                    minute=0,
                                    second=0,
                                    microsecond=0)
        if not 'lastBlpIndex' in self.onlineTable:
            self.onlineTable['lastBlpIndex'] = dt_today_loc
        for i in range(self.onlineTable.shape[0]):
            a = Dict(self.onlineTable.iloc[i].to_dict())
            if (not isinstance(a.INTRA_SHORT_NAME, str)) or (not isinstance(
                    a.freq, str)):
                continue

            nfreq, sfreq = mongo.splitFreq(a.freq)  #assume freq is minutes

            if (t1 - self.t0_intra[i] > a.updateInterval):
                # st1 = dt.now() - datetime.timedelta(seconds=a.updateInterval*5) #np.maximum(a.startDate,dt_today)
                # try:
                #     df_ = mds.read(a.security,a.freq,library=mds.mktBars,date_range=[st1,dt_max])
                #     df1 = self.blp.getIntradayHistoricData(a.security, nfreq, st1, dt_max, event=a.event,
                #                                            **a.kwargs)
                # except Exception as e: #first query of the day - get all times
                #     df1 = self.blp.getIntradayHistoricData(a.security, nfreq, dt_today, dt_max, event=a.event,
                #                                            **a.kwargs)
                # df1 = self.blp.getIntradayHistoricData(a.security, nfreq, dt_today, dt_max, event=a.event,**a.kwargs)

                self.t0_intra[i] = t1
                try:
                    md = mds.read_metadata(
                        a.INTRA_SHORT_NAME, '1Minute',
                        mds.assetTS2)  #a.security.split(' ')[0]
                    md.freq = a.freq
                except:
                    md = metadataAsset(a.INTRA_SHORT_NAME,
                                       'equity',
                                       freq=a.freq,
                                       feeder_id=a.FEEDER_ID)
                mds.blp = self.blp
                #dt_today
                df1 = self.blp.getIntradayHistoricDataBA(
                    a.FEEDER_ID,
                    nfreq,
                    self.onlineTable.lastBlpIndex[i],
                    dt_max,
                    md,
                    event=a.event,
                    mds=mds)
                if df1.shape[0] == 0:
                    continue

                self.onlineTable.lastBlpIndex.values[i] = df1.index[-1]
                #                                            **a.kwargs)
                df1 = df1.rename(columns={'numEvents': 'trades'})
                if df1.index.tzinfo is None:
                    df1 = df1.tz_localize('GMT')
                print('doBars2 - ' + a.FEEDER_ID)
                try:
                    mds.append(df1,
                               library=mds.onlineTS,
                               replaceIntersection=True,
                               check_metadata=False)
                except Exception as e:
                    warn('Unable to append {}'.format(df1.md.name))
                    uu.printException(e)
                # if len(a.addrs) :
                #     self.putData({'messageType':'barsUpdate','data':a},a.addrs)

                if (t1 - self.t0_daily[i] > a.dailyUpdateInterval):
                    # for each series in intradayQueries, check if the daily series is in onlineVS up to yesterday
                    # If not, simply copy the series from assetVS to onlineVS. If it is not up-to-date, warn
                    #
                    self.t0_daily[i] = t1

                    dt_today1 = dt_today + datetime.timedelta(1)
                    dt0 = dt(1900, 1, 1)

                    if (df1.shape[0] == 0) or df1.index[-1] < dt_today_loc:
                        warn(
                            'No prices for {}/{} today ({}) in bars - (intraday/onlineTS)'
                            .format(a.INTRA_SHORT_NAME, nfreq, dt_today))
                        continue

                    try:
                        dfd = mds.read(a.daily_shortname,
                                       '1BDay',
                                       library=mds.assetVS,
                                       date_range=[dt0, dt_today1])
                    except Exception as e:
                        print(
                            'Unable to read {}/{} from assetVS in bars - daily'
                            .format(a.security, nfreq))
                        uu.printException(e)
                        continue
                    # df1 = df1.loc[df1.index<dt_today_loc]
                    c1 = dfd.columns.intersection(df1.columns)
                    c2 = dfd.columns.difference(df1.columns)
                    dfi1 = df1[c1].iloc[-1]
                    lastUpdate = dfi1.name
                    dfi1.name = dfi1.name.normalize().tz_localize(None)

                    for c in c2:
                        dfi1[c] = array(nan)

                    # if md.subtype == 'fut_rol':
                    if 'underlying' in dfd:
                        if not 'underlying' in df1:
                            warn(
                                'Ignoring {}/{} for Daily. Underlying not present in bloomberg results'
                                .format(a.INTRA_SHORT_NAME, nfreq))
                            continue
                        dfi1['underlying'] = dfi1['underlying'].split(' ')[0]
                        if dfd.underlying[-1] != dfi1['underlying']:
                            continue

                        #check if it is the corerct future, if not continue

                    dfd_ = pd.DataFrame(dfi1).T

                    for c in dfd_.columns:
                        # if dfd[c].dtype in [float32,float64,int32,int64]:
                        dfd_[c] = dfd_[c].astype(dfd[c].dtype)

                    if dfd.md.subtype == 'di_fut':
                        dfd_['yield_close'] = dfd_['close']
                        dfd_['close'] = NaN

                    df2 = pd.concat((dfd, dfd_))
                    df2.md = dfd.md
                    df2.md.lastUpdate = lastUpdate
                    # if (not 't0_daily' in a): #first uptade in the day
                    try:
                        mds.delete(df2.md.name,
                                   df2.md.freq,
                                   library=mds.onlineVS
                                   )  #make sure not accumulating versions
                    except Exception as e:
                        pass
                    try:
                        mds.write(df2,
                                  library=mds.onlineVS,
                                  check_metadata=False,
                                  prune_previous_version=True)
                    except Exception as e:
                        print(
                            'Unable to read {}/{} from assetVS in bars - daily'
                            .format(a.security, nfreq))
                        uu.printException(e)
コード例 #15
0
from py_init import *
from mDataStore.globalMongo import mds

df1 = pd.DataFrame(np.random.normal(300),
                   index=pd.date_range(dt(2010, 1, 1),
                                       dt(2012, 1, 1),
                                       periods=300),
                   columns=['close'])

md1 = ds.metadataAsset(name='teste', type='index', subtype='index')

df1.md = md1

mds.write(df1, library='testVS')
dff = mds.read('teste', library='testVS')