Пример #1
0
def cnv2minute1(tk,library=mds.assetTS):
    dti = library.min_date(tk+'_1Second')
    dtf = library.max_date(tk+'_1Second')

    dts = pd.date_range(dti,dtf,freq='6MS').normalize()
    lst1=list(pd.to_datetime(dts))
    lst1 = [l.to_pydatetime() for l in lst1]
    dts=[dti]+lst1+[dtf]

    try:
        mds.delete(tk,ds.freqHelper.minute, library=library)
    except Exception as e:
        print('UNABLE TO DELETE:')
        uu.printException(e)

    for t,dt0 in enumerate(dts[:-1]):
        dt1 = dts[t+1]
        if dt0>= dt1:
            continue
        try:
            df1 = mds.read(tk, ds.freqHelper.second, library=library, date_range=[dt0, dt1])
        except Exception as e:
            print('Error reading {}'.format(tk))
            uu.printException(e)
        for fld in ['close','yield_close','ask','yield_ask','bid','yield_bid']:
            if fld in df1:
                df1[fld][df1[fld]==0]=np.NaN

        df2 = convertFreq(df1,ds.freqHelper.minute)
        mds.append(df2,library=library,check_metadata=False)
Пример #2
0
    def __init__(self, series, transformation='mom', frequency='monthly'):

        self.series = series
        self.transformation = transformation
        self.warnings = []
        if frequency == 'monthly':
            self.freq = f.monthBegin
        elif frequency == 'quarterly':
            self.freq = f.quarterBegin

        self.name = series + '_' + transformation + '_' + frequency
        self.old_obj = mds.obj.load(self.name, path='economia')
        self.db = mds.read(name=series, freq=self.freq, library=mds.econVS)
        self.summary = {}
        self.accuracy = pd.DataFrame()
        self.country = self.db.md['country']
        self.proj_ponta = None

        #### pensar nos casos em que:
        #### 1. pegamos variaveis com frequencia mensal e as transformamos para trimestral
        #### 2. pegamos variáveis com frequencia trimestral
        if self.old_obj == (None, None):
            self.update = False
            self.tested_lags_y = None
            self.tested_lags_x = None
            self.models_par = []
            self.models_npar = []
            self.prediction = {}
            self.avg_y = None
            self.y = {}
            self.x = {}
            self.selected_dict = {}
            self.n_start = None
            self.n_end = None
            self.config = {}
            self.mcs = []
            self.models = {}
            self.mongo_list = []
            self.colsorder = None

        else:
            self.update = True
            obj = self.old_obj[1]['obj']
            self.models_par = obj.models_par
            self.models_npar = obj.models_npar
            self.prediction = obj.prediction
            self.avg_y = obj.avg_y
            self.y = obj.y
            self.x = obj.x
            self.selected_dict = obj.selected_dict
            self.n_start = obj.n_start
            self.n_end = obj.n_end
            self.config = obj.config
            self.mcs = obj.mcs
            self.models = obj.models
            self.mongo_list = obj.mongo_list
            self.colsorder = self.x['select'].columns.to_list()
Пример #3
0
def renameDIs():
    from mDataStore.globalMongo import mds
    lst = mds.find(library=mds.assetTS2)

    for l in lst:
        if l[2].name.startswith('OD'):
            df1 = mds.read(l[2].name,l[2].freq,library=mds.assetTS2)
            df1.md.name =df1.md.name.replace('OD','DI1')
            mds.write(df1,library=mds.assetTS2)
            mds.delete(l[2].name,l[2].freq,library=mds.assetTS2)
Пример #4
0
def update(tt):
    global df1
    dt0=df1.index[-1]
    xaxis_dt_format = '%d %b %Y, %H:%M:%S'
    print(' ### READ ####')
    df1_ = mds.read(TK, ds.freqHelper.minute, date_range=[dt(2010,1,1), dt(2035, 1, 1)],
                   library=mds.mktBars).tz_convert('America/Sao_Paulo')

    if TEST:
        df1_ = df1_.iloc[:-T + tt]
        if tt%2==0:
            df1_.high=df1_.high*1.0001
        else:
            df1_.high=df1_.high/1.0001

    df1_['Date']=df1_.index
    df1_['idx'] = np.arange(df1_.shape[0])
    df1_=df1_.set_index('idx')

    # t=df1_.index.get_loc(dt0,'nearest')
    t=len(inc_source.data['high1'])
    inc_source.data['high1']=df1_.high.iloc[:t].values.tolist()
    inc_source.data['low1'] = df1_.low.iloc[:t].values.tolist()
    inc_source.data['close1'] = df1_.close.iloc[:t].values.tolist()
    inc_source.data['open1'] = df1_.open.iloc[:t].values.tolist()
    inc_source.data['high1'] = df1_.high.iloc[:t].values.tolist()
    inc = df1_.close.iloc[:t] > df1_.open.iloc[:t]
    inc_source.data['color'] = np.where(inc,INCREASING_COLOR,DECREASING_COLOR).tolist()

    if df1_.index[-1] != df1.index[-1]:
        for t1 in range(t,df1_.shape[0]):
            inc = df1_.close.iloc[-1] > df1_.open.iloc[-1]
            color=INCREASING_COLOR if inc else DECREASING_COLOR
            new_data = dict(
                x1= [df1_.index[t1]],
                Date1=[df1_.Date.values[t1]],
                open1=[df1_.open.iloc[t1]],
                high1=[df1_.high.iloc[t1]],
                low1=[df1_.low.iloc[t1]],
                close1=[df1_.close.iloc[t1]],
                color=[color],
            )

            inc_source.stream(new_data)

            dt1=pd.Index(df1_.Date).tz_localize(None).values

            fig.xaxis.major_label_overrides = {
                i: date.strftime(xaxis_dt_format) for i, date in enumerate(pd.to_datetime(dt1))
            }

        a=1

    df1=df1_
    print(' ### DONE ####')
Пример #5
0
def correctGenFut(mds, code, freq):
    '''
    Correct data for Gen Future from reuters. It sometimes use the second future instead of the first.
    just deleting this data.
    '''

    symbol = code + '_' + freq
    dt0 = mds.assetTS.min_date(symbol)
    dt1 = mds.assetTS.max_date(symbol)

    dts = pd.date_range(dt0, dt1, freq='3MS').tolist()
    dts.append(dt1)
    print('analyzing {}'.format(code))

    #last_underlying = []
    for t, dt0_ in enumerate(dts[:-1]):
        dt1_ = dts[t + 1]
        # print('analyzing {} : {} - {}'.format(code,dt0_, dt1_))
        df = mds.read(code, freq=freq, date_range=[dt0_, dt1_])

        df1 = df.copy()
        #order unames according to time (so, invert year and month.
        #underlying will be replaced by integers reflecting order
        unames_ =  pd.unique(df.underlying)
        unames=unames_.copy()
        for i in range(len(unames)):
            dt1=df.index[df.underlying==unames_[i]][-1]
            match = re.match(r"([a-z]+)([0-9]+)", unames_[i], re.I)
            assert(match)
            items = match.groups()
            dig=1*(dt1.year - 2000-int(items[1][-1])>2)
            unames[i]=items[0][:-1]+dig.__str__()+ items[1][-1]+items[0][-1]
        idx=np.argsort(unames)

        map1 = {k:v for v,k in enumerate(unames_[idx])}
        df1.underlying = df1.underlying.replace(map1)

        # df1['cnt']=cntrep(df1.underlying.values)
        # df1.underlying[df1['cnt']<1500]
        I = df1.underlying.values[1:] < df1.underlying.cummax().values[:-1]

        # I = df1.underlying.values[:-1] > df1.underlying.cummin().values[:0:-1]
        I1=I[1:]!=I[:-1]
        J = np.where(I1)[0]
        dt_rep = df1.index[J + 1]
        dt_rep=dt_rep.append(df1.index[-1:].shift(1,'S'))
        # dt_rep = df1.index[J]
        if len(dt_rep)>0:
            for j in range(len(dt_rep)-1):
                if I[J[j]+1]:
                    dt_rg = DateRange(dt_rep[j], dt_rep[j+1])
                    print('correcting: {} - dates: {} - {}'.format(symbol,dt_rg.start,dt_rg.end))
                    a=1
                    mongoDS.TS_deleteDateRange(mds.assetTS, symbol, dt_rg, df)
Пример #6
0
def convertOne(i,
               assetBase,
               date_range,
               freq=ds.freqHelper.minute,
               freqBase=ds.freqHelper.second,
               firstRun=False):
    from mDataStore.globalMongo import mds
    name = assetBase.name[i]

    #################### REMOVE THIS ##########################3
    # if firstRun:
    #     try:
    #         mds.delete(name, freq, mds.assetTS)
    #     except:
    #         pass
    ##########################################################3

    print('starting {} . {}/{}'.format(name, i, len(assetBase.name)))
    try:
        df1 = mds.read(name, freq, mds.assetTS, date_range=date_range)
        print('Already Done!')
        return
    except:
        pass

    try:
        df = mds.read(name,
                      freqBase,
                      mds.assetTS,
                      date_range=date_range,
                      tz='GMT')
    except:
        print('unable to read {} - dt {}->{}'.format(name, date_range[0],
                                                     date_range[1]))
        return

    df1 = convertFreq(df, freq)
    df = mds.write(df1, mds.assetTS)
Пример #7
0
def reutersSplitGenTickerIntoContractsRoutine(
        genTickers=['DIJc{}'.format(i + 1) for i in range(38)],
        dt0=dt(2000, 1, 1),
        freq='6MS',
        library=mds.assetTS):

    dts = pd.date_range(dt0, dt.now(), freq=freq, closed='left')
    dts = dts.append(pd.DatetimeIndex([dt.now()]))

    uCode0 = []
    for t, dt1 in enumerate(dts[1:]):
        dt0_ = dts[t]

        dfs = mds.read(genTickers, f.second, library, date_range=[dt0_, dt1])
        df1 = pd.concat(dfs, 0)
        if df1.shape[0] == 0:
            continue
        print('dt: {}'.format(dt0_))
        df1 = df1.rename(columns={
            'close': 'yield_close',
            'ask': 'yield_bid',
            'bid': 'yield_ask'
        })
        uCode = pd.unique(df1.underlying)

        #rename for 2 digits
        for i, code1 in enumerate(uCode):
            match = re.match(r"([a-z]+)([0-9]+)", code1, re.I)
            items = match.groups()
            if len(items[1]) == 1:  #1 digit
                try:
                    mds.delete(code1, f.second, library)
                except:
                    pass
                dig = 1 * (dt0_.year - 2000 - int(items[1][-1]) > 2)
                newCode = items[0][:-1] + items[0][-1] + dig.__str__(
                ) + items[1][-1]
                df1.underlying.loc[df1.underlying == uCode[i]] = newCode
                uCode[i] = newCode
                #year1 = 2000 + int(match[2])

        #delete previous data
        for nm1 in uCode:
            if nm1 not in uCode0:
                try:
                    mds.delete(nm1, f.second, mds.assetTS)
                except:
                    pass
            uCode0.append(nm1)
        updateAllContracts(df1)
Пример #8
0
def cp2onlineVS():
    from mDataStore.globalMongo import mds

    ms = mds.find(library=mds.assetVS)

    for i,m in enumerate(ms):

        nm = m[2].name
        print('Copying {} - {}/{}'.format(nm,i,len(ms)))
        if m[2].freq=='1BDay':
            df = mds.read(nm,'1BDay',mds.assetVS,date_range=[dt(1990,1,1),dt(2035,1,1)])
            try:
                mds.delete(nm,'1BDay',library=mds.onlineVS)
            except:
                pass

            mds.write(df,library=mds.onlineVS,check_metadata=False)
Пример #9
0
    def __init__(self, y_all):
        self.y_all = y_all

        YX = pd.DataFrame()
        for el in y_all:
            X = mds.read(name=el, freq=f.monthBegin, library=mds.econVS)
            YX = pd.concat([YX, X.iloc[:, 0]], axis=1)
        YX.columns = y_all

        YX = YX.diff(periods=1).dropna()

        weights = LinearRegression(fit_intercept=False).fit(
            YX.iloc[:, 1:], YX.iloc[:, 0]).coef_
        self.weights = weights / sum(weights)

        self.alpha = {}
        self.beta = {}
        self.gamma = {}
Пример #10
0
    def getIntradayHistoricDataBA(self, feeder_id, interval, startDate, endDate, md, event='TRADE', mds=None, **kwargs):

        if mds is None:
            from mDataStore.globalMongo import mds
        from mDataStore.mongo import mDataFrame

        df = mDataFrame(self.getIntradayHistoricData(feeder_id,interval , startDate, endDate,event='TRADE', **kwargs))
        df_bid = self.getIntradayHistoricData(feeder_id,interval , startDate, endDate,event='BID', **kwargs)
        df_ask = self.getIntradayHistoricData(feeder_id,interval , startDate, endDate,event='ASK', **kwargs)
        #desc1 = mds.blp.getRefData(feeder_id, desc)

        df.md=md

        df.rename(columns={'numEvents': 'trades'}, inplace=True)

        df.index=df.index.tz_localize('GMT')
        df_bid.index = df_bid.index.tz_localize('GMT')
        df_ask.index = df_ask.index.tz_localize('GMT')
        # overwrite=True

        if md.subtype in ['fut_nrol','fut_rol']:
            dts = mds.read(df.md.name.lower() + '_dates', library=mds.fundamentalVS)
            dts1=pd.Index(pd.to_datetime(dts.iloc[:,1]))
            dts1=dts1.tz_localize(df.index.tzinfo.zone)

            I = dts1.get_indexer(df.index,method='bfill')
            underlying=dts.iloc[I,0]
            df['underlying']=underlying.values

        df['bid'] =nan
        J = df_bid.index.get_indexer(df.index)
        df.loc[J>=0,'bid']=df_bid.close.iloc[J[J>=0]].values

        df['ask'] =nan
        J = df_ask.index.get_indexer(df.index)
        df.loc[J>=0,'ask']=df_ask.close.iloc[J[J>=0]].values

        # df['vwap'] =
        df=df.rename(columns={'trades':'gmt_off'})

        df['vwap']= df['value']/df['volume']

        del df['value']
        return df
Пример #11
0
def cp2blp(librarySource=mds.assetTS,libraryDest=mds.assetTS2):
    # from mDataStore.bloomberg import get_srs_meta
    # if srsMeta is None:
    #     srsMeta = get_srs_meta()

    nms,nmsBlp=cnvAllReutersTickers(library=librarySource)

    #k=[i for i,nm in enumerate(nms) if nm =='ESc1'][0]

    for i,nm in enumerate(nms):
        print('Copying {} - {}/{}'.format(nm,i,len(nms)))
        # if not nmsBlp[i].startswith('DI1'):
        #     continue
        df1=mds.read(nm,ds.freqHelper.minute,librarySource,date_range=[dt(1990,1,1),dt(2030,1,1)])
        df1.md.name=nmsBlp[i]
        try:
            mds.delete(df1.md.name,df1.md.freq,library=libraryDest)
        except:
            pass
        mds.write(df1,library=libraryDest)
Пример #12
0
    def updateCDI(self):
        df = mds.read('CDI', '1BDay', library=mds.assetVS)
        dt0 = dt.now().replace(hour=0, minute=0, second=0, microsecond=0)

        cdi0 = self.blp.getRefData(['BZDIOVRA Index'],
                                   ['px_last']).values[0] / 100

        cdi0 = (1 + cdi0)**(1 / 252) - 1

        j = df.index.get_loc(dt0, method='ffill')

        if df.index[j] == dt0:
            cdi_tm1 = df.close[j - 1]
        else:
            cdi_tm1 = df.close[j]

        df.loc[dt0] = cdi_tm1 * (1 + cdi0)

        mds.delete(df.md.name, df.md.freq, library=mds.onlineVS)
        mds.write(df, library=mds.onlineVS, check_metadata=False)
Пример #13
0
def saveFuture2pickle(mds, code, freq,path):

    symbol = code + '_' + freq
    dt0 = mds.assetTS.min_date(symbol)
    dt1 = mds.assetTS.max_date(symbol)

    dts = pd.date_range(dt0, dt1, freq='6MS').tolist()
    dts.append(dt1)
    print('analyzing {}'.format(code))

    #last_underlying = []
    for t, dt0_ in enumerate(dts[:-1]):
        dt1_ = dts[t + 1]
        # print('analyzing {} : {} - {}'.format(code,dt0_, dt1_))
        df = mds.read(code, freq=freq, date_range=[dt0_, dt1_])
        if df.shape[0]>0:
            name=symbol+'_{:%Y-%m-%d}_{:%Y-%m-%d}'.format(dt0_, dt1_)
            uu.save_obj(df,name,path=path)

    return name
Пример #14
0
    def clean(self, transf='mom', min_y_sample=.8, lags_y=1, lags_X=(0, 4)):
        if lags_y == 0 and lags_X == (0, 0):
            with_lags = False
        else:
            with_lags = True

        for el in self.y_all:
            db = mds.read(name=el, freq=f.monthBegin, library=mds.econVS)
            alpha = sl_clean(target=db)
            alpha.transform_y(transf)
            alpha.get_db(check_real=True,
                         check_seas=True,
                         min_y_sample=min_y_sample,
                         mongo_list=None)
            alpha.make_lags(lags_y=lags_y, lags_X=lags_X)
            alpha.check_rel_date(with_lags=with_lags,
                                 new_output=False,
                                 update_mongo_list=True)
            self.alpha[el] = alpha

        self.with_lags = with_lags
Пример #15
0
def updateDB():
    input = xw.Book.caller().sheets['db_input']
    out = xw.Book.caller().sheets['db']

    tb_blp = input.range('blp_hist_inp[#ALL]').options(pd.DataFrame,
                                                       expand='table').value
    tb_db = input.range('db_inp[#ALL]').options(pd.DataFrame,
                                                expand='table').value

    out.clear_contents()

    k = 1
    for i in range(tb_db.shape[0]):
        at = tb_db.index[i]
        freq = tb_db.freq[i]
        library = tb_db.library[i]
        dti = tb_db.dt_start[i]
        dtf = tb_db.dt_end[i]
        dfO = mds.read(at, freq, library=library, date_range=[dti, dtf])

        out.cells(1, k).value = pd.DataFrame(dfO)
        out.cells(1, k).value = at + '_' + freq
        k += dfO.shape[1] + 2

    for i in range(tb_blp.shape[0]):
        at = tb_blp.index[i]
        dti = tb_blp.dt_start[i]
        dtf = tb_blp.dt_end[i]
        fields = tb_blp.iloc[i, 2:]
        fields = fields[~fields.isna()].values.tolist()

        dfs = blp1.getHistoricData([at], fields, dti, dtf)
        dfs = dfs[0]
        out.cells(1, k).value = pd.DataFrame(dfs)
        out.cells(1, k).value = at

        k += dfs.shape[1] + 2
Пример #16
0
dbdi_du.index = uu.x2dti_date(dbdi_du.index)
dbdi_tx.index = uu.x2dti_date(dbdi_tx.index)

#fer=uu.m2pdate(fer[:,0])
#ferDT=fer.date
#dbdi=Dict(dbdi)
#DATA=Dict(DATA)
out = Dict()

out.cpom = cpom  #pd.DataFrame(data=cpom[:,1],index=uu.x2pdate(cpom[:,0]),columns=['rate'])
out.cpomAll = out.cpom.copy()
out.du = dbdi_du
out.tx = dbdi_tx / 100

#out.cdi=pd.DataFrame(data=DATA.INDEX.CDI[:,1],index=uu.x2pdate(DATA.INDEX.CDI[:,0]),columns=['rate'])
out.cdi = mds.read('CDI')
out.cdi.iloc[:, 0] = insert(diff(log(out.cdi.values), axis=0), 0, 0)
out.cdi.iloc[:, 0] = exp(out.cdi)**252 - 1

out.cpom = out.cpom.reindex(out.du.index, method='ffill') / 100
out.cpom = out.cpom.fillna(method='ffill')
out.cdi = out.cdi.reindex(out.du.index, method='ffill')

delt = (out.cdi - out.cpom).rolling(30).mean()

out.cdiProxy = out.cpom.shift(1) + delt
#out.cpom=out.cpom[(~pd.isna(out.cpom)).values]

# t=out.tx.shape[0]-1
# dt=out.tx.index[t]
# t0=out.cpom.index.searchsorted(dt)
Пример #17
0
from py_init import *
from mDataStore.globalMongo import mds

df1 = pd.DataFrame(np.random.normal(300),
                   index=pd.date_range(dt(2010, 1, 1),
                                       dt(2012, 1, 1),
                                       periods=300),
                   columns=['close'])

md1 = ds.metadataAsset(name='teste', type='index', subtype='index')

df1.md = md1

mds.write(df1, library='testVS')
dff = mds.read('teste', library='testVS')
Пример #18
0
    [ds.freqHelper.minute15, ds.freqHelper.minute15, ds.freqHelper.minute15],
    'library': [mds.mktBars, mds.mktBars, mds.mktBars]
},
             path='plot_data',
             saveStrategyAsAsset=False)
# TK='ES1 index'
N = 0
obj1, dobj = mds.obj.load('plot_cfg{}'.format(N), 'plot_data')

TK = obj1['TK']
freq = obj1['freq']
library = obj1['library']

df = [
    mds.read(TK1,
             freq[i],
             date_range=[dt(2010, 1, 1), dt(2020, 1, 1)],
             library=library[i]).tz_convert('America/Sao_Paulo')
    for i, TK1 in enumerate(TK)
]

# for i,freq1 in enumerate(freq):
#     if freq1 != ds.freqHelper.minute:
#         df[i]=convertFreq(df[i],freq1)

# df2 = mds.read('BZ1 index',ds.freqHelper.minute,date_range=[dt(2010,1,1),dt(2020,1,1)],library=mds.mktBars).tz_convert('America/Sao_Paulo')
if TEST:
    T = 1000
    for i in range(len(df)):
        df[i] = df[i].iloc[:-T]

fig = []
Пример #19
0
    def doBars(self):  #for intraday bars
        from mDataStore.mongo import mDataFrame, metadataAsset
        from mDataStore import mongo

        if self.onlineTable is None:
            return

        t1 = time.time()
        # print('doBars1')
        if hasattr(self, 't0_bars') and (t1 - self.t0_bars < 1):
            return

        self.t0_bars = t1
        dt_today = dt.today().replace(hour=0,
                                      minute=0,
                                      second=0,
                                      microsecond=0)
        dt_today_loc = pd.Timestamp(dt_today).tz_localize('America/Sao_Paulo')
        dt_max = dt.today().replace(year=dt_today.year + 1,
                                    hour=0,
                                    minute=0,
                                    second=0,
                                    microsecond=0)
        if not 'lastBlpIndex' in self.onlineTable:
            self.onlineTable['lastBlpIndex'] = dt_today_loc
        for i in range(self.onlineTable.shape[0]):
            a = Dict(self.onlineTable.iloc[i].to_dict())
            if (not isinstance(a.INTRA_SHORT_NAME, str)) or (not isinstance(
                    a.freq, str)):
                continue

            nfreq, sfreq = mongo.splitFreq(a.freq)  #assume freq is minutes

            if (t1 - self.t0_intra[i] > a.updateInterval):
                # st1 = dt.now() - datetime.timedelta(seconds=a.updateInterval*5) #np.maximum(a.startDate,dt_today)
                # try:
                #     df_ = mds.read(a.security,a.freq,library=mds.mktBars,date_range=[st1,dt_max])
                #     df1 = self.blp.getIntradayHistoricData(a.security, nfreq, st1, dt_max, event=a.event,
                #                                            **a.kwargs)
                # except Exception as e: #first query of the day - get all times
                #     df1 = self.blp.getIntradayHistoricData(a.security, nfreq, dt_today, dt_max, event=a.event,
                #                                            **a.kwargs)
                # df1 = self.blp.getIntradayHistoricData(a.security, nfreq, dt_today, dt_max, event=a.event,**a.kwargs)

                self.t0_intra[i] = t1
                try:
                    md = mds.read_metadata(
                        a.INTRA_SHORT_NAME, '1Minute',
                        mds.assetTS2)  #a.security.split(' ')[0]
                    md.freq = a.freq
                except:
                    md = metadataAsset(a.INTRA_SHORT_NAME,
                                       'equity',
                                       freq=a.freq,
                                       feeder_id=a.FEEDER_ID)
                mds.blp = self.blp
                #dt_today
                df1 = self.blp.getIntradayHistoricDataBA(
                    a.FEEDER_ID,
                    nfreq,
                    self.onlineTable.lastBlpIndex[i],
                    dt_max,
                    md,
                    event=a.event,
                    mds=mds)
                if df1.shape[0] == 0:
                    continue

                self.onlineTable.lastBlpIndex.values[i] = df1.index[-1]
                #                                            **a.kwargs)
                df1 = df1.rename(columns={'numEvents': 'trades'})
                if df1.index.tzinfo is None:
                    df1 = df1.tz_localize('GMT')
                print('doBars2 - ' + a.FEEDER_ID)
                try:
                    mds.append(df1,
                               library=mds.onlineTS,
                               replaceIntersection=True,
                               check_metadata=False)
                except Exception as e:
                    warn('Unable to append {}'.format(df1.md.name))
                    uu.printException(e)
                # if len(a.addrs) :
                #     self.putData({'messageType':'barsUpdate','data':a},a.addrs)

                if (t1 - self.t0_daily[i] > a.dailyUpdateInterval):
                    # for each series in intradayQueries, check if the daily series is in onlineVS up to yesterday
                    # If not, simply copy the series from assetVS to onlineVS. If it is not up-to-date, warn
                    #
                    self.t0_daily[i] = t1

                    dt_today1 = dt_today + datetime.timedelta(1)
                    dt0 = dt(1900, 1, 1)

                    if (df1.shape[0] == 0) or df1.index[-1] < dt_today_loc:
                        warn(
                            'No prices for {}/{} today ({}) in bars - (intraday/onlineTS)'
                            .format(a.INTRA_SHORT_NAME, nfreq, dt_today))
                        continue

                    try:
                        dfd = mds.read(a.daily_shortname,
                                       '1BDay',
                                       library=mds.assetVS,
                                       date_range=[dt0, dt_today1])
                    except Exception as e:
                        print(
                            'Unable to read {}/{} from assetVS in bars - daily'
                            .format(a.security, nfreq))
                        uu.printException(e)
                        continue
                    # df1 = df1.loc[df1.index<dt_today_loc]
                    c1 = dfd.columns.intersection(df1.columns)
                    c2 = dfd.columns.difference(df1.columns)
                    dfi1 = df1[c1].iloc[-1]
                    lastUpdate = dfi1.name
                    dfi1.name = dfi1.name.normalize().tz_localize(None)

                    for c in c2:
                        dfi1[c] = array(nan)

                    # if md.subtype == 'fut_rol':
                    if 'underlying' in dfd:
                        if not 'underlying' in df1:
                            warn(
                                'Ignoring {}/{} for Daily. Underlying not present in bloomberg results'
                                .format(a.INTRA_SHORT_NAME, nfreq))
                            continue
                        dfi1['underlying'] = dfi1['underlying'].split(' ')[0]
                        if dfd.underlying[-1] != dfi1['underlying']:
                            continue

                        #check if it is the corerct future, if not continue

                    dfd_ = pd.DataFrame(dfi1).T

                    for c in dfd_.columns:
                        # if dfd[c].dtype in [float32,float64,int32,int64]:
                        dfd_[c] = dfd_[c].astype(dfd[c].dtype)

                    if dfd.md.subtype == 'di_fut':
                        dfd_['yield_close'] = dfd_['close']
                        dfd_['close'] = NaN

                    df2 = pd.concat((dfd, dfd_))
                    df2.md = dfd.md
                    df2.md.lastUpdate = lastUpdate
                    # if (not 't0_daily' in a): #first uptade in the day
                    try:
                        mds.delete(df2.md.name,
                                   df2.md.freq,
                                   library=mds.onlineVS
                                   )  #make sure not accumulating versions
                    except Exception as e:
                        pass
                    try:
                        mds.write(df2,
                                  library=mds.onlineVS,
                                  check_metadata=False,
                                  prune_previous_version=True)
                    except Exception as e:
                        print(
                            'Unable to read {}/{} from assetVS in bars - daily'
                            .format(a.security, nfreq))
                        uu.printException(e)
Пример #20
0
from py_init import *
from mDataStore.globalMongo import mds
from bokeh.plotting import curdoc, figure
from bokeh.layouts import row, column, gridplot

TK='ES1 index'
TEST=False
df1 = mds.read(TK,ds.freqHelper.minute,date_range=[dt(2010,1,1),dt(2020,1,1)],library=mds.mktBars).tz_convert('America/Sao_Paulo')
# df2 = mds.read('BZ1 index',ds.freqHelper.minute,date_range=[dt(2010,1,1),dt(2020,1,1)],library=mds.mktBars).tz_convert('America/Sao_Paulo')
if TEST:
    T=1000
    df1=df1.iloc[:-T]

fig,inc_source=uu.candlestick_plot(df1,df1.md.name)

df1['Date'] = df1.index
df1['idx'] = np.arange(df1.shape[0])
df1 = df1.set_index('idx')

INCREASING_COLOR = '#17BECF'
DECREASING_COLOR = '#7F7F7F'

from bokeh.driving import count

@count()
def update(tt):
    global df1
    dt0=df1.index[-1]
    xaxis_dt_format = '%d %b %Y, %H:%M:%S'
    print(' ### READ ####')
    df1_ = mds.read(TK, ds.freqHelper.minute, date_range=[dt(2010,1,1), dt(2035, 1, 1)],
Пример #21
0
#eco = sl_estimate(y=Y_eco, X=X_eco)
#tic = time.clock()
#eco.calculate(al=True)
#toc = time.clock()
#time_eco = toc - tic
#print('Eco model (10 variables) elapsed time:'+str(toc-tic)+'seconds')
#eco.bench(bench=y_bench)
#eco.plotP(md='avg_mcs',bench=y_bench)
#eco.stats()





db_pim = mds.read(name='pim_1 Industria geral', freq=f.monthBegin, library=mds.econVS)


alpha = sl_clean(target=db_pim)
alpha.transform_y(transform='yoy')
alpha.get_db()
alpha.make_lags()
alpha.check_rel_date()

Y  = alpha.y_data
X  = alpha.X_df_l
#Xl = alpha2.X_df_l

beta = sl_select(y = Y, X = X)
#beta_eco.select_prop(proportion = 0.8, method = 'f_regression') #mutual_info_regression
beta.select_prop(proportion = 0.8, method = 'mutual_info_regression')      #mutual_info_regression
Пример #22
0
        'PRE_2A', 'ES1', 'TY1'
    ]
    freq = ['1BDay']
    library = ['assetVS']

    # get current date (d0)
    dt0 = dt.now().replace(hour=0, minute=0, second=0, microsecond=0)

    # get the workday before (d-1)
    dtm1 = pd.to_datetime(uu.workday(dt0, -1)[0])

    # set date interval
    expDT = [dtm1, dt0]

    print('ativo \t\t\t\tdata \t\tdata_exp \tstatus')
    print('------------------------------------------------------')

    for a in al:
        for f in freq:
            for i, l in enumerate(library):
                expDT_ = expDT[i]
                try:
                    dff = mds.read(a, freq=f, library=l)
                    print('{:_<18} \t{:%d-%m-%Y} \t{:%d-%m-%Y} \t{}'.format(
                        a, dff.index[-1], expDT_,
                        'ok' if dff.index[-1] >= expDT_ else 'ERROR'))
                except NoDataFoundException:
                    print('{:_<20} \t{:%d-%m-%Y} \t{:%d-%m-%Y} \t{}'.format(
                        a, dff.index[-1], expDT_, 'ERROR! Serie not found.'))
    print('------------------------------------------------------')