コード例 #1
0
ファイル: reuters2blp.py プロジェクト: royopa/test
def cnv2minute1(tk,library=mds.assetTS):
    dti = library.min_date(tk+'_1Second')
    dtf = library.max_date(tk+'_1Second')

    dts = pd.date_range(dti,dtf,freq='6MS').normalize()
    lst1=list(pd.to_datetime(dts))
    lst1 = [l.to_pydatetime() for l in lst1]
    dts=[dti]+lst1+[dtf]

    try:
        mds.delete(tk,ds.freqHelper.minute, library=library)
    except Exception as e:
        print('UNABLE TO DELETE:')
        uu.printException(e)

    for t,dt0 in enumerate(dts[:-1]):
        dt1 = dts[t+1]
        if dt0>= dt1:
            continue
        try:
            df1 = mds.read(tk, ds.freqHelper.second, library=library, date_range=[dt0, dt1])
        except Exception as e:
            print('Error reading {}'.format(tk))
            uu.printException(e)
        for fld in ['close','yield_close','ask','yield_ask','bid','yield_bid']:
            if fld in df1:
                df1[fld][df1[fld]==0]=np.NaN

        df2 = convertFreq(df1,ds.freqHelper.minute)
        mds.append(df2,library=library,check_metadata=False)
コード例 #2
0
ファイル: reuters2blp.py プロジェクト: royopa/test
def loadFuturesFrompickle(mds:mongoDS,library='assetTS'):
    names=uu.save_obj('saveFutures2pickle',path=path1)
    path1=globalM.dataRoot+'/futPickle'

    for nm in names:
        print('uploading {}'.format(name))
        df=uu.load_obj(name,path=path1)
        mds.append(df,library=library)
コード例 #3
0
def importFromReutersCSV(inputPath,
                         file1,
                         outPath,
                         columns=None,
                         chunksize=4 * 2**20,
                         library=None):
    #reutersPath = globalM.dataRoot + '/datascope_csv'
    with gzip.open(inputPath + file1) as f_:
        for i, srs in enumerate(pd.read_csv(f_, chunksize=chunksize)):

            #    reutersPath = globalM.dataRoot+'/datascope_csv'
            print('{} - chunk {}'.format(file1, i))
            dt1 = pd.to_datetime(srs['Date-Time'],
                                 format='%Y-%m-%dT%H:%M:%S.%fZ')
            srs['Date-Time'] = dt1
            type1 = pd.unique(srs.Type)
            del srs['Domain']
            del srs['Type']

            if not columns is None:
                srs.columns = columns
            else:
                srs = srs.rename(
                    columns={
                        '#RIC': 'code',
                        'Date-Time': 'dt',
                        'GMT Offset': 'gmt_off',
                        'Close Bid': 'bid',
                        'Close Ask': 'ask',
                        'Last': 'close',
                        'Volume': 'volume',
                        'Alias Underlying RIC': 'underlying'
                    })
            #            srs.rename(columns={'code', 'underlying', 'dt', 'gmt_off'})
            # srs.columns.values[:4] = ['code', 'underlying', 'dt', 'gmt_off']

            srs.gmt_off = srs.gmt_off.astype(int)
            # delta1 = pd.to_timedelta(srs.gmt_off, unit='h')
            # srs.dt = srs.dt + delta1

            codes = pd.unique(srs.code)
            out = Dict()
            fr = {'Intraday 5Sec': f.second5, 'Intraday 1Sec': f.second}
            for code in codes:
                out[code] = mDataFrame(
                    srs[srs.code == code].iloc[:, 1:].set_index('dt'))
                out[code].md = metadataAsset(code, 'equity', freq=fr[type1[0]])

            for nm in out:
                # if not mds.read_metadata(out[nm].md.name,out[nm].md.freq,mds.assetTS):
                out[nm].index = out[nm].index.tz_localize(pytz.timezone('GMT'))
                mds.append(out[nm],
                           check_metadata=False,
                           keep_metadata=True,
                           library=library)
    file1_done = outPath + file1
    os.rename(inputPath + file1, file1_done)
コード例 #4
0
def updateAllContracts(df1):
    uCode = pd.unique(df1.underlying)

    monthCodes = array(
        ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z'])
    for i, code1 in enumerate(uCode):
        df_ = mDataFrame(df1.loc[df1.underlying == code1].copy())
        del df_['underlying']
        df_ = df_.astype(float64)
        match = re.match(r"([a-z]+)([0-9]+)", code1, re.I)
        assert (match)
        month1 = where(monthCodes == match[1][-1])[0][0] + 1
        year1 = 2000 + int(match[2])
        mat1 = dt(year1, month1, 1)
        if not df_.index.tz:
            df_.index = df_.index.tz_localize(
                pytz.timezone('GMT'))  # Brazil/East

        md1 = mds.read_metadata(code1, f.second, mds.assetTS)
        if md1:
            tz = pytz.timezone('GMT')
            stDT = minimum(df_.index[0].to_pydatetime(), tz.localize(md1.stDT))
        else:
            stDT = df_.index[0].to_pydatetime()
        df_.md = metadataAsset(code1,
                               'future',
                               stDT,
                               fut_like=True,
                               maturity=mat1,
                               freq=f.second)

        df_.md.subtype = 'di_fut_intraday'

        #        print('appending {}.  {}/{}'.format(code1,i,len(uCode)))
        #        print('appending {}.  {}/{}'.format(code1,i,len(uCode)))

        if not df_.index.is_unique:
            df_ = uu.drop_duplicates_index(df_)

        # try:
        #     mds.delete(code1,f.second,mds.assetTS)
        # except:
        #     pass
        mds.append(df_, mds.assetTS, check_metadata=False)
コード例 #5
0
    def doBars(self):  #for intraday bars
        from mDataStore.mongo import mDataFrame, metadataAsset
        from mDataStore import mongo

        if self.onlineTable is None:
            return

        t1 = time.time()
        # print('doBars1')
        if hasattr(self, 't0_bars') and (t1 - self.t0_bars < 1):
            return

        self.t0_bars = t1
        dt_today = dt.today().replace(hour=0,
                                      minute=0,
                                      second=0,
                                      microsecond=0)
        dt_today_loc = pd.Timestamp(dt_today).tz_localize('America/Sao_Paulo')
        dt_max = dt.today().replace(year=dt_today.year + 1,
                                    hour=0,
                                    minute=0,
                                    second=0,
                                    microsecond=0)
        if not 'lastBlpIndex' in self.onlineTable:
            self.onlineTable['lastBlpIndex'] = dt_today_loc
        for i in range(self.onlineTable.shape[0]):
            a = Dict(self.onlineTable.iloc[i].to_dict())
            if (not isinstance(a.INTRA_SHORT_NAME, str)) or (not isinstance(
                    a.freq, str)):
                continue

            nfreq, sfreq = mongo.splitFreq(a.freq)  #assume freq is minutes

            if (t1 - self.t0_intra[i] > a.updateInterval):
                # st1 = dt.now() - datetime.timedelta(seconds=a.updateInterval*5) #np.maximum(a.startDate,dt_today)
                # try:
                #     df_ = mds.read(a.security,a.freq,library=mds.mktBars,date_range=[st1,dt_max])
                #     df1 = self.blp.getIntradayHistoricData(a.security, nfreq, st1, dt_max, event=a.event,
                #                                            **a.kwargs)
                # except Exception as e: #first query of the day - get all times
                #     df1 = self.blp.getIntradayHistoricData(a.security, nfreq, dt_today, dt_max, event=a.event,
                #                                            **a.kwargs)
                # df1 = self.blp.getIntradayHistoricData(a.security, nfreq, dt_today, dt_max, event=a.event,**a.kwargs)

                self.t0_intra[i] = t1
                try:
                    md = mds.read_metadata(
                        a.INTRA_SHORT_NAME, '1Minute',
                        mds.assetTS2)  #a.security.split(' ')[0]
                    md.freq = a.freq
                except:
                    md = metadataAsset(a.INTRA_SHORT_NAME,
                                       'equity',
                                       freq=a.freq,
                                       feeder_id=a.FEEDER_ID)
                mds.blp = self.blp
                #dt_today
                df1 = self.blp.getIntradayHistoricDataBA(
                    a.FEEDER_ID,
                    nfreq,
                    self.onlineTable.lastBlpIndex[i],
                    dt_max,
                    md,
                    event=a.event,
                    mds=mds)
                if df1.shape[0] == 0:
                    continue

                self.onlineTable.lastBlpIndex.values[i] = df1.index[-1]
                #                                            **a.kwargs)
                df1 = df1.rename(columns={'numEvents': 'trades'})
                if df1.index.tzinfo is None:
                    df1 = df1.tz_localize('GMT')
                print('doBars2 - ' + a.FEEDER_ID)
                try:
                    mds.append(df1,
                               library=mds.onlineTS,
                               replaceIntersection=True,
                               check_metadata=False)
                except Exception as e:
                    warn('Unable to append {}'.format(df1.md.name))
                    uu.printException(e)
                # if len(a.addrs) :
                #     self.putData({'messageType':'barsUpdate','data':a},a.addrs)

                if (t1 - self.t0_daily[i] > a.dailyUpdateInterval):
                    # for each series in intradayQueries, check if the daily series is in onlineVS up to yesterday
                    # If not, simply copy the series from assetVS to onlineVS. If it is not up-to-date, warn
                    #
                    self.t0_daily[i] = t1

                    dt_today1 = dt_today + datetime.timedelta(1)
                    dt0 = dt(1900, 1, 1)

                    if (df1.shape[0] == 0) or df1.index[-1] < dt_today_loc:
                        warn(
                            'No prices for {}/{} today ({}) in bars - (intraday/onlineTS)'
                            .format(a.INTRA_SHORT_NAME, nfreq, dt_today))
                        continue

                    try:
                        dfd = mds.read(a.daily_shortname,
                                       '1BDay',
                                       library=mds.assetVS,
                                       date_range=[dt0, dt_today1])
                    except Exception as e:
                        print(
                            'Unable to read {}/{} from assetVS in bars - daily'
                            .format(a.security, nfreq))
                        uu.printException(e)
                        continue
                    # df1 = df1.loc[df1.index<dt_today_loc]
                    c1 = dfd.columns.intersection(df1.columns)
                    c2 = dfd.columns.difference(df1.columns)
                    dfi1 = df1[c1].iloc[-1]
                    lastUpdate = dfi1.name
                    dfi1.name = dfi1.name.normalize().tz_localize(None)

                    for c in c2:
                        dfi1[c] = array(nan)

                    # if md.subtype == 'fut_rol':
                    if 'underlying' in dfd:
                        if not 'underlying' in df1:
                            warn(
                                'Ignoring {}/{} for Daily. Underlying not present in bloomberg results'
                                .format(a.INTRA_SHORT_NAME, nfreq))
                            continue
                        dfi1['underlying'] = dfi1['underlying'].split(' ')[0]
                        if dfd.underlying[-1] != dfi1['underlying']:
                            continue

                        #check if it is the corerct future, if not continue

                    dfd_ = pd.DataFrame(dfi1).T

                    for c in dfd_.columns:
                        # if dfd[c].dtype in [float32,float64,int32,int64]:
                        dfd_[c] = dfd_[c].astype(dfd[c].dtype)

                    if dfd.md.subtype == 'di_fut':
                        dfd_['yield_close'] = dfd_['close']
                        dfd_['close'] = NaN

                    df2 = pd.concat((dfd, dfd_))
                    df2.md = dfd.md
                    df2.md.lastUpdate = lastUpdate
                    # if (not 't0_daily' in a): #first uptade in the day
                    try:
                        mds.delete(df2.md.name,
                                   df2.md.freq,
                                   library=mds.onlineVS
                                   )  #make sure not accumulating versions
                    except Exception as e:
                        pass
                    try:
                        mds.write(df2,
                                  library=mds.onlineVS,
                                  check_metadata=False,
                                  prune_previous_version=True)
                    except Exception as e:
                        print(
                            'Unable to read {}/{} from assetVS in bars - daily'
                            .format(a.security, nfreq))
                        uu.printException(e)
コード例 #6
0
def bloombergUpdateIntraday(meta, dti, bOnlyUpdateMetadata=False, srsMeta=None, overwrite=False,
                            dtm1=None, onlineTS='onlineTS',keep_metadata=False):
    
    from mDataStore import mDataFrame, metadataFundamental, metadataOption, metadataAsset, metadataIndicator, \
        metadataStrategy

    meta1 = [m for m in meta if 'feeder_id_intraday' in m]
    N=len(meta1)
    if dtm1 is None:
        import pytz
        tz = pytz.timezone('GMT')
        dtm1 = (dt.now()-datetime.timedelta(1)).replace(hour=0, minute=0, second=0, microsecond=0)


    # success = np.full(hist1.shape[0], True)
    lst_failures=[]
    for k in tqdm(range(N)): #tqdm(

        # nfreq, sfreq = mongo.splitFreq(meta1[k]['freq_intraday'])


        interval = 1#nfreq
        #
        # if currency1[k] is None:
        #     # try:
        #     #     currency = desc1.CRNCY[feeder_id[k]]
        #     # except:
        #     currency = 'BRL'
        # else:
        #     currency = currency1[k]

        if not 'options_intraday' in meta1[k] or not isinstance(meta1[k]['options_intraday'], str) or meta1[k]['options_intraday']=='':
            options = {}

        else:
            options = eval('dict('+meta1[k]['options_intraday']+')')
        # get output columns
        md=copy(meta1[k])
        md['freq']='1Minute'
        md = metadataAsset(**md)
        library1 = 'assetTS2'#md.pop('library')
        if 'stDT' in md:
            md.pop('stDT')
        if 'endDT' in md:
            md.pop('endDT')

        mt_ant = mds.read_metadata(md['name'],'1Minute',library1)
        if mt_ant and 'endDT' in mt_ant and dti > mt_ant['endDT']:
            dti=pd.Timestamp(mt_ant['endDT']).normalize().to_pydatetime()

        df = blp.getIntradayHistoricDataBA(md.feeder_id_intraday, interval, dti, dt.now(), md, **options)
        if df.shape[0] == 0:
            continue

        # df.md.stDT = df.index[0]
        # df.md.endDT = df.index[-1]
        # if overwrite:
        #     for s in df.columns:
        #         if (df[s].dtype == np.float) and np.all(np.isnan(df[s])):
        #             del df[s]
        #
        #     # lst=mds.find(df.md.name, df.md.freq, library=library1[k])
        #     att = df.md.name + '_' + df.md.freq
        #     exists = att in getattr(mds, library1).list_symbols()
        #     if exists:
        #         mds.delete(df.md.name, df.md.freq, library=library1)
        #
        #     mds.write(df, library=library1, check_metadata=False)
        #     # mds.write(df, library=onlineTS, check_metadata=False)
        #
        #     successi = True
        #
        # else:
        try:
            mds.append(df,date_range=[dti, dtm1],  library=library1, check_metadata=False,keep_metadata=keep_metadata,
                       replaceIntersection=overwrite)
            successi=True
        except:
            try:
                mds.append(df, date_range=[dti, dtm1], library=library1, check_metadata=False, replaceIntersection=True,
                           keep_metadata=keep_metadata)
            except:
                successi=False

            # mds.append(df, date_range=[dti, dtm1], library=onlineTS, check_metadata=False, replaceIntersection=True,keep_metadata=True)

        if not successi:
            lst_failures.append(df.md.name)

    return lst_failures