def subscribe(tickers, flds=None, identity=None, options=None, **kwargs): """ Subscribe Bloomberg realtime data Args: tickers: list of tickers flds: fields to subscribe, default: Last_Price, Bid, Ask identity: Bloomberg identity """ logger = logs.get_logger(subscribe, **kwargs) if isinstance(tickers, str): tickers = [tickers] if flds is None: flds = ['Last_Price', 'Bid', 'Ask'] if isinstance(flds, str): flds = [flds] sub_list = conn.blpapi.SubscriptionList() for ticker in tickers: topic = f'//blp/mktdata/{ticker}' cid = conn.blpapi.CorrelationId(ticker) logger.debug(f'Subscribing {cid} => {topic}') sub_list.add(topic, flds, correlationId=cid, options=options) try: conn.bbg_session(**kwargs).subscribe(sub_list, identity) yield finally: conn.bbg_session(**kwargs).unsubscribe(sub_list)
def rec_events(func, **kwargs): """ Receive events received from Bloomberg Args: func: must be generator function **kwargs: arguments for input function Yields: Elements of Bloomberg responses """ timeout_counts = 0 responses = [blpapi.Event.PARTIAL_RESPONSE, blpapi.Event.RESPONSE] timeout = kwargs.pop('timeout', 500) while True: ev = conn.bbg_session(**kwargs).nextEvent(timeout=timeout) if ev.eventType() in responses: for msg in ev: for r in func(msg=msg, **kwargs): yield r if ev.eventType() == blpapi.Event.RESPONSE: break elif ev.eventType() == blpapi.Event.TIMEOUT: timeout_counts += 1 if timeout_counts > 20: break else: for _ in ev: if getattr(ev, 'messageType', lambda: None)() \ == SESSION_TERMINATED: break
def init_request(request: blpapi.request.Request, tickers, flds, **kwargs): """ Initiate Bloomberg request instance Args: request: Bloomberg request to initiate and append tickers: tickers flds: fields **kwargs: overrides and """ while conn.bbg_session(**kwargs).tryNextEvent(): pass if isinstance(tickers, str): tickers = [tickers] for ticker in tickers: request.append('securities', ticker) if isinstance(flds, str): flds = [flds] for fld in flds: request.append('fields', fld) adjust = kwargs.pop('adjust', None) if isinstance(adjust, str) and adjust: if adjust == 'all': kwargs['CshAdjNormal'] = True kwargs['CshAdjAbnormal'] = True kwargs['CapChg'] = True else: kwargs['CshAdjNormal'] = 'normal' in adjust or 'dvd' in adjust kwargs['CshAdjAbnormal'] = 'abn' in adjust or 'dvd' in adjust kwargs['CapChg'] = 'split' in adjust if 'start_date' in kwargs: request.set('startDate', kwargs.pop('start_date')) if 'end_date' in kwargs: request.set('endDate', kwargs.pop('end_date')) for elem_name, elem_val in overrides.proc_elms(**kwargs): request.set(elem_name, elem_val) ovrds = request.getElement('overrides') for ovrd_fld, ovrd_val in overrides.proc_ovrds(**kwargs): ovrd = ovrds.appendElement() ovrd.setElement('fieldId', ovrd_fld) ovrd.setElement('value', ovrd_val)
async def live(tickers, flds=None, info=None, max_cnt=0, options=None, **kwargs): """ Subscribe and getting data feeds from Args: tickers: list of tickers flds: fields to subscribe info: list of keys of interests (ticker will be included) max_cnt: max number of data points to receive Yields: dict: Bloomberg market data Examples: >>> # async for _ in live('SPY US Equity', info=const.LIVE_INFO): pass """ from collections.abc import Iterable logger = logs.get_logger(live, **kwargs) evt_typs = conn.event_types() if flds is None: s_flds = ['LAST_PRICE', 'BID', 'ASK'] else: if isinstance(flds, str): flds = [flds] s_flds = [fld.upper() for fld in flds] if isinstance(info, str): info = [info] if isinstance(info, Iterable): info = [key.upper() for key in info] if info is None: info = const.LIVE_INFO sess = conn.bbg_session(**kwargs) while sess.tryNextEvent(): pass with subscribe(tickers=tickers, flds=s_flds, options=options, **kwargs): cnt = 0 while True and cnt <= max_cnt: try: ev = sess.tryNextEvent() if ev is None: continue if evt_typs[ev.eventType()] != 'SUBSCRIPTION_DATA': continue for msg, fld in product(ev, s_flds): if not msg.hasElement(fld): continue if msg.getElement(fld).isNull(): continue yield { **{ 'TICKER': msg.correlationIds()[0].value(), 'FIELD': fld, }, **{ str(elem.name()): process.elem_value(elem) for elem in msg.asElement().elements() if (True if not info else str(elem.name( )) in info) }, } if max_cnt: cnt += 1 except ValueError as e: logger.debug(e) except KeyboardInterrupt: break
def bdtick(ticker, dt, session='allday', time_range=None, types=None, **kwargs) -> pd.DataFrame: """ Bloomberg tick data Args: ticker: ticker name dt: date to download session: [allday, day, am, pm, pre, post] time_range: tuple of start and end time (must be converted into UTC) if this is given, `dt` and `session` will be ignored types: str or list, one or combinations of [ TRADE, AT_TRADE, BID, ASK, MID_PRICE, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK, ] Returns: pd.DataFrame """ logger = logs.get_logger(bdtick, **kwargs) if types is None: types = ['TRADE'] exch = const.exch_info(ticker=ticker, **kwargs) if exch.empty: raise LookupError(f'Cannot find exchange info for {ticker}') if isinstance(time_range, (tuple, list)) and (len(time_range) == 2): cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') time_rng = (pd.DatetimeIndex([ f'{cur_dt} {time_range[0]}', f'{cur_dt} {time_range[1]}', ]).tz_localize(exch.tz).tz_convert( process.DEFAULT_TZ).tz_convert('UTC')) else: time_rng = process.time_range(dt=dt, ticker=ticker, session=session, **kwargs) while conn.bbg_session(**kwargs).tryNextEvent(): pass request = process.create_request( service='//blp/refdata', request='IntradayTickRequest', settings=[ ('security', ticker), ('startDateTime', time_rng[0]), ('endDateTime', time_rng[1]), ('includeConditionCodes', True), ('includeExchangeCodes', True), ('includeNonPlottableEvents', True), ('includeBrokerCodes', True), ('includeRpsCodes', True), ('includeTradeTime', True), ('includeActionCodes', True), ('includeIndicatorCodes', True), ], append={'eventTypes': types}, **kwargs, ) logger.debug(f'Sending request to Bloomberg ...\n{request}') conn.send_request(request=request) res = pd.DataFrame( process.rec_events(func=process.process_bar, typ='t', **kwargs)) if kwargs.get('raw', False): return res if res.empty or ('time' not in res): return pd.DataFrame() return (res.set_index('time').rename_axis( index=None).tz_localize('UTC').tz_convert(exch.tz).pipe( pipeline.add_ticker, ticker=ticker).rename( columns={ 'size': 'volume', 'type': 'typ', 'conditionCodes': 'cond', 'exchangeCode': 'exch', 'tradeTime': 'trd_time', }))
def bdib(ticker: str, dt, session='allday', typ='TRADE', **kwargs) -> pd.DataFrame: """ Bloomberg intraday bar data Args: ticker: ticker name dt: date to download session: [allday, day, am, pm, pre, post] typ: [TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK] **kwargs: ref: reference ticker or exchange used as supplement if exchange info is not defined for `ticker` batch: whether is batch process to download data log: level of logs Returns: pd.DataFrame """ from xbbg.core import trials logger = logs.get_logger(bdib, **kwargs) ex_info = const.exch_info(ticker=ticker, **kwargs) if ex_info.empty: raise KeyError(f'Cannot find exchange info for {ticker}') ss_rng = process.time_range(dt=dt, ticker=ticker, session=session, tz=ex_info.tz, **kwargs) data_file = storage.bar_file(ticker=ticker, dt=dt, typ=typ) if files.exists(data_file) and kwargs.get( 'cache', True) and (not kwargs.get('reload', False)): res = (pd.read_parquet(data_file).pipe( pipeline.add_ticker, ticker=ticker).loc[ss_rng[0]:ss_rng[1]]) if not res.empty: logger.debug(f'Loading Bloomberg intraday data from: {data_file}') return res if not process.check_current(dt=dt, logger=logger, **kwargs): return pd.DataFrame() cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') q_tckr = ticker if ex_info.get('is_fut', False): is_sprd = ex_info.get( 'has_sprd', False) and (len(ticker[:-1]) != ex_info['tickers'][0]) if not is_sprd: q_tckr = fut_ticker(gen_ticker=ticker, dt=dt, freq=ex_info['freq']) if q_tckr == '': logger.error(f'cannot find futures ticker for {ticker} ...') return pd.DataFrame() info_log = f'{q_tckr} / {cur_dt} / {typ}' trial_kw = dict(ticker=ticker, dt=dt, typ=typ, func='bdib') num_trials = trials.num_trials(**trial_kw) if num_trials >= 2: if kwargs.get('batch', False): return pd.DataFrame() logger.info(f'{num_trials} trials with no data {info_log}') return pd.DataFrame() while conn.bbg_session(**kwargs).tryNextEvent(): pass time_rng = process.time_range(dt=dt, ticker=ticker, session='allday', **kwargs) request = process.create_request( service='//blp/refdata', request='IntradayBarRequest', settings=[ ('security', ticker), ('eventType', typ), ('interval', kwargs.get('interval', 1)), ('startDateTime', time_rng[0]), ('endDateTime', time_rng[1]), ], **kwargs, ) logger.debug(f'Sending request to Bloomberg ...\n{request}') conn.send_request(request=request, **kwargs) res = pd.DataFrame(process.rec_events(func=process.process_bar, **kwargs)) if res.empty or ('time' not in res): logger.warning(f'No data for {info_log} ...') trials.update_trials(cnt=num_trials + 1, **trial_kw) return pd.DataFrame() data = (res.set_index('time').rename_axis(index=None).rename( columns={ 'numEvents': 'num_trds' }).tz_localize('UTC').tz_convert(ex_info.tz).pipe(pipeline.add_ticker, ticker=ticker)) if kwargs.get('cache', True): storage.save_intraday(data=data[ticker], ticker=ticker, dt=dt, typ=typ, **kwargs) return data.loc[ss_rng[0]:ss_rng[1]]
def live(tickers, flds='Last_Price', max_cnt=None, json=False, **kwargs) -> dict: """ Subscribe and getting data feeds from Args: tickers: list of tickers flds: fields to subscribe max_cnt: max number of data points to receive json: if data is required to convert to json Yields: dict: Bloomberg market data """ logger = logs.get_logger(live, **kwargs) def get_value(element): """ Get value from element Args: element: Bloomberg element Returns: dict """ conv = [conn.blpapi.name.Name] if json: conv += [pd.Timestamp, datetime.time, datetime.date] if element.isNull(): return None value = element.getValue() if isinstance(value, np.bool_): return bool(value) if isinstance(value, tuple(conv)): return str(value) return value if isinstance(flds, str): flds = [flds] s_flds = [fld.upper() for fld in flds] with subscribe(tickers=tickers, flds=s_flds, **kwargs): cnt = 0 while True if max_cnt is None else cnt < max_cnt: try: ev = conn.bbg_session(**kwargs).nextEvent(500) if conn.event_types()[ev.eventType()] != 'SUBSCRIPTION_DATA': continue for msg in ev: for fld in s_flds: if not msg.hasElement(fld): continue if msg.getElement(fld).isNull(): continue ticker = msg.correlationIds()[0].value() values = { **{ 'TICKER': ticker }, **{ str(elem.name()): get_value(elem) for elem in msg.asElement().elements() } } yield { key: value for key, value in values.items() if value not in [np.nan, pd.NaT, None] or ( isinstance(value, str) and value.strip()) } cnt += 1 except ValueError as e: logger.debug(e) except KeyboardInterrupt: break
def bdtick(ticker, dt, session='allday', types=None, **kwargs) -> pd.DataFrame: """ Bloomberg tick data Args: ticker: ticker name dt: date to download session: [allday, day, am, pm, pre, post] types: str or list, one or combinations of [ TRADE, AT_TRADE, BID, ASK, MID_PRICE, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK, ] Returns: pd.DataFrame """ logger = logs.get_logger(bdtick, **kwargs) exch = const.exch_info(ticker=ticker, **kwargs) time_rng = process.time_range(dt=dt, ticker=ticker, session=session, tz=exch.tz, **kwargs) service = conn.bbg_service(service='//blp/refdata', **kwargs) request = service.createRequest('IntradayTickRequest') while conn.bbg_session(**kwargs).tryNextEvent(): pass if types is None: types = ['TRADE'] if isinstance(types, str): types = [types] request.set('security', ticker) for typ in types: request.append('eventTypes', typ) request.set('startDateTime', time_rng[0]) request.set('endDateTime', time_rng[1]) request.set('includeConditionCodes', True) request.set('includeExchangeCodes', True) request.set('includeNonPlottableEvents', True) request.set('includeBrokerCodes', True) request.set('includeRpsCodes', True) request.set('includeTradeTime', True) request.set('includeActionCodes', True) request.set('includeIndicatorCodes', True) logger.debug(f'Sending request to Bloomberg ...\n{request}') conn.send_request(request=request) res = pd.DataFrame( process.rec_events(func=process.process_bar, typ='t', **kwargs)) if kwargs.get('raw', False): return res if res.empty or ('time' not in res): return pd.DataFrame() return (res.set_index('time').rename_axis( index=None).tz_localize('UTC').tz_convert(exch.tz).pipe( pipeline.add_ticker, ticker=ticker).rename( columns={ 'size': 'volume', 'type': 'typ', 'conditionCodes': 'cond', 'exchangeCode': 'exch', 'tradeTime': 'trd_time', }))