def bdib(ticker, dt, typ='TRADE', batch=False, log=logs.LOG_LEVEL) -> pd.DataFrame: """ Download intraday data and save to cache Args: ticker: ticker name dt: date to download typ: [TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK] batch: whether is batch process to download data log: level of logs Returns: pd.DataFrame """ from xbbg.core import missing logger = logs.get_logger(bdib, level=log) t_1 = pd.Timestamp('today').date() - pd.Timedelta('1D') whole_day = pd.Timestamp(dt).date() < t_1 if (not whole_day) and batch: logger.warning(f'querying date {t_1} is too close, ignoring download ...') return pd.DataFrame() cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') asset = ticker.split()[-1] info_log = f'{ticker} / {cur_dt} / {typ}' if asset in ['Equity', 'Curncy', 'Index', 'Comdty']: exch = const.exch_info(ticker=ticker) if exch.empty: return pd.DataFrame() else: logger.error(f'unknown asset type: {asset}') return pd.DataFrame() time_fmt = '%Y-%m-%dT%H:%M:%S' time_idx = pd.DatetimeIndex([ f'{cur_dt} {exch.allday[0]}', f'{cur_dt} {exch.allday[-1]}'] ).tz_localize(exch.tz).tz_convert(DEFAULT_TZ).tz_convert('UTC') if time_idx[0] > time_idx[1]: time_idx -= pd.TimedeltaIndex(['1D', '0D']) q_tckr = ticker if exch.get('is_fut', False): if 'freq' not in exch: logger.error(f'[freq] missing in info for {info_log} ...') is_sprd = exch.get('has_sprd', False) and (len(ticker[:-1]) != exch['tickers'][0]) if not is_sprd: q_tckr = fut_ticker(gen_ticker=ticker, dt=dt, freq=exch['freq']) if q_tckr == '': logger.error(f'cannot find futures ticker for {ticker} ...') return pd.DataFrame() info_log = f'{q_tckr} / {cur_dt} / {typ}' miss_kw = dict(ticker=ticker, dt=dt, typ=typ, func='bdib') cur_miss = missing.current_missing(**miss_kw) if cur_miss >= 2: if batch: return pd.DataFrame() logger.info(f'{cur_miss} trials with no data {info_log}') return pd.DataFrame() logger.info(f'loading data from Bloomberg: {info_log} ...') con, _ = create_connection() data = con.bdib( ticker=q_tckr, event_type=typ, interval=1, start_datetime=time_idx[0].strftime(time_fmt), end_datetime=time_idx[1].strftime(time_fmt), ) if not isinstance(data, pd.DataFrame): raise ValueError(f'unknown output format: {type(data)}') if data.empty: logger.warning(f'no data for {info_log} ...') missing.update_missing(**miss_kw) return pd.DataFrame() data = data.tz_localize('UTC').tz_convert(exch.tz) storage.save_intraday(data=data, ticker=ticker, dt=dt, typ=typ) return pd.DataFrame() if batch else assist.format_intraday(data=data, ticker=ticker)
def wrapper(*args, **kwargs): scope = utils.func_scope(func=func) param = inspect.signature(func).parameters port = kwargs.pop('port', _PORT_) timeout = kwargs.pop('timeout', _TIMEOUT_) restart = kwargs.pop('restart', False) all_kw = { k: args[n] if n < len(args) else v.default for n, (k, v) in enumerate(param.items()) if k != 'kwargs' } all_kw.update(kwargs) log_level = kwargs.get('log', logs.LOG_LEVEL) for to_list in ['tickers', 'flds']: conv = all_kw.get(to_list, None) if hasattr(conv, 'tolist'): all_kw[to_list] = getattr(conv, 'tolist')() if isinstance(conv, str): all_kw[to_list] = [conv] cached_data = [] if scope in ['xbbg.blp.bdp', 'xbbg.blp.bds']: to_qry = cached.bdp_bds_cache(func=func.__name__, **all_kw) cached_data += to_qry.cached_data if not (to_qry.tickers and to_qry.flds): if not cached_data: return pd.DataFrame() res = pd.concat(cached_data, sort=False).reset_index(drop=True) if not all_kw.get('raw', False): res = assist.format_output(data=res, source=func.__name__, col_maps=all_kw.get( 'col_maps', dict())) return res all_kw['tickers'] = to_qry.tickers all_kw['flds'] = to_qry.flds if scope in ['xbbg.blp.bdib']: data_file = storage.hist_file( ticker=all_kw['ticker'], dt=all_kw['dt'], typ=all_kw['typ'], ) if files.exists(data_file): logger = logs.get_logger(func, level=log_level) if all_kw.get('batch', False): return logger.debug(f'reading from {data_file} ...') return assist.format_intraday(data=pd.read_parquet(data_file), **all_kw) _, new = create_connection(port=port, timeout=timeout, restart=restart) res = func( ** {k: v for k, v in all_kw.items() if k not in ['raw', 'col_maps']}) if new: delete_connection() if scope.startswith('xbbg.blp.') and isinstance(res, list): final = cached_data + res if not final: return pd.DataFrame() res = pd.DataFrame(pd.concat(final, sort=False)) if (scope in ['xbbg.blp.bdp', 'xbbg.blp.bds']) \ and (not all_kw.get('raw', False)): res = assist.format_output( data=res.reset_index(drop=True), source=func.__name__, col_maps=all_kw.get('col_maps', dict()), ) return res