def bdh(tickers, flds=None, start_date=None, end_date='today', adjust=None, **kwargs) -> pd.DataFrame: """ Bloomberg historical data Args: tickers: ticker(s) flds: field(s) start_date: start date end_date: end date - default today adjust: `all`, `dvd`, `normal`, `abn` (=abnormal), `split`, `-` or None exact match of above words will adjust for corresponding events Case 0: `-` no adjustment for dividend or split Case 1: `dvd` or `normal|abn` will adjust for all dividends except splits Case 2: `adjust` will adjust for splits and ignore all dividends Case 3: `all` == `dvd|split` == adjust for all Case 4: None == Bloomberg default OR use kwargs **kwargs: overrides Returns: pd.DataFrame """ logger = logs.get_logger(bdh, **kwargs) if flds is None: flds = ['Last_Price'] e_dt = utils.fmt_dt(end_date, fmt='%Y%m%d') if start_date is None: start_date = pd.Timestamp(e_dt) - pd.Timedelta(weeks=8) s_dt = utils.fmt_dt(start_date, fmt='%Y%m%d') request = process.create_request( service='//blp/refdata', request='HistoricalDataRequest', **kwargs, ) process.init_request(request=request, tickers=tickers, flds=flds, start_date=s_dt, end_date=e_dt, adjust=adjust, **kwargs) logger.debug(f'Sending request to Bloomberg ...\n{request}') conn.send_request(request=request, **kwargs) res = pd.DataFrame(process.rec_events(process.process_hist, **kwargs)) if kwargs.get('raw', False): return res if res.empty or any(fld not in res for fld in ['ticker', 'date']): return pd.DataFrame() return (res.set_index(['ticker', 'date']).unstack(level=0).rename_axis( index=None, columns=[None, None]).swaplevel( 0, 1, axis=1).reindex(columns=utils.flatten(tickers), level=0).reindex(columns=utils.flatten(flds), level=1))
def bds(tickers, flds, **kwargs) -> pd.DataFrame: """ Bloomberg block data Args: tickers: ticker(s) flds: field **kwargs: other overrides for query Returns: pd.DataFrame: block data """ logger = logs.get_logger(bds, **kwargs) service = conn.bbg_service(service='//blp/refdata', **kwargs) request = service.createRequest('ReferenceDataRequest') if isinstance(tickers, str): data_file = storage.ref_file(ticker=tickers, fld=flds, has_date=True, ext='pkl', **kwargs) if files.exists(data_file): logger.debug(f'Loading Bloomberg data from: {data_file}') return pd.DataFrame(pd.read_pickle(data_file)) process.init_request(request=request, tickers=tickers, flds=flds, **kwargs) logger.debug(f'Sending request to Bloomberg ...\n{request}') conn.send_request(request=request, **kwargs) res = pd.DataFrame( process.rec_events(func=process.process_ref, **kwargs)) if kwargs.get('raw', False): return res if res.empty or any(fld not in res for fld in ['ticker', 'field']): return pd.DataFrame() data = (res.set_index(['ticker', 'field']).droplevel( axis=0, level=1).rename_axis(index=None).pipe( pipeline.standard_cols, col_maps=kwargs.get('col_maps', None))) if data_file: logger.debug(f'Saving Bloomberg data to: {data_file}') files.create_folder(data_file, is_file=True) data.to_pickle(data_file) return data return pd.DataFrame( pd.concat( [bds(tickers=ticker, flds=flds, **kwargs) for ticker in tickers], sort=False))
def _bds_( ticker: str, fld: str, logger: logs.logging.Logger, use_port: bool = False, **kwargs, ) -> pd.DataFrame: """ Get data of BDS of single ticker """ if 'has_date' not in kwargs: kwargs['has_date'] = True data_file = storage.ref_file(ticker=ticker, fld=fld, ext='pkl', **kwargs) if files.exists(data_file): logger.debug(f'Loading Bloomberg data from: {data_file}') return pd.DataFrame(pd.read_pickle(data_file)) request = process.create_request( service='//blp/refdata', request='PortfolioDataRequest' if use_port else 'ReferenceDataRequest', **kwargs, ) process.init_request(request=request, tickers=ticker, flds=fld, **kwargs) logger.debug(f'Sending request to Bloomberg ...\n{request}') conn.send_request(request=request, **kwargs) res = pd.DataFrame(process.rec_events(func=process.process_ref, **kwargs)) if kwargs.get('raw', False): return res if res.empty or any(fld not in res for fld in ['ticker', 'field']): return pd.DataFrame() data = (res.set_index(['ticker', 'field']).droplevel( axis=0, level=1).rename_axis(index=None).pipe(pipeline.standard_cols, col_maps=kwargs.get( 'col_maps', None))) if data_file: logger.debug(f'Saving Bloomberg data to: {data_file}') files.create_folder(data_file, is_file=True) data.to_pickle(data_file) return data
def bdp(tickers, flds, **kwargs) -> pd.DataFrame: """ Bloomberg reference data Args: tickers: tickers flds: fields to query **kwargs: Bloomberg overrides Returns: pd.DataFrame """ logger = logs.get_logger(bdp, **kwargs) if isinstance(tickers, str): tickers = [tickers] if isinstance(flds, str): flds = [flds] request = process.create_request( service='//blp/refdata', request='ReferenceDataRequest', **kwargs, ) process.init_request(request=request, tickers=tickers, flds=flds, **kwargs) logger.debug(f'Sending request to Bloomberg ...\n{request}') conn.send_request(request=request, **kwargs) res = pd.DataFrame(process.rec_events(func=process.process_ref, **kwargs)) if kwargs.get('raw', False): return res if res.empty or any(fld not in res for fld in ['ticker', 'field']): return pd.DataFrame() return (res.set_index(['ticker', 'field']).unstack(level=1).rename_axis( index=None, columns=[None, None]).droplevel(axis=1, level=0).loc[:, res.field.unique()].pipe( pipeline.standard_cols, col_maps=kwargs.get('col_maps', None)))