Beispiel #1
0
def data_from_bloomberg(equity, field, start, end, pselect, nontrading, fia=False):
	start = datetime.strptime(start, '%m/%d/%Y').strftime('%Y%m%d')
	end = datetime.strptime(end, '%m/%d/%Y').strftime('%Y%m%d')
	con = pdblp.BCon(debug=False, port=8194, timeout=5000)
	if not con.start():
		print("ERROR: ****Connection could not be established with the data source****")
	
	bbg_df = con.bdh(equity, field, start, end, elms=[("periodicityAdjustment", 'CALENDAR'),
													  ("periodicitySelection", pselect),
													  ('nonTradingDayFillMethod', 'PREVIOUS_VALUE'),
													  ('nonTradingDayFillOption', nontrading)])
	con.stop()
	if bbg_df.empty:
		print("ERROR: ****Couldn't fetch data. Dataframe is empty****")
	
	elif not fia:
		bbg_df = bbg_df.resample('M', closed='right').last()
		bbg_df.ffill(inplace=True)
		bbg_df.to_csv(src + 'data_from_bbg.csv')
		read_bbg_file = pd.read_csv(src + 'data_from_bbg.csv', header=[0])
		read_bbg_file.iloc[0:1] = np.nan
		read_bbg_file.dropna(axis=0, inplace=True)
		read_bbg_file.set_index('ticker', inplace=True)
		read_bbg_file.index.name = 'Date'
		ticker_columns = [c.split(' ')[0] for c in read_bbg_file.columns.tolist()]
		read_bbg_file.columns = ticker_columns
		read_bbg_file.index = pd.to_datetime(read_bbg_file.index)
		read_bbg_file[ticker_columns] = read_bbg_file[ticker_columns].apply(pd.to_numeric, errors='coerce', axis=1)
		return read_bbg_file
	else:
		bbg_df.to_csv(src + 'fia_index_data_from_bbg.csv')
 def __init__(self, user_settings):
     HistoricalMarketData.__init__(self, user_settings)
     self.user_settings = user_settings
     self.bbg_object = pdblp.BCon(debug=True,
                                  port=self.user_settings.bloomberg_port)
     self.bbg_object.start()
     self.bdh = self.cacher.cache(self.bbg_object.bdh, ignore=['self'])
     pass
Beispiel #3
0
def bbloadML(ticker, start_date, end_date):

    con = pdblp.BCon(debug=False, port=8194, timeout=5000)
    con.start()
    a = con.bdh(ticker, ['PX_LAST', 'FUT_AGGTE_VOL'], start_date, end_date)
    a.columns = ['close', 'volume']

    return a
Beispiel #4
0
def test_non_empty_session_queue(port, host):
    sopts = blpapi.SessionOptions()
    sopts.setServerHost(host)
    sopts.setServerPort(port)
    session = blpapi.Session(sopts)
    session.start()
    with pytest.raises(ValueError):
        pdblp.BCon(session=session)
Beispiel #5
0
def bdh(ticker, flds, start, end):
    con = pdblp.BCon(debug=False, port=8194, timeout=5000)
    con.start()
    result = con.bdh([ticker], flds, start, end).reset_index()
    result.columns = ['asofdate', ticker]
    result.set_index('asofdate', inplace=True)
    result.index = pd.to_datetime(result.index)
    result[ticker] = result[ticker].astype(float)
    return result
Beispiel #6
0
def test_connection_error(port):
    con = pdblp.BCon(port=port+1)
    with pytest.raises(ConnectionError):
        con.start()
    def test_bdh_one_ticker_one_field_pivoted(self):
        df = self.con.bdh('SPY US Equity', 'PX_LAST', '20150629', '20150630')
        midx = pd.MultiIndex(levels=[["SPY US Equity"], ["PX_LAST"]],
                             labels=[[0], [0]], names=["ticker", "field"])
        df_expect = pd.DataFrame(
            index=pd.date_range("2015-06-29", "2015-06-30"),
            columns=midx,
            data=[205.42, 205.85]
        )
        df_expect.index.names = ["date"]
        assert_frame_equal(df, df_expect)
Beispiel #7
0
def bbload(ticker, start_date, end_date):

    name = ticker[:3]

    con = pdblp.BCon(debug=False, port=8194, timeout=5000)
    con.start()
    a = con.bdh(ticker, ['PX_LAST'], start_date, end_date)
    a.columns = ['close']

    # save in global
    global F
    F['ticker'] = ticker  #keep the ticker
    F['name'] = name  #give it a short name without spaces

    return a
Beispiel #8
0
def bbload(ticker, start_date, end_date):
    
    global BBCACHE
    global BBDATACACHEPATH
        
    name = ticker[:3]
    
    CSVcachefilename = BBDATACACHEPATH + ticker +  '.' + start_date + end_date + '.csv'
	
    if ticker in BBCACHE:
        a = BBCACHE[ticker]
        print('USING CACHED')
		
    else:
   
        # try to load CSV first, it is easier than BB and good for those without BB
        try: 
            a = pd.read_csv(CSVcachefilename, index_col = "date" )
            print('Loaded from CSV ' + CSVcachefilename)
        
        # If that fails, load from BB
        except:
            con = pdblp.BCon(debug=False, port=8194, timeout=5000)
            con.start()
            a = con.bdh(ticker, ['PX_LAST'],  start_date, end_date )
            a.columns=['close']

            #save as csv 
            #a.to_csv(CSVcachefilename)
            #print('Loaded from BB and Saved to '+CSVcachefilename)

        #cache
        BBCACHE[ticker] = a
	
	
    # save in global
    global F
    F['ticker'] = ticker   #keep the ticker
    F['name']   = name  #give it a short name without spaces
	 
    return a
Beispiel #9
0
def create_connection(port=_PORT_, timeout=_TIMEOUT_, restart=False):
    """
    Create Bloomberg connection

    Returns:
        (Bloomberg connection, if connection is new)
    """
    if _CON_SYM_ in globals():
        if not isinstance(globals()[_CON_SYM_], pdblp.BCon):
            del globals()[_CON_SYM_]

    if (_CON_SYM_ in globals()) and (not restart):
        con = globals()[_CON_SYM_]
        if getattr(con, '_session').start(): con.start()
        return con, False

    else:
        con = pdblp.BCon(port=port, timeout=timeout)
        globals()[_CON_SYM_] = con
        con.start()
        return con, True
Beispiel #10
0
def main():
    # password
    with open(CRED_PATH) as f:
        auth = tuple(json.load(f))

    # blp
    blp = pdblp.BCon(timeout=10000)
    blp.start()

    tickers = []
    tickers += CORR
    for prod in [XCCY, IRS, FXS]:
        type_, ccys, periods = prod["type"], prod["ccys"], prod["periods"]
        tickers += [
            f"{ccy}{type_}{period} Curncy"
            for ccy, period in itertools.product(ccys, periods)
        ]

    def _get_data(ticker):
        print(ticker)
        try:
            df = blp.bdh(ticker,
                         "PX_LAST",
                         start_date=START_DATE,
                         end_date="20990101")
        except (ValueError, RuntimeError):
            print("retry " + ticker)
            time.sleep(2)
            df = blp.bdh(ticker,
                         "PX_LAST",
                         start_date=START_DATE,
                         end_date="20990101")
        df.columns = list(df.columns.get_level_values(0))
        return df

    dfs = {ticker: _get_data(ticker) for ticker in tickers}

    for ticker, df in dfs.items():
        _upload_df(df, ticker, auth)
Beispiel #11
0
 def __init__(self, use_debug: bool=True):
     self.con = pdblp.BCon(debug=use_debug, port=8194, timeout=10000)
     self.con.start()
     self.default_start_date = '19500101'
Beispiel #12
0
def collect_data_from_ticker(ticker):

    con = pdblp.BCon(timeout=50000)
    con.start()

    print('Collecting data of',
          con.ref(ticker, 'ID_BB_ULTIMATE_PARENT_CO_NAME')['value'][0],
          'for bond :',
          con.ref(ticker, 'SECURITY_NAME')['value'][0], '\n')

    issue_date = con.ref(ticker, 'ISSUE_DT')['value'][0].strftime("%Y%m%d")
    intermediate_dates = get_date_range(issue_date, TODAY_DATE)
    df = pd.DataFrame(intermediate_dates, columns=['date'])

    # D/E ratio, Curr_Assets/Curr_Liabilities, EPS, ROA, ROE, EBITDA/REVENUE
    financial_features = [
        'TOT_DEBT_TO_TOT_EQY', 'CUR_RATIO', 'IS_EPS', 'RETURN_ON_ASSET',
        'RETURN_COM_EQY', 'EBITDA_TO_REVENUE'
    ]

    # DAYS TO MATURITY, DAYS TO NEXT COUPON, CALLABILITY, SENIORITY, COUPON TYPE
    bond_features = [
        'ISSUE_DT', 'MATURITY', 'CALLABLE', 'NORMALIZED_PAYMENT_RANK',
        'CPN_TYP', 'CPN_FREQ'
    ]

    # Composite ratings + changes
    print('**** Collecting bloomberg composite ratings ****')
    for i, day in enumerate(intermediate_dates):

        row = con.ref(ticker,
                      'BB_COMPOSITE',
                      ovrds=[('RATING_AS_OF_DATE_OVERRIDE', day)])
        current_rating = row['value'][0]
        df.loc[i, 'BB_COMPOSITE'] = current_rating

        if (i > 0):
            previous_rating = df['BB_COMPOSITE'][i - 1]
            comparison = compare_ratings_fitch(current_rating, previous_rating)
            if (comparison == 1):
                print('Upgrade from bloomberg composite on', day, 'from',
                      previous_rating, 'to', current_rating)
            if (comparison == -1):
                print('Downgrade from bloomberg composite on', day, 'from',
                      previous_rating, 'to', current_rating)

            df.loc[i, 'RTG_COMPOSITE_CHANGE'] = comparison

    # S&P ratings + changes
    print('**** Collecting S&P ratings ****')
    for i, day in enumerate(intermediate_dates):

        row = con.ref(ticker,
                      'RTG_SP',
                      ovrds=[('RATING_AS_OF_DATE_OVERRIDE', day)])
        current_rating = row['value'][0]
        df.loc[i, 'RTG_SP'] = current_rating

        if (i > 0):
            previous_rating = df['RTG_SP'][i - 1]
            comparison = compare_ratings_moody(current_rating, previous_rating)
            if (comparison == 1):
                print('Upgrade from S&P on', day, 'from', previous_rating,
                      'to', current_rating)
            if (comparison == -1):
                print('Downgrade from S&P on', day, 'from', previous_rating,
                      'to', current_rating)

            df.loc[i, 'RTG_SP_CHANGE'] = comparison

    # Fitch ratings + changes
    print('**** Collecting fitch ratings ****')
    for i, day in enumerate(intermediate_dates):

        row = con.ref(ticker,
                      'RTG_FITCH',
                      ovrds=[('RATING_AS_OF_DATE_OVERRIDE', day)])
        current_rating = row['value'][0]
        df.loc[i, 'RTG_FITCH'] = current_rating

        if (i > 0):
            previous_rating = df['RTG_FITCH'][i - 1]
            comparison = compare_ratings_fitch(current_rating, previous_rating)

            if (comparison == 1):
                print('Upgrade from Fitch on', day, 'from', previous_rating,
                      'to', current_rating)
            if (comparison == -1):
                print('Downgrade from Fitch on', day, 'from', previous_rating,
                      'to', current_rating)

            df.loc[i, 'RTG_FITCH_CHANGE'] = comparison

    # Moodys ratings + changes
    print('**** Collecting moodys ratings ****')
    for i, day in enumerate(intermediate_dates):

        row = con.ref(ticker,
                      'RTG_MOODY',
                      ovrds=[('RATING_AS_OF_DATE_OVERRIDE', day)])
        current_rating = row['value'][0]
        df.loc[i, 'RTG_MOODY'] = current_rating

        if (i > 0):
            previous_rating = df['RTG_MOODY'][i - 1]
            comparison = compare_ratings_moody(current_rating, previous_rating)

            if (comparison == 1):
                print('Upgrade from Moody on', day, 'from', previous_rating,
                      'to', current_rating)
            if (comparison == -1):
                print('Downgrade from Moody on', day, 'from', previous_rating,
                      'to', current_rating)

            df.loc[i, 'RTG_MOODY_CHANGE'] = comparison

    #Adding financial features to the dataset
    for feature in financial_features:
        print('**** Collecting', feature, '****')
        for i, day in enumerate(intermediate_dates):

            row = con.ref(ticker,
                          feature,
                          ovrds=[('FUNDAMENTAL_DATABASE_DATE', day)])
            value = row['value'][0]
            df.loc[i, feature] = value

    #Adding bond features to the dataset
    for feature in bond_features:
        print('**** Collecting', feature, '****')
        value = con.ref(ticker, feature)['value'][0]
        df[feature] = value

    df['date'] = pd.to_datetime(df['date'], format='%Y%m%d')
    df.set_index(df['date'], inplace=True)
    df = df.drop(columns=['date'])
    df.to_csv('no_price' + ticker + '.csv')

    #Adding the price of the security at the end of the day
    print('**** Collecting historical prices **** ')
    price_df = con.bdh(ticker, 'PX_LAST', issue_date, TODAY_DATE)
    price_df.columns = ['PX_LAST']
    price_df = price_df.reset_index()
    price_df['date'] = pd.to_datetime(price_df['date'])
    price_df.set_index(price_df['date'], inplace=True)
    price_df = price_df.drop(columns=['date'])
    price_df.to_csv('price' + ticker + '.csv')

    final_df = df.merge(price_df, on='date', how='inner')

    final_df.to_csv(ticker + '.csv')
    print('CSV file for ticker ', ticker, 'created')
Beispiel #13
0
def test_multi_start(port, host, timeout):
    con = pdblp.BCon(host=host, port=port, timeout=timeout)
    con.start()
    con.start()
Beispiel #14
0
 def _connect(self):
     self.conn = pdblp.BCon(debug=True, port=8194, timeout=10000)
     self.conn.debug = False
Beispiel #15
0
import pandas as pd
import pdblp
from typing import List
from .helpers import blpstring, write_datasets_to_file, load_dataset
from .cleaning import fill_na
from .constants import holidays, sectors, sector_valuation_fields, start_date, end_date, macroeconomic_indices,\
                      sector_etfs, sentiment_fields, ROOT_DIR, CLEAN_DATA_FOLDER

macroeconomic_data: List[pd.DataFrame] = []
sentiment_data: List[pd.DataFrame] = []
valuation_data: List[pd.DataFrame] = []

try:
    con = pdblp.BCon(timeout=30000)
    con.start()

    spx: pd.DataFrame = con.bdh(['SPX Index'], ['PX_LAST'],
                                blpstring(start_date), blpstring(end_date))

    valuation_data = load_dataset(sectors, sector_valuation_fields, start_date,
                                  end_date, con)
    macroeconomic_data = load_dataset(macroeconomic_indices, ['PX_LAST'],
                                      start_date, end_date, con)
    sentiment_data = load_dataset(sector_etfs, sentiment_fields, start_date,
                                  end_date, con)

    if len(sentiment_data) == 0 or len(valuation_data) == 0 or len(
            macroeconomic_data) == 0:
        raise ValueError(
            "Unable to load some of the data, check Bloomberg API and internet connection"
        )
Beispiel #16
0
import pandas as pd
import math
import seaborn as snn
import numpy as np
import matplotlib
import requests
import sqlite3
import os
import datetime
import sqlalchemy as sql
import pdblp as bbg
from xbbg import blp

# %% assumption and settings
#create bloomberg connection
con = bbg.BCon(debug=False, port=8194, timeout=5000)
con.start()
today = datetime.datetime.now()
s_date = '2011-01-01'
e_date = today
#read ticker list
names = ['Tickers']
tix = pd.read_csv("tickers.csv", names=names).values.tolist()
# create a global df for all prices. index needs to be created for begining of the month
global_index = pd.date_range(start=s_date, end=e_date, freq='W-FRI')
global_df = pd.DataFrame(index=global_index)

# %% bloomberg data requests
#create loop here for tickers in tix
for tt in tix:
    test_temp = blp.bdh(tickers=tt,
def RunBDH(Dates):
    import pandas as pd

    from datetime import datetime, timedelta

    import pdblp

    con = pdblp.BCon(debug=True, port=8194, timeout=5000)

    con.start()

    #Date de début et de fin
    DateStart = datetime(Dates[0], Dates[1], Dates[2])

    DateEnd = datetime(Dates[3], Dates[4], Dates[5])

    # bloomberg import function

    def BBG_import(Index, Start, End, Price):

        Import = con.bdh(Index, Price, Start.strftime('%Y%m%d'),
                         End.strftime('%Y%m%d'))  # bb import

        return Import.xs(Price, axis=1, level=1)  #  keep price

    # indices import (bloomberg)

    SX5E_Index = BBG_import('SX5E Index', DateStart, DateEnd,
                            'PX_LAST')  # SX5E Import : Eurostoxx 50

    SX5T_Index = BBG_import('SX5T Index', DateStart, DateEnd,
                            'PX_LAST')  # SX5T Import : Eurostoxx 50 incl. div

    LEGATREH_Index = BBG_import(
        'LEGATREH Index', DateStart, DateEnd, 'PX_LAST'
    )  # LEGATREH Import : Bloomberg Barclays Global-Aggregate Total Return Index

    HFRXEHE_Index = BBG_import(
        'HFRXEHE Index', DateStart, DateEnd,
        'PX_LAST')  # HFRXEHE Import : HFRX Equity Hedge EUR

    ERIXITEU_Index = BBG_import('ERIXITEU Index', DateStart, DateEnd,
                                'PX_LAST')  # ERIXITEU Import : Main Itraxx

    ITRXTX5I_Index = BBG_import('ITRXTX5I Index', DateStart, DateEnd,
                                'PX_LAST')  # ITRXTX5I Import : Xover

    RX1_Comdty = BBG_import('RX1 Comdty', DateStart, DateEnd,
                            'PX_LAST')  # RX1 Import : Bund 10 years

    # creation of third friday list

    # we need to know the date of each month third friday to know option tickers

    thirdfriday = pd.DataFrame(columns=["Date", "Prix"])

    a = -1

    datelist = pd.date_range(start=DateStart, end=DateEnd).tolist()

    for d in datelist:

        a = a + 1

        if d.weekday() == 4 and 15 <= d.day <= 21:

            if (d in SX5E_Index.index):

                thirdfriday = thirdfriday.append(pd.DataFrame({
                    "Date": [d],
                    "Prix": [SX5E_Index['SX5E Index'][d]]
                }),
                                                 ignore_index=True)

            else:

                thirdfriday = thirdfriday.append(pd.DataFrame({
                    "Date": [d - timedelta(days=1)],
                    "Prix": [SX5E_Index['SX5E Index'][d - timedelta(days=1)]]
                }),
                                                 ignore_index=True)

    # creation of call ticker list (option ticker according to the third fridays lists)

    pd.options.display.float_format = '{:,.0f}'.format

    thirdfriday['Strike'] = 50 * round(
        thirdfriday['Prix'].shift(1) * 1.01 / 50)

    thirdfriday['Strike'][0] = 50 * round(
        SX5E_Index['SX5E Index'][0] * 1.01 / 50)

    thirdfriday['Call'] = "SX5E " + thirdfriday['Date'].apply(
        lambda x: x.strftime('%m/%d/%y')
    ) + " C" + thirdfriday['Strike'].astype(int).map(str) + " Index"

    thirdfriday.to_excel('Dataframes/output_THIRDFRIDAY.xlsx',
                         index=True)  # excel export

    # indices export (excel)

    Indices_prices = pd.concat([
        SX5E_Index, SX5T_Index, LEGATREH_Index, HFRXEHE_Index, ERIXITEU_Index,
        ITRXTX5I_Index, RX1_Comdty
    ],
                               axis=1)

    Indices_prices.to_excel('Dataframes/output_INDICES.xlsx',
                            sheet_name='Sheet_name_1',
                            index=True)  # excel export

    # option prices import (bloomberg) + export (excel)

    sorted_data_PXLAST = BBG_import(list(thirdfriday['Call']), DateStart,
                                    DateEnd,
                                    'PX_LAST')  # SX5E Call prices import

    sorted_data_PXLAST = sorted_data_PXLAST.reindex(
        columns=thirdfriday['Call'])  # sort data

    sorted_data_PXLAST.to_excel('Dataframes/output_OPTPRICE.xlsx',
                                index=True)  # excel export

    sorted_data_MID = BBG_import(list(thirdfriday['Call']), DateStart, DateEnd,
                                 'DELTA_MID')  # SX5E Call delta import

    sorted_data_MID = sorted_data_MID.reindex(
        columns=thirdfriday['Call'])  # sort data

    sorted_data_MID.to_excel('Dataframes/output_DELTAMID.xlsx',
                             sheet_name='Sheet_name_1',
                             index=True)  # excel export

    # futures prices import (bloomberg) + export (excel)

    Futures_Price = BBG_import('VG1 Index', DateStart, DateEnd,
                               'PX_LAST')  # SX5E Futures prices import

    Futures_Price.to_excel('Dataframes/output_FUTPRICE.xlsx',
                           sheet_name='Sheet_name_1',
                           index=True)  # excel export
Beispiel #18
0
import numpy as np
from numpy import nan as Nan
import pandas as pd
import math
import scipy.optimize as sp
from scipy.interpolate import interp1d
from xbbg import blp
import blpapi as bbg
import pdblp
con = pdblp.BCon(debug=False, timeout=100000).start()
from datetime import date as dt
import datetime
import os

path_curva_spot = 'Ruta donde se busca poner los datos resultantes de la cuva cupón cero'


def path_dia(path_inicial, fecha):
    path_dia = path_inicial + fecha + '/'
    return path_dia


def path_final(path_con_fecha, economia):
    path_final = path_con_fecha + economia + '/'
    return path_final


def path_final_final(path_final, metodo):
    path = path_final + metodo + '/'
    return path
Beispiel #19
0
def con(host, port, timeout):
    return pdblp.BCon(host=host, port=port, timeout=timeout).start()
Beispiel #20
0
"""

import pandas as pd
import numpy as np
import pdblp as bbg
from utils import feriados
import scipy.optimize as solver
import matplotlib.pyplot as plt
from tqdm import trange
from scipy.stats import norm

fer = feriados()

# ============================================================
# Inicia a conexão com a BBG para puxar os dados de equities :
con = bbg.BCon(debug=False, timeout=8000)
con.start()
# ============================================================

# 4Y intervalo de dados :
today = np.datetime64('today').astype('datetime64[D]')
yest = np.busday_offset(today, -1, holidays=fer)
begin = np.busday_offset(today, -1008, holidays=fer)
today_trat = today.astype(str).replace('-','')
yest_trat = yest.astype(str).replace('-','')
begin_trat = begin.astype(str).replace('-','')

cdi = con.bdh('BZDIOVRA Index', 'PX_LAST', yest_trat, yest_trat).values[0][0]/100.0

tickers = ['VALE3 BZ Equity', 'PETR4 BZ Equity', 'GGBR4 BZ Equity', 'CASH3 BZ Equity', 'PRIO3 BZ Equity']
tickers.sort()
Beispiel #21
0
def get_vix_futures_prices(start_date, end_date=datetime.datetime.today()):
    if not isinstance(start_date, datetime.date):
        start_date = parse(start_date)
    if not isinstance(end_date, datetime.date):
        end_date = parse(end_date)
        
    contract_months = ['f','g','h','j','k','m','n','q','u','v','x','z',]
    contract_expiry = range(1,13)
    contract_key = dict(zip(contract_expiry, contract_months))
    tickers = []
    for expiry in get_last_trading_days(start_date, end_date + datetime.timedelta(260), -1):
        if expiry > datetime.datetime.today() + datetime.timedelta(260):
            break
        elif expiry.year >= datetime.datetime.today().year:
            year = '{0:1d}'.format(expiry.year%2010)
        else:
            year = '{0:02d}'.format(expiry.year%1000)

        tickers.append('ux' + contract_key[expiry.month] + year)
        
    bb_tickers = [ticker + ' index' for ticker in tickers]
    start_date, end_date = start_date.strftime('%Y%m%d'), end_date.strftime('%Y%m%d')

    con = pdblp.BCon(debug=False)
    con.start()
    df_tmp1 = con.bdh(bb_tickers, 'px_last', start_date=start_date, end_date=end_date)
    con.stop()
    df_tmp1 = df_tmp1.xs('px_last', axis=1, level=1)
    df_tmp1.columns = [column.replace(' index','') for column in df_tmp1.columns]
    vix_contracts = df_tmp1[tickers].dropna(axis=1, how='all')
    vix_contracts.index = pd.to_datetime(vix_contracts.index)
    vix_contracts.columns = [get_contract_expiry(column) for column in vix_contracts.columns]
    vix_contracts.columns = pd.to_datetime(vix_contracts.columns)
    vix_contracts.index = pd.to_datetime(vix_contracts.index)
    
    vix_rolling_contracts = pd.DataFrame()
    ux1_date = vix_contracts.columns[0]
    ux2_date = vix_contracts.columns[1]
    ux3_date = vix_contracts.columns[2]
    ux4_date = vix_contracts.columns[3]
    ux5_date = vix_contracts.columns[4]
    ux6_date = vix_contracts.columns[5]
    ux7_date = vix_contracts.columns[6]
    for date in vix_contracts.index:
        if ux1_date <= date:
            for i, column in enumerate(vix_contracts.columns):
                if column > date:
                    ux1_date = ux2_date
                    if i < len(vix_contracts.columns) - 1:
                        ux2_date = vix_contracts.columns[i + 1]
                        ux3_date = vix_contracts.columns[i + 2]
                        ux4_date = vix_contracts.columns[i + 3]
                        ux5_date = vix_contracts.columns[i + 4]
                        ux6_date = vix_contracts.columns[i + 5]
                        ux7_date = vix_contracts.columns[i + 6]
                    break
        vix_rolling_contracts.loc[date, 'ux1'] = vix_contracts.loc[date, ux1_date]
        vix_rolling_contracts.loc[date, 'ux2'] = vix_contracts.loc[date, ux2_date]
        vix_rolling_contracts.loc[date, 'ux3'] = vix_contracts.loc[date, ux3_date]
        vix_rolling_contracts.loc[date, 'ux4'] = vix_contracts.loc[date, ux4_date]
        vix_rolling_contracts.loc[date, 'ux5'] = vix_contracts.loc[date, ux5_date]
        vix_rolling_contracts.loc[date, 'ux6'] = vix_contracts.loc[date, ux6_date]
        vix_rolling_contracts.loc[date, 'ux7'] = vix_contracts.loc[date, ux7_date]
    vix_rolling_contracts.index = pd.to_datetime(vix_rolling_contracts.index)
    return vix_rolling_contracts
Beispiel #22
0
if len(End_Time_input) == 8:
    End_Time = End_Time_input
Fields = 'PX_LAST'


def rearrange_column(column_list, df):
    new_df = pd.DataFrame(index=[i.strftime('%m/%d/%Y') for i in df.index],
                          columns=column_list)
    for i in column_list:
        new_df[i] = df[i]
    return new_df


# Start Connection
con = pdblp.BCon(debug=False, port=8194)
con.start()
Index_Data = con.bdh(Index_assets, Fields, Asset_Start_Time, End_Time)
Index_Data = rearrange_column(Index_assets, Index_Data)
root = os.getcwd()
Index_Data.to_csv(root + '/Index_Price_' + Asset_Start_Time + '_' + End_Time +
                  '.csv')
print(Index_Data.head())

# Get Benchmark Data
benchmark_assets = ['NDDUWI Index', 'LEGATRUU Index']
Benchmark_Data = con.bdh(benchmark_assets, Fields, Asset_Start_Time, End_Time)
Benchmark_Data = rearrange_column(benchmark_assets, Benchmark_Data)
Benchmark_Data.to_csv(root + '/Benchmark_Price_' + Asset_Start_Time + '_' +
                      End_Time + '.csv')
print(Benchmark_Data.head())
Beispiel #23
0
def refresh_pickle():
    try: cur_dir = os.path.dirname(__file__)
    except: cur_dir = ''
    data = pd.read_pickle(os.path.join(cur_dir, 'historical_data1.pickle'))
    last_date = data.index[-5].strftime('%Y%m%d')

    INDICATORS_WITH_LAG = ['tradhigh index']
    EQUITY_TICKERS   = ['xiv', 'vxx', 'tvix', 'spy', 'iwm', 'mdy', 'hyg']
    INDUSTRY_TICKERS = ['xle', 'xlb', 'xlf', 'iyz', 'xlv', 'xlk', 'xlp', 'xlu', 'xly', 'xli'] 
    INDEX_TICKERS    = ['spvixstr', 'sx5e', 'vix', 'pcrteqty', 'vvix', 'vxv', 'vxth', 'rxm', 'rvx', 'vxn',
                        'tyvix', 'jpmvxyg7', 'jpmvxyem', 'tradhigh', 'tradlows', 'cvxftncn', 'vxxiv', 'cesiusd', 'cesig10', 
                        'vcac', 'v2x', 'vhsi', 'vaex', 'vimex', 'tradcads', 'vnky', 'vftse', 'vkospi']
    MACRO_TICKERS    = ['nfp tch', 'injcjc', 'usmmmnch', 'injcsp', 'ip chng', 'consexp', 'dgnoxtch', 'pitlchng', 'lei chng', 'napmpmi', 'napmnmi', 'conssent', 'concconf']
    VIX_FUTURES      = ['ux1', 'ux2', 'ux3', 'ux4', 'ux5', 'ux6', 'ux7']
    VOL_FIELDS       = ['30DAY_IMPVOL_97.5%MNY_DF', '30DAY_IMPVOL_102.5%MNY_DF', '30DAY_IMPVOL_95.0%MNY_DF', '30DAY_IMPVOL_105.0%MNY_DF','3MTH_IMPVOL_95.0%MNY_DF', '3MTH_IMPVOL_105.0%MNY_DF']
    OTHER            = ['weight']
    equity_tks       = [tk + ' equity' for tk in EQUITY_TICKERS]
    industry_tks     = [tk + ' equity' for tk in INDUSTRY_TICKERS]
    index_tks        = [tk + ' index' for tk in INDEX_TICKERS]
    macro_tks        = [tk + ' index' for tk in MACRO_TICKERS]

    con = pdblp.BCon(debug=False)
    con.start()
    macro_data = con.bdh(macro_tks, 'ACTUAL_RELEASE', start_date=last_date, end_date=datetime.datetime.today().strftime('%Y%m%d'), ovrds=[('RELEASE_DATE_OVERRIDE','1')])
    index_data = con.bdh(index_tks, 'px_last', start_date=last_date, end_date=datetime.datetime.today().strftime('%Y%m%d'))
    equity_data = con.bdh(equity_tks, 'px_last', start_date=last_date, end_date=datetime.datetime.today().strftime('%Y%m%d'))
    industry_data = con.bdh(industry_tks, 'px_last', start_date=last_date, end_date=datetime.datetime.today().strftime('%Y%m%d'))
    vol_data = con.bdh('spy equity', VOL_FIELDS, start_date=last_date, end_date=datetime.datetime.today().strftime('%Y%m%d'))
    if (datetime.datetime.now().time() < datetime.time(9,30)) and (datetime.datetime.now().weekday() < 5):
        try:
            equity_data_pre_open = con.bdh(equity_tks, 'PX_LAST_ALL_SESSIONS', start_date=datetime.datetime.today().strftime('%Y%m%d'), end_date=datetime.datetime.today().strftime('%Y%m%d'))
            industry_data_pre_open = con.bdh(industry_tks, 'PX_LAST_ALL_SESSIONS', start_date=datetime.datetime.today().strftime('%Y%m%d'), end_date=datetime.datetime.today().strftime('%Y%m%d'))
            equity_data_pre_open = equity_data_pre_open.xs('PX_LAST_ALL_SESSIONS', axis=1, level=1)
            industry_data_pre_open = industry_data_pre_open.xs('PX_LAST_ALL_SESSIONS', axis=1, level=1)
        except e as Exception:
            equity_data_pre_open = None
            industry_data_pre_open = None
            pass
    else:
        equity_data_pre_open = None
        industry_data_pre_open = None

    con.stop()
    macro_df = macro_data.xs('ACTUAL_RELEASE', axis=1, level=1)
    index_df = index_data.xs('px_last', axis=1, level=1)
    equity_df = equity_data.xs('px_last', axis=1, level=1)
    industry_df = industry_data.xs('px_last', axis=1, level=1)
    vol_df = vol_data.xs('spy equity', axis=1, level=0)
    vol_df.columns = ['SPY_' + col for col in vol_df.columns]
    new_vix_futures_data = get_vix_futures_prices(start_date=last_date)
    if equity_data_pre_open is not None:
        equity_df = equity_df.append(equity_data_pre_open)
    if industry_data_pre_open is not None:
        industry_df = industry_df.append(industry_data_pre_open)
    new_data = equity_df.join(industry_df, how='outer').join(index_df, how='outer').join(macro_df, how='outer').join(vol_df, how='outer').join(new_vix_futures_data, how='outer')
    new_data.index = pd.to_datetime(new_data.index)
    new_data_new = new_data[~new_data.index.isin(data.index)].dropna(how='all')
    new_data_refresh = new_data[new_data.index.isin(data.index)].dropna(how='all')
    new_data_new['weight'] = new_data_new.index.map(get_contract_weight)
    new_data_new['contract_days_length'] = new_data_new.index.map(lambda x: get_contract_days_length(x, how='total'))
    new_data_new['contract_days_left'] = new_data_new.index.map(lambda x: get_contract_days_length(x, how='remaining'))
    data = data.append(new_data_new)
    data.update(new_data_refresh, join='left')
    data.sort_index(inplace=True)
    data.to_pickle(os.path.join(cur_dir, 'historical_data1.pickle'))
Beispiel #24
0
 def __init__(self, Tickers, cBatches = 50):
     self.Tickers = Tickers
     self.cBatches = cBatches
     
     self.con = pdblp.BCon(debug=False, port=8194, timeout=5000)
     self.con.start()
Beispiel #25
0
class Bloomberg:
    con = pdblp.BCon(debug=True, port=8194, timeout=5000)
    con.start()
def get_bloomberg_data(stock_list, bbg_functions, start_date, end_date):
    con = pdblp.BCon(port = 8194, debug =True)
    con.start()
    df= pd.DataFrame(con.bdh(stock_list, bbg_functions, start_date, end_date, long_data = True))
    return df
Beispiel #27
0
 def __init__(self):
     self.con = pdblp.BCon(debug=False, port=8194)
     pass
Beispiel #28
0
from datetime import datetime, timedelta
import math
import pandas as pd
import pandas_datareader.data as pdr
from pandas.tseries.offsets import BDay
import pdblp

con = pdblp.BCon(debug=True, port=8194, timeout=10000)
con.start()
con.debug = False


def yahoo_adj_close(stocks_list, start, debug=True):
    """
    Recover the adjusted closing price of a list of stocks
    :param stocks_list: List. List of stocks you want to recover information
    :param start: Integer. Number of previous years you want to recover data
    :param debug: Bool. If True, shows the progress of the data recovery
    :return: DataFrame with the adjusted closing prices
    """
    today = datetime.now().strftime("%Y-%m-%d")  # getting today's date
    start = (datetime.now() - timedelta(start * 365)).strftime("%Y-%m-%d")
    prices = pd.DataFrame()
    for stk in stocks_list:
        try:
            prices[stk] = pdr.DataReader(stk + '.SA',
                                         data_source='yahoo',
                                         start=start,
                                         end=today)['Adj Close']
        except:
            if debug:
# -*- coding: utf-8 -*-
"""
Created on Mon Dec  2 15:05:23 2019

@author: Administrator
"""

import pdblp
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns

con = pdblp.BCon(debug=False, port=8194, timeout=5000)
con.start()

#indices = ['NYA Index', 'SPX Index', 'CCMP Index' ,'CDAX Index' ,'ASX Index', 'TPX Index', 'SHCOMP Index' , 'SZCOMP Index', 'XUTUM Index', 'MEXBOL Index',  'IBOV Index', 'IMOEX Index' , 'JALSH Index']

#price_earnings_ratio = con.bdh(indices, ['PE RATIO'],'19991231', '20191210')
from datetime import date


start = '20040101'
today = date.today().strftime('%Y%m%d')
firstday = '19991230'


pe_ratio_tickers =['NYA Index', 'SPX Index', 'CCMP Index','NDX Index','CDAX Index' ,'DAX Index', 
            'ASX Index','UKX Index', 'TPX Index','NKY Index', 'SHCOMP Index' , 
           'SZCOMP Index','XUTUM Index','XU100 Index',  'MEXBOL Index', 
           'IBOV Index', 'IMOEX Index' , 'JALSH Index']
Beispiel #30
0
#Connecting to the Bloomberg terminal and downloading daily and intraday data

import pdblp
import ta
import numpy
import os

#Connect to Bloomberg
os.getcwd()
con = pdblp.BCon(debug=true, port=8194, time out=5000)
con.start()

#Download daily SPY data
df0 = con.bdh(['SPY Equity'], 'PX_LAST', '20191231', '20190101')
df0.head()

#Download daily GBPJPY data. The same format can be used for intraday data
df = con.bdib('GBPJPY BGN Curncy', '2019-12-02T00:00:00', '2019-12-10T13:30:00','TRADE')
df = dfdrop(columns=['volume','numEvents'])
df.head()
df.tail()

#Calculate Simple Moving Average (SMA) of different periods
df['SMA_50'] = talib.SMA(df['close'],timeperiod=50)
df['SMA_100'] = talib.SMA(df['close'],timeperiod=100)
df['SMA_200'] = talib.SMA(df['close'],timeperiod=200)
df.tail()

#Calculate MACD and MACD Signal
macd, macdsignal, macdhist = talib.MACD(df['close'], fastperiod=12, slowperiod=26, signal)