Example #1
0
    def get_economic_data_history(self, start_date, finish_date, country_group, data_type,
        source = 'fred', cache_algo = "internet_load_return"):

        #vendor_country_codes = self.fred_country_codes[country_group]
        #vendor_pretty_country = self.fred_nice_country_codes[country_group]

        if isinstance(country_group, list):
            pretty_country_names = country_group
        else:
            # get all the country names in the country_group
            pretty_country_names = list(self._econ_country_groups[
                self._econ_country_groups["Country Group"] == country_group]['Country'])

        # construct the pretty tickers
        pretty_tickers = [x + '-' + data_type for x in pretty_country_names]

        # get vendor tickers
        vendor_tickers = []

        for pretty_ticker in pretty_tickers:
            vendor_ticker = list(self._all_econ_tickers[
                                         self._all_econ_tickers["Full Code"] == pretty_ticker][source].values)

            if vendor_ticker == []:
                vendor_ticker = None
                self.logger.error('Could not find match for ' + pretty_ticker)
            else:
                vendor_ticker = vendor_ticker[0]

            vendor_tickers.append(vendor_ticker)

        vendor_fields = ['close']

        if source == 'bloomberg': vendor_fields = ['PX_LAST']

        md_request = MarketDataRequest(
                start_date = start_date,                            # start date
                finish_date = finish_date,                          # finish date
                category = 'economic',
                freq = 'daily',                                     # intraday data
                data_source = source,                               # use Bloomberg as data source
                cut = 'LOC',
                tickers = pretty_tickers,
                fields = ['close'],                                 # which fields to download
                vendor_tickers = vendor_tickers,
                vendor_fields = vendor_fields,                      # which Bloomberg fields to download
                cache_algo = cache_algo)                            # how to return data

        return self.market_data_generator.fetch_market_data(md_request)
Example #2
0
def test_binance():

    market = Market(market_data_generator=MarketDataGenerator())
    md_request = MarketDataRequest(
        start_date='18 Feb 2017',
        finish_date='20 Feb 2018',
        cut='LOC',
        freq='daily',
        data_source='binance',
        category='crypto',
        fields=['close', 'volume', 'quote-asset-volume'],
        tickers=['WTCXBT'])

    df = market.fetch_market(md_request)
    assert not df.empty
Example #3
0
def construct_backtest(ticker, vendor_ticker, sma_period, data_source, start_date, quandl_api_key):
    backtest = Backtest()
    br = BacktestRequest()

    # Set all the parameters for the backtest
    br.start_date = start_date
    br.finish_date = datetime.datetime.utcnow()
    br.spot_tc_bp = 2.5  # 2.5 bps bid/ask spread
    br.ann_factor = 252

    tech_params = TechParams()
    tech_params.sma_period = sma_period
    indicator = 'SMA'

    md_request = MarketDataRequest(
        start_date=start_date,
        finish_date=datetime.date.today(),
        freq='daily',
        data_source=data_source,
        tickers=ticker,
        fields=['close'],
        vendor_tickers=vendor_ticker,
        quandl_api_key=quandl_api_key)

    market = Market(market_data_generator=MarketDataGenerator())

    # Download the market data (the asset we are trading is also
    # being used to generate the signal)
    asset_df = market.fetch_market(md_request)
    spot_df = asset_df

    # Use technical indicator to create signals
    # (we could obviously create whatever function we wanted for generating the signal dataframe)
    # However, finmarketpy has some technical indicators built in (and some signals too)
    tech_ind = TechIndicator()
    tech_ind.create_tech_ind(spot_df, indicator, tech_params);
    signal_df = tech_ind.get_signal()

    # use the same data for generating signals
    backtest.calculate_trading_PnL(br, asset_df, signal_df, None, False)

    # Get the returns and signals for the portfolio
    port = backtest.portfolio_cum()
    port.columns = [indicator + ' = ' + str(tech_params.sma_period) + ' ' + str(backtest.portfolio_pnl_desc()[0])]
    signals = backtest.portfolio_signal()
    # returns = backtest.pnl()

    return port, signals
Example #4
0
    def getFxData(self,
                  startDate='14 Jun 2016',
                  endDate='15 Jun 2016',
                  tickers=['EURUSD'],
                  fields=['close'],
                  frequency='tick'):
        md_request = MarketDataRequest(start_date=startDate,
                                       finish_date=endDate,
                                       category=self.category,
                                       fields=fields,
                                       freq=frequency,
                                       data_source=self.datasource,
                                       tickers=tickers)

        market = Market(market_data_generator=MarketDataGenerator())
        return market.fetch_market(md_request)
def plot_animated_vol_market():
    market = Market(market_data_generator=MarketDataGenerator())

    cross = ['EURUSD']
    start_date = '01 Mar 2017'
    finish_date = '21 Apr 2017'
    sampling = 'no'

    md_request = MarketDataRequest(start_date=start_date,
                                   finish_date=finish_date,
                                   data_source='bloomberg',
                                   cut='NYC',
                                   category='fx-implied-vol',
                                   tickers=cross,
                                   cache_algo='cache_algo_return')

    df = market.fetch_market(md_request)
    if sampling != 'no': df = df.resample(sampling).mean()
    fxvf = FXVolFactory()
    df_vs = []

    # Grab the vol surface for each date and create a dataframe for each date (could have used a panel)
    for i in range(0, len(df.index)):
        df_vs.append(fxvf.extract_vol_surface_for_date(df, cross[0], i))

    # Do static plot for first day using Plotly
    style = Style(title="FX vol surface of " + cross[0],
                  source="chartpy",
                  color='Blues')

    Chart(df=df_vs[0], chart_type='surface', style=style).plot(engine='plotly')

    # Now do animation (TODO: need to fix animation in chartpy for matplotlib)
    style = Style(title="FX vol surface of " + cross[0],
                  source="chartpy",
                  color='Blues',
                  animate_figure=True,
                  animate_titles=df.index,
                  animate_frame_ms=500,
                  normalize_colormap=False)

    Chart(df=df_vs, chart_type='surface',
          style=style).plot(engine='matplotlib')

    # Chart object is initialised with the dataframe and our chart style
    Chart(df=df_vs, chart_type='surface',
          style=style).plot(engine='matplotlib')
Example #6
0
def generate_market_data_for_tests(start_date, finish_date):

    md_request = MarketDataRequest(start_date=start_date,
                                   finish_date=finish_date,
                                   cut='NYC',
                                   category='fx',
                                   fields=['bid'],
                                   freq='tick',
                                   data_source='dukascopy',
                                   tickers=['EURUSD'])

    market = Market(market_data_generator=MarketDataGenerator())
    try:
        df = market.fetch_market(md_request)
        return df
    except:
        return None
Example #7
0
def test_bbg_download():

    freq = ["daily", "intraday"]

    for fr in freq:
        md_request = MarketDataRequest(
            start_date="week",  # start date
            data_source="bloomberg",  # use Bloomberg as data source
            freq=fr,
            tickers=["S&P 500", "EURUSD"],  # ticker (findatapy)
            fields=["close"],  # which fields to download
            vendor_tickers=["SPX Index", "EURUSD Curncy"],  # ticker (Yahoo)
            vendor_fields=["PX_LAST"])  # which Bloomberg fields to download)

        df = market.fetch_market(md_request)

        assert df is not None
Example #8
0
def load_minute_data(ticker,
                     start_date='01 Jan 2019',
                     finish_date='30 Jun 2019'):
    # Load tick data from DukasCopy (if doesn't exist on disk) and then save to disk as 1 minute data
    # This is in UTC timezone
    # By default the path is the working director but we can change that
    raw_data_path = ''

    # Imports of various findatapy libraries for market data downloads
    from findatapy.market import Market, MarketDataRequest, MarketDataGenerator

    import os

    # First we can do it by defining all the vendor fields, tickers etc. so we bypass the configuration file
    md_request = MarketDataRequest(start_date=start_date,
                                   finish_date=finish_date,
                                   fields=['bid', 'ask'],
                                   vendor_fields=['bid', 'ask'],
                                   freq='tick',
                                   data_source='dukascopy',
                                   tickers=ticker,
                                   vendor_tickers=ticker,
                                   category='fx')

    market = Market(market_data_generator=MarketDataGenerator())

    compression_type = 'gzip'  # you can change this to 'snappy' if you want!

    # Only download file if not on disk (slow to download)
    if not (os.path.exists(raw_data_path + ticker + '_1min.gzip')):
        df_tick = market.fetch_market(md_request)

        df_tick['mid'] = (df_tick[ticker + '.bid'] +
                          df_tick[ticker + '.ask']) / 2.0
        df_minute = pd.DataFrame(
            df_tick['mid'].resample("1min").first()).dropna()
        df_minute.to_parquet(raw_data_path + ticker + '_1min.gzip',
                             compression=compression_type,
                             engine='fastparquet')
    else:
        # Edit the below line if you want to pick only one of the yearly Parquet files
        # If you load the whole amount might run out of memory!
        df_minute = pd.read_parquet(raw_data_path + ticker + '_1min.gzip',
                                    engine='fastparquet')

    return df_minute
Example #9
0
    def construct_strategy_benchmark(self):

        ###### FILL IN WITH YOUR OWN BENCHMARK

        tsr_indices = MarketDataRequest(
            start_date=self.br.start_date,  # start date
            finish_date=self.br.finish_date,  # finish date
            freq='daily',  # intraday data
            data_source='quandl',  # use Bloomberg as data source
            tickers=["EURUSD"],  # tickers to download
            vendor_tickers=['FRED/DEXUSEU'],
            fields=['close'],  # which fields to download
            vendor_fields=['close'],
            cache_algo='cache_algo_return')  # how to return data)

        df = self.market.fetch_market(tsr_indices)

        df.columns = [x.split(".")[0] for x in df.columns]

        return df
Example #10
0
def load_une_data():
    md_request = MarketDataRequest(
        start_date='01 Jan 2001',  # Start date
        finish_date='12 Aug 2019',  # Finish date
        tickers=us_states,  # What we want the ticker to look like once download
        vendor_tickers=us_states_fred,  # The ticker used by the vendor
        fields=[
            'close'
        ],  # What fields we want (usually close, we can also define vendor fields)
        data_source='alfred',  # What is the data source?
        # vendor_fields=['actual-release', 'first-revision', 'close'],
        fred_api_key=FRED_API_KEY
    )  # Most data sources will require us to specify an API key/password

    market = Market(market_data_generator=MarketDataGenerator())

    df_une = market.fetch_market(md_request)
    df_une.columns = [x.replace('.close', '') for x in df_une.columns]

    return df_une
Example #11
0
def get_mid_price(raw_data_path, ticker='EURUSD'):
    # First we can do it by defining all the vendor fields, tickers etc. so we bypass the configuration file
    # We use findatapy
    md_request = MarketDataRequest(
        start_date='01 Jan 2007', finish_date='30 Jun 2019',
        fields=['bid', 'ask'], vendor_fields=['bid', 'ask'],
        freq='tick', data_source='dukascopy',
        tickers=[ticker], vendor_tickers=[ticker], category='fx')

    market = Market(market_data_generator=MarketDataGenerator())

    compression_type = 'gzip'  # you can change this to 'snappy' if you want!

    # Only download file if not on disk (slow to download),
    # then write to disk as parquet and CSV
    # Note: writing to CSV takes a long time, so we have commented it here!
    if not (os.path.exists(os.path.join(raw_data_path, ticker + '.gzip'))):
        df_tick = market.fetch_market(md_request)

        df_tick.to_parquet(os.path.join(raw_data_path, ticker + '.gzip'), compression=compression_type,
                           engine='fastparquet')

        start_year = df_tick.index[0].year
        finish_year = df_tick.index[-1].year

        for i in range(start_year, finish_year + 1):
            df_year = df_tick[df_tick.index.year == i]
            df_year.to_parquet(raw_data_path + ticker + '_' + str(i) + '.gzip',
                               compression=compression_type, engine='fastparquet')
    else:
        # Edit the below line if you want to pick only one of the yearly Parquet files
        # If you load the whole amount might run out of memory!
        df_tick = pd.read_parquet(os.path.join(raw_data_path, ticker + '_2019.gzip'),
                                  engine='fastparquet')

    # calculate mid-price
    df_tick['mid'] = (df_tick[ticker + '.ask'] + df_tick[ticker + '.bid']) / 2.0

    # get 1 minute data
    return pd.DataFrame(df_tick['mid'].resample("1min").first()).dropna()
Example #12
0
def load_data():

    # Download the historical spot data once and store in memory, we'll process later
    market = Market(market_data_generator=MarketDataGenerator())

    market_data_request = MarketDataRequest(
        start_date='01 Jan 2000',  # Start date
        freq='daily',  # Daily data
        data_source='quandl',  # Use Quandl as data source
        tickers=tickers,  # Ticker (Cuemacro)
        fields=['close'],  # Which fields to download
        vendor_tickers=vendor_tickers,  # Ticker (Quandl)
        vendor_fields=['close'],  # Which Bloomberg fields to download
        cache_algo='cache_algo_return')  # How to return data

    # You need to type your Quandl API below (or modify the DataCred file)
    # market_data_request.quandl_api_key = None

    df = market.fetch_market(market_data_request)
    df = df.fillna(method='ffill')

    df_ret = df / df.shift(1)

    return df, df_ret
Example #13
0
def plot_animated_vol_market():
    market = Market(market_data_generator=MarketDataGenerator())

    cross = ['GBPUSD']
    start_date = '01 Jun 2016'
    finish_date = '01 Aug 2016'
    sampling = 'no'

    md_request = MarketDataRequest(start_date=start_date,
                                   finish_date=finish_date,
                                   data_source='bloomberg',
                                   cut='LDN',
                                   category='fx-implied-vol',
                                   tickers=cross,
                                   cache_algo='internet_load_return')

    df = market.fetch_market(md_request)
    if sampling != 'no': df = df.resample(sampling).mean()
    fxvf = FXVolFactory()
    df_vs = []

    # grab the vol surface for each date and create a dataframe for each date (could have used a panel)
    for i in range(0, len(df.index)):
        df_vs.append(fxvf.extract_vol_surface_for_date(df, cross[0], i))

    style = Style(title="FX vol surface of " + cross[0],
                  source="chartpy",
                  color='Blues',
                  animate_figure=True,
                  animate_titles=df.index,
                  animate_frame_ms=500,
                  normalize_colormap=False)

    # Chart object is initialised with the dataframe and our chart style
    Chart(df=df_vs, chart_type='surface',
          style=style).plot(engine='matplotlib')
def load_tickers():
    logger = LoggerManager.getLogger(__name__)

    market = Market(market_data_generator=MarketDataGenerator())

    DataConstants.market_thread_technique = 'thread'

    # load S&P 500 ticker via wikipedia
    snp = pd.read_html(
        'https://en.wikipedia.org/wiki/List_of_S%26P_500_companies')
    tickers = snp[0]['Symbol'].to_list()

    # download equities data from Yahoo
    md_request = MarketDataRequest(
        start_date=START_DATE,
        data_source='yahoo',  # use Bloomberg as data source
        tickers=tickers,  # ticker (findatapy)
        fields=['close', 'open', 'high', 'low',
                'volume'],  # which fields to download
        vendor_tickers=tickers,  # ticker (Yahoo)
        vendor_fields=['Close', 'Open', 'High', 'Low',
                       'Volume'])  # which Bloomberg fields to download)

    logger.info("Loading data with threading")

    df = market.fetch_market(md_request)

    logger.info("Loading data with multiprocessing")

    DataConstants.market_thread_technique = 'multiprocessing'

    df = market.fetch_market(md_request)

    logger.info("Loaded data with multiprocessing")

    df.to_csv("temp_downloads/snp.csv")
Example #15
0
#


if __name__ == '__main__':
    ###### below line CRUCIAL when running Windows, otherwise multiprocessing doesn't work! (not necessary on Linux)
    from findatapy.util import SwimPool; SwimPool()

    from findatapy.market import Market, MarketDataRequest, MarketDataGenerator

    market = Market(market_data_generator=MarketDataGenerator())

    # Get the first release for GDP and also print the release date of that
    md_request = MarketDataRequest(
        start_date="01 Jun 2000",                                                      # start date (download data over past decade)
        data_source='alfred',                                                          # use ALFRED/FRED as data source
        tickers=['US GDP'],                                                            # ticker
        fields=['actual-release', 'release-date-time-full'],                           # which fields to download
        vendor_tickers=['GDP'],                                                        # ticker (FRED)
        vendor_fields=['actual-release', 'release-date-time-full'])                    # which FRED fields to download

    df = market.fetch_market(md_request)

    print(df)

    # Compare the close and actual release of US GDP (and the final)
    md_request = MarketDataRequest(
        start_date="01 Jun 2000",                                                      # start date (download data over past decade)
        data_source='alfred',                                                          # use ALFRED/FRED as data source
        tickers=['US GDP'],                                                            # ticker
        fields=['actual-release', 'close'],                                            # which fields to download
        vendor_tickers=['GDP'],                                                        # ticker (FRED)
Example #16
0
    # run_example = 5 - download second FX data from Bloomberg
    # run_example = 6 - download free tick data from FXCM example (compare with DukasCopy)

    run_example = 6

    if run_example == 1 or run_example == 0:

        ####### DukasCopy examples
        # let's download data for 14 Jun 2016 for EUR/USD - the raw data has bid/ask, if we specify close, we calculate
        # it as the average

        # first we can do it by defining all the vendor fields, tickers etc. so we bypass the configuration file
        md_request = MarketDataRequest(start_date='14 Jun 2016',
                                       finish_date='15 Jun 2016',
                                       fields=['bid'],
                                       vendor_fields=['bid'],
                                       freq='tick',
                                       data_source='dukascopy',
                                       tickers=['EURUSD'],
                                       vendor_tickers=['EURUSD'])

        df = market.fetch_market(md_request)
        print(df.tail(n=10))

        # now let's do it using the category keyword, which goes into our config files (only works for predefined tickers!)
        # simplifies our calling procedure a lot!
        md_request = MarketDataRequest(start_date='14 Jun 2016',
                                       finish_date='15 Jun 2016',
                                       category='fx',
                                       fields=['close'],
                                       freq='tick',
                                       data_source='dukascopy',
Example #17
0
run_example = 2

from finmarketpy.curve.rates.fxforwardspricer import FXForwardsPricer

###### Value forwards for AUDUSD for odd days
if run_example == 1 or run_example == 0:

    cross = 'AUDUSD'
    fx_forwards_tenors = ['1W', '2W', '3W', '1M']

    fx_forwards_to_print = ['1W', '2W']

    # Get AUDUSD data for spot, forwards + depos
    md_request = MarketDataRequest(start_date='01 Jan 2020', finish_date='01 Feb 2020',
                                   data_source='bloomberg', cut='NYC', category='fx-forwards-market',
                                   tickers=cross,
                                   cache_algo='cache_algo_return', fx_forwards_tenor=fx_forwards_tenors,
                                   base_depos_currencies=[cross[0:3], cross[3:6]])

    market_df = market.fetch_market(md_request=md_request)

    fx_forwards_price = FXForwardsPricer()

    delivery_dates = Calendar().get_delivery_date_from_horizon_date(market_df.index, "8D", cal=cross)
    interpolated_forwards_df = fx_forwards_price.price_instrument(cross, market_df.index, delivery_dates,
        market_df=market_df, fx_forwards_tenor_for_interpolation =['1W', '2W'])

    interpolated_forwards_df[cross + ".delivery"] = delivery_dates.values

    print(interpolated_forwards_df)
Example #18
0
# run_example = 2 - get GBPUSD vol surface for a date and plot interpolated vol surface and implied PDF
# run_example = 3 - do an animation of GBPUSD implied vol surface over this period
# run_example = 4 - get implied vol for a particular strike, interpolating the surface
# run_example = 5 - get USDJPY vol surface around US presidential election and plot

run_example = 3

###### Fetch market data for pricing GBPUSD FX options over Brexit vote (ie. FX spot, FX forwards, FX deposits and FX vol quotes)
###### Show how to plot ATM 1M implied_vol vol time series
if run_example == 1 or run_example == 0:

    # Download the whole all market data for GBPUSD for pricing options (vol surface)
    md_request = MarketDataRequest(start_date='01 May 2016',
                                   finish_date='01 Aug 2016',
                                   data_source='bloomberg',
                                   cut='LDN',
                                   category='fx-vol-market',
                                   tickers=['GBPUSD'],
                                   cache_algo='cache_algo_return')

    df = market.fetch_market(md_request)

    style = Style()

    style.title = 'GBPUSD 1M Implied Vol'
    style.scale_factor = 3
    style.source = 'Bloomberg'

    chart.plot(df['GBPUSDV1M.close'], style=style)

###### Fetch market data for pricing GBPUSD FX options over Brexit vote (ie. FX spot, FX forwards, FX deposits and FX vol quotes)
Example #19
0
# run_example = 5 - pricing of EURUSD options
# run_example = 6 - another USDJPY option

run_example = 6

###### Fetch market data for pricing GBPUSD FX options over Brexit vote (ie. FX spot, FX forwards, FX deposits and FX vol quotes)
###### Construct volatility surface using FinancePy library underneath, using polynomial interpolation and
###### Then price some options over these dates eg. atm, 25d-call etc.
if run_example == 1 or run_example == 0:

    horizon_date = '23 Jun 2016'
    cross = 'GBPUSD'

    # Download the whole all market data for GBPUSD for pricing options (vol surface)
    md_request = MarketDataRequest(start_date=horizon_date, finish_date=horizon_date,
                                   data_source='bloomberg', cut='NYC', category='fx-vol-market',
                                   tickers=cross, base_depos_currencies=[cross[0:3], cross[3:6]],
                                   cache_algo='cache_algo_return')

    df = market.fetch_market(md_request)

    fx_vol_surface = FXVolSurface(market_df=df, asset=cross)

    fx_op = FXOptionsPricer(fx_vol_surface=fx_vol_surface)

    # Price several different options

    print("atm 1M european call")
    print(fx_op.price_instrument(cross, pd.Timestamp(horizon_date), 'atm', contract_type='european-call', tenor='1M').to_string())

    print("25d 1W european put")
    print(fx_op.price_instrument(cross, pd.Timestamp(horizon_date), '25d-otm', contract_type='european-put', tenor='1W').to_string())
if __name__ == '__main__':
    ###### below line CRUCIAL when running Windows, otherwise multiprocessing doesn't work! (not necessary on Linux)
    from findatapy.util import SwimPool
    SwimPool()

    from findatapy.market import Market, MarketDataRequest, MarketDataGenerator

    market = Market(market_data_generator=MarketDataGenerator())

    # in the config file, we can use keywords 'open', 'high', 'low', 'close' and 'volume' for Yahoo and Google finance data

    # download equities data from Alpha Vantage
    md_request = MarketDataRequest(
        start_date="decade",  # start date
        data_source='alphavantage',  # use Bloomberg as data source
        tickers=['Apple', 'Microsoft', 'Citigroup'],  # ticker (findatapy)
        fields=['close'],  # which fields to download
        vendor_tickers=['aapl', 'msft', 'c'],  # ticker (Alpha Vantage)
        vendor_fields=['Close'])  # which Bloomberg fields to download)

    df = market.fetch_market(md_request)

    print(df.tail(n=10))

    # NOTE: Yahoo API no longer works

    # download equities data from Yahoo
    md_request = MarketDataRequest(
        start_date="decade",  # start date
        data_source='yahoo',  # use Bloomberg as data source
        tickers=['Apple', 'Citigroup'],  # ticker (findatapy)
chart = Chart(engine='plotly')
market = Market(market_data_generator=MarketDataGenerator())

# Choose run_example = 0 for everything
# run_example = 1 - calculating difference between realized and implied volatility over Brexit for GBPUSD
# run_example = 2 - calculating realized volatility using different minute frequencies over Brexit for GBPUSD
# run_example = 3 - calculating implied volatility addon associated with days
# run_example = 4 - compare recent implied vs realized volatility for EURUSD

run_example = 0

###### Looking at realized and implied volatility over GBPUSD in the overnight (ON) tenor
if run_example == 1 or run_example == 0:
    # Download the whole all market data for GBPUSD for pricing options (vol surface)
    md_request = MarketDataRequest(start_date='01 Jun 2016', finish_date='02 Jul 2016',
                                   data_source='bloomberg', cut='10AM', category='fx-vol-market',
                                   tickers=['GBPUSD'],
                                   cache_algo='cache_algo_return')

    market_df = market.fetch_market(md_request)

    # Download FX tick data for GBPUSD over Brexit vote and then convert into 1 minute data (open/high/low/close)
    # which are necessary for calculating realised volatility
    md_request = MarketDataRequest(start_date='01 Jun 2016', finish_date='02 Jul 2016',
                                   data_source='dukascopy', freq='tick', category='fx', fields=['bid', 'ask'],
                                   tickers=['GBPUSD'],
                                   cache_algo='cache_algo_return')

    from findatapy.timeseries import Calculations
    calc = Calculations()

    tick_data = market.fetch_market(md_request)
Example #22
0
market = Market(market_data_generator=MarketDataGenerator())

# choose run_example = 0 for everything
# run_example = 1 - download BoE data from quandl

run_example = 0

###### fetch data from Quandl for BoE rate (using Bloomberg data)
if run_example == 1 or run_example == 0:
    # Monthly average of UK resident monetary financial institutions' (excl. Central Bank) sterling
    # Weighted average interest rate, other loans, new advances, on a fixed rate to private non-financial corporations (in percent)
    # not seasonally adjusted
    md_request = MarketDataRequest(
        start_date="01 Jan 2000",  # start date
        data_source='quandl',  # use Quandl as data source
        tickers=['Weighted interest rate'],
        fields=['close'],  # which fields to download
        vendor_tickers=['BOE/CFMBJ84'],  # ticker (Bloomberg)
        vendor_fields=['close'],  # which Bloomberg fields to download
        cache_algo='internet_load_return')  # how to return data

    df = market.fetch_market(md_request)

    style = Style()

    style.title = 'BoE weighted interest rate'
    style.scale_factor = 3
    style.file_output = "boe-rate.png"
    style.source = 'Quandl/BoE'

    chart.plot(df, style=style)
###### Fetch market data for pricing AUDUSD options in 2007 (ie. FX spot, FX forwards, FX deposits and FX vol quotes)
###### Construct volatility surface using FinancePy library underneath, using polynomial interpolation
###### Enters a long 1M call, and MTM every day, and at expiry rolls into another long 1M call
if run_example == 1 or run_example == 0:

    # Warning make sure you choose dates, where there is full vol surface! If points are missing interpolation
    # will fail
    start_date = '01 Jan 2007'; finish_date = '31 Dec 2008' # Use smaller window for quicker execution

    cross = 'AUDUSD'
    fx_options_trading_tenor = '1M'

    # Download the whole all market data for AUDUSD for pricing options (FX vol surface + spot + FX forwards + depos)
    md_request = MarketDataRequest(start_date=start_date, finish_date=finish_date,
                                   data_source='bloomberg', cut='BGN', category='fx-vol-market',
                                   tickers=cross, fx_vol_tenor=['1W', '1M', '3M'],
                                   cache_algo='cache_algo_return', base_depos_currencies=[cross[0:3], cross[3:6]])

    df = market.fetch_market(md_request)
    df = df.fillna(method='ffill')

    # Remove New Year's Day and Christmas
    df = Filter().filter_time_series_by_holidays(df, cal='FX')

    # We want to roll long 1M ATM call at expiry
    # We'll mark to market the price through the month by interpolating between 1W and 1M (and using whole vol curve
    # at each tenor)
    fx_options_curve = FXOptionsCurve(fx_options_trading_tenor=fx_options_trading_tenor,
        roll_days_before=0,
        roll_event='expiry-date',
        roll_months=1,
Example #24
0
    run_example = 0

    if run_example == 1 or run_example == 0:

        ####### DukasCopy examples
        # let's download data for 14 Jun 2016 for EUR/USD - the raw data has bid/ask, if we specify close, we calculate
        # it as the average

        from findatapy.market import Market, MarketDataRequest, MarketDataGenerator

        market = Market(market_data_generator=MarketDataGenerator())

        # first we can do it by defining all the vendor fields, tickers etc. so we bypass the configuration file
        md_request = MarketDataRequest(start_date='14 Jun 2016', finish_date='15 Jun 2016',
                                       fields=['bid'], vendor_fields=['bid'],
                                       freq='tick', data_source='dukascopy',
                                       tickers=['EURUSD'], vendor_tickers=['EURUSD'])

        df = market.fetch_market(md_request)
        print(df.tail(n=10))

    if run_example == 2 or run_example == 0:
        ####### Dukascopy S&P500 example
        from findatapy.market import Market, MarketDataRequest, MarketDataGenerator

        market = Market(market_data_generator=MarketDataGenerator())

        md_request = MarketDataRequest(start_date='14 Jun 2016', finish_date='15 Jun 2016',
                                       fields=['bid', 'ask'], vendor_fields=['bid', 'ask'],
                                       freq='tick', data_source='dukascopy',
                                       tickers=['S&P500'], vendor_tickers=['USA500IDXUSD'])
Example #25
0
        "XRX",
        "XLNX",
        "XL",
        "XYL",
        "YHOO",
        "YUM",
        "ZBH",
        "ZION",
        "ZTS",
    ]

    # download equities data from Yahoo
    md_request = MarketDataRequest(
        start_date="decade",  # start date
        data_source="yahoo",  # use Bloomberg as data source
        tickers=tickers,  # ticker (findatapy)
        fields=["close"],  # which fields to download
        vendor_tickers=tickers,  # ticker (Yahoo)
        vendor_fields=["Close"])  # which Bloomberg fields to download)

    logger.info("Loading data with threading")

    df = market.fetch_market(md_request)

    logger.info("Loading data with multiprocessing")

    DataConstants.market_thread_technique = "multiprocessing"

    df = market.fetch_market(md_request)

    logger.info("Loaded data with multiprocessing")
Example #26
0
    # run_example = 3 - save to disk for Quandl as Parquet (defined tickers)
    # and read back with MarketDataRequest
    # run_example = 4 - save to disk for Dukascopy as Parquet (defined tickers
    # for EURUSD) and read back with MarketDataRequest

    # NOTE: you need to make sure you have the correct data licences before
    # storing data on disk (and whether other
    # users can access it)

    run_example = 4

    if run_example == 1 or run_example == 0:
        md_request = MarketDataRequest(
            start_date="01 Jan 2002", finish_date="31 Jan 2016",
            tickers="S&P500",
            fields="close",
            data_source="../tests/S&P500.csv",
            freq="daily",
        )

        market = Market(market_data_generator=MarketDataGenerator())

        df = market.fetch_market(md_request=md_request)

        print(df)

    if run_example == 2 or run_example == 0:
        # load tick data from DukasCopy and then resample to 15s buckets
        md_request = MarketDataRequest(
            start_date="01 Jun 2016", finish_date="31 Jul 2016",
            tickers="EURUSD",
Example #27
0
    # Note we use XBT instead of BTC.  Same for XET (ETH) and XLC (LTC).
    #
    # Note: there will generally be a limit of how many data points you can pull in one call, so you might
    # have to choose smaller time periods

    run_example = 0

    if run_example == 1 or run_example == 0:
        ### Download data from bitcoincharts ###
        # fields contains ['close','volume']
        # return tick data

        md_request = MarketDataRequest(start_date='11 Nov 2015',
                                       finish_date='02 Feb 2018',
                                       cut='LOC',
                                       freq='tick',
                                       data_source='bitcoincharts',
                                       category='crypto',
                                       fields=['close', 'volume'],
                                       tickers=['XBTUSD_itbit'])

        df = market.fetch_market(md_request)
        print(df.head(5))
        print(df.tail(5))

    if run_example == 2 or run_example == 0:
        ### Download data from poloniex ###
        # freq : daily or intraday, where intraday - returns 5 minutes data.
        # fields contains ['close','high','low','open','quote-volume','volume','weighted-average']

        md_request = MarketDataRequest(
            start_date='18 Feb 2017',
Example #28
0
    SwimPool()

    from findatapy.market import Market, MarketDataRequest, MarketDataGenerator
    from findatapy.timeseries import DataQuality

    market = Market(market_data_generator=MarketDataGenerator())
    dq = DataQuality()

    # in the config file, we can use keywords 'open', 'high', 'low', 'close'
    # and 'volume' for Yahoo and Google finance data

    # download equities data from Yahoo
    md_request = MarketDataRequest(
        start_date="decade",  # start date
        data_source='yahoo',  # use Bloomberg as data source
        tickers=['Apple', 'Citigroup'],  # ticker (findatapy)
        fields=['close'],  # which fields to download
        vendor_tickers=['aapl', 'c'],  # ticker (Yahoo)
        vendor_fields=['Close'])  # which Bloomberg fields to download)

    df = market.fetch_market(md_request)

    # create duplicated DataFrame
    df = df.append(df)
    count, dups = dq.count_repeated_dates(df)

    print("Number of duplicated elements")
    print(count)

    print("Duplicated dates")
    print(dups)
Example #29
0
    def plot_chart(self,
                   tickers=None,
                   tickers_rhs=None,
                   start_date=None,
                   finish_date=None,
                   chart_file=None,
                   chart_type='line',
                   title='',
                   fields={'close': 'PX_LAST'},
                   freq='daily',
                   source='Web',
                   brand_label='Cuemacro',
                   display_brand_label=True,
                   reindex=False,
                   additive_index=False,
                   yoy=False,
                   plotly_plot_mode='offline_png',
                   quandl_api_key=dataconstants.quandl_api_key,
                   fred_api_key=dataconstants.fred_api_key,
                   alpha_vantage_api_key=dataconstants.alpha_vantage_api_key,
                   df=None):

        if start_date is None:
            start_date = datetime.datetime.utcnow().date() - timedelta(days=60)
        if finish_date is None:
            finish_date = datetime.datetime.utcnow()

        if isinstance(tickers, str):
            tickers = {tickers: tickers}
        elif isinstance(tickers, list):
            tickers_dict = {}

            for t in tickers:
                tickers_dict[t] = t

            tickers = tickers_dict

        if tickers_rhs is not None:
            if isinstance(tickers_rhs, str):
                tickers_rhs = {tickers_rhs: tickers_rhs}
            elif isinstance(tickers, list):
                tickers_rhs_dict = {}

                for t in tickers_rhs:
                    tickers_rhs_dict[t] = t

                tickers_rhs = tickers_rhs_dict

            tickers.update(tickers_rhs)
        else:
            tickers_rhs = {}

        if df is None:
            md_request = MarketDataRequest(
                start_date=start_date,
                finish_date=finish_date,
                freq=freq,
                data_source=self._data_source,
                tickers=list(tickers.keys()),
                vendor_tickers=list(tickers.values()),
                fields=list(fields.keys()),
                vendor_fields=list(fields.values()),
                quandl_api_key=quandl_api_key,
                fred_api_key=fred_api_key,
                alpha_vantage_api_key=alpha_vantage_api_key)

            df = self._market.fetch_market(md_request=md_request)

        df = df.fillna(method='ffill')
        df.columns = [x.split('.')[0] for x in df.columns]

        style = Style(title=title,
                      chart_type=chart_type,
                      html_file_output=chart_file,
                      scale_factor=-1,
                      height=400,
                      width=600,
                      file_output=datetime.date.today().strftime("%Y%m%d") +
                      " " + title + ".png",
                      plotly_plot_mode=plotly_plot_mode,
                      source=source,
                      brand_label=brand_label,
                      display_brand_label=display_brand_label)

        if reindex:
            df = Calculations().create_mult_index_from_prices(df)

            style.y_title = 'Reindexed from 100'

        if additive_index:
            df = (df - df.shift(1)).cumsum()

            style.y_title = 'Additive changes from 0'

        if yoy:
            if freq == 'daily':
                obs_in_year = 252
            elif freq == 'intraday':
                obs_in_year = 1440

            df_rets = Calculations().calculate_returns(df)
            df = Calculations().average_by_annualised_year(
                df_rets, obs_in_year=obs_in_year) * 100

            style.y_title = 'Annualized % YoY'

        if list(tickers_rhs.keys()) != []:
            style.y_axis_2_series = list(tickers_rhs.keys())
            style.y_axis_2_showgrid = False
            style.y_axis_showgrid = False

        return self._chart.plot(df, style=style), df
Example #30
0
    from datetime import timedelta

    ###### Get intraday data for USD/JPY from the past few months from Bloomberg, NFP date/times from Bloomberg
    ###### then plot intraday price action around NFP for EUR/USD

    start_date = datetime.date.today() - timedelta(days=180)
    finish_date = datetime.datetime.utcnow()

    market = Market(market_data_generator=MarketDataGenerator())

    # Fetch NFP times from Bloomberg
    md_request = MarketDataRequest(
        start_date=start_date,  # start date
        finish_date=finish_date,  # finish date
        category="events",
        freq='daily',  # daily data
        data_source='bloomberg',  # use Bloomberg as data source
        tickers=['NFP'],
        fields=['release-date-time-full'],  # which fields to download
        vendor_tickers=['NFP TCH Index'],  # ticker (Bloomberg)
        cache_algo='internet_load_return')  # how to return data

    df_event_times = market.fetch_market(md_request)
    df_event_times = pandas.DataFrame(
        index=df_event_times['NFP.release-date-time-full'])

    # Need same timezone for event times as market data (otherwise can cause problems with Pandas)
    df_event_times = df_event_times.tz_localize('utc')

    # Fetch USD/JPY spot
    md_request = MarketDataRequest(
        start_date=start_date,  # start date