def get_economic_data_history(self,
                                  start_date,
                                  finish_date,
                                  country_group,
                                  data_type,
                                  source='fred',
                                  cache_algo="internet_load_return"):

        #vendor_country_codes = self.fred_country_codes[country_group]
        #vendor_pretty_country = self.fred_nice_country_codes[country_group]

        if isinstance(country_group, list):
            pretty_country_names = country_group
        else:
            # get all the country names in the country_group
            pretty_country_names = list(self._econ_country_groups[
                self._econ_country_groups["Country Group"] == country_group]
                                        ['Country'])

        # construct the pretty tickers
        pretty_tickers = [x + '-' + data_type for x in pretty_country_names]

        # get vendor tickers
        vendor_tickers = []

        for pretty_ticker in pretty_tickers:
            vendor_ticker = list(
                self._all_econ_tickers[self._all_econ_tickers["Full Code"] ==
                                       pretty_ticker][source].values)

            if vendor_ticker == []:
                vendor_ticker = None
                self.logger.error('Could not find match for ' + pretty_ticker)
            else:
                vendor_ticker = vendor_ticker[0]

            vendor_tickers.append(vendor_ticker)

        vendor_fields = ['close']

        if source == 'bloomberg': vendor_fields = ['PX_LAST']

        time_series_request = TimeSeriesRequest(
            start_date=start_date,  # start date
            finish_date=finish_date,  # finish date
            category='economic',
            freq='daily',  # intraday data
            data_source=source,  # use Bloomberg as data source
            cut='LOC',
            tickers=pretty_tickers,
            fields=['close'],  # which fields to download
            vendor_tickers=vendor_tickers,
            vendor_fields=vendor_fields,  # which Bloomberg fields to download
            cache_algo=cache_algo)  # how to return data

        return self.time_series_factory.harvest_time_series(
            time_series_request)
Example #2
0
    def fill_assets(self):
        ##### FILL IN WITH YOUR ASSET DATA

        # for FX basket
        full_bkt = [
            'EURUSD', 'USDJPY', 'GBPUSD', 'AUDUSD', 'USDCAD', 'NZDUSD',
            'USDCHF', 'USDNOK', 'USDSEK'
        ]

        basket_dict = {}

        for i in range(0, len(full_bkt)):
            basket_dict[full_bkt[i]] = [full_bkt[i]]

        basket_dict['Thalesians FX CTA'] = full_bkt

        br = self.fill_backtest_request()

        self.logger.info("Loading asset data...")

        vendor_tickers = [
            'FRED/DEXUSEU', 'FRED/DEXJPUS', 'FRED/DEXUSUK', 'FRED/DEXUSAL',
            'FRED/DEXCAUS', 'FRED/DEXUSNZ', 'FRED/DEXSZUS', 'FRED/DEXNOUS',
            'FRED/DEXSDUS'
        ]

        time_series_request = TimeSeriesRequest(
            start_date=br.start_date,  # start date
            finish_date=br.finish_date,  # finish date
            freq='daily',  # daily data
            data_source='quandl',  # use Quandl as data source
            tickers=full_bkt,  # ticker (Thalesians)
            fields=['close'],  # which fields to download
            vendor_tickers=vendor_tickers,  # ticker (Quandl)
            vendor_fields=['close'],  # which Bloomberg fields to download
            cache_algo='internet_load_return')  # how to return data

        asset_df = self.tsfactory.harvest_time_series(time_series_request)

        # if web connection fails read from CSV
        if asset_df is None:
            import pandas

            asset_df = pandas.read_csv(
                "d:/fxcta.csv",
                index_col=0,
                parse_dates=['Date'],
                date_parser=lambda x: pandas.datetime.strptime(x, '%Y-%m-%d'))

        # signalling variables
        spot_df = asset_df
        spot_df2 = None

        # asset_df

        return asset_df, spot_df, spot_df2, basket_dict
    def construct_strategy_benchmark(self):

        ###### FILL IN WITH YOUR OWN BENCHMARK

        tsr_indices = TimeSeriesRequest(
            start_date = '01 Jan 1980',                     # start date
            finish_date = datetime.datetime.utcnow(),       # finish date
            freq = 'daily',                                 # intraday data
            data_source = 'quandl',                         # use Bloomberg as data source
            tickers = ["EURUSD"],                           # tickers to download
            vendor_tickers=['FRED/DEXUSEU'],
            fields = ['close'],                             # which fields to download
            vendor_fields = ['close'],
            cache_algo = 'cache_algo_return')               # how to return data)

        df = self.tsfactory.harvest_time_series(tsr_indices)

        df.columns = [x.split(".")[0] for x in df.columns]

        return df
Example #4
0
# In[ ]:

from pythalesians.market.requests.timeseriesrequest import TimeSeriesRequest
from datetime import timedelta

import datetime

tickers = ['S&P500', 'FTSE', 'Nikkei']
vendor_tickers = ['SPX Index', 'UKX Index', 'NKY Index']

time_series_request = TimeSeriesRequest(
        start_date = datetime.datetime.utcnow() - timedelta(days=180),  # start date
        finish_date = datetime.datetime.utcnow(),                       # finish date
        freq = 'daily',                                                 # daily data
        data_source = 'bloomberg',                                      # use Bloomberg as data source
        tickers = tickers,                                              # ticker (Thalesians)
        fields = ['close'],                                             # which fields to download
        vendor_tickers = vendor_tickers,                                # ticker (Bloomberg)
        vendor_fields = ['PX_LAST'],                                    # which Bloomberg fields to download
        cache_algo = 'internet_load_return')                            # how to return data


# We now pass this TimeSeriesRequest object to a LightTimeSeriesFactory, which will return a pandas dataframe from the data source and will also output logger text (we can edit logging.conf to make this less verbose or to change the output of the log to disk, rather than screen, however, I've included it, so we can see which classes are being called). 
# 
# By default, PyThalesians will split up the ticker download into several chunks, using multithreading to make several calls to our data source. The number of threads can be changed in the Constants.py file (we can also specify if we would prefer to use multiprocessing as opposed to multithreading, which gets around the Python GIL. In most cases, however, the overhead of doing this is unnecessary).

# In[ ]:

from pythalesians.market.loaders.lighttimeseriesfactory import LightTimeSeriesFactory

ltsf = LightTimeSeriesFactory()
Example #5
0
        # just change "False" to "True" to run any of the below examples

        ###### download daily data from Bloomberg for FX, with different threading techniques
        if False:

            time_series_request = TimeSeriesRequest(
                start_date="01 Jan 1999",  # start date
                finish_date=datetime.date.today(),  # finish date
                freq='daily',  # daily data
                data_source='bloomberg',  # use Bloomberg as data source
                tickers=[
                    'EURUSD',  # ticker (Thalesians)
                    'GBPUSD',
                    'USDJPY',
                    'AUDUSD'
                ],
                fields=['close', 'high', 'low'],  # which fields to download
                vendor_tickers=[
                    'EURUSD BGN Curncy',  # ticker (Bloomberg)
                    'GBPUSD BGN Curncy',
                    'USDJPY BGN Curncy',
                    'AUDUSD BGN Curncy'
                ],
                vendor_fields=['PX_LAST', 'PX_HIGH',
                               'PX_LOW'],  # which Bloomberg fields to download
                cache_algo='internet_load_return')  # how to return data

            ltsf = LightTimeSeriesFactory()

            from pythalesians.util.constants import Constants
Example #6
0
import datetime
from pythalesians.market.loaders.lighttimeseriesfactory import LightTimeSeriesFactory
from pythalesians.market.requests.timeseriesrequest import TimeSeriesRequest
from pythalesians.timeseries.calcs.timeseriescalcs import TimeSeriesCalcs

from pythalesians.graphics.graphs.plotfactory import PlotFactory
from pythalesians.graphics.graphs.graphproperties import GraphProperties

if True:

    time_series_request = TimeSeriesRequest(
        start_date="01 Jan 2013",  # start date
        finish_date=datetime.date.today(),  # finish date
        freq='daily',  # daily data
        data_source='google',  # use Bloomberg as data source
        tickers=['Apple', 'S&P500 ETF'],  # ticker (Thalesians)
        fields=['close'],  # which fields to download
        vendor_tickers=['aapl', 'spy'],  # ticker (Google)
        vendor_fields=['Close'],  # which Bloomberg fields to download
        cache_algo='internet_load_return')  # how to return data

    ltsf = LightTimeSeriesFactory()
    tsc = TimeSeriesCalcs()

    df = tsc.create_mult_index_from_prices(
        ltsf.harvest_time_series(time_series_request))

    gp = GraphProperties()
    gp.title = "S&P500 vs Apple"

    # plot first with PyThalesians and then Plotly (via Cufflinks)
Example #7
0
    def get_fx_cross(self, start, end, cross,
                     cut = "NYC", source = "bloomberg", freq = "intraday", cache_algo='cache_algo_return', type = 'spot'):

        if source == "gain" or source == 'dukascopy' or freq == 'tick':
            return self.get_fx_cross_tick(start, end, cross,
                     cut = cut, source = source, cache_algo='cache_algo_return', type = 'spot')

        if isinstance(cross, str):
            cross = [cross]

        time_series_request = TimeSeriesRequest()
        time_series_factory = self.time_series_factory
        time_series_calcs = TimeSeriesCalcs()
        data_frame_agg = None

        if freq == 'intraday':
            time_series_request.gran_freq = "minute"                # intraday

        elif freq == 'daily':
            time_series_request.gran_freq = "daily"                 # intraday

        time_series_request.freq_mult = 1                       # 1 min
        time_series_request.cut = cut                           # NYC/BGN ticker
        time_series_request.fields = 'close'                    # close field only
        time_series_request.cache_algo = cache_algo             # cache_algo_only, cache_algo_return, internet_load

        time_series_request.environment = 'backtest'
        time_series_request.start_date = start
        time_series_request.finish_date = end
        time_series_request.data_source = source

        for cr in cross:
            base = cr[0:3]
            terms = cr[3:6]

            if (type == 'spot'):
                # non-USD crosses
                if base != 'USD' and terms != 'USD':
                    base_USD = self.fxconv.correct_notation('USD' + base)
                    terms_USD = self.fxconv.correct_notation('USD' + terms)

                    # TODO check if the cross exists in the database

                    # download base USD cross
                    time_series_request.tickers = base_USD
                    time_series_request.category = self.fxconv.em_or_g10(base, freq)
                    base_vals = time_series_factory.harvest_time_series(time_series_request)

                    # download terms USD cross
                    time_series_request.tickers = terms_USD
                    time_series_request.category = self.fxconv.em_or_g10(terms, freq)
                    terms_vals = time_series_factory.harvest_time_series(time_series_request)

                    if (base_USD[0:3] == 'USD'):
                        base_vals = 1 / base_vals
                    if (terms_USD[0:3] == 'USD'):
                        terms_vals = 1 / terms_vals

                    base_vals.columns = ['temp']
                    terms_vals.columns = ['temp']
                    cross_vals = base_vals.div(terms_vals, axis = 'index')
                    cross_vals.columns = [cr + '.close']

                else:
                    if base == 'USD': non_USD = terms
                    if terms == 'USD': non_USD = base

                    correct_cr = self.fxconv.correct_notation(cr)

                    time_series_request.tickers = correct_cr
                    time_series_request.category = self.fxconv.em_or_g10(non_USD, freq)
                    cross_vals = time_series_factory.harvest_time_series(time_series_request)

                    # flip if not convention
                    if(correct_cr != cr):
                        cross_vals = 1 / cross_vals

                    cross_vals.columns.names = [cr + '.close']

            elif type[0:3] == "tot":
                if freq == 'daily':
                    # download base USD cross
                    time_series_request.tickers = base + 'USD'
                    time_series_request.category = self.fxconv.em_or_g10(base, freq) + '-tot'

                    if type == "tot":
                        base_vals = time_series_factory.harvest_time_series(time_series_request)
                    else:
                        x = 0

                    # download terms USD cross
                    time_series_request.tickers = terms + 'USD'
                    time_series_request.category = self.fxconv.em_or_g10(terms, freq) + '-tot'

                    if type == "tot":
                        terms_vals = time_series_factory.harvest_time_series(time_series_request)
                    else:
                        x = 0

                    base_rets = time_series_calcs.calculate_returns(base_vals)
                    terms_rets = time_series_calcs.calculate_returns(terms_vals)

                    cross_rets = base_rets.sub(terms_rets.iloc[:,0],axis=0)

                    # first returns of a time series will by NaN, given we don't know previous point
                    cross_rets.iloc[0] = 0

                    cross_vals = time_series_calcs.create_mult_index(cross_rets)
                    cross_vals.columns = [cr + '-tot.close']

                elif freq == 'intraday':
                    self.logger.info('Total calculated returns for intraday not implemented yet')
                    return None

            if data_frame_agg is None:
                data_frame_agg = cross_vals
            else:
                data_frame_agg = data_frame_agg.join(cross_vals, how='outer')

        # strip the nan elements
        data_frame_agg = data_frame_agg.dropna()
        return data_frame_agg
Example #8
0
    def get_fx_cross_tick(self, start, end, cross,
                     cut = "NYC", source = "gain", cache_algo='cache_algo_return', type = 'spot'):

        if isinstance(cross, str):
            cross = [cross]

        time_series_request = TimeSeriesRequest()
        time_series_factory = self.time_series_factory
        data_frame_agg = None

        time_series_request.gran_freq = "tick"                  # tick

        time_series_request.freq_mult = 1                       # 1 min
        time_series_request.cut = cut                           # NYC/BGN ticker
        time_series_request.fields = ['bid', 'ask']             # bid/ask field only
        time_series_request.cache_algo = cache_algo             # cache_algo_only, cache_algo_return, internet_load

        time_series_request.environment = 'backtest'
        time_series_request.start_date = start
        time_series_request.finish_date = end
        time_series_request.data_source = source

        time_series_request.category = 'fx'

        for cr in cross:

            if (type == 'spot'):
                time_series_request.tickers = cr

                cross_vals = time_series_factory.harvest_time_series(time_series_request)
                cross_vals.columns = [cr + '.bid', cr + '.ask']

            if data_frame_agg is None:
                data_frame_agg = cross_vals
            else:
                data_frame_agg = data_frame_agg.join(cross_vals, how='outer')

        # strip the nan elements
        data_frame_agg = data_frame_agg.dropna()
        return data_frame_agg
if True:
    from pythalesians.market.loaders.lighttimeseriesfactory import LightTimeSeriesFactory
    from pythalesians.market.requests.timeseriesrequest import TimeSeriesRequest
    from pythalesians.timeseries.techind.techindicator import TechIndicator
    from pythalesians.timeseries.techind.techparams import TechParams
    from pythalesians.graphics.graphs.plotfactory import PlotFactory

    import datetime

    time_series_request = TimeSeriesRequest(
        start_date="01 Jan 1970",  # start date
        finish_date=datetime.date.today(),  # finish date
        freq='daily',  # daily data
        data_source='quandl',  # use Quandl as data source
        tickers=[
            'EURUSD',  # ticker (Thalesians)
            'GBPUSD'
        ],
        fields=['close'],  # which fields to download
        vendor_tickers=['FRED/DEXUSEU', 'FRED/DEXUSUK'],  # ticker (Quandl)
        vendor_fields=['close'],  # which Bloomberg fields to download
        cache_algo='internet_load_return')  # how to return data

    ltsf = LightTimeSeriesFactory()

    daily_vals = ltsf.harvest_time_series(time_series_request)

    techind = TechIndicator()
    tech_params = TechParams()
    tech_params.sma_period = 20
Example #10
0
if True:
    logger = LoggerManager().getLogger(__name__)

    import datetime

    # just change "False" to "True" to run any of the below examples

    ###### download daily data from quandl for S&P500 and then plot
    if True:

        time_series_request = TimeSeriesRequest(
                start_date = "01 Jan 2014",                     # start date
                finish_date = datetime.date.today(),            # finish date
                freq = 'daily',                                 # daily data
                data_source = 'quandl',                         # use quandl as data source
                tickers = ['S&P500'],                            # ticker (Thalesians)
                fields = ['close', 'open', 'adjusted-close', 'high'],           # which fields to download
                vendor_tickers = ['YAHOO/INDEX_GSPC'],                           # ticker (quandl)
                vendor_fields = ['close', 'open', 'adjusted-close', 'high'],    # which quandl fields to download
                cache_algo = 'internet_load_return')                            # how to return data

        ltsf = LightTimeSeriesFactory()

        df = None
        df = ltsf.harvest_time_series(time_series_request)

        pf = PlotFactory()
        pf.plot_line_graph(df, adapter = 'pythalesians')

    ###### download monthly quandl data for Total US nonfarm payrolls
    if True:
if True:
    logger = LoggerManager().getLogger(__name__)

    import datetime

    # just change "False" to "True" to run any of the below examples

    ###### download daily data from Bloomberg for USD/JPY
    ###### download BoJ intervention data from FRED
    if True:

        time_series_request = TimeSeriesRequest(
            start_date="01 Jan 1995",  # start date
            finish_date=datetime.date.today(),  # finish date
            freq='daily',  # daily data
            data_source='bloomberg',  # use Bloomberg as data source
            tickers=['USDJPY'],  # ticker (Thalesians)
            fields=['close'],  # which fields to download
            vendor_tickers=['USDJPY BGN Curncy'],  # ticker (Bloomberg)
            vendor_fields=['PX_LAST'],  # which Bloomberg fields to download
            cache_algo='internet_load_return')  # how to return data

        ltsf = LightTimeSeriesFactory()

        df = None
        df = ltsf.harvest_time_series(time_series_request)

        df.columns = [x.replace('.close', '') for x in df.columns.values]

        time_series_request.tickers = ['USDJPY purchases (bn USD)']
        time_series_request.vendor_tickers = ['JPINTDUSDJPY']
        time_series_request.data_source = 'fred'
Example #12
0
seasonality = Seasonality()
tsc = TimeSeriesCalcs()
logger = LoggerManager().getLogger(__name__)

pf = PlotFactory()

# just change "False" to "True" to run any of the below examples

###### calculate seasonal moves in SPX (using Quandl data)
if True:
    time_series_request = TimeSeriesRequest(
        start_date="01 Jan 1970",  # start date
        finish_date=datetime.date.today(),  # finish date
        freq='daily',  # daily data
        data_source='bloomberg',  # use Quandl as data source
        tickers=['S&P500'],
        fields=['close'],  # which fields to download
        vendor_tickers=['SPX Index'],  # ticker (Quandl)
        vendor_fields=['PX_LAST'],  # which Bloomberg fields to download
        cache_algo='internet_load_return')  # how to return data

    ltsf = LightTimeSeriesFactory()

    df = ltsf.harvest_time_series(time_series_request)

    df_ret = tsc.calculate_returns(df)

    day_of_month_seasonality = seasonality.bus_day_of_month_seasonality(
        df_ret, partition_by_month=False)
    day_of_month_seasonality = tsc.convert_month_day_to_date_time(
        day_of_month_seasonality)
Example #13
0
        #     if write_tick_disk:
        #         tick = open(out_path_list[i], "rb+").read()
        #     else:
        #         tick = tick_list[i]
        #
        #     time = time_list[i]
        #     data = lzma.decompress(tick)
        #     df_list.append(self.retrieve_df(data, symbol, time))


if __name__ == '__main__':
    from pythalesians.market.requests.timeseriesrequest import TimeSeriesRequest
    import datetime

    dc = LoaderDukasCopy()

    time_series_request = TimeSeriesRequest(
        start_date="01 Jun 2015",  # start date
        finish_date="02 Jun 2015",  # finish date
        freq='tick',  # tick data
        data_source='dukascopy',  # use Bloomberg as data source
        tickers=['EURUSD'],  # ticker (Thalesians)
        fields=['bid', 'ask'],  # which fields to download
        vendor_tickers=[
            'EURUSD',  # ticker (Bloomberg)
            'GBPUSD'
        ],
        vendor_fields=['bid', 'ask'],  # which Bloomberg fields to download
        cache_algo='internet_load_return')  # how to return data

    dc.load_ticker(time_series_request)
Example #14
0
    import pytz

    # just change "False" to "True" to run any of the below examples

    ###### download second data from Bloomberg for EUR/USD, USD/JPY (inverted) and GBP/USD around last payroll
    if True:
        finish_date = datetime.datetime.utcnow()
        start_date = finish_date - timedelta(days=60)

        # fetch NFP times from Bloomberg
        time_series_request = TimeSeriesRequest(
                start_date = start_date,                # start date
                finish_date = finish_date,              # finish date
                category = "events",
                freq = 'daily',                         # daily data
                data_source = 'bloomberg',              # use Bloomberg as data source
                tickers = ['NFP'],
                fields = ['release-date-time-full'],                    # which fields to download
                vendor_tickers = ['NFP TCH Index'], # ticker (Bloomberg)
                vendor_fields = ['ECO_FUTURE_RELEASE_DATE_LIST'],   # which Bloomberg fields to download
                cache_algo = 'internet_load_return')                # how to return data

        ltsf = LightTimeSeriesFactory()
        ts_filter = TimeSeriesFilter()

        df_event_times = ltsf.harvest_time_series(time_series_request)

        utc_time = pytz.utc
        df_event_times = pandas.DataFrame(index = df_event_times['NFP.release-date-time-full'])
        df_event_times.index = df_event_times.index.tz_localize(utc_time)    # work in UTC time
        df_event_times = ts_filter.filter_time_series_by_date(start_date, finish_date, df_event_times)
Example #15
0
    from pythalesians.market.loaders.lighttimeseriesfactory import LightTimeSeriesFactory

    import datetime

    from pythalesians.graphics.graphs.plotfactory import PlotFactory
    from pythalesians.graphics.graphs.graphproperties import GraphProperties

    end = datetime.datetime.utcnow()
    start_date = end.replace(hour=0, minute=0, second=0,
                             microsecond=0)  # Returns a copy

    time_series_request = TimeSeriesRequest(
        start_date=start_date,  # start date
        finish_date=datetime.datetime.utcnow(),  # finish date
        freq='intraday',  # intraday data
        data_source='bloomberg',  # use Bloomberg as data source
        tickers=['EURUSD'],  # ticker (Thalesians)
        fields=['close'],  # which fields to download
        vendor_tickers=['EURUSD BGN Curncy'],  # ticker (Bloomberg)
        vendor_fields=['close'],  # which Bloomberg fields to download
        cache_algo='internet_load_return')  # how to return data

    ltsf = LightTimeSeriesFactory()

    df = ltsf.harvest_time_series(time_series_request)
    df.columns = [x.replace('.close', '') for x in df.columns.values]

    gp = GraphProperties()

    gp.title = 'EURUSD stuff!'
    gp.file_output = 'EURUSD.png'
    gp.source = 'Thalesians/BBG (created with PyThalesians Python library)'
Example #16
0
    import datetime

    # just change "False" to "True" to run any of the below examples

    ###### download daily data from Bloomberg for EUR/USD and GBP/USD spot, then calculate correlation
    if True:

        time_series_request = TimeSeriesRequest(
            start_date="01 Jan 2014",  # start date
            finish_date=datetime.date.today(),  # finish date
            freq='daily',  # daily data
            data_source='bloomberg',  # use Bloomberg as data source
            tickers=[
                'EURUSD',  # ticker (Thalesians)
                'GBPUSD',
                'AUDUSD'
            ],
            fields=['close'],  # which fields to download
            vendor_tickers=[
                'EURUSD BGN Curncy',  # ticker (Bloomberg)
                'GBPUSD BGN Curncy',
                'AUDUSD BGN Curncy'
            ],
            vendor_fields=['PX_LAST'],  # which Bloomberg fields to download
            cache_algo='internet_load_return')  # how to return data

        ltsf = LightTimeSeriesFactory()

        df = None
        df = ltsf.harvest_time_series(time_series_request)

        tsc = TimeSeriesCalcs()
    logger = LoggerManager().getLogger(__name__)

    import datetime

    ###### download daily data from Bloomberg for EUR/USD and GBP/USD spot and then plot
    if True:

        time_series_request = TimeSeriesRequest(
            start_date="01 Jan 2014",  # start date
            finish_date=datetime.date.today(),  # finish date
            freq='daily',  # daily data
            data_source='bloomberg',  # use Bloomberg as data source
            tickers=[
                'EURUSD',  # ticker (Thalesians)
                'GBPUSD'
            ],
            fields=['close', 'high', 'low'],  # which fields to download
            vendor_tickers=[
                'EURUSD BGN Curncy',  # ticker (Bloomberg)
                'GBPUSD BGN Curncy'
            ],
            vendor_fields=['PX_LAST', 'PX_HIGH',
                           'PX_LOW'],  # which Bloomberg fields to download
            cache_algo='internet_load_return')  # how to return data

        ltsf = LightTimeSeriesFactory()

        df = None
        df = ltsf.harvest_time_series(time_series_request)

        pf = PlotFactory()
if True:
    logger = LoggerManager().getLogger(__name__)

    import datetime

    # just change "False" to "True" to run any of the below examples

    ###### download daily data from Bloomberg for USD/JPY
    ###### download BoJ intervention data from FRED
    if True:

        time_series_request = TimeSeriesRequest(
                start_date = "01 Jan 1995",                     # start date
                finish_date = datetime.date.today(),            # finish date
                freq = 'daily',                                 # daily data
                data_source = 'bloomberg',                      # use Bloomberg as data source
                tickers = ['USDJPY'],                            # ticker (Thalesians)
                fields = ['close'],              # which fields to download
                vendor_tickers = ['USDJPY BGN Curncy'],         # ticker (Bloomberg)
                vendor_fields = ['PX_LAST'],   # which Bloomberg fields to download
                cache_algo = 'internet_load_return')                # how to return data

        ltsf = LightTimeSeriesFactory()

        df = None
        df = ltsf.harvest_time_series(time_series_request)

        df.columns = [x.replace('.close', '') for x in df.columns.values]

        time_series_request.tickers = ['USDJPY purchases (bn USD)']
        time_series_request.vendor_tickers = ['JPINTDUSDJPY']
        time_series_request.data_source = 'fred'