コード例 #1
0
    def monthly_seasonality(self,
                            data_frame,
                            cum=True,
                            add_average=False,
                            price_index=False):

        calculations = Calculations()

        if price_index:
            data_frame = data_frame.resample(
                'BM').mean()  # resample into month end
            data_frame = calculations.calculate_returns(data_frame)

        data_frame.index = pandas.to_datetime(data_frame.index)

        monthly_seasonality = calculations.average_by_month(data_frame)

        if add_average:
            monthly_seasonality['Avg'] = monthly_seasonality.mean(axis=1)

        if cum is True:
            monthly_seasonality.loc[0] = numpy.zeros(
                len(monthly_seasonality.columns))
            monthly_seasonality = monthly_seasonality.sort_index()

            monthly_seasonality = calculations.create_mult_index(
                monthly_seasonality)

        return monthly_seasonality
コード例 #2
0
ファイル: seasonality.py プロジェクト: Bruce2020/pythalesians
    def bus_day_of_month_seasonality(self, data_frame,
                                 month_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], cum = True,
                                 cal = "FX", partition_by_month = True, add_average = False, price_index = False):

        calculations = Calculations()
        filter = Filter()

        if price_index:
            data_frame = data_frame.resample('B')           # resample into business days
            data_frame = calculations.calculate_returns(data_frame)

        data_frame.index = pandas.to_datetime(data_frame.index)
        data_frame = filter.filter_time_series_by_holidays(data_frame, cal)

        monthly_seasonality = calculations.average_by_month_day_by_bus_day(data_frame, cal)
        monthly_seasonality = monthly_seasonality.loc[month_list]

        if partition_by_month:
            monthly_seasonality = monthly_seasonality.unstack(level=0)

            if add_average:
               monthly_seasonality['Avg'] = monthly_seasonality.mean(axis=1)

        if cum is True:
            if partition_by_month:
                monthly_seasonality.loc[0] = numpy.zeros(len(monthly_seasonality.columns))
                # monthly_seasonality.index = monthly_seasonality.index + 1       # shifting index
                monthly_seasonality = monthly_seasonality.sort_index()

            monthly_seasonality = calculations.create_mult_index(monthly_seasonality)

        return monthly_seasonality
コード例 #3
0
    def bus_day_of_month_seasonality(
            self,
            data_frame,
            month_list=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
            cum=True,
            cal="FX",
            partition_by_month=True,
            add_average=False,
            price_index=False,
            resample_freq='B'):

        calculations = Calculations()
        filter = Filter()

        if price_index:
            data_frame = data_frame.resample(
                resample_freq).mean()  # resample into business days
            data_frame = calculations.calculate_returns(data_frame)

        data_frame.index = pandas.to_datetime(data_frame.index)
        data_frame = filter.filter_time_series_by_holidays(data_frame, cal)

        if resample_freq == 'B':  # business days
            monthly_seasonality = calculations.average_by_month_day_by_bus_day(
                data_frame, cal)
        elif resample_freq == 'D':  # calendar days
            monthly_seasonality = calculations.average_by_month_day_by_day(
                data_frame)

        monthly_seasonality = monthly_seasonality.loc[month_list]

        if partition_by_month:
            monthly_seasonality = monthly_seasonality.unstack(level=0)

            if add_average:
                monthly_seasonality['Avg'] = monthly_seasonality.mean(axis=1)

        if cum is True:
            if partition_by_month:
                monthly_seasonality.loc[0] = numpy.zeros(
                    len(monthly_seasonality.columns))
                # monthly_seasonality.index = monthly_seasonality.index + 1       # shifting index
                monthly_seasonality = monthly_seasonality.sort_index()

            monthly_seasonality = calculations.create_mult_index(
                monthly_seasonality)

        return monthly_seasonality
コード例 #4
0
    def calculate_vol_adjusted_index_from_prices(self, prices_df, br):
        """Adjusts an index of prices for a vol target

        Parameters
        ----------
        br : BacktestRequest
            Parameters for the backtest specifying start date, finish data, transaction costs etc.

        asset_a_df : pandas.DataFrame
            Asset prices to be traded

        Returns
        -------
        pandas.Dataframe containing vol adjusted index
        """

        calculations = Calculations()

        returns_df, leverage_df = self.calculate_vol_adjusted_returns(prices_df, br, returns=False)

        return calculations.create_mult_index(returns_df)
コード例 #5
0
    def calculate_vol_adjusted_index_from_prices(self, prices_df, br):
        """
        calculate_vol_adjusted_index_from_price - Adjusts an index of prices for a vol target

        Parameters
        ----------
        br : BacktestRequest
            Parameters for the backtest specifying start date, finish data, transaction costs etc.

        asset_a_df : pandas.DataFrame
            Asset prices to be traded

        Returns
        -------
        pandas.Dataframe containing vol adjusted index
        """

        calculations = Calculations()

        returns_df, leverage_df = self.calculate_vol_adjusted_returns(prices_df, br, returns=False)

        return calculations.create_mult_index(returns_df)
コード例 #6
0
ファイル: seasonality.py プロジェクト: joequant/pythalesians
    def monthly_seasonality(self, data_frame,
                                  cum = True,
                                  add_average = False, price_index = False):

        calculations = Calculations()

        if price_index:
            data_frame = data_frame.resample('BM').mean()          # resample into month end
            data_frame = calculations.calculate_returns(data_frame)

        data_frame.index = pandas.to_datetime(data_frame.index)

        monthly_seasonality = calculations.average_by_month(data_frame)

        if add_average:
            monthly_seasonality['Avg'] = monthly_seasonality.mean(axis=1)

        if cum is True:
            monthly_seasonality.loc[0] = numpy.zeros(len(monthly_seasonality.columns))
            monthly_seasonality = monthly_seasonality.sort_index()

            monthly_seasonality = calculations.create_mult_index(monthly_seasonality)

        return monthly_seasonality
コード例 #7
0
ファイル: market.py プロジェクト: prasunanand/findatapy
class FXCrossFactory(object):
    """Generates FX spot time series and FX total return time series (assuming we already have
    total return indices available from xxxUSD form) from underlying series. Can also produce cross rates from the USD
    crosses.

    """
    def __init__(self, market_data_generator=None):
        self.fxconv = FXConv()

        self.cache = {}

        self._calculations = Calculations()
        self._market_data_generator = market_data_generator

        return

    def get_fx_cross_tick(self,
                          start,
                          end,
                          cross,
                          cut="NYC",
                          data_source="dukascopy",
                          cache_algo='internet_load_return',
                          type='spot',
                          environment='backtest',
                          fields=['bid', 'ask']):

        if isinstance(cross, str):
            cross = [cross]

        market_data_request = MarketDataRequest(
            gran_freq="tick",
            freq_mult=1,
            freq='tick',
            cut=cut,
            fields=['bid', 'ask', 'bidv', 'askv'],
            cache_algo=cache_algo,
            environment=environment,
            start_date=start,
            finish_date=end,
            data_source=data_source,
            category='fx')

        market_data_generator = self._market_data_generator
        data_frame_agg = None

        for cr in cross:

            if (type == 'spot'):
                market_data_request.tickers = cr

                cross_vals = market_data_generator.fetch_market_data(
                    market_data_request)

                if cross_vals is not None:

                    # If user only wants 'close' calculate that from the bid/ask fields
                    if fields == ['close']:
                        cross_vals = cross_vals[[cr + '.bid',
                                                 cr + '.ask']].mean(axis=1)
                        cross_vals.columns = [cr + '.close']
                    else:
                        filter = Filter()

                        filter_columns = [cr + '.' + f for f in fields]
                        cross_vals = filter.filter_time_series_by_columns(
                            filter_columns, cross_vals)

            if data_frame_agg is None:
                data_frame_agg = cross_vals
            else:
                data_frame_agg = data_frame_agg.join(cross_vals, how='outer')

        if data_frame_agg is not None:
            # Strip the nan elements
            data_frame_agg = data_frame_agg.dropna()

        return data_frame_agg

    def get_fx_cross(self,
                     start,
                     end,
                     cross,
                     cut="NYC",
                     data_source="bloomberg",
                     freq="intraday",
                     cache_algo='internet_load_return',
                     type='spot',
                     environment='backtest',
                     fields=['close']):

        if data_source == "gain" or data_source == 'dukascopy' or freq == 'tick':
            return self.get_fx_cross_tick(start,
                                          end,
                                          cross,
                                          cut=cut,
                                          data_source=data_source,
                                          cache_algo=cache_algo,
                                          type='spot',
                                          fields=fields)

        if isinstance(cross, str):
            cross = [cross]

        market_data_request_list = []
        freq_list = []
        type_list = []

        for cr in cross:
            market_data_request = MarketDataRequest(freq_mult=1,
                                                    cut=cut,
                                                    fields=['close'],
                                                    freq=freq,
                                                    cache_algo=cache_algo,
                                                    start_date=start,
                                                    finish_date=end,
                                                    data_source=data_source,
                                                    environment=environment)

            market_data_request.type = type
            market_data_request.cross = cr

            if freq == 'intraday':
                market_data_request.gran_freq = "minute"  # intraday

            elif freq == 'daily':
                market_data_request.gran_freq = "daily"  # daily

            market_data_request_list.append(market_data_request)

        data_frame_agg = []

        # Depends on the nature of operation as to whether we should use threading or multiprocessing library
        if constants.market_thread_technique is "thread":
            from multiprocessing.dummy import Pool
        else:
            # Most of the time is spend waiting for Bloomberg to return, so can use threads rather than multiprocessing
            # must use the multiprocess library otherwise can't pickle objects correctly
            # note: currently not very stable
            from multiprocess import Pool

        thread_no = constants.market_thread_no['other']

        if market_data_request_list[
                0].data_source in constants.market_thread_no:
            thread_no = constants.market_thread_no[
                market_data_request_list[0].data_source]

        # Fudge, issue with multithreading and accessing HDF5 files
        # if self._market_data_generator.__class__.__name__ == 'CachedMarketDataGenerator':
        #    thread_no = 0
        thread_no = 0

        if (thread_no > 0):
            pool = Pool(thread_no)

            # Open the market data downloads in their own threads and return the results
            df_list = pool.map_async(self._get_individual_fx_cross,
                                     market_data_request_list).get()

            data_frame_agg = self._calculations.iterative_outer_join(df_list)

            # data_frame_agg = self._calculations.pandas_outer_join(result.get())

            try:
                pool.close()
                pool.join()
            except:
                pass
        else:
            for md_request in market_data_request_list:
                data_frame_agg.append(
                    self._get_individual_fx_cross(md_request))

            data_frame_agg = self._calculations.pandas_outer_join(
                data_frame_agg)

        # Strip the nan elements
        data_frame_agg = data_frame_agg.dropna(how='all')

        # self.speed_cache.put_dataframe(key, data_frame_agg)

        return data_frame_agg

    def _get_individual_fx_cross(self, market_data_request):
        cr = market_data_request.cross
        type = market_data_request.type
        freq = market_data_request.freq

        base = cr[0:3]
        terms = cr[3:6]

        if (type == 'spot'):
            # Non-USD crosses
            if base != 'USD' and terms != 'USD':
                base_USD = self.fxconv.correct_notation('USD' + base)
                terms_USD = self.fxconv.correct_notation('USD' + terms)

                # TODO check if the cross exists in the database

                # Download base USD cross
                market_data_request.tickers = base_USD
                market_data_request.category = 'fx'

                base_vals = self._market_data_generator.fetch_market_data(
                    market_data_request)

                # Download terms USD cross
                market_data_request.tickers = terms_USD
                market_data_request.category = 'fx'

                terms_vals = self._market_data_generator.fetch_market_data(
                    market_data_request)

                # If quoted USD/base flip to get USD terms
                if (base_USD[0:3] == 'USD'):
                    base_vals = 1 / base_vals

                # If quoted USD/terms flip to get USD terms
                if (terms_USD[0:3] == 'USD'):
                    terms_vals = 1 / terms_vals

                base_vals.columns = ['temp']
                terms_vals.columns = ['temp']

                cross_vals = base_vals.div(terms_vals, axis='index')
                cross_vals.columns = [cr + '.close']

                base_vals.columns = [base_USD + '.close']
                terms_vals.columns = [terms_USD + '.close']
            else:
                # if base == 'USD': non_USD = terms
                # if terms == 'USD': non_USD = base

                correct_cr = self.fxconv.correct_notation(cr)

                market_data_request.tickers = correct_cr
                market_data_request.category = 'fx'

                cross_vals = self._market_data_generator.fetch_market_data(
                    market_data_request)

                # Special case for USDUSD!
                if base + terms == 'USDUSD':
                    if freq == 'daily':
                        cross_vals = pd.DataFrame(1,
                                                  index=cross_vals.index,
                                                  columns=cross_vals.columns)
                        filter = Filter()
                        cross_vals = filter.filter_time_series_by_holidays(
                            cross_vals, cal='WEEKDAY')
                else:
                    # Flip if not convention (eg. JPYUSD)
                    if (correct_cr != cr):
                        cross_vals = 1 / cross_vals

                # cross_vals = self._market_data_generator.harvest_time_series(market_data_request)
                cross_vals.columns = [cr + '.close']

        elif type[0:3] == "tot":
            if freq == 'daily':
                # Download base USD cross
                market_data_request.tickers = base + 'USD'
                market_data_request.category = 'fx-' + type

                if type[0:3] == "tot":
                    base_vals = self._market_data_generator.fetch_market_data(
                        market_data_request)

                # Download terms USD cross
                market_data_request.tickers = terms + 'USD'
                market_data_request.category = 'fx-' + type

                if type[0:3] == "tot":
                    terms_vals = self._market_data_generator.fetch_market_data(
                        market_data_request)

                # base_rets = self._calculations.calculate_returns(base_vals)
                # terms_rets = self._calculations.calculate_returns(terms_vals)

                # Special case for USDUSD case (and if base or terms USD are USDUSD
                if base + terms == 'USDUSD':
                    base_rets = self._calculations.calculate_returns(base_vals)
                    cross_rets = pd.DataFrame(0,
                                              index=base_rets.index,
                                              columns=base_rets.columns)
                elif base + 'USD' == 'USDUSD':
                    cross_rets = -self._calculations.calculate_returns(
                        terms_vals)
                elif terms + 'USD' == 'USDUSD':
                    cross_rets = self._calculations.calculate_returns(
                        base_vals)
                else:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    terms_rets = self._calculations.calculate_returns(
                        terms_vals)

                    cross_rets = base_rets.sub(terms_rets.iloc[:, 0], axis=0)

                # First returns of a time series will by NaN, given we don't know previous point
                cross_rets.iloc[0] = 0

                cross_vals = self._calculations.create_mult_index(cross_rets)
                cross_vals.columns = [cr + '-' + type + '.close']

            elif freq == 'intraday':
                LoggerManager().getLogger(__name__).info(
                    'Total calculated returns for intraday not implemented yet'
                )
                return None

        return cross_vals
コード例 #8
0
ファイル: market.py プロジェクト: pkan0583/findatapy
class FXCrossFactory(object):
    def __init__(self, market_data_generator=None):
        self.logger = LoggerManager().getLogger(__name__)
        self.fxconv = FXConv()

        self.cache = {}

        self.calculations = Calculations()
        self.market_data_generator = market_data_generator

        return

    def flush_cache(self):
        self.cache = {}

    def get_fx_cross_tick(self,
                          start,
                          end,
                          cross,
                          cut="NYC",
                          source="dukascopy",
                          cache_algo='internet_load_return',
                          type='spot',
                          environment='backtest',
                          fields=['bid', 'ask']):

        if isinstance(cross, str):
            cross = [cross]

        market_data_request = MarketDataRequest(
            gran_freq="tick",
            freq_mult=1,
            freq='tick',
            cut=cut,
            fields=['bid', 'ask', 'bidv', 'askv'],
            cache_algo=cache_algo,
            environment=environment,
            start_date=start,
            finish_date=end,
            data_source=source,
            category='fx')

        market_data_generator = self.market_data_generator
        data_frame_agg = None

        for cr in cross:

            if (type == 'spot'):
                market_data_request.tickers = cr

                cross_vals = market_data_generator.fetch_market_data(
                    market_data_request)

                # if user only wants 'close' calculate that from the bid/ask fields
                if fields == ['close']:
                    cross_vals = cross_vals[[cr + '.bid',
                                             cr + '.ask']].mean(axis=1)
                    cross_vals.columns = [cr + '.close']

            if data_frame_agg is None:
                data_frame_agg = cross_vals
            else:
                data_frame_agg = data_frame_agg.join(cross_vals, how='outer')

        # strip the nan elements
        data_frame_agg = data_frame_agg.dropna()
        return data_frame_agg

    def get_fx_cross(self,
                     start,
                     end,
                     cross,
                     cut="NYC",
                     source="bloomberg",
                     freq="intraday",
                     cache_algo='internet_load_return',
                     type='spot',
                     environment='backtest',
                     fields=['close']):

        if source == "gain" or source == 'dukascopy' or freq == 'tick':
            return self.get_fx_cross_tick(start,
                                          end,
                                          cross,
                                          cut=cut,
                                          source=source,
                                          cache_algo=cache_algo,
                                          type='spot',
                                          fields=fields)

        if isinstance(cross, str):
            cross = [cross]

        market_data_request_list = []
        freq_list = []
        type_list = []

        for cr in cross:
            market_data_request = MarketDataRequest(freq_mult=1,
                                                    cut=cut,
                                                    fields=['close'],
                                                    freq=freq,
                                                    cache_algo=cache_algo,
                                                    start_date=start,
                                                    finish_date=end,
                                                    data_source=source,
                                                    environment=environment)

            market_data_request.type = type
            market_data_request.cross = cr

            if freq == 'intraday':
                market_data_request.gran_freq = "minute"  # intraday

            elif freq == 'daily':
                market_data_request.gran_freq = "daily"  # daily

            market_data_request_list.append(market_data_request)

        data_frame_agg = []

        # depends on the nature of operation as to whether we should use threading or multiprocessing library
        if DataConstants().market_thread_technique is "thread":
            from multiprocessing.dummy import Pool
        else:
            # most of the time is spend waiting for Bloomberg to return, so can use threads rather than multiprocessing
            # must use the multiprocessing_on_dill library otherwise can't pickle objects correctly
            # note: currently not very stable
            from multiprocessing_on_dill import Pool

        thread_no = DataConstants().market_thread_no['other']

        if market_data_request_list[0].data_source in DataConstants(
        ).market_thread_no:
            thread_no = DataConstants().market_thread_no[
                market_data_request_list[0].data_source]

        # fudge, issue with multithreading and accessing HDF5 files
        # if self.market_data_generator.__class__.__name__ == 'CachedMarketDataGenerator':
        #    thread_no = 0

        if (thread_no > 0):
            pool = Pool(thread_no)

            # open the market data downloads in their own threads and return the results
            result = pool.map_async(self._get_individual_fx_cross,
                                    market_data_request_list)
            data_frame_agg = self.calculations.iterative_outer_join(
                result.get())

            # data_frame_agg = self.calculations.pandas_outer_join(result.get())

            # pool would have already been closed earlier
            # try:
            #    pool.close()
            #    pool.join()
            # except: pass
        else:
            for md_request in market_data_request_list:
                data_frame_agg.append(
                    self._get_individual_fx_cross(md_request))

            data_frame_agg = self.calculations.pandas_outer_join(
                data_frame_agg)

        # strip the nan elements
        data_frame_agg = data_frame_agg.dropna()

        return data_frame_agg

    def _get_individual_fx_cross(self, market_data_request):
        cr = market_data_request.cross
        type = market_data_request.type
        freq = market_data_request.freq

        base = cr[0:3]
        terms = cr[3:6]

        if (type == 'spot'):
            # non-USD crosses
            if base != 'USD' and terms != 'USD':
                base_USD = self.fxconv.correct_notation('USD' + base)
                terms_USD = self.fxconv.correct_notation('USD' + terms)

                # TODO check if the cross exists in the database

                # download base USD cross
                market_data_request.tickers = base_USD
                market_data_request.category = 'fx'

                if base_USD + '.close' in self.cache:
                    base_vals = self.cache[base_USD + '.close']
                else:
                    base_vals = self.market_data_generator.fetch_market_data(
                        market_data_request)
                    self.cache[base_USD + '.close'] = base_vals

                # download terms USD cross
                market_data_request.tickers = terms_USD
                market_data_request.category = 'fx'

                if terms_USD + '.close' in self.cache:
                    terms_vals = self.cache[terms_USD + '.close']
                else:
                    terms_vals = self.market_data_generator.fetch_market_data(
                        market_data_request)
                    self.cache[terms_USD + '.close'] = terms_vals

                # if quoted USD/base flip to get USD terms
                if (base_USD[0:3] == 'USD'):
                    if 'USD' + base in '.close' in self.cache:
                        base_vals = self.cache['USD' + base + '.close']
                    else:
                        base_vals = 1 / base_vals
                        self.cache['USD' + base + '.close'] = base_vals

                # if quoted USD/terms flip to get USD terms
                if (terms_USD[0:3] == 'USD'):
                    if 'USD' + terms in '.close' in self.cache:
                        terms_vals = self.cache['USD' + terms + '.close']
                    else:
                        terms_vals = 1 / terms_vals
                        self.cache['USD' + terms + '.close'] = base_vals

                base_vals.columns = ['temp']
                terms_vals.columns = ['temp']

                cross_vals = base_vals.div(terms_vals, axis='index')
                cross_vals.columns = [cr + '.close']

                base_vals.columns = [base_USD + '.close']
                terms_vals.columns = [terms_USD + '.close']
            else:
                # if base == 'USD': non_USD = terms
                # if terms == 'USD': non_USD = base

                correct_cr = self.fxconv.correct_notation(cr)

                market_data_request.tickers = correct_cr
                market_data_request.category = 'fx'

                if correct_cr + '.close' in self.cache:
                    cross_vals = self.cache[correct_cr + '.close']
                else:
                    cross_vals = self.market_data_generator.fetch_market_data(
                        market_data_request)

                    # flip if not convention
                    if (correct_cr != cr):
                        if cr + '.close' in self.cache:
                            cross_vals = self.cache[cr + '.close']
                        else:
                            cross_vals = 1 / cross_vals
                            self.cache[cr + '.close'] = cross_vals

                    self.cache[correct_cr + '.close'] = cross_vals

                # cross_vals = self.market_data_generator.harvest_time_series(market_data_request)
                cross_vals.columns.names = [cr + '.close']

        elif type[0:3] == "tot":
            if freq == 'daily':
                # download base USD cross
                market_data_request.tickers = base + 'USD'
                market_data_request.category = 'fx-tot'

                if type == "tot":
                    base_vals = self.market_data_generator.fetch_market_data(
                        market_data_request)
                else:
                    x = 0

                # download terms USD cross
                market_data_request.tickers = terms + 'USD'
                market_data_request.category = 'fx-tot'

                if type == "tot":
                    terms_vals = self.market_data_generator.fetch_market_data(
                        market_data_request)
                else:
                    pass

                base_rets = self.calculations.calculate_returns(base_vals)
                terms_rets = self.calculations.calculate_returns(terms_vals)

                cross_rets = base_rets.sub(terms_rets.iloc[:, 0], axis=0)

                # first returns of a time series will by NaN, given we don't know previous point
                cross_rets.iloc[0] = 0

                cross_vals = self.calculations.create_mult_index(cross_rets)
                cross_vals.columns = [cr + '-tot.close']

            elif freq == 'intraday':
                self.logger.info(
                    'Total calculated returns for intraday not implemented yet'
                )
                return None

        return cross_vals
コード例 #9
0
class FXSpotCurve(object):
    """Construct total return (spot) indices for FX. In future will also convert assets from local currency to foreign currency
    denomination and construct indices from forwards series.

    """
    def __init__(self,
                 market_data_generator=None,
                 depo_tenor='ON',
                 construct_via_currency='no'):
        self._market_data_generator = market_data_generator
        self._calculations = Calculations()

        self._depo_tenor = depo_tenor
        self._construct_via_currency = construct_via_currency

    def generate_key(self):
        from findatapy.market.ioengine import SpeedCache

        # Don't include any "large" objects in the key
        return SpeedCache().generate_key(
            self, ['_market_data_generator', '_calculations'])

    def fetch_continuous_time_series(self,
                                     md_request,
                                     market_data_generator,
                                     construct_via_currency=None):

        if market_data_generator is None:
            market_data_generator = self._market_data_generator

        if construct_via_currency is None:
            construct_via_currency = self._construct_via_currency

        # Eg. we construct AUDJPY via AUDJPY directly
        if construct_via_currency == 'no':
            base_depo_tickers = [
                x[0:3] + self._depo_tenor for x in md_request.tickers
            ]
            terms_depo_tickers = [
                x[3:6] + self._depo_tenor for x in md_request.tickers
            ]

            depo_tickers = list(set(base_depo_tickers + terms_depo_tickers))

            market = Market(market_data_generator=market_data_generator)

            # Deposit data for base and terms currency
            md_request_download = MarketDataRequest(md_request=md_request)

            md_request_download.tickers = depo_tickers
            md_request_download.category = 'base-depos'
            md_request_download.fields = 'close'
            md_request_download.abstract_curve = None

            depo_df = market.fetch_market(md_request_download)

            # Spot data
            md_request_download.tickers = md_request.tickers
            md_request_download.category = 'fx'

            spot_df = market.fetch_market(md_request_download)

            return self.construct_total_return_index(md_request.tickers,
                                                     self._depo_tenor, spot_df,
                                                     depo_df)
        else:
            # eg. we calculate via your domestic currency such as USD, so returns will be in your domestic currency
            # Hence AUDJPY would be calculated via AUDUSD and JPYUSD (subtracting the difference in returns)
            total_return_indices = []

            for tick in md_request.tickers:
                base = tick[0:3]
                terms = tick[3:6]

                md_request_base = MarketDataRequest(md_request=md_request)
                md_request_base.tickers = base + construct_via_currency

                md_request_terms = MarketDataRequest(md_request=md_request)
                md_request_terms.tickers = terms + construct_via_currency

                base_vals = self.fetch_continuous_time_series(
                    md_request_base,
                    market_data_generator,
                    construct_via_currency='no')
                terms_vals = self.fetch_continuous_time_series(
                    md_request_terms,
                    market_data_generator,
                    construct_via_currency='no')

                # Special case for USDUSD case (and if base or terms USD are USDUSD
                if base + terms == 'USDUSD':
                    base_rets = self._calculations.calculate_returns(base_vals)
                    cross_rets = pd.DataFrame(0,
                                              index=base_rets.index,
                                              columns=base_rets.columns)
                elif base + 'USD' == 'USDUSD':
                    cross_rets = -self._calculations.calculate_returns(
                        terms_vals)
                elif terms + 'USD' == 'USDUSD':
                    cross_rets = self._calculations.calculate_returns(
                        base_vals)
                else:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    terms_rets = self._calculations.calculate_returns(
                        terms_vals)

                    cross_rets = base_rets.sub(terms_rets.iloc[:, 0], axis=0)

                # First returns of a time series will by NaN, given we don't know previous point
                cross_rets.iloc[0] = 0

                cross_vals = self._calculations.create_mult_index(cross_rets)
                cross_vals.columns = [tick + '-tot.close']

                total_return_indices.append(cross_vals)

            return self._calculations.pandas_outer_join(total_return_indices)

    def unhedged_asset_fx(self,
                          assets_df,
                          asset_currency,
                          home_curr,
                          start_date,
                          finish_date,
                          spot_df=None):
        pass

    def hedged_asset_fx(self,
                        assets_df,
                        asset_currency,
                        home_curr,
                        start_date,
                        finish_date,
                        spot_df=None,
                        total_return_indices_df=None):
        pass

    def get_day_count_conv(self, currency):
        if currency in ['AUD', 'CAD', 'GBP', 'NZD']:
            return 365.0

        return 360.0

    def construct_total_return_index(self, cross_fx, tenor, spot_df,
                                     deposit_df):
        """Creates total return index for selected FX crosses from spot and deposit data

        Parameters
        ----------
        cross_fx : String
            Crosses to construct total return indices (can be a list)
        tenor : String
            Tenor of deposit rates to use to compute carry (typically ON for spot)
        spot_df : pd.DataFrame
            Spot data (must include crosses we select)
        deposit_df : pd.DataFrame
            Deposit data

        Returns
        -------
        pd.DataFrame
        """
        if not (isinstance(cross_fx, list)):
            cross_fx = [cross_fx]

        total_return_index_agg = []

        for cross in cross_fx:
            # Get the spot series, base deposit
            base_deposit = deposit_df[cross[0:3] + tenor + ".close"].to_frame()
            terms_deposit = deposit_df[cross[3:6] + tenor +
                                       ".close"].to_frame()

            # Eg. if we specify USDUSD
            if cross[0:3] == cross[3:6]:
                total_return_index_agg.append(
                    pd.DataFrame(100,
                                 index=base_deposit.index,
                                 columns=[cross + "-tot.close"]))
            else:
                carry = base_deposit.join(terms_deposit, how='inner')

                spot = spot_df[cross + ".close"].to_frame()

                base_daycount = self.get_day_count_conv(cross[0:3])
                terms_daycount = self.get_day_count_conv(cross[4:6])

                # Align the base & terms deposits series to spot
                spot, carry = spot.align(carry, join='left', axis=0)

                # Sometimes depo data can be patchy, ok to fill down, given not very volatile (don't do this with spot!)
                carry = carry.fillna(method='ffill') / 100.0

                # In case there are values missing at start of list (fudge for old data!)
                carry = carry.fillna(method='bfill')

                spot = spot[cross + ".close"].to_frame()
                base_deposit = carry[base_deposit.columns]
                terms_deposit = carry[terms_deposit.columns]

                # Calculate the time difference between each data point
                spot['index_col'] = spot.index
                time = spot['index_col'].diff()
                spot = spot.drop('index_col', 1)

                total_return_index = pd.DataFrame(
                    index=spot.index, columns=[cross + "-tot.close"])
                total_return_index.iloc[0] = 100

                time_diff = time.values.astype(
                    float) / 86400000000000.0  # get time difference in days

                for i in range(1, len(total_return_index.index)):

                    # TODO vectorise this formulae or use Numba
                    # Calculate total return index as product of yesterday, changes in spot and carry accrued
                    total_return_index.values[i] = total_return_index.values[i - 1] * \
                                                   (1 + (1 + base_deposit.values[i] * time_diff[i] / base_daycount) *
                                                    (spot.values[i] / spot.values[i - 1]) \
                                                    - (1 + terms_deposit.values[i] * time_diff[i] / terms_daycount))

                total_return_index_agg.append(total_return_index)

        return self._calculations.pandas_outer_join(total_return_index_agg)
コード例 #10
0
    def get_intraday_moves_over_custom_event(self, data_frame_rets, ef_time_frame, vol=False,
                                             minute_start = 5, mins = 3 * 60, min_offset = 0 , create_index = False,
                                             resample = False, freq = 'minutes'):

        filter = Filter()

        ef_time_frame = filter.filter_time_series_by_date(data_frame_rets.index[0], data_frame_rets.index[-1], ef_time_frame)
        ef_time = ef_time_frame.index

        if freq == 'minutes':
            ef_time_start = ef_time - timedelta(minutes = minute_start)
            ef_time_end = ef_time + timedelta(minutes = mins)
            ann_factor = 252 * 1440
        elif freq == 'days':
            ef_time = ef_time_frame.index.normalize()
            ef_time_start = ef_time - timedelta(days = minute_start)
            ef_time_end = ef_time + timedelta(days = mins)
            ann_factor = 252

        ords = range(-minute_start + min_offset, mins + min_offset)

        # all data needs to be equally spaced
        if resample:

            # make sure time series is properly sampled at 1 min intervals
            data_frame_rets = data_frame_rets.resample('1min')
            data_frame_rets = data_frame_rets.fillna(value = 0)
            data_frame_rets = filter.remove_out_FX_out_of_hours(data_frame_rets)

        data_frame_rets['Ind'] = numpy.nan

        start_index = data_frame_rets.index.searchsorted(ef_time_start)
        finish_index = data_frame_rets.index.searchsorted(ef_time_end)

        # not all observation windows will be same length (eg. last one?)

        # fill the indices which represent minutes
        # TODO vectorise this!
        for i in range(0, len(ef_time_frame.index)):
            try:
                data_frame_rets.ix[start_index[i]:finish_index[i], 'Ind'] = ords
            except:
                data_frame_rets.ix[start_index[i]:finish_index[i], 'Ind'] = ords[0:(finish_index[i] - start_index[i])]

        # set the release dates
        data_frame_rets.ix[start_index,'Rel'] = ef_time                                         # set entry points
        data_frame_rets.ix[finish_index + 1,'Rel'] = numpy.zeros(len(start_index))              # set exit points
        data_frame_rets['Rel'] = data_frame_rets['Rel'].fillna(method = 'pad')                  # fill down signals

        data_frame_rets = data_frame_rets[pandas.notnull(data_frame_rets['Ind'])]               # get rid of other

        data_frame = data_frame_rets.pivot(index='Ind',
                                           columns='Rel', values=data_frame_rets.columns[0])

        data_frame.index.names = [None]

        if create_index:
            calculations = Calculations()
            data_frame.ix[-minute_start + min_offset,:] = numpy.nan
            data_frame = calculations.create_mult_index(data_frame)
        else:
            if vol is True:
                # annualise (if vol)
                data_frame = data_frame.rolling(center=False,window=5).std() * math.sqrt(ann_factor)
            else:
                data_frame = data_frame.cumsum()

        return data_frame
コード例 #11
0
ファイル: eventstudy.py プロジェクト: dmunozc/finmarketpy
    def get_intraday_moves_over_custom_event(self, data_frame_rets, ef_time_frame, vol=False,
                                             minute_start = 5, mins = 3 * 60, min_offset = 0 , create_index = False,
                                             resample = False, freq = 'minutes', cumsum = True):

        filter = Filter()

        ef_time_frame = filter.filter_time_series_by_date(data_frame_rets.index[0], data_frame_rets.index[-1], ef_time_frame)
        ef_time = ef_time_frame.index

        if freq == 'minutes':
            ef_time_start = ef_time - timedelta(minutes = minute_start)
            ef_time_end = ef_time + timedelta(minutes = mins)
            #annualization factor
            ann_factor = 252 * 1440
        elif freq == 'days':
            ef_time = ef_time_frame.index.normalize()
            ef_time_start = ef_time - timedelta(days = minute_start)
            ef_time_end = ef_time + timedelta(days = mins)
            ann_factor = 252

        ords = range(-minute_start + min_offset, mins + min_offset)

        # all data needs to be equally spaced
        if resample:

            # make sure time series is properly sampled at 1 min intervals
            data_frame_rets = data_frame_rets.resample('1min')
            data_frame_rets = data_frame_rets.fillna(value = 0)
            data_frame_rets = filter.remove_out_FX_out_of_hours(data_frame_rets)
        #clear our indicator signals
        data_frame_rets['Ind'] = numpy.nan

        start_index = data_frame_rets.index.searchsorted(ef_time_start)
        finish_index = data_frame_rets.index.searchsorted(ef_time_end)

        # not all observation windows will be same length (eg. last one?)

        # fill the indices which represent minutes
        # TODO vectorise this!
        for i in range(0, len(ef_time_frame.index)):
            try:
                data_frame_rets.ix[start_index[i]:finish_index[i], 'Ind'] = ords
            except:
                data_frame_rets.ix[start_index[i]:finish_index[i], 'Ind'] = ords[0:(finish_index[i] - start_index[i])]

        # set the release dates
        data_frame_rets.ix[start_index,'Rel'] = ef_time                                         # set entry points
        data_frame_rets.ix[finish_index + 1,'Rel'] = numpy.zeros(len(start_index))              # set exit points
        data_frame_rets['Rel'] = data_frame_rets['Rel'].fillna(method = 'pad')                  # fill down signals

        data_frame_rets = data_frame_rets[pandas.notnull(data_frame_rets['Ind'])]               # get rid of other

        data_frame = data_frame_rets.pivot(index='Ind',
                                           columns='Rel', values=data_frame_rets.columns[0])

        data_frame.index.names = [None]

        if create_index:
            calculations = Calculations()
            data_frame.ix[-minute_start + min_offset,:] = numpy.nan
            data_frame = calculations.create_mult_index(data_frame)
        else:
            if vol is True:
                # annualise (if vol)
                data_frame = data_frame.rolling(center=False,window=5).std() * math.sqrt(ann_factor)
            elif cumsum:
                data_frame = data_frame.cumsum()

        return data_frame
コード例 #12
0
class FXOptionsCurve(object):
    """Constructs continuous forwards time series total return indices from underlying forwards contracts.

    """
    def __init__(
            self,
            market_data_generator=None,
            fx_vol_surface=None,
            enter_trading_dates=None,
            fx_options_trading_tenor=market_constants.fx_options_trading_tenor,
            roll_days_before=market_constants.fx_options_roll_days_before,
            roll_event=market_constants.fx_options_roll_event,
            construct_via_currency='no',
            fx_options_tenor_for_interpolation=market_constants.
        fx_options_tenor_for_interpolation,
            base_depos_tenor=data_constants.base_depos_tenor,
            roll_months=market_constants.fx_options_roll_months,
            cum_index=market_constants.fx_options_cum_index,
            strike=market_constants.fx_options_index_strike,
            contract_type=market_constants.fx_options_index_contract_type,
            premium_output=market_constants.fx_options_index_premium_output,
            position_multiplier=1,
            depo_tenor_for_option=market_constants.fx_options_depo_tenor,
            freeze_implied_vol=market_constants.fx_options_freeze_implied_vol,
            tot_label='',
            cal=None,
            output_calculation_fields=market_constants.
        output_calculation_fields):
        """Initializes FXForwardsCurve

        Parameters
        ----------
        market_data_generator : MarketDataGenerator
            Used for downloading market data

        fx_vol_surface : FXVolSurface
            We can specify the FX vol surface beforehand if we want

        fx_options_trading_tenor : str
            What is primary forward contract being used to trade (default - '1M')

        roll_days_before : int
            Number of days before roll event to enter into a new forwards contract

        roll_event : str
            What constitutes a roll event? ('month-end', 'quarter-end', 'year-end', 'expiry')

        cum_index : str
            In total return index, do we compute in additive or multiplicative way ('add' or 'mult')

        construct_via_currency : str
            What currency should we construct the forward via? Eg. if we asked for AUDJPY we can construct it via
            AUDUSD & JPYUSD forwards, as opposed to AUDJPY forwards (default - 'no')

        fx_options_tenor_for_interpolation : str(list)
            Which forwards should we use for interpolation

        base_depos_tenor : str(list)
            Which base deposits tenors do we need (this is only necessary if we want to start inferring depos)

        roll_months : int
            After how many months should we initiate a roll. Typically for trading 1M this should 1, 3M this should be 3
            etc.

        tot_label : str
            Postfix for the total returns field

        cal : str
            Calendar to use for expiry (if None, uses that of FX pair)

        output_calculation_fields : bool
            Also output additional data should forward expiries etc. alongside total returns indices
        """

        self._market_data_generator = market_data_generator
        self._calculations = Calculations()
        self._calendar = Calendar()
        self._filter = Filter()

        self._fx_vol_surface = fx_vol_surface

        self._enter_trading_dates = enter_trading_dates
        self._fx_options_trading_tenor = fx_options_trading_tenor
        self._roll_days_before = roll_days_before
        self._roll_event = roll_event

        self._construct_via_currency = construct_via_currency
        self._fx_options_tenor_for_interpolation = fx_options_tenor_for_interpolation
        self._base_depos_tenor = base_depos_tenor

        self._roll_months = roll_months
        self._cum_index = cum_index
        self._contact_type = contract_type
        self._strike = strike
        self._premium_output = premium_output

        self._position_multiplier = position_multiplier

        self._depo_tenor_for_option = depo_tenor_for_option

        self._freeze_implied_vol = freeze_implied_vol

        self._tot_label = tot_label
        self._cal = cal

        self._output_calculation_fields = output_calculation_fields

    def generate_key(self):
        from findatapy.market.ioengine import SpeedCache

        # Don't include any "large" objects in the key
        return SpeedCache().generate_key(self, [
            '_market_data_generator', '_calculations', '_calendar', '_filter'
        ])

    def fetch_continuous_time_series(self,
                                     md_request,
                                     market_data_generator,
                                     fx_vol_surface=None,
                                     enter_trading_dates=None,
                                     fx_options_trading_tenor=None,
                                     roll_days_before=None,
                                     roll_event=None,
                                     construct_via_currency=None,
                                     fx_options_tenor_for_interpolation=None,
                                     base_depos_tenor=None,
                                     roll_months=None,
                                     cum_index=None,
                                     strike=None,
                                     contract_type=None,
                                     premium_output=None,
                                     position_multiplier=None,
                                     depo_tenor_for_option=None,
                                     freeze_implied_vol=None,
                                     tot_label=None,
                                     cal=None,
                                     output_calculation_fields=None):

        if fx_vol_surface is None: fx_vol_surface = self._fx_vol_surface
        if enter_trading_dates is None:
            enter_trading_dates = self._enter_trading_dates
        if market_data_generator is None:
            market_data_generator = self._market_data_generator
        if fx_options_trading_tenor is None:
            fx_options_trading_tenor = self._fx_options_trading_tenor
        if roll_days_before is None: roll_days_before = self._roll_days_before
        if roll_event is None: roll_event = self._roll_event
        if construct_via_currency is None:
            construct_via_currency = self._construct_via_currency
        if fx_options_tenor_for_interpolation is None:
            fx_options_tenor_for_interpolation = self._fx_options_tenor_for_interpolation
        if base_depos_tenor is None: base_depos_tenor = self._base_depos_tenor
        if roll_months is None: roll_months = self._roll_months
        if strike is None: strike = self._strike
        if contract_type is None: contract_type = self._contact_type
        if premium_output is None: premium_output = self._premium_output

        if position_multiplier is None:
            position_multiplier = self._position_multiplier

        if depo_tenor_for_option is None:
            depo_tenor_for_option = self._depo_tenor_for_option

        if freeze_implied_vol is None:
            freeze_implied_vol = self._freeze_implied_vol

        if tot_label is None: tot_label = self._tot_label
        if cal is None: cal = self._cal

        if output_calculation_fields is None:
            output_calculation_fields = self._output_calculation_fields

        # Eg. we construct EURJPY via EURJPY directly (note: would need to have sufficient options/forward data for this)
        if construct_via_currency == 'no':

            if fx_vol_surface is None:
                # Download FX spot, FX forwards points and base depos etc.
                market = Market(market_data_generator=market_data_generator)

                md_request_download = MarketDataRequest(md_request=md_request)

                fx_conv = FXConv()

                # CAREFUL: convert the tickers to correct notation, eg. USDEUR => EURUSD, because our data
                # should be fetched in correct convention
                md_request_download.tickers = [
                    fx_conv.correct_notation(x) for x in md_request.tickers
                ]
                md_request_download.category = 'fx-vol-market'
                md_request_download.fields = 'close'
                md_request_download.abstract_curve = None
                md_request_download.fx_options_tenor = fx_options_tenor_for_interpolation
                md_request_download.base_depos_tenor = base_depos_tenor
                # md_request_download.base_depos_currencies = []

                forwards_market_df = market.fetch_market(md_request_download)
            else:
                forwards_market_df = None

            # Now use the original tickers
            return self.construct_total_return_index(
                md_request.tickers,
                forwards_market_df,
                fx_vol_surface=fx_vol_surface,
                enter_trading_dates=enter_trading_dates,
                fx_options_trading_tenor=fx_options_trading_tenor,
                roll_days_before=roll_days_before,
                roll_event=roll_event,
                fx_options_tenor_for_interpolation=
                fx_options_tenor_for_interpolation,
                roll_months=roll_months,
                cum_index=cum_index,
                strike=strike,
                contract_type=contract_type,
                premium_output=premium_output,
                position_multiplier=position_multiplier,
                freeze_implied_vol=freeze_implied_vol,
                depo_tenor_for_option=depo_tenor_for_option,
                tot_label=tot_label,
                cal=cal,
                output_calculation_fields=output_calculation_fields)
        else:
            # eg. we calculate via your domestic currency such as USD, so returns will be in your domestic currency
            # Hence AUDJPY would be calculated via AUDUSD and JPYUSD (subtracting the difference in returns)
            total_return_indices = []

            for tick in md_request.tickers:
                base = tick[0:3]
                terms = tick[3:6]

                md_request_base = MarketDataRequest(md_request=md_request)
                md_request_base.tickers = base + construct_via_currency

                md_request_terms = MarketDataRequest(md_request=md_request)
                md_request_terms.tickers = terms + construct_via_currency

                # Construct the base and terms separately (ie. AUDJPY => AUDUSD & JPYUSD)
                base_vals = self.fetch_continuous_time_series(
                    md_request_base,
                    market_data_generator,
                    fx_vol_surface=fx_vol_surface,
                    enter_trading_dates=enter_trading_dates,
                    fx_options_trading_tenor=fx_options_trading_tenor,
                    roll_days_before=roll_days_before,
                    roll_event=roll_event,
                    fx_options_tenor_for_interpolation=
                    fx_options_tenor_for_interpolation,
                    base_depos_tenor=base_depos_tenor,
                    roll_months=roll_months,
                    cum_index=cum_index,
                    strike=strike,
                    contract_type=contract_type,
                    premium_output=premium_output,
                    position_multiplier=position_multiplier,
                    depo_tenor_for_option=depo_tenor_for_option,
                    freeze_implied_vol=freeze_implied_vol,
                    tot_label=tot_label,
                    cal=cal,
                    output_calculation_fields=output_calculation_fields,
                    construct_via_currency='no')

                terms_vals = self.fetch_continuous_time_series(
                    md_request_terms,
                    market_data_generator,
                    fx_vol_surface=fx_vol_surface,
                    enter_trading_dates=enter_trading_dates,
                    fx_options_trading_tenor=fx_options_trading_tenor,
                    roll_days_before=roll_days_before,
                    roll_event=roll_event,
                    fx_options_tenor_for_interpolation=
                    fx_options_tenor_for_interpolation,
                    base_depos_tenor=base_depos_tenor,
                    roll_months=roll_months,
                    cum_index=cum_index,
                    strike=strike,
                    contract_type=contract_type,
                    position_multiplier=position_multiplier,
                    depo_tenor_for_option=depo_tenor_for_option,
                    freeze_implied_vol=freeze_implied_vol,
                    tot_label=tot_label,
                    cal=cal,
                    output_calculation_fields=output_calculation_fields,
                    construct_via_currency='no')

                # Special case for USDUSD case (and if base or terms USD are USDUSD
                if base + terms == construct_via_currency + construct_via_currency:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    cross_rets = pd.DataFrame(0,
                                              index=base_rets.index,
                                              columns=base_rets.columns)
                elif base + construct_via_currency == construct_via_currency + construct_via_currency:
                    cross_rets = -self._calculations.calculate_returns(
                        terms_vals)
                elif terms + construct_via_currency == construct_via_currency + construct_via_currency:
                    cross_rets = self._calculations.calculate_returns(
                        base_vals)
                else:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    terms_rets = self._calculations.calculate_returns(
                        terms_vals)

                    cross_rets = base_rets.sub(terms_rets.iloc[:, 0], axis=0)

                # First returns of a time series will by NaN, given we don't know previous point
                cross_rets.iloc[0] = 0

                cross_vals = self._calculations.create_mult_index(cross_rets)
                cross_vals.columns = [tick + '-option-tot.close']

                total_return_indices.append(cross_vals)

            return self._calculations.join(total_return_indices, how='outer')

    def unhedged_asset_fx(self,
                          assets_df,
                          asset_currency,
                          home_curr,
                          start_date,
                          finish_date,
                          spot_df=None):
        pass

    def hedged_asset_fx(self,
                        assets_df,
                        asset_currency,
                        home_curr,
                        start_date,
                        finish_date,
                        spot_df=None,
                        total_return_indices_df=None):
        pass

    def get_day_count_conv(self, currency):
        if currency in market_constants.currencies_with_365_basis:
            return 365.0

        return 360.0

    def construct_total_return_index(self,
                                     cross_fx,
                                     market_df,
                                     fx_vol_surface=None,
                                     enter_trading_dates=None,
                                     fx_options_trading_tenor=None,
                                     roll_days_before=None,
                                     roll_event=None,
                                     roll_months=None,
                                     cum_index=None,
                                     strike=None,
                                     contract_type=None,
                                     premium_output=None,
                                     position_multiplier=None,
                                     fx_options_tenor_for_interpolation=None,
                                     freeze_implied_vol=None,
                                     depo_tenor_for_option=None,
                                     tot_label=None,
                                     cal=None,
                                     output_calculation_fields=None):

        if fx_vol_surface is None: fx_vol_surface = self._fx_vol_surface
        if enter_trading_dates is None:
            enter_trading_dates = self._enter_trading_dates
        if fx_options_trading_tenor is None:
            fx_options_trading_tenor = self._fx_options_trading_tenor
        if roll_days_before is None: roll_days_before = self._roll_days_before
        if roll_event is None: roll_event = self._roll_event
        if roll_months is None: roll_months = self._roll_months
        if cum_index is None: cum_index = self._cum_index
        if strike is None: strike = self._strike
        if contract_type is None: contract_type = self._contact_type
        if premium_output is None: premium_output = self._premium_output
        if position_multiplier is None:
            position_multiplier = self._position_multiplier
        if fx_options_tenor_for_interpolation is None:
            fx_options_tenor_for_interpolation = self._fx_options_tenor_for_interpolation

        if freeze_implied_vol is None:
            freeze_implied_vol = self._freeze_implied_vol

        if depo_tenor_for_option is None:
            depo_tenor_for_option = self._depo_tenor_for_option
        if tot_label is None: tot_label = self._tot_label
        if cal is None: cal = self._cal

        if output_calculation_fields is None:
            output_calculation_fields = self._output_calculation_fields

        if not (isinstance(cross_fx, list)):
            cross_fx = [cross_fx]

        total_return_index_df_agg = []

        # Remove columns where there is no data (because these vols typically aren't quoted)
        if market_df is not None:
            market_df = market_df.dropna(how='all', axis=1)

        fx_options_pricer = FXOptionsPricer(premium_output=premium_output)

        def get_roll_date(horizon_d, expiry_d, asset_hols, month_adj=0):
            if roll_event == 'month-end':
                roll_d = horizon_d + CustomBusinessMonthEnd(
                    roll_months + month_adj, holidays=asset_hols)

                # Special case so always rolls on month end date, if specify 0 days
                if roll_days_before > 0:
                    return (roll_d - CustomBusinessDay(n=roll_days_before,
                                                       holidays=asset_hols))

            elif roll_event == 'expiry-date':
                roll_d = expiry_d

                # Special case so always rolls on expiry date, if specify 0 days
                if roll_days_before > 0:
                    return (roll_d - CustomBusinessDay(n=roll_days_before,
                                                       holidays=asset_hols))

            return roll_d

        for cross in cross_fx:

            if cal is None:
                cal = cross

            # Eg. if we specify USDUSD
            if cross[0:3] == cross[3:6]:
                total_return_index_df_agg.append(
                    pd.DataFrame(100,
                                 index=market_df.index,
                                 columns=[cross + "-option-tot.close"]))
            else:
                # Is the FX cross in the correct convention
                old_cross = cross

                cross = FXConv().correct_notation(cross)

                # TODO also specification of non-standard crosses like USDGBP
                if old_cross != cross:
                    pass

                if fx_vol_surface is None:
                    fx_vol_surface = FXVolSurface(
                        market_df=market_df,
                        asset=cross,
                        tenors=fx_options_tenor_for_interpolation,
                        depo_tenor=depo_tenor_for_option)

                    market_df = fx_vol_surface.get_all_market_data()

                horizon_date = market_df.index

                expiry_date = np.zeros(len(horizon_date), dtype=object)
                roll_date = np.zeros(len(horizon_date), dtype=object)

                new_trade = np.full(len(horizon_date), False, dtype=bool)
                exit_trade = np.full(len(horizon_date), False, dtype=bool)
                has_position = np.full(len(horizon_date), False, dtype=bool)

                asset_holidays = self._calendar.get_holidays(cal=cross)

                # If no entry dates specified, assume we just keep rolling
                if enter_trading_dates is None:
                    # Get first expiry date
                    expiry_date[
                        0] = self._calendar.get_expiry_date_from_horizon_date(
                            pd.DatetimeIndex([horizon_date[0]]),
                            fx_options_trading_tenor,
                            cal=cal,
                            asset_class='fx-vol')[0]

                    # For first month want it to expire within that month (for consistency), hence month_adj=0 ONLY here
                    roll_date[0] = get_roll_date(horizon_date[0],
                                                 expiry_date[0],
                                                 asset_holidays,
                                                 month_adj=0)

                    # New trade => entry at beginning AND on every roll
                    new_trade[0] = True
                    exit_trade[0] = False
                    has_position[0] = True

                    # Get all the expiry dates and roll dates
                    # At each "roll/trade" day we need to reset them for the new contract
                    for i in range(1, len(horizon_date)):
                        has_position[i] = True

                        # If the horizon date has reached the roll date (from yesterday), we're done, and we have a
                        # new roll/trade
                        if (horizon_date[i] - roll_date[i - 1]).days >= 0:
                            new_trade[i] = True
                        else:
                            new_trade[i] = False

                        # If we're entering a new trade/contract (and exiting an old trade) we need to get new expiry and roll dates
                        if new_trade[i]:
                            exp = self._calendar.get_expiry_date_from_horizon_date(
                                pd.DatetimeIndex([horizon_date[i]]),
                                fx_options_trading_tenor,
                                cal=cal,
                                asset_class='fx-vol')[0]

                            # Make sure we don't expire on a date in the history where there isn't market data
                            # It is ok for future values to expire after market data (just not in the backtest!)
                            if exp not in market_df.index:
                                exp_index = market_df.index.searchsorted(exp)

                                if exp_index < len(market_df.index):
                                    exp_index = min(exp_index,
                                                    len(market_df.index))

                                    exp = market_df.index[exp_index]

                            expiry_date[i] = exp

                            roll_date[i] = get_roll_date(
                                horizon_date[i], expiry_date[i],
                                asset_holidays)
                            exit_trade[i] = True
                        else:
                            if horizon_date[i] <= expiry_date[i - 1]:
                                # Otherwise use previous expiry and roll dates, because we're still holding same contract
                                expiry_date[i] = expiry_date[i - 1]
                                roll_date[i] = roll_date[i - 1]
                                exit_trade[i] = False
                            else:
                                exit_trade[i] = True
                else:
                    new_trade[horizon_date.searchsorted(
                        enter_trading_dates)] = True
                    has_position[horizon_date.searchsorted(
                        enter_trading_dates)] = True

                    # Get first expiry date
                    #expiry_date[0] = \
                    #    self._calendar.get_expiry_date_from_horizon_date(pd.DatetimeIndex([horizon_date[0]]),
                    #                                                     fx_options_trading_tenor, cal=cal,
                    #                                                     asset_class='fx-vol')[0]

                    # For first month want it to expire within that month (for consistency), hence month_adj=0 ONLY here
                    #roll_date[0] = get_roll_date(horizon_date[0], expiry_date[0], asset_holidays, month_adj=0)

                    # New trade => entry at beginning AND on every roll
                    #new_trade[0] = True
                    #exit_trade[0] = False
                    #has_position[0] = True

                    # Get all the expiry dates and roll dates
                    # At each "roll/trade" day we need to reset them for the new contract
                    for i in range(0, len(horizon_date)):

                        # If we're entering a new trade/contract (and exiting an old trade) we need to get new expiry and roll dates
                        if new_trade[i]:
                            exp = \
                                self._calendar.get_expiry_date_from_horizon_date(pd.DatetimeIndex([horizon_date[i]]),
                                                                                 fx_options_trading_tenor, cal=cal,
                                                                                 asset_class='fx-vol')[0]

                            # Make sure we don't expire on a date in the history where there isn't market data
                            # It is ok for future values to expire after market data (just not in the backtest!)
                            if exp not in market_df.index:
                                exp_index = market_df.index.searchsorted(exp)

                                if exp_index < len(market_df.index):
                                    exp_index = min(exp_index,
                                                    len(market_df.index))

                                    exp = market_df.index[exp_index]

                            expiry_date[i] = exp

                            # roll_date[i] = get_roll_date(horizon_date[i], expiry_date[i], asset_holidays)
                            # if i > 0:
                            # Makes the assumption we aren't rolling contracts
                            exit_trade[i] = False
                        else:
                            if i > 0:
                                # Check there's valid expiry on previous day (if not then we're not in an option trade here!)
                                if expiry_date[i - 1] == 0:
                                    has_position[i] = False
                                else:
                                    if horizon_date[i] <= expiry_date[i - 1]:
                                        # Otherwise use previous expiry and roll dates, because we're still holding same contract
                                        expiry_date[i] = expiry_date[i - 1]
                                        # roll_date[i] = roll_date[i - 1]
                                        has_position[i] = True

                                    if horizon_date[i] == expiry_date[i]:
                                        exit_trade[i] = True
                                    else:
                                        exit_trade[i] = False

                # Note: may need to add discount factor when marking to market option

                mtm = np.zeros(len(horizon_date))
                calculated_strike = np.zeros(len(horizon_date))
                interpolated_option = np.zeros(len(horizon_date))
                implied_vol = np.zeros(len(horizon_date))
                delta = np.zeros(len(horizon_date))

                # For debugging
                df_temp = pd.DataFrame()

                df_temp['expiry-date'] = expiry_date
                df_temp['horizon-date'] = horizon_date
                df_temp['roll-date'] = roll_date
                df_temp['new-trade'] = new_trade
                df_temp['exit-trade'] = exit_trade
                df_temp['has-position'] = has_position

                if has_position[0]:
                    # Special case: for first day of history (given have no previous positions)
                    option_values_, spot_, strike_, vol_, delta_, expiry_date_, intrinsic_values_  = \
                        fx_options_pricer.price_instrument(cross, horizon_date[0], strike, expiry_date[0],
                            contract_type=contract_type,
                            tenor=fx_options_trading_tenor,
                            fx_vol_surface=fx_vol_surface,
                            return_as_df=False)

                    interpolated_option[0] = option_values_
                    calculated_strike[0] = strike_
                    implied_vol[0] = vol_

                mtm[0] = 0

                # Now price options for rest of history
                # On rolling dates: MTM will be the previous option contract (interpolated)
                # On non-rolling dates: it will be the current option contract
                for i in range(1, len(horizon_date)):
                    if exit_trade[i]:
                        # Price option trade being exited
                        option_values_, spot_, strike_, vol_, delta_, expiry_date_, intrinsic_values_ = \
                            fx_options_pricer.price_instrument(cross, horizon_date[i], calculated_strike[i-1], expiry_date[i-1],
                            contract_type=contract_type,
                            tenor=fx_options_trading_tenor,
                            fx_vol_surface=fx_vol_surface,
                            return_as_df=False)

                        # Store as MTM
                        mtm[i] = option_values_
                        delta[
                            i] = 0  # Note: this will get overwritten if there's a new trade
                        calculated_strike[i] = calculated_strike[
                            i -
                            1]  # Note: this will get overwritten if there's a new trade

                    if new_trade[i]:
                        # Price new option trade being entered
                        option_values_, spot_, strike_, vol_, delta_, expiry_date_, intrinsic_values_ = \
                            fx_options_pricer.price_instrument(cross, horizon_date[i], strike, expiry_date[i],
                            contract_type=contract_type,
                            tenor=fx_options_trading_tenor,
                            fx_vol_surface=fx_vol_surface,
                            return_as_df=False)

                        calculated_strike[
                            i] = strike_  # option_output[cross + '-strike.close'].values
                        implied_vol[i] = vol_
                        interpolated_option[i] = option_values_
                        delta[i] = delta_

                    elif has_position[i] and not (exit_trade[i]):
                        # Price current option trade
                        # - strike/expiry the same as yesterday
                        # - other market inputs taken live, closer to expiry
                        calculated_strike[i] = calculated_strike[i - 1]

                        if freeze_implied_vol:
                            frozen_vol = implied_vol[i - 1]
                        else:
                            frozen_vol = None

                        option_values_, spot_, strike_, vol_, delta_, expiry_date_, intrinsic_values_ = \
                            fx_options_pricer.price_instrument(cross, horizon_date[i], calculated_strike[i],
                                expiry_date[i],
                                vol=frozen_vol,
                                contract_type=contract_type,
                                tenor=fx_options_trading_tenor,
                                fx_vol_surface=fx_vol_surface,
                                return_as_df=False)

                        interpolated_option[i] = option_values_
                        implied_vol[i] = vol_
                        mtm[i] = interpolated_option[i]
                        delta[i] = delta_

                # Calculate delta hedging P&L
                spot_rets = (market_df[cross + ".close"] /
                             market_df[cross + ".close"].shift(1) - 1).values

                if tot_label == '':
                    tot_rets = spot_rets
                else:
                    tot_rets = (
                        market_df[cross + "-" + tot_label + ".close"] /
                        market_df[cross + "-" + tot_label + ".close"].shift(1)
                        - 1).values

                # Remember to take the inverted sign, eg. if call is +20%, we need to -20% of spot to flatten delta
                # Also invest for whether we are long or short the option
                delta_hedging_pnl = -np.roll(
                    delta, 1) * tot_rets * position_multiplier
                delta_hedging_pnl[0] = 0

                # Calculate options P&L (given option premium is already percentage, only need to subtract)
                # Again need to invert if we are short option
                option_rets = (mtm - np.roll(interpolated_option,
                                             1)) * position_multiplier
                option_rets[0] = 0

                # Calculate option + delta hedging P&L
                option_delta_rets = delta_hedging_pnl + option_rets

                if cum_index == 'mult':
                    cum_rets = 100 * np.cumprod(1.0 + option_rets)
                    cum_delta_rets = 100 * np.cumprod(1.0 + delta_hedging_pnl)
                    cum_option_delta_rets = 100 * np.cumprod(1.0 +
                                                             option_delta_rets)

                elif cum_index == 'add':
                    cum_rets = 100 + 100 * np.cumsum(option_rets)
                    cum_delta_rets = 100 + 100 * np.cumsum(delta_hedging_pnl)
                    cum_option_delta_rets = 100 + 100 * np.cumsum(
                        option_delta_rets)

                total_return_index_df = pd.DataFrame(
                    index=horizon_date, columns=[cross + "-option-tot.close"])
                total_return_index_df[cross + "-option-tot.close"] = cum_rets

                if output_calculation_fields:
                    total_return_index_df[
                        cross +
                        '-interpolated-option.close'] = interpolated_option
                    total_return_index_df[cross + '-mtm.close'] = mtm
                    total_return_index_df[cross + ".close"] = market_df[
                        cross + ".close"].values
                    total_return_index_df[cross +
                                          '-implied-vol.close'] = implied_vol
                    total_return_index_df[cross +
                                          '-new-trade.close'] = new_trade
                    total_return_index_df[cross + '.roll-date'] = roll_date
                    total_return_index_df[cross +
                                          '-exit-trade.close'] = exit_trade
                    total_return_index_df[cross + '.expiry-date'] = expiry_date
                    total_return_index_df[
                        cross + '-calculated-strike.close'] = calculated_strike
                    total_return_index_df[cross +
                                          '-option-return.close'] = option_rets
                    total_return_index_df[cross +
                                          '-spot-return.close'] = spot_rets
                    total_return_index_df[cross +
                                          '-tot-return.close'] = tot_rets
                    total_return_index_df[cross + '-delta.close'] = delta
                    total_return_index_df[
                        cross + '-delta-pnl-return.close'] = delta_hedging_pnl
                    total_return_index_df[
                        cross + '-delta-pnl-index.close'] = cum_delta_rets
                    total_return_index_df[
                        cross +
                        '-option-delta-return.close'] = option_delta_rets
                    total_return_index_df[
                        cross +
                        '-option-delta-tot.close'] = cum_option_delta_rets

                total_return_index_df_agg.append(total_return_index_df)

        return self._calculations.join(total_return_index_df_agg, how='outer')

    def apply_tc_signals_to_total_return_index(self,
                                               cross_fx,
                                               total_return_index_orig_df,
                                               option_tc_bp,
                                               spot_tc_bp,
                                               signal_df=None,
                                               cum_index=None):

        # TODO signal not implemented yet
        if cum_index is None: cum_index = self._cum_index

        total_return_index_df_agg = []

        if not (isinstance(cross_fx, list)):
            cross_fx = [cross_fx]

        option_tc = option_tc_bp / (2 * 100 * 100)
        spot_tc = spot_tc_bp / (2 * 100 * 100)

        total_return_index_df = total_return_index_orig_df.copy()

        for cross in cross_fx:

            # p = abs(total_return_index_df[cross + '-roll.close'].shift(1)) * option_tc
            # q = abs(total_return_index_df[cross + '-delta.close'] - total_return_index_df[cross + '-delta.close'].shift(1)) * spot_tc

            # Additional columns to include P&L with transaction costs
            total_return_index_df[cross + '-option-return-with-tc.close'] = \
                total_return_index_df[cross + '-option-return.close'] - abs(total_return_index_df[cross + '-new-trade.close'].shift(1)) * option_tc
            total_return_index_df[cross + '-delta-pnl-return-with-tc.close'] = \
                total_return_index_df[cross + '-delta-pnl-return.close'] \
                - abs(total_return_index_df[cross + '-delta.close'] - total_return_index_df[cross + '-delta.close'].shift(1)) * spot_tc

            total_return_index_df[cross +
                                  '-option-return-with-tc.close'][0] = 0
            total_return_index_df[cross +
                                  '-delta-pnl-return-with-tc.close'][0] = 0
            total_return_index_df[cross + '-option-delta-return-with-tc.close'] = \
                total_return_index_df[cross + '-option-return-with-tc.close'] + total_return_index_df[cross + '-delta-pnl-return-with-tc.close']

            if cum_index == 'mult':
                cum_rets = 100 * np.cumprod(1.0 + total_return_index_df[
                    cross + '-option-return-with-tc.close'].values)
                cum_delta_rets = 100 * np.cumprod(1.0 + total_return_index_df[
                    cross + '-delta-pnl-return-with-tc.close'].values)
                cum_option_delta_rets = 100 * np.cumprod(
                    1.0 + total_return_index_df[
                        cross + '-option-delta-return-with-tc.close'].values)

            elif cum_index == 'add':
                cum_rets = 100 + 100 * np.cumsum(total_return_index_df[
                    cross + '-option-return-with-tc.close'].values)
                cum_delta_rets = 100 + 100 * np.cumsum(total_return_index_df[
                    cross + '-delta-pnl-return-with-tc.close'].values)
                cum_option_delta_rets = 100 + 100 * np.cumsum(
                    total_return_index_df[
                        cross + '-option-delta-return-with-tc.close'].values)

            total_return_index_df[cross +
                                  "-option-tot-with-tc.close"] = cum_rets
            total_return_index_df[
                cross + '-delta-pnl-index-with-tc.close'] = cum_delta_rets
            total_return_index_df[
                cross +
                '-option-delta-tot-with-tc.close'] = cum_option_delta_rets

            total_return_index_df_agg.append(total_return_index_df)

        return self._calculations.join(total_return_index_df_agg, how='outer')
コード例 #13
0
    def calculate_trading_PnL(self, br, asset_a_df, signal_df):
        """
        calculate_trading_PnL - Calculates P&L of a trading strategy and statistics to be retrieved later

        Parameters
        ----------
        br : BacktestRequest
            Parameters for the backtest specifying start date, finish data, transaction costs etc.

        asset_a_df : pandas.DataFrame
            Asset prices to be traded

        signal_df : pandas.DataFrame
            Signals for the trading strategy
        """

        calculations = Calculations()
        # signal_df.to_csv('e:/temp0.csv')
        # make sure the dates of both traded asset and signal are aligned properly
        asset_df, signal_df = asset_a_df.align(signal_df,
                                               join='left',
                                               axis='index')

        # only allow signals to change on the days when we can trade assets
        signal_df = signal_df.mask(numpy.isnan(
            asset_df.values))  # fill asset holidays with NaN signals
        signal_df = signal_df.fillna(method='ffill')  # fill these down
        asset_df = asset_df.fillna(method='ffill')  # fill down asset holidays

        returns_df = calculations.calculate_returns(asset_df)
        tc = br.spot_tc_bp

        signal_cols = signal_df.columns.values
        returns_cols = returns_df.columns.values

        pnl_cols = []

        for i in range(0, len(returns_cols)):
            pnl_cols.append(returns_cols[i] + " / " + signal_cols[i])

        # do we have a vol target for individual signals?
        if hasattr(br, 'signal_vol_adjust'):
            if br.signal_vol_adjust is True:
                risk_engine = RiskEngine()

                if not (hasattr(br, 'signal_vol_resample_type')):
                    br.signal_vol_resample_type = 'mean'

                if not (hasattr(br, 'signal_vol_resample_freq')):
                    br.signal_vol_resample_freq = None

                leverage_df = risk_engine.calculate_leverage_factor(
                    returns_df, br.signal_vol_target,
                    br.signal_vol_max_leverage, br.signal_vol_periods,
                    br.signal_vol_obs_in_year, br.signal_vol_rebalance_freq,
                    br.signal_vol_resample_freq, br.signal_vol_resample_type)

                signal_df = pandas.DataFrame(signal_df.values *
                                             leverage_df.values,
                                             index=signal_df.index,
                                             columns=signal_df.columns)

                self._individual_leverage = leverage_df  # contains leverage of individual signal (before portfolio vol target)

        _pnl = calculations.calculate_signal_returns_with_tc_matrix(signal_df,
                                                                    returns_df,
                                                                    tc=tc)
        _pnl.columns = pnl_cols

        # portfolio is average of the underlying signals: should we sum them or average them?
        if hasattr(br, 'portfolio_combination'):
            if br.portfolio_combination == 'sum':
                portfolio = pandas.DataFrame(data=_pnl.sum(axis=1),
                                             index=_pnl.index,
                                             columns=['Portfolio'])
            elif br.portfolio_combination == 'mean':
                portfolio = pandas.DataFrame(data=_pnl.mean(axis=1),
                                             index=_pnl.index,
                                             columns=['Portfolio'])
        else:
            portfolio = pandas.DataFrame(data=_pnl.mean(axis=1),
                                         index=_pnl.index,
                                         columns=['Portfolio'])

        portfolio_leverage_df = pandas.DataFrame(data=numpy.ones(
            len(_pnl.index)),
                                                 index=_pnl.index,
                                                 columns=['Portfolio'])

        # should we apply vol target on a portfolio level basis?
        if hasattr(br, 'portfolio_vol_adjust'):
            if br.portfolio_vol_adjust is True:
                risk_engine = RiskEngine()

                portfolio, portfolio_leverage_df = risk_engine.calculate_vol_adjusted_returns(
                    portfolio, br=br)

        self._portfolio = portfolio
        self._signal = signal_df  # individual signals (before portfolio leverage)
        self._portfolio_leverage = portfolio_leverage_df  # leverage on portfolio

        # multiply portfolio leverage * individual signals to get final position signals
        length_cols = len(signal_df.columns)
        leverage_matrix = numpy.repeat(
            portfolio_leverage_df.values.flatten()[numpy.newaxis, :],
            length_cols, 0)

        # final portfolio signals (including signal & portfolio leverage)
        self._portfolio_signal = pandas.DataFrame(data=numpy.multiply(
            numpy.transpose(leverage_matrix), signal_df.values),
                                                  index=signal_df.index,
                                                  columns=signal_df.columns)

        if hasattr(br, 'portfolio_combination'):
            if br.portfolio_combination == 'sum':
                pass
            elif br.portfolio_combination == 'mean':
                self._portfolio_signal = self._portfolio_signal / float(
                    length_cols)
        else:
            self._portfolio_signal = self._portfolio_signal / float(
                length_cols)

        self._pnl = _pnl  # individual signals P&L

        # TODO FIX very slow - hence only calculate on demand
        _pnl_trades = None
        # _pnl_trades = calculations.calculate_individual_trade_gains(signal_df, _pnl)
        self._pnl_trades = _pnl_trades

        self._ret_stats_pnl = RetStats()
        self._ret_stats_pnl.calculate_ret_stats(self._pnl, br.ann_factor)

        self._portfolio.columns = ['Port']
        self._ret_stats_portfolio = RetStats()
        self._ret_stats_portfolio.calculate_ret_stats(self._portfolio,
                                                      br.ann_factor)

        self._cumpnl = calculations.create_mult_index(
            self._pnl)  # individual signals cumulative P&L
        self._cumpnl.columns = pnl_cols

        self._cumportfolio = calculations.create_mult_index(
            self._portfolio)  # portfolio cumulative P&L
        self._cumportfolio.columns = ['Port']
コード例 #14
0
ファイル: eventstudy.py プロジェクト: treksis/finmarketpy
    def get_intraday_moves_over_custom_event(self,
                                             data_frame_rets,
                                             ef_time_frame,
                                             vol=False,
                                             minute_start=5,
                                             mins=3 * 60,
                                             min_offset=0,
                                             create_index=False,
                                             resample=False,
                                             freq='minutes',
                                             cumsum=True,
                                             adj_cumsum_zero_point=False,
                                             adj_zero_point=2):
        filter = Filter()

        ef_time_frame = filter.filter_time_series_by_date(
            data_frame_rets.index[0], data_frame_rets.index[-1], ef_time_frame)
        ef_time = ef_time_frame.index

        if freq == 'minutes':
            ef_time_start = ef_time - timedelta(minutes=minute_start)
            ef_time_end = ef_time + timedelta(minutes=mins)
            ann_factor = 252 * 1440
        elif freq == 'days':
            ef_time = ef_time_frame.index.normalize()
            ef_time_start = ef_time - pandas.tseries.offsets.BusinessDay(
            ) * minute_start
            ef_time_end = ef_time + pandas.tseries.offsets.BusinessDay() * mins
            ann_factor = 252
        elif freq == 'weeks':
            ef_time = ef_time_frame.index.normalize()
            ef_time_start = ef_time - pandas.tseries.offsets.Week(
            ) * minute_start
            ef_time_end = ef_time + pandas.tseries.offsets.Week() * mins
            ann_factor = 52

        ords = list(range(-minute_start + min_offset, mins + min_offset))
        lst_ords = list(ords)

        # All data needs to be equally spaced
        if resample:
            # Make sure time series is properly sampled at 1 min intervals
            if freq == 'minutes':
                data_frame_rets = data_frame_rets.resample('1min').last()
                data_frame_rets = data_frame_rets.fillna(value=0)
                data_frame_rets = filter.remove_out_FX_out_of_hours(
                    data_frame_rets)
            elif freq == 'daily':
                data_frame_rets = data_frame_rets.resample('B').last()
                data_frame_rets = data_frame_rets.fillna(value=0)
            elif freq == 'weekly':
                data_frame_rets = data_frame_rets.resample('W').last()
                data_frame_rets = data_frame_rets.fillna(value=0)

        start_index = data_frame_rets.index.searchsorted(ef_time_start)
        finish_index = data_frame_rets.index.searchsorted(ef_time_end)

        data_frame = pandas.DataFrame(index=ords, columns=ef_time_frame.index)

        for i in range(0, len(ef_time_frame.index)):

            vals = data_frame_rets.iloc[start_index[i]:finish_index[i]].values

            st = ef_time_start[i]
            en = ef_time_end[i]

            # Add extra "future" history in case we are doing an event study which goes outside our data window
            # (will just be filled with NaN)
            if len(vals) < len(lst_ords):
                extend = np.zeros((len(lst_ords) - len(vals), 1)) * np.nan

                # If start window date is before we have data
                if st < data_frame_rets.index[0]:
                    vals = np.append(extend, vals)

                # If end date window is after we have data
                else:
                    vals = np.append(vals, extend)

            data_frame[ef_time_frame.index[i]] = vals

        data_frame.index.names = [None]

        if create_index:
            calculations = Calculations()
            data_frame.iloc[-minute_start + min_offset] = numpy.nan
            data_frame = calculations.create_mult_index(data_frame)
        else:
            if vol is True:
                # Annualise (if vol)
                data_frame = data_frame.rolling(
                    center=False, window=5).std() * math.sqrt(ann_factor)
            elif cumsum:

                data_frame = data_frame.cumsum()

                # Adjust DataFrame so zero point shows zero returns
                if adj_cumsum_zero_point:
                    ind = abs(minute_start) - adj_zero_point

                    for i, c in enumerate(data_frame.columns):
                        data_frame[
                            c] = data_frame[c] - data_frame[c].values[ind]

        return data_frame
コード例 #15
0
class FXForwardsCurve(object):
    """Constructs continuous forwards time series total return indices from underlying forwards contracts.


    """

    def __init__(self, market_data_generator=None, fx_forwards_trading_tenor=market_constants.fx_forwards_trading_tenor,
                 roll_days_before=market_constants.fx_forwards_roll_days_before,
                 roll_event=market_constants.fx_forwards_roll_event, construct_via_currency='no',
                 fx_forwards_tenor_for_interpolation=market_constants.fx_forwards_tenor_for_interpolation,
                 base_depos_tenor=data_constants.base_depos_tenor,
                 roll_months=market_constants.fx_forwards_roll_months,
                 cum_index=market_constants.fx_forwards_cum_index,
                 output_calculation_fields=market_constants.output_calculation_fields):
        """Initializes FXForwardsCurve

        Parameters
        ----------
        market_data_generator : MarketDataGenerator
            Used for downloading market data

        fx_forwards_trading_tenor : str
            What is primary forward contract being used to trade (default - '1M')

        roll_days_before : int
            Number of days before roll event to enter into a new forwards contract

        roll_event : str
            What constitutes a roll event? ('month-end', 'quarter-end', 'year-end', 'expiry')

        construct_via_currency : str
            What currency should we construct the forward via? Eg. if we asked for AUDJPY we can construct it via
            AUDUSD & JPYUSD forwards, as opposed to AUDJPY forwards (default - 'no')

        fx_forwards_tenor_for_interpolation : str(list)
            Which forwards should we use for interpolation

        base_depos_tenor : str(list)
            Which base deposits tenors do we need (this is only necessary if we want to start inferring depos)

        roll_months : int
            After how many months should we initiate a roll. Typically for trading 1M this should 1, 3M this should be 3
            etc.

        cum_index : str
            In total return index, do we compute in additive or multiplicative way ('add' or 'mult')

        output_calculation_fields : bool
            Also output additional data should forward expiries etc. alongside total returns indices
        """

        self._market_data_generator = market_data_generator
        self._calculations = Calculations()
        self._calendar = Calendar()
        self._filter = Filter()

        self._fx_forwards_trading_tenor = fx_forwards_trading_tenor
        self._roll_days_before = roll_days_before
        self._roll_event = roll_event

        self._construct_via_currency = construct_via_currency
        self._fx_forwards_tenor_for_interpolation = fx_forwards_tenor_for_interpolation
        self._base_depos_tenor = base_depos_tenor

        self._roll_months = roll_months
        self._cum_index = cum_index
        self._output_calcultion_fields = output_calculation_fields

    def generate_key(self):
        from findatapy.market.ioengine import SpeedCache

        # Don't include any "large" objects in the key
        return SpeedCache().generate_key(self, ['_market_data_generator', '_calculations', '_calendar', '_filter'])

    def fetch_continuous_time_series(self, md_request, market_data_generator, fx_forwards_trading_tenor=None,
                                     roll_days_before=None, roll_event=None,
                                     construct_via_currency=None, fx_forwards_tenor_for_interpolation=None, base_depos_tenor=None,
                                     roll_months=None, cum_index=None, output_calculation_fields=False):

        if market_data_generator is None: market_data_generator = self._market_data_generator
        if fx_forwards_trading_tenor is None: fx_forwards_trading_tenor = self._fx_forwards_trading_tenor
        if roll_days_before is None: roll_days_before = self._roll_days_before
        if roll_event is None: roll_event = self._roll_event
        if construct_via_currency is None: construct_via_currency = self._construct_via_currency
        if fx_forwards_tenor_for_interpolation is None: fx_forwards_tenor_for_interpolation = self._fx_forwards_tenor_for_interpolation
        if base_depos_tenor is None: base_depos_tenor = self._base_depos_tenor
        if roll_months is None: roll_months = self._roll_months
        if cum_index is None: cum_index = self._cum_index
        if output_calculation_fields is None: output_calculation_fields

        # Eg. we construct EURJPY via EURJPY directly (note: would need to have sufficient forward data for this)
        if construct_via_currency == 'no':
            # Download FX spot, FX forwards points and base depos etc.
            market = Market(market_data_generator=market_data_generator)

            md_request_download = MarketDataRequest(md_request=md_request)

            fx_conv = FXConv()

            # CAREFUL: convert the tickers to correct notation, eg. USDEUR => EURUSD, because our data
            # should be fetched in correct convention
            md_request_download.tickers = [fx_conv.correct_notation(x) for x in md_request.tickers]
            md_request_download.category = 'fx-forwards-market'
            md_request_download.fields = 'close'
            md_request_download.abstract_curve = None
            md_request_download.fx_forwards_tenor = fx_forwards_tenor_for_interpolation
            md_request_download.base_depos_tenor = base_depos_tenor

            forwards_market_df = market.fetch_market(md_request_download)

            # Now use the original tickers
            return self.construct_total_return_index(md_request.tickers, forwards_market_df,
                                                     fx_forwards_trading_tenor=fx_forwards_trading_tenor,
                                                     roll_days_before=roll_days_before, roll_event=roll_event,
                                                     fx_forwards_tenor_for_interpolation=fx_forwards_tenor_for_interpolation,
                                                     roll_months=roll_months,
                                                     cum_index=cum_index,
                                                     output_calculation_fields=output_calculation_fields)
        else:
            # eg. we calculate via your domestic currency such as USD, so returns will be in your domestic currency
            # Hence AUDJPY would be calculated via AUDUSD and JPYUSD (subtracting the difference in returns)
            total_return_indices = []

            for tick in md_request.tickers:
                base = tick[0:3]
                terms = tick[3:6]

                md_request_base = MarketDataRequest(md_request=md_request)
                md_request_base.tickers = base + construct_via_currency

                md_request_terms = MarketDataRequest(md_request=md_request)
                md_request_terms.tickers = terms + construct_via_currency

                # Construct the base and terms separately (ie. AUDJPY => AUDUSD & JPYUSD)
                base_vals = self.fetch_continuous_time_series(md_request_base, market_data_generator,
                                     fx_forwards_trading_tenor=fx_forwards_trading_tenor,
                                     roll_days_before=roll_days_before, roll_event=roll_event,
                                     fx_forwards_tenor_for_interpolation=fx_forwards_tenor_for_interpolation,
                                     base_depos_tenor=base_depos_tenor,
                                     roll_months=roll_months, output_calculation_fields=False,
                                     cum_index=cum_index,
                                     construct_via_currency='no')

                terms_vals = self.fetch_continuous_time_series(md_request_terms, market_data_generator,
                                     fx_forwards_trading_tenor=fx_forwards_trading_tenor,
                                     roll_days_before=roll_days_before, roll_event=roll_event,
                                     fx_forwards_tenor_for_interpolation=fx_forwards_tenor_for_interpolation,
                                     base_depos_tenor=base_depos_tenor,
                                     roll_months=roll_months,
                                     cum_index=cum_index,
                                     output_calculation_fields=False,
                                     construct_via_currency='no')

                # Special case for USDUSD case (and if base or terms USD are USDUSD
                if base + terms == construct_via_currency + construct_via_currency:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    cross_rets = pd.DataFrame(0, index=base_rets.index, columns=base_rets.columns)
                elif base + construct_via_currency == construct_via_currency + construct_via_currency:
                    cross_rets = -self._calculations.calculate_returns(terms_vals)
                elif terms + construct_via_currency == construct_via_currency + construct_via_currency:
                    cross_rets = self._calculations.calculate_returns(base_vals)
                else:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    terms_rets = self._calculations.calculate_returns(terms_vals)

                    cross_rets = base_rets.sub(terms_rets.iloc[:, 0], axis=0)

                # First returns of a time series will by NaN, given we don't know previous point
                cross_rets.iloc[0] = 0

                cross_vals = self._calculations.create_mult_index(cross_rets)
                cross_vals.columns = [tick + '-forward-tot.close']

                total_return_indices.append(cross_vals)

            return self._calculations.pandas_outer_join(total_return_indices)

    def unhedged_asset_fx(self, assets_df, asset_currency, home_curr, start_date, finish_date, spot_df=None):
        pass

    def hedged_asset_fx(self, assets_df, asset_currency, home_curr, start_date, finish_date, spot_df=None,
                        total_return_indices_df=None):
        pass

    def get_day_count_conv(self, currency):
        if currency in market_constants.currencies_with_365_basis:
            return 365.0

        return 360.0

    def construct_total_return_index(self, cross_fx, forwards_market_df,
                                     fx_forwards_trading_tenor=None,
                                     roll_days_before=None,
                                     roll_event=None,
                                     roll_months=None,
                                     fx_forwards_tenor_for_interpolation=None,
                                     cum_index=None,
                                     output_calculation_fields=False):

        if not (isinstance(cross_fx, list)):
            cross_fx = [cross_fx]

        if fx_forwards_trading_tenor is None: fx_forwards_trading_tenor = self._fx_forwards_trading_tenor
        if roll_days_before is None: roll_days_before = self._roll_days_before
        if roll_event is None: roll_event = self._roll_event
        if roll_months is None: roll_months = self._roll_months
        if fx_forwards_tenor_for_interpolation is None: fx_forwards_tenor_for_interpolation = self._fx_forwards_tenor_for_interpolation
        if cum_index is None: cum_index = self._cum_index

        total_return_index_df_agg = []

        # Remove columns where there is no data (because these points typically aren't quoted)
        forwards_market_df = forwards_market_df.dropna(how='all', axis=1)

        fx_forwards_pricer = FXForwardsPricer()

        def get_roll_date(horizon_d, delivery_d, asset_hols, month_adj=1):
            if roll_event == 'month-end':
                roll_d = horizon_d + CustomBusinessMonthEnd(roll_months + month_adj, holidays=asset_hols)
            elif roll_event == 'delivery-date':
                roll_d = delivery_d

            return (roll_d - CustomBusinessDay(n=roll_days_before, holidays=asset_hols))

        for cross in cross_fx:

            # Eg. if we specify USDUSD
            if cross[0:3] == cross[3:6]:
                total_return_index_df_agg.append(
                    pd.DataFrame(100, index=forwards_market_df.index, columns=[cross + "-forward-tot.close"]))
            else:
                # Is the FX cross in the correct convention
                old_cross = cross
                cross = FXConv().correct_notation(cross)

                horizon_date = forwards_market_df.index

                delivery_date = []
                roll_date = []

                new_trade = np.full(len(horizon_date), False, dtype=bool)

                asset_holidays = self._calendar.get_holidays(cal=cross)

                # Get first delivery date
                delivery_date.append(
                    self._calendar.get_delivery_date_from_horizon_date(horizon_date[0],
                                                                       fx_forwards_trading_tenor, cal=cross, asset_class='fx')[0])

                # For first month want it to expire within that month (for consistency), hence month_adj=0 ONLY here
                roll_date.append(get_roll_date(horizon_date[0], delivery_date[0], asset_holidays, month_adj=0))

                # New trade => entry at beginning AND on every roll
                new_trade[0] = True

                # Get all the delivery dates and roll dates
                # At each "roll/trade" day we need to reset them for the new contract
                for i in range(1, len(horizon_date)):

                    # If the horizon date has reached the roll date (from yesterday), we're done, and we have a
                    # new roll/trade
                    if (horizon_date[i] - roll_date[i-1]).days == 0:
                        new_trade[i] = True
                    # else:
                    #    new_trade[i] = False

                    # If we're entering a new trade/contract, we need to get new delivery and roll dates
                    if new_trade[i]:
                        delivery_date.append(self._calendar.get_delivery_date_from_horizon_date(horizon_date[i],
                            fx_forwards_trading_tenor, cal=cross, asset_class='fx')[0])

                        roll_date.append(get_roll_date(horizon_date[i], delivery_date[i], asset_holidays))
                    else:
                        # Otherwise use previous delivery and roll dates, because we're still holding same contract
                        delivery_date.append(delivery_date[i-1])
                        roll_date.append(roll_date[i-1])

                interpolated_forward = fx_forwards_pricer.price_instrument(cross, horizon_date, delivery_date, market_df=forwards_market_df,
                         fx_forwards_tenor_for_interpolation=fx_forwards_tenor_for_interpolation)[cross + '-interpolated-outright-forward.close'].values

                # To record MTM prices
                mtm = np.copy(interpolated_forward)

                # Note: may need to add discount factor when marking to market forwards?

                # Special case: for very first trading day
                # mtm[0] = interpolated_forward[0]

                # On rolling dates, MTM will be the previous forward contract (interpolated)
                # otherwise it will be the current forward contract
                for i in range(1, len(horizon_date)):
                    if new_trade[i]:
                        mtm[i] = fx_forwards_pricer.price_instrument(cross, horizon_date[i], delivery_date[i-1],
                            market_df=forwards_market_df,
                            fx_forwards_tenor_for_interpolation=fx_forwards_tenor_for_interpolation) \
                                [cross + '-interpolated-outright-forward.close'].values
                    # else:
                    #    mtm[i] = interpolated_forward[i]

                # Eg. if we asked for USDEUR, we first constructed spot/forwards for EURUSD
                # and then need to invert it
                if old_cross != cross:
                    mtm = 1.0 / mtm
                    interpolated_forward = 1.0 / interpolated_forward

                forward_rets = mtm / np.roll(interpolated_forward, 1) - 1.0
                forward_rets[0] = 0

                if cum_index == 'mult':
                    cum_rets = 100 * np.cumprod(1.0 + forward_rets)
                elif cum_index == 'add':
                    cum_rets = 100 + 100 * np.cumsum(forward_rets)

                total_return_index_df = pd.DataFrame(index=horizon_date, columns=[cross + "-forward-tot.close"])
                total_return_index_df[cross + "-forward-tot.close"] = cum_rets

                if output_calculation_fields:
                    total_return_index_df[cross + '-interpolated-outright-forward.close'] = interpolated_forward
                    total_return_index_df[cross + '-mtm.close'] = mtm
                    total_return_index_df[cross + '-roll.close'] = new_trade
                    total_return_index_df[cross + '.roll-date'] = roll_date
                    total_return_index_df[cross + '.delivery-date'] = delivery_date
                    total_return_index_df[cross + '-forward-return.close'] = forward_rets

                total_return_index_df_agg.append(total_return_index_df)

        return self._calculations.pandas_outer_join(total_return_index_df_agg)
コード例 #16
0
class FXForwardsCurve(object):
    """Constructs continuous forwards time series total return indices from underlying forwards contracts. Incomplete!

    """
    def __init__(self,
                 market_data_generator=None,
                 fx_forwards_trading_tenor='1M',
                 roll_date=0,
                 construct_via_currency='no',
                 fx_forwards_tenor=constants.fx_forwards_tenor,
                 base_depos_tenor=constants.base_depos_tenor):

        self._market_data_generator = market_data_generator
        self._calculations = Calculations()
        self._calendar = Calendar()

        self._fx_forwards_trading_tenor = fx_forwards_trading_tenor
        self._roll_date = roll_date
        self._construct_via_currency = construct_via_currency
        self._fx_forwards_tenor = fx_forwards_tenor
        self._base_depos_tenor = base_depos_tenor

    def generate_key(self):
        from findatapy.market.ioengine import SpeedCache

        # Don't include any "large" objects in the key
        return SpeedCache().generate_key(
            self, ['_market_data_generator', '_calculations', '_calendar'])

    def fetch_continuous_time_series(self,
                                     md_request,
                                     market_data_generator,
                                     fx_forwards_trading_tenor=None,
                                     roll_date=None,
                                     construct_via_currency=None,
                                     fx_forwards_tenor=None,
                                     base_depos_tenor=None):

        if market_data_generator is None:
            market_data_generator = self._market_data_generator
        if fx_forwards_trading_tenor is None:
            fx_forwards_trading_tenor = self._fx_forwards_trading_tenor
        if roll_date is None: roll_date = self._roll_date
        if construct_via_currency is None:
            construct_via_currency = self._construct_via_currency
        if fx_forwards_tenor is None:
            fx_forwards_tenor = self._fx_forwards_tenor
        if base_depos_tenor is None: base_depos_tenor = self._base_depos_tenor

        # Eg. we construct EURJPY via EURJPY directly (note: would need to have sufficient forward data for this)
        if construct_via_currency == 'no':
            # Download FX spot, FX forwards points and base depos
            market = Market(market_data_generator=market_data_generator)

            md_request_download = MarketDataRequest(md_request=md_request)

            md_request_download.category = 'fx-forwards-market'
            md_request_download.fields = 'close'
            md_request_download.abstract_curve = None
            md_request_download.fx_forwards_tenor = fx_forwards_tenor
            md_request_download.base_depos_tenor = base_depos_tenor

            forwards_market_df = market.fetch_market(md_request_download)

            return self.construct_total_return_index(
                md_request.tickers,
                fx_forwards_trading_tenor,
                roll_date,
                forwards_market_df,
                fx_forwards_tenor=fx_forwards_tenor,
                base_depos_tenor=base_depos_tenor)
        else:
            # eg. we calculate via your domestic currency such as USD, so returns will be in your domestic currency
            # Hence AUDJPY would be calculated via AUDUSD and JPYUSD (subtracting the difference in returns)
            total_return_indices = []

            for tick in md_request.tickers:
                base = tick[0:3]
                terms = tick[3:6]

                md_request_base = MarketDataRequest(md_request=md_request)
                md_request_base.tickers = base + construct_via_currency

                md_request_terms = MarketDataRequest(md_request=md_request)
                md_request_terms.tickers = terms + construct_via_currency

                base_vals = self.fetch_continuous_time_series(
                    md_request_base,
                    market_data_generator,
                    construct_via_currency='no')
                terms_vals = self.fetch_continuous_time_series(
                    md_request_terms,
                    market_data_generator,
                    construct_via_currency='no')

                # Special case for USDUSD case (and if base or terms USD are USDUSD
                if base + terms == 'USDUSD':
                    base_rets = self._calculations.calculate_returns(base_vals)
                    cross_rets = pd.DataFrame(0,
                                              index=base_rets.index,
                                              columns=base_rets.columns)
                elif base + 'USD' == 'USDUSD':
                    cross_rets = -self._calculations.calculate_returns(
                        terms_vals)
                elif terms + 'USD' == 'USDUSD':
                    cross_rets = self._calculations.calculate_returns(
                        base_vals)
                else:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    terms_rets = self._calculations.calculate_returns(
                        terms_vals)

                    cross_rets = base_rets.sub(terms_rets.iloc[:, 0], axis=0)

                # First returns of a time series will by NaN, given we don't know previous point
                cross_rets.iloc[0] = 0

                cross_vals = self._calculations.create_mult_index(cross_rets)
                cross_vals.columns = [tick + '-tot.close']

                total_return_indices.append(cross_vals)

            return self._calculations.pandas_outer_join(total_return_indices)

    def unhedged_asset_fx(self,
                          assets_df,
                          asset_currency,
                          home_curr,
                          start_date,
                          finish_date,
                          spot_df=None):
        pass

    def hedged_asset_fx(self,
                        assets_df,
                        asset_currency,
                        home_curr,
                        start_date,
                        finish_date,
                        spot_df=None,
                        total_return_indices_df=None):
        pass

    def get_day_count_conv(self, currency):
        if currency in ['AUD', 'CAD', 'GBP', 'NZD']:
            return 365.0

        return 360.0

    def construct_total_return_index(
            self,
            cross_fx,
            fx_forwards_trading_tenor,
            roll_date,
            forwards_market_df,
            fx_forwards_tenor=constants.fx_forwards_tenor,
            base_depos_tenor=constants.base_depos_tenor):

        if not (isinstance(cross_fx, list)):
            cross_fx = [cross_fx]

        total_return_index_agg = []

        # Remove columns where there is no data (because these points typically aren't quoted)
        forwards_market_df = forwards_market_df.dropna(axis=1)

        for cross in cross_fx:

            # Eg. if we specify USDUSD
            if cross[0:3] == cross[3:6]:
                total_return_index_agg.append(
                    pd.DataFrame(100,
                                 index=forwards_market_df.index,
                                 columns=[cross + "-tot.close"]))
            else:
                spot = forwards_market_df[cross + ".close"].to_frame()

                fx_forwards_tenor_pickout = []

                for f in fx_forwards_tenor:
                    if f + ".close" in fx_forwards_tenor:
                        fx_forwards_tenor_pickout.append(f)

                    if f == fx_forwards_trading_tenor:
                        break

                divisor = 10000.0

                if cross[3:6] == 'JPY':
                    divisor = 100.0

                forward_pts = forwards_market_df[[cross + x + ".close" for x in fx_forwards_tenor_pickout]].to_frame() \
                              / divisor

                outright = spot + forward_pts

                # Calculate the time difference between each data point
                spot['index_col'] = spot.index
                time = spot['index_col'].diff()
                spot = spot.drop('index_col', 1)

                total_return_index = pd.DataFrame(
                    index=spot.index, columns=[cross + "-tot.close"])
                total_return_index.iloc[0] = 100

                time_diff = time.values.astype(
                    float) / 86400000000000.0  # get time difference in days

                # TODO incomplete forwards calculations
                total_return_index_agg.append(total_return_index)

        return self._calculations.pandas_outer_join(total_return_index_agg)
コード例 #17
0
    chart.plot(
        calculations.create_mult_index_from_prices(
            prepare_indices(cross=cross,
                            df_option_tot=df_cuemacro_option_put_tot,
                            df_option_tc=df_cuemacro_option_put_tc,
                            df_spot_tot=df_bbg_tot)))

    # P&L from put option + TC and total returns from spot
    chart.plot(
        calculations.create_mult_index_from_prices(
            prepare_indices(cross=cross,
                            df_option_tc=df_cuemacro_option_put_tc,
                            df_spot_tot=df_bbg_tot)))

    # P&L for total returns from spot and total returns from + 2*put option + TC (ie. hedged portfolio)
    chart.plot(calculations.create_mult_index(df_hedged))

    # Plot delta from put option
    chart.plot(df_cuemacro_option_put_tot[cross + '-delta.close'])

###### Fetch market data for pricing EURUSD options from 2006-2020 (ie. FX spot, FX forwards, FX deposits and FX vol quotes)
###### Construct volatility surface using FinancePy library underneath, using polynomial interpolation
###### Enters a short 1W straddle, and MTM every day, and at expiry rolls into another 1W straddle
if run_example == 2 or run_example == 0:

    # Warning make sure you choose dates, where there is full vol surface! If vol points in the tenors you are looking at
    # are missing then interpolation will fail (or if eg. spot data is missing etc.)
    start_date = '08 Mar 2007'
    finish_date = '31 Dec 2020'  # Monday
    # start_date = '09 Mar 2007'; finish_date = '31 Dec 2014'
    # start_date = '04 Jan 2006'; finish_date = '31 Dec 2008'
コード例 #18
0
class FXSpotCurve(object):
    """Construct total return (spot) indices for FX. In future will also convert assets from local currency to foreign currency
    denomination and construct indices from forwards series.

    """
    def __init__(self,
                 market_data_generator=None,
                 depo_tenor=market_constants.spot_depo_tenor,
                 construct_via_currency='no',
                 output_calculation_fields=market_constants.
                 output_calculation_fields):
        self._market_data_generator = market_data_generator
        self._calculations = Calculations()

        self._depo_tenor = depo_tenor
        self._construct_via_currency = construct_via_currency
        self._output_calculation_fields = output_calculation_fields

    def generate_key(self):
        from findatapy.market.ioengine import SpeedCache

        # Don't include any "large" objects in the key
        return SpeedCache().generate_key(
            self, ['_market_data_generator', '_calculations'])

    def fetch_continuous_time_series(self,
                                     md_request,
                                     market_data_generator,
                                     depo_tenor=None,
                                     construct_via_currency=None,
                                     output_calculation_fields=None):

        if market_data_generator is None:
            market_data_generator = self._market_data_generator
        if depo_tenor is None: depo_tenor = self._depo_tenor
        if construct_via_currency is None:
            construct_via_currency = self._construct_via_currency
        if output_calculation_fields is None:
            output_calculation_fields = self._output_calculation_fields

        # Eg. we construct AUDJPY via AUDJPY directly
        if construct_via_currency == 'no':
            base_depo_tickers = [
                x[0:3] + self._depo_tenor for x in md_request.tickers
            ]
            terms_depo_tickers = [
                x[3:6] + self._depo_tenor for x in md_request.tickers
            ]

            depo_tickers = list(set(base_depo_tickers + terms_depo_tickers))

            market = Market(market_data_generator=market_data_generator)

            # Deposit data for base and terms currency
            md_request_download = MarketDataRequest(md_request=md_request)

            md_request_download.tickers = depo_tickers
            md_request_download.category = 'base-depos'
            md_request_download.fields = 'close'
            md_request_download.abstract_curve = None

            depo_df = market.fetch_market(md_request_download)

            # Spot data
            md_request_download.tickers = md_request.tickers
            md_request_download.category = 'fx'

            spot_df = market.fetch_market(md_request_download)

            return self.construct_total_return_index(
                md_request.tickers,
                self._calculations.pandas_outer_join([spot_df, depo_df]),
                depo_tenor=depo_tenor,
                output_calculation_fields=output_calculation_fields)
        else:
            # eg. we calculate via your domestic currency such as USD, so returns will be in your domestic currency
            # Hence AUDJPY would be calculated via AUDUSD and JPYUSD (subtracting the difference in returns)
            total_return_indices = []

            for tick in md_request.tickers:
                base = tick[0:3]
                terms = tick[3:6]

                md_request_base = MarketDataRequest(md_request=md_request)
                md_request_base.tickers = base + construct_via_currency

                md_request_terms = MarketDataRequest(md_request=md_request)
                md_request_terms.tickers = terms + construct_via_currency

                base_vals = self.fetch_continuous_time_series(
                    md_request_base,
                    market_data_generator,
                    construct_via_currency='no')
                terms_vals = self.fetch_continuous_time_series(
                    md_request_terms,
                    market_data_generator,
                    construct_via_currency='no')

                # Special case for USDUSD case (and if base or terms USD are USDUSD
                if base + terms == construct_via_currency + construct_via_currency:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    cross_rets = pd.DataFrame(0,
                                              index=base_rets.index,
                                              columns=base_rets.columns)
                elif base + construct_via_currency == construct_via_currency + construct_via_currency:
                    cross_rets = -self._calculations.calculate_returns(
                        terms_vals)
                elif terms + construct_via_currency == construct_via_currency + construct_via_currency:
                    cross_rets = self._calculations.calculate_returns(
                        base_vals)
                else:
                    base_rets = self._calculations.calculate_returns(base_vals)
                    terms_rets = self._calculations.calculate_returns(
                        terms_vals)

                    cross_rets = base_rets.sub(terms_rets.iloc[:, 0], axis=0)

                # First returns of a time series will by NaN, given we don't know previous point
                cross_rets.iloc[0] = 0

                cross_vals = self._calculations.create_mult_index(cross_rets)
                cross_vals.columns = [tick + '-tot.close']

                total_return_indices.append(cross_vals)

            return self._calculations.pandas_outer_join(total_return_indices)

    def unhedged_asset_fx(self,
                          assets_df,
                          asset_currency,
                          home_curr,
                          start_date,
                          finish_date,
                          spot_df=None):
        pass

    def hedged_asset_fx(self,
                        assets_df,
                        asset_currency,
                        home_curr,
                        start_date,
                        finish_date,
                        spot_df=None,
                        total_return_indices_df=None):
        pass

    def get_day_count_conv(self, currency):
        if currency in market_constants.currencies_with_365_basis:
            return 365.0

        return 360.0

    def construct_total_return_index(self,
                                     cross_fx,
                                     market_df,
                                     depo_tenor=None,
                                     output_calculation_fields=False):
        """Creates total return index for selected FX crosses from spot and deposit data

        Parameters
        ----------
        cross_fx : String
            Crosses to construct total return indices (can be a list)
        tenor : String
            Tenor of deposit rates to use to compute carry (typically ON for spot)
        spot_df : pd.DataFrame
            Spot data (must include crosses we select)
        deposit_df : pd.DataFrame
            Deposit data

        Returns
        -------
        pd.DataFrame
        """
        if not (isinstance(cross_fx, list)):
            cross_fx = [cross_fx]

        if depo_tenor is None: depo_tenor = self._depo_tenor

        total_return_index_df_agg = []

        for cross in cross_fx:
            # Get the spot series, base deposit
            base_deposit = market_df[cross[0:3] + depo_tenor +
                                     ".close"].to_frame()
            terms_deposit = market_df[cross[3:6] + depo_tenor +
                                      ".close"].to_frame()

            # Eg. if we specify USDUSD
            if cross[0:3] == cross[3:6]:
                total_return_index_df_agg.append(
                    pd.DataFrame(100,
                                 index=base_deposit.index,
                                 columns=[cross + "-tot.close"]))
            else:
                carry = base_deposit.join(terms_deposit, how='inner')

                spot = market_df[cross + ".close"].to_frame()

                base_daycount = self.get_day_count_conv(cross[0:3])
                terms_daycount = self.get_day_count_conv(cross[4:6])

                # Align the base & terms deposits series to spot (this should already be done by construction)
                # spot, carry = spot.align(carry, join='left', axis=0)

                # Sometimes depo data can be patchy, ok to fill down, given not very volatile (don't do this with spot!)
                carry = carry.fillna(method='ffill') / 100.0

                # In case there are values missing at start of list (fudge for old data!)
                carry = carry.fillna(method='bfill')

                spot = spot[cross + ".close"].to_frame()

                spot_vals = spot[cross + ".close"].values
                base_deposit_vals = carry[cross[0:3] + depo_tenor +
                                          ".close"].values
                terms_deposit_vals = carry[cross[3:6] + depo_tenor +
                                           ".close"].values

                # Calculate the time difference between each data point (flooring it to whole days, because carry
                # is accured when there's a new day)
                spot['index_col'] = spot.index.floor('D')
                time = spot['index_col'].diff()
                spot = spot.drop('index_col', 1)

                time_diff = time.values.astype(
                    float) / 86400000000000.0  # get time difference in days
                time_diff[0] = 0.0

                # Use Numba to do total return index calculation given has many loops
                total_return_index_df = pd.DataFrame(
                    index=spot.index,
                    columns=[cross + "-tot.close"],
                    data=_spot_index_numba(spot_vals, time_diff,
                                           base_deposit_vals,
                                           terms_deposit_vals, base_daycount,
                                           terms_daycount))

                if output_calculation_fields:
                    total_return_index_df[cross + '-carry.close'] = carry
                    total_return_index_df[
                        cross +
                        '-tot-return.close'] = total_return_index_df / total_return_index_df.shift(
                            1) - 1.0
                    total_return_index_df[
                        cross +
                        '-spot-return.close'] = spot / spot.shift(1) - 1.0

                total_return_index_df_agg.append(total_return_index_df)

        return self._calculations.pandas_outer_join(total_return_index_df_agg)
コード例 #19
0
    def calculate_trading_PnL(self, br, asset_a_df, signal_df, contract_value_df = None):
        """Calculates P&L of a trading strategy and statistics to be retrieved later

        Calculates the P&L for each asset/signal combination and also for the finally strategy applying appropriate
        weighting in the portfolio, depending on predefined parameters, for example:
            static weighting for each asset
            static weighting for each asset + vol weighting for each asset
            static weighting for each asset + vol weighting for each asset + vol weighting for the portfolio

        Parameters
        ----------
        br : BacktestRequest
            Parameters for the backtest specifying start date, finish data, transaction costs etc.

        asset_a_df : pandas.DataFrame
            Asset prices to be traded

        signal_df : pandas.DataFrame
            Signals for the trading strategy

        contract_value_df : pandas.DataFrame
            Daily size of contracts
        """

        calculations = Calculations()

        # make sure the dates of both traded asset and signal are aligned properly
        asset_df, signal_df = asset_a_df.align(signal_df, join='left', axis = 'index')

        if (contract_value_df is not None):
            asset_df, contract_value_df = asset_df.align(contract_value_df, join='left', axis='index')
            contract_value_df = contract_value_df.fillna(method='ffill')  # fill down asset holidays (we won't trade on these days)

        # non-trading days
        non_trading_days = numpy.isnan(asset_df.values)

        # only allow signals to change on the days when we can trade assets
        signal_df = signal_df.mask(non_trading_days)                # fill asset holidays with NaN signals
        signal_df = signal_df.fillna(method='ffill')                # fill these down

        tc = br.spot_tc_bp

        signal_cols = signal_df.columns.values
        asset_df_cols = asset_df.columns.values

        pnl_cols = []

        for i in range(0, len(asset_df_cols)):
            pnl_cols.append(asset_df_cols[i] + " / " + signal_cols[i])

        asset_df_copy = asset_df.copy()
        asset_df = asset_df.fillna(method='ffill')        # fill down asset holidays (we won't trade on these days)
        returns_df = calculations.calculate_returns(asset_df)

        # apply a stop loss/take profit to every trade if this has been specified
        # do this before we start to do vol weighting etc.
        if hasattr(br, 'take_profit') and hasattr(br, 'stop_loss'):
            returns_df = calculations.calculate_returns(asset_df)

            temp_strategy_rets_df = calculations.calculate_signal_returns(signal_df, returns_df)
            trade_rets_df = calculations.calculate_cum_rets_trades(signal_df, temp_strategy_rets_df)

            # pre_signal_df = signal_df.copy()

            signal_df = calculations.calculate_risk_stop_signals(signal_df, trade_rets_df, br.stop_loss, br.take_profit)

            # make sure we can't trade where asset price is undefined and carry over signal
            signal_df = signal_df.mask(non_trading_days)  # fill asset holidays with NaN signals
            signal_df = signal_df.fillna(method='ffill')  # fill these down (when asset is not trading

            # signal_df.columns = [x + '_final_signal' for x in signal_df.columns]

            # for debugging purposes
            # if False:
            #     signal_df_copy = signal_df.copy()
            #     trade_rets_df_copy = trade_rets_df.copy()
            #
            #     asset_df_copy.columns = [x + '_asset' for x in temp_strategy_rets_df.columns]
            #     temp_strategy_rets_df.columns = [x + '_strategy_rets' for x in temp_strategy_rets_df.columns]
            #     signal_df_copy.columns = [x + '_final_signal' for x in signal_df_copy.columns]
            #     trade_rets_df_copy.columns = [x + '_cum_trade' for x in trade_rets_df_copy.columns]
            #
            #     to_plot = calculations.pandas_outer_join([asset_df_copy, pre_signal_df, signal_df_copy, trade_rets_df_copy, temp_strategy_rets_df])
            #     to_plot.to_csv('test.csv')

        # do we have a vol target for individual signals?
        if hasattr(br, 'signal_vol_adjust'):
            if br.signal_vol_adjust is True:
                risk_engine = RiskEngine()

                if not(hasattr(br, 'signal_vol_resample_type')):
                    br.signal_vol_resample_type = 'mean'

                if not(hasattr(br, 'signal_vol_resample_freq')):
                    br.signal_vol_resample_freq = None

                if not(hasattr(br, 'signal_vol_period_shift')):
                    br.signal_vol_period_shift = 0

                leverage_df = risk_engine.calculate_leverage_factor(returns_df, br.signal_vol_target, br.signal_vol_max_leverage,
                                               br.signal_vol_periods, br.signal_vol_obs_in_year,
                                               br.signal_vol_rebalance_freq, br.signal_vol_resample_freq,
                                               br.signal_vol_resample_type, period_shift=br.signal_vol_period_shift)

                signal_df = pandas.DataFrame(
                    signal_df.values * leverage_df.values, index = signal_df.index, columns = signal_df.columns)

                self._individual_leverage = leverage_df     # contains leverage of individual signal (before portfolio vol target)

        _pnl = calculations.calculate_signal_returns_with_tc_matrix(signal_df, returns_df, tc = tc)
        _pnl.columns = pnl_cols

        adjusted_weights_matrix = None

        # portfolio is average of the underlying signals: should we sum them or average them?
        if hasattr(br, 'portfolio_combination'):
            if br.portfolio_combination == 'sum':
                portfolio = pandas.DataFrame(data = _pnl.sum(axis = 1), index = _pnl.index, columns = ['Portfolio'])
            elif br.portfolio_combination == 'mean':
                portfolio = pandas.DataFrame(data = _pnl.mean(axis = 1), index = _pnl.index, columns = ['Portfolio'])

                adjusted_weights_matrix = self.create_portfolio_weights(br, _pnl, method='mean')
            elif isinstance(br.portfolio_combination, dict):
                # get the weights for each asset
                adjusted_weights_matrix = self.create_portfolio_weights(br, _pnl, method='weighted')

                portfolio = pandas.DataFrame(data=(_pnl.values * adjusted_weights_matrix), index=_pnl.index)
                is_all_na = pandas.isnull(portfolio).all(axis=1)
                portfolio = pandas.DataFrame(portfolio.sum(axis = 1), columns = ['Portfolio'])

                # overwrite days when every asset PnL was null is NaN with nan
                portfolio[is_all_na] = numpy.nan
        else:
            portfolio = pandas.DataFrame(data = _pnl.mean(axis = 1), index = _pnl.index, columns = ['Portfolio'])

            adjusted_weights_matrix = self.create_portfolio_weights(br, _pnl, method='mean')

        portfolio_leverage_df = pandas.DataFrame(data = numpy.ones(len(_pnl.index)), index = _pnl.index, columns = ['Portfolio'])

        # should we apply vol target on a portfolio level basis?
        if hasattr(br, 'portfolio_vol_adjust'):
            if br.portfolio_vol_adjust is True:
                risk_engine = RiskEngine()

                portfolio, portfolio_leverage_df = risk_engine.calculate_vol_adjusted_returns(portfolio, br = br)

        self._portfolio = portfolio
        self._signal = signal_df                            # individual signals (before portfolio leverage)
        self._portfolio_leverage = portfolio_leverage_df    # leverage on portfolio

        # multiply portfolio leverage * individual signals to get final position signals
        length_cols = len(signal_df.columns)
        leverage_matrix = numpy.repeat(portfolio_leverage_df.values.flatten()[numpy.newaxis,:], length_cols, 0)

        # final portfolio signals (including signal & portfolio leverage)
        self._portfolio_signal = pandas.DataFrame(
            data = numpy.multiply(numpy.transpose(leverage_matrix), signal_df.values),
            index = signal_df.index, columns = signal_df.columns)

        if hasattr(br, 'portfolio_combination'):
            if br.portfolio_combination == 'sum':
                pass
            elif br.portfolio_combination == 'mean' or isinstance(br.portfolio_combination, dict):
                self._portfolio_signal = pandas.DataFrame(data=(self._portfolio_signal.values * adjusted_weights_matrix),
                                             index=self._portfolio_signal.index,
                                             columns=self._portfolio_signal.columns)
        else:
            self._portfolio_signal = pandas.DataFrame(data=(self._portfolio_signal.values * adjusted_weights_matrix),
                                                      index=self._portfolio_signal.index,
                                                      columns=self._portfolio_signal.columns)

        # calculate each period of trades
        self._portfolio_trade = self._portfolio_signal - self._portfolio_signal.shift(1)

        self._portfolio_signal_notional = None
        self._portfolio_signal_trade_notional = None

        self._portfolio_signal_contracts = None
        self._portfolio_signal_trade_contracts = None

        # also create other measures of portfolio
        # portfolio & trades in terms of a predefined notional (in USD)
        # portfolio & trades in terms of contract sizes (particularly useful for futures)
        if hasattr(br, 'portfolio_notional_size'):
            # express positions in terms of the notional size specified
            self._portfolio_signal_notional = self._portfolio_signal * br.portfolio_notional_size
            self._portfolio_signal_trade_notional = self._portfolio_signal_notional - self._portfolio_signal_notional.shift(1)

            # get the positions in terms of the contract sizes
            notional_copy = self._portfolio_signal_notional.copy(deep=True)
            notional_copy_cols = [x.split('.')[0] for x in notional_copy.columns]
            notional_copy_cols = [x + '.contract-value' for x in notional_copy_cols]

            notional_copy.columns = notional_copy_cols

            contract_value_df = contract_value_df[notional_copy_cols]
            notional_df, contract_value_df = notional_copy.align(contract_value_df, join='left', axis='index')

            # careful make sure orders of magnitude are same for the notional and the contract value
            self._portfolio_signal_contracts = notional_df / contract_value_df
            self._portfolio_signal_contracts.columns = self._portfolio_signal_notional.columns
            self._portfolio_signal_trade_contracts = self._portfolio_signal_contracts - self._portfolio_signal_contracts.shift(1)

        self._pnl = _pnl                                                                    # individual signals P&L

        # TODO FIX very slow - hence only calculate on demand
        _pnl_trades = None
        # _pnl_trades = calculations.calculate_individual_trade_gains(signal_df, _pnl)
        self._pnl_trades = _pnl_trades

        self._ret_stats_pnl = RetStats()
        self._ret_stats_pnl.calculate_ret_stats(self._pnl, br.ann_factor)

        self._portfolio.columns = ['Port']
        self._ret_stats_portfolio = RetStats()
        self._ret_stats_portfolio.calculate_ret_stats(self._portfolio, br.ann_factor)

        self._cumpnl = calculations.create_mult_index(self._pnl)                             # individual signals cumulative P&L
        self._cumpnl.columns = pnl_cols

        self._cumportfolio = calculations.create_mult_index(self._portfolio)                 # portfolio cumulative P&L
        self._cumportfolio.columns = ['Port']