Ejemplo n.º 1
0
    def test_orders_stop(self, name, order_data, event_data, expected):
        data = order_data
        data['asset'] = self.ASSET133
        order = Order(**data)

        if expected['transaction']:
            expected['transaction']['asset'] = self.ASSET133
        event_data['asset'] = self.ASSET133

        assets = (
            (133, pd.DataFrame(
                {
                    'open': [event_data['open']],
                    'high': [event_data['high']],
                    'low': [event_data['low']],
                    'close': [event_data['close']],
                    'volume': [event_data['volume']],
                },
                index=[pd.Timestamp('2006-01-05 14:31', tz='UTC')],
            )),
        )
        days = pd.date_range(
            start=normalize_date(self.minutes[0]),
            end=normalize_date(self.minutes[-1])
        )
        with tmp_bcolz_equity_minute_bar_reader(
                self.trading_calendar, days, assets) as reader:
            data_portal = DataPortal(
                self.env.asset_finder, self.trading_calendar,
                first_trading_day=reader.first_trading_day,
                equity_minute_reader=reader,
            )

            slippage_model = VolumeShareSlippage()

            try:
                dt = pd.Timestamp('2006-01-05 14:31', tz='UTC')
                bar_data = BarData(
                    data_portal,
                    lambda: dt,
                    self.sim_params.data_frequency,
                    self.trading_calendar,
                    NoRestrictions(),
                )

                _, txn = next(slippage_model.simulate(
                    bar_data,
                    self.ASSET133,
                    [order],
                ))
            except StopIteration:
                txn = None

            if expected['transaction'] is None:
                self.assertIsNone(txn)
            else:
                self.assertIsNotNone(txn)

                for key, value in expected['transaction'].items():
                    self.assertEquals(value, txn[key])
Ejemplo n.º 2
0
    def test_orders_stop(self, name, order_data, event_data, expected):
        data = order_data
        data['asset'] = self.ASSET133
        order = Order(**data)

        if expected['transaction']:
            expected['transaction']['asset'] = self.ASSET133
        event_data['asset'] = self.ASSET133

        assets = (
            (133, pd.DataFrame(
                {
                    'open': [event_data['open']],
                    'high': [event_data['high']],
                    'low': [event_data['low']],
                    'close': [event_data['close']],
                    'volume': [event_data['volume']],
                },
                index=[pd.Timestamp('2006-01-05 14:31', tz='UTC')],
            )),
        )
        days = pd.date_range(
            start=normalize_date(self.minutes[0]),
            end=normalize_date(self.minutes[-1])
        )
        with tmp_bcolz_equity_minute_bar_reader(
                self.trading_calendar, days, assets) as reader:
            data_portal = DataPortal(
                self.env.asset_finder, self.trading_calendar,
                first_trading_day=reader.first_trading_day,
                equity_minute_reader=reader,
            )

            slippage_model = VolumeShareSlippage()

            try:
                dt = pd.Timestamp('2006-01-05 14:31', tz='UTC')
                bar_data = BarData(
                    data_portal,
                    lambda: dt,
                    self.sim_params.data_frequency,
                    self.trading_calendar,
                    NoRestrictions(),
                )

                _, txn = next(slippage_model.simulate(
                    bar_data,
                    self.ASSET133,
                    [order],
                ))
            except StopIteration:
                txn = None

            if expected['transaction'] is None:
                self.assertIsNone(txn)
            else:
                self.assertIsNotNone(txn)

                for key, value in expected['transaction'].items():
                    self.assertEquals(value, txn[key])
Ejemplo n.º 3
0
    def test_orders_stop(self, name, order_data, event_data, expected):
        tempdir = TempDirectory()
        try:
            data = order_data
            data['sid'] = self.ASSET133

            order = Order(**data)

            assets = {
                133: pd.DataFrame({
                    "open": [event_data["open"]],
                    "high": [event_data["high"]],
                    "low": [event_data["low"]],
                    "close": [event_data["close"]],
                    "volume": [event_data["volume"]],
                    "dt": [pd.Timestamp('2006-01-05 14:31', tz='UTC')]
                }).set_index("dt")
            }

            write_bcolz_minute_data(
                self.env,
                pd.date_range(
                    start=normalize_date(self.minutes[0]),
                    end=normalize_date(self.minutes[-1])
                ),
                tempdir.path,
                assets
            )

            equity_minute_reader = BcolzMinuteBarReader(tempdir.path)

            data_portal = DataPortal(
                self.env,
                equity_minute_reader=equity_minute_reader,
            )

            slippage_model = VolumeShareSlippage()

            try:
                dt = pd.Timestamp('2006-01-05 14:31', tz='UTC')
                bar_data = BarData(data_portal,
                                   lambda: dt,
                                   'minute')
                _, txn = next(slippage_model.simulate(
                    bar_data,
                    self.ASSET133,
                    [order],
                ))
            except StopIteration:
                txn = None

            if expected['transaction'] is None:
                self.assertIsNone(txn)
            else:
                self.assertIsNotNone(txn)

                for key, value in expected['transaction'].items():
                    self.assertEquals(value, txn[key])
        finally:
            tempdir.cleanup()
Ejemplo n.º 4
0
    def __init__(self, start_session, end_session,
                 trading_calendar,
                 capital_base=DEFAULT_CAPITAL_BASE,
                 emission_rate='daily',
                 data_frequency='daily',
                 arena='backtest'):

        assert type(start_session) == pd.Timestamp
        assert type(end_session) == pd.Timestamp

        assert trading_calendar is not None, \
            "Must pass in trading calendar!"
        assert start_session <= end_session, \
            "Period start falls after period end."
        assert start_session <= trading_calendar.last_trading_session, \
            "Period start falls after the last known trading day."
        assert end_session >= trading_calendar.first_trading_session, \
            "Period end falls before the first known trading day."

        # chop off any minutes or hours on the given start and end dates,
        # as we only support session labels here (and we represent session
        # labels as midnight UTC).
        self._start_session = normalize_date(start_session)
        self._end_session = normalize_date(end_session)
        self._capital_base = capital_base

        self._emission_rate = emission_rate
        self._data_frequency = data_frequency

        # copied to algorithm's environment for runtime access
        self._arena = arena

        self._trading_calendar = trading_calendar

        if not trading_calendar.is_session(self._start_session):
            # if the start date is not a valid session in this calendar,
            # push it forward to the first valid session
            self._start_session = trading_calendar.minute_to_session_label(
                self._start_session
            )

        if not trading_calendar.is_session(self._end_session):
            # if the end date is not a valid session in this calendar,
            # pull it backward to the last valid session before the given
            # end date.
            self._end_session = trading_calendar.minute_to_session_label(
                self._end_session, direction="previous"
            )

        self._first_open = trading_calendar.open_and_close_for_session(
            self._start_session
        )[0]
        self._last_close = trading_calendar.open_and_close_for_session(
            self._end_session
        )[1]
Ejemplo n.º 5
0
    def setUpClass(cls):
        cls.tempdir = TempDirectory()
        cls.env = TradingEnvironment()

        cls.sim_params = SimulationParameters(
            period_start=pd.Timestamp("2006-01-05 14:31", tz="utc"),
            period_end=pd.Timestamp("2006-01-05 14:36", tz="utc"),
            capital_base=1.0e5,
            data_frequency="minute",
            emission_rate='daily',
            env=cls.env
        )

        cls.sids = [133]

        cls.minutes = pd.DatetimeIndex(
            start=pd.Timestamp("2006-01-05 14:31", tz="utc"),
            end=pd.Timestamp("2006-01-05 14:35", tz="utc"),
            freq="1min"
        )

        assets = {
            133: pd.DataFrame({
                "open": np.array([3.0, 3.0, 3.5, 4.0, 3.5]),
                "high": np.array([3.15, 3.15, 3.15, 3.15, 3.15]),
                "low": np.array([2.85, 2.85, 2.85, 2.85, 2.85]),
                "close": np.array([3.0, 3.5, 4.0, 3.5, 3.0]),
                "volume": [2000, 2000, 2000, 2000, 2000],
                "dt": cls.minutes
            }).set_index("dt")
        }

        write_bcolz_minute_data(
            cls.env,
            pd.date_range(
                start=normalize_date(cls.minutes[0]),
                end=normalize_date(cls.minutes[-1])
            ),
            cls.tempdir.path,
            assets
        )

        cls.env.write_data(equities_data={
            133: {
                "start_date": pd.Timestamp("2006-01-05", tz='utc'),
                "end_date": pd.Timestamp("2006-01-07", tz='utc')
            }
        })

        cls.ASSET133 = cls.env.asset_finder.retrieve_asset(133)

        cls.data_portal = DataPortal(
            cls.env,
            equity_minute_reader=BcolzMinuteBarReader(cls.tempdir.path),
        )
Ejemplo n.º 6
0
    def __init__(self, start_session, end_session,
                 trading_calendar,
                 capital_base=10e3,
                 emission_rate='daily',
                 data_frequency='daily',
                 arena='backtest'):

        assert type(start_session) == pd.Timestamp
        assert type(end_session) == pd.Timestamp

        assert trading_calendar is not None, \
            "Must pass in trading calendar!"
        assert start_session <= end_session, \
            "Period start falls after period end."
        assert start_session <= trading_calendar.last_trading_session, \
            "Period start falls after the last known trading day."
        assert end_session >= trading_calendar.first_trading_session, \
            "Period end falls before the first known trading day."

        # chop off any minutes or hours on the given start and end dates,
        # as we only support session labels here (and we represent session
        # labels as midnight UTC).
        self._start_session = normalize_date(start_session)
        self._end_session = normalize_date(end_session)
        self._capital_base = capital_base

        self._emission_rate = emission_rate
        self._data_frequency = data_frequency

        # copied to algorithm's environment for runtime access
        self._arena = arena

        self._trading_calendar = trading_calendar

        if not trading_calendar.is_session(self._start_session):
            # if the start date is not a valid session in this calendar,
            # push it forward to the first valid session
            self._start_session = trading_calendar.minute_to_session_label(
                self._start_session
            )

        if not trading_calendar.is_session(self._end_session):
            # if the end date is not a valid session in this calendar,
            # pull it backward to the last valid session before the given
            # end date.
            self._end_session = trading_calendar.minute_to_session_label(
                self._end_session, direction="previous"
            )

        self._first_open = trading_calendar.open_and_close_for_session(
            self._start_session
        )[0]
        self._last_close = trading_calendar.open_and_close_for_session(
            self._end_session
        )[1]
Ejemplo n.º 7
0
    def __init__(self, algo, sim_params):

        # ==============
        # Simulation
        # Param Setup
        # ==============
        self.sim_params = sim_params

        # ==============
        # Algo Setup
        # ==============
        self.algo = algo
        self.algo_start = normalize_date(self.sim_params.first_open)
        self.env = algo.trading_environment

        # ==============
        # Snapshot Setup
        # ==============

        def _get_asset_close_date(sid,
                                  finder=self.env.asset_finder,
                                  default=self.sim_params.last_close
                                  + timedelta(days=1)):
            try:
                asset = finder.retrieve_asset(sid)
            except ValueError:
                # Handle sid not an int, such as from a custom source.
                # So that they don't compare equal to other sids, and we'd
                # blow up comparing strings to ints, let's give them unique
                # close dates.
                return default + timedelta(microseconds=id(sid))
            except SidsNotFound:
                return default
            # Default is used when the asset has no auto close date,
            # and is set to a time after the simulation ends, so that the
            # relevant asset isn't removed from the universe at all
            # (at least not for this reason).
            return asset.auto_close_date or default

        self._get_asset_close = _get_asset_close_date

        # The algorithm's data as of our most recent event.
        # Maintain sids in order by asset close date, so that we can more
        # efficiently remove them when their times come...
        self.current_data = BarData(SortedDict(self._get_asset_close))

        # We don't have a datetime for the current snapshot until we
        # receive a message.
        self.simulation_dt = None

        # =============
        # Logging Setup
        # =============

        # Processor function for injecting the algo_dt into
        # user prints/logs.
        def inject_algo_dt(record):
            if 'algo_dt' not in record.extra:
                record.extra['algo_dt'] = self.simulation_dt
        self.processor = Processor(inject_algo_dt)
Ejemplo n.º 8
0
    def get_fetcher_assets(self, dt):
        """
        Returns a list of assets for the current date, as defined by the
        fetcher data.

        Returns
        -------
        list: a list of Asset objects.
        """
        # return a list of assets for the current date, as defined by the
        # fetcher source
        if self._extra_source_df is None:
            return []

        day = normalize_date(dt)

        if day in self._extra_source_df.index:
            assets = self._extra_source_df.loc[day]['sid']
        else:
            return []

        if isinstance(assets, pd.Series):
            return [x for x in assets if isinstance(x, Asset)]
        else:
            return [assets] if isinstance(assets, Asset) else []
Ejemplo n.º 9
0
    def get_fetcher_assets(self, dt):
        """
        Returns a list of assets for the current date, as defined by the
        fetcher data.

        Returns
        -------
        list: a list of Asset objects.
        """
        # return a list of assets for the current date, as defined by the
        # fetcher source
        if self._extra_source_df is None:
            return []

        day = normalize_date(dt)

        if day in self._extra_source_df.index:
            assets = self._extra_source_df.loc[day]['sid']
        else:
            return []

        if isinstance(assets, pd.Series):
            return [x for x in assets if isinstance(x, Asset)]
        else:
            return [assets] if isinstance(assets, Asset) else []
Ejemplo n.º 10
0
    def _get_fetcher_value(self, asset, field, dt):
        day = normalize_date(dt)

        try:
            return \
                self._augmented_sources_map[field][asset].loc[day, field]
        except KeyError:
            return np.NaN
Ejemplo n.º 11
0
    def _call_before_trading_start(self, dt):
        dt = normalize_date(dt)
        self.simulation_dt = dt
        self.on_dt_changed(dt)

        self._cleanup_expired_assets(dt, self.current_data, self.algo.blotter)

        self.algo.before_trading_start(self.current_data)
Ejemplo n.º 12
0
    def _check_is_currently_alive(self, asset, dt):
        sid = int(asset)

        if sid not in self._asset_start_dates:
            self._get_asset_start_date(asset)

        start_date = self._asset_start_dates[sid]
        if self._asset_start_dates[sid] > dt:
            raise NoTradeDataAvailableTooEarly(sid=sid,
                                               dt=normalize_date(dt),
                                               start_dt=start_date)

        end_date = self._asset_end_dates[sid]
        if self._asset_end_dates[sid] < dt:
            raise NoTradeDataAvailableTooLate(sid=sid,
                                              dt=normalize_date(dt),
                                              end_dt=end_date)
Ejemplo n.º 13
0
    def _get_fetcher_value(self, asset, field, dt):
        day = normalize_date(dt)

        try:
            return \
                self._augmented_sources_map[field][asset].loc[day, field]
        except KeyError:
            return np.NaN
Ejemplo n.º 14
0
    def _check_is_currently_alive(self, asset, dt):
        sid = int(asset)

        if sid not in self._asset_start_dates:
            self._get_asset_start_date(asset)

        start_date = self._asset_start_dates[sid]
        if self._asset_start_dates[sid] > dt:
            raise NoTradeDataAvailableTooEarly(
                sid=sid,
                dt=normalize_date(dt),
                start_dt=start_date
            )

        end_date = self._asset_end_dates[sid]
        if self._asset_end_dates[sid] < dt:
            raise NoTradeDataAvailableTooLate(
                sid=sid,
                dt=normalize_date(dt),
                end_dt=end_date
            )
Ejemplo n.º 15
0
    def __init__(self, algo, sim_params, data_portal, clock, benchmark_source,
                 universe_func):

        # ==============
        # Simulation
        # Param Setup
        # ==============
        self.sim_params = sim_params
        self.env = algo.trading_environment
        self.data_portal = data_portal

        # ==============
        # Algo Setup
        # ==============
        self.algo = algo
        self.algo_start = normalize_date(self.sim_params.first_open)

        # ==============
        # Snapshot Setup
        # ==============

        # This object is the way that user algorithms interact with OHLCV data,
        # fetcher data, and some API methods like `data.can_trade`.
        self.current_data = self._create_bar_data(universe_func)

        # We don't have a datetime for the current snapshot until we
        # receive a message.
        self.simulation_dt = None
        self.previous_dt = self.algo_start

        self.clock = clock

        self.benchmark_source = benchmark_source

        # =============
        # Logging Setup
        # =============

        # Processor function for injecting the algo_dt into
        # user prints/logs.
        def inject_algo_dt(record):
            if 'algo_dt' not in record.extra:
                record.extra['algo_dt'] = self.simulation_dt

        self.processor = Processor(inject_algo_dt)
Ejemplo n.º 16
0
    def __init__(self, algo, sim_params, data_portal, clock, benchmark_source,
                 universe_func):

        # ==============
        # Simulation
        # Param Setup
        # ==============
        self.sim_params = sim_params
        self.env = algo.trading_environment
        self.data_portal = data_portal

        # ==============
        # Algo Setup
        # ==============
        self.algo = algo
        self.algo_start = normalize_date(self.sim_params.first_open)

        # ==============
        # Snapshot Setup
        # ==============

        # This object is the way that user algorithms interact with OHLCV data,
        # fetcher data, and some API methods like `data.can_trade`.
        self.current_data = self._create_bar_data(universe_func)

        # We don't have a datetime for the current snapshot until we
        # receive a message.
        self.simulation_dt = None
        self.previous_dt = self.algo_start

        self.clock = clock

        self.benchmark_source = benchmark_source

        # =============
        # Logging Setup
        # =============

        # Processor function for injecting the algo_dt into
        # user prints/logs.
        def inject_algo_dt(record):
            if 'algo_dt' not in record.extra:
                record.extra['algo_dt'] = self.simulation_dt
        self.processor = Processor(inject_algo_dt)
Ejemplo n.º 17
0
    def __init__(self, algo, sim_params):

		# Performance Setup
        self.fast_backtest = sim_params.fast_backtest

        # ==============
        # Simulation
        # Param Setup
        # ==============
        self.sim_params = sim_params

        # ==============
        # Algo Setup
        # ==============
        self.algo = algo
        self.algo_start = normalize_date(self.sim_params.first_open)

        # ==============
        # Snapshot Setup
        # ==============

        # The algorithm's data as of our most recent event.
        # We want an object that will have empty objects as default
        # values on missing keys.
        self.current_data = BarData()

        # We don't have a datetime for the current snapshot until we
        # receive a message.
        self.simulation_dt = None

        # =============
        # Logging Setup
        # =============

        # Processor function for injecting the algo_dt into
        # user prints/logs.
        def inject_algo_dt(record):
            if 'algo_dt' not in record.extra:
                record.extra['algo_dt'] = self.simulation_dt
        self.processor = Processor(inject_algo_dt)
Ejemplo n.º 18
0
    def __init__(self, algo, sim_params):

        # ==============
        # Simulation
        # Param Setup
        # ==============
        self.sim_params = sim_params

        # ==============
        # Algo Setup
        # ==============
        self.algo = algo
        self.algo_start = normalize_date(self.sim_params.first_open)
        self.env = algo.trading_environment

        # ==============
        # Snapshot Setup
        # ==============

        # The algorithm's data as of our most recent event.
        # We want an object that will have empty objects as default
        # values on missing keys.
        self.current_data = BarData()

        # We don't have a datetime for the current snapshot until we
        # receive a message.
        self.simulation_dt = None
        self.previous_dt = self.algo_start

        # =============
        # Logging Setup
        # =============

        # Processor function for injecting the algo_dt into
        # user prints/logs.
        def inject_algo_dt(record):
            if 'algo_dt' not in record.extra:
                record.extra['algo_dt'] = self.simulation_dt
        self.processor = Processor(inject_algo_dt)
Ejemplo n.º 19
0
    def test_volume_share_slippage(self):
        tempdir = TempDirectory()

        try:
            assets = {
                133: pd.DataFrame({
                    "open": [3.00],
                    "high": [3.15],
                    "low": [2.85],
                    "close": [3.00],
                    "volume": [200],
                    "dt": [self.minutes[0]]
                }).set_index("dt")
            }

            write_bcolz_minute_data(
                self.env,
                pd.date_range(
                    start=normalize_date(self.minutes[0]),
                    end=normalize_date(self.minutes[-1])
                ),
                tempdir.path,
                assets
            )

            equity_minute_reader = BcolzMinuteBarReader(tempdir.path)

            data_portal = DataPortal(
                self.env,
                equity_minute_reader=equity_minute_reader,
            )

            slippage_model = VolumeShareSlippage()

            open_orders = [
                Order(
                    dt=datetime.datetime(2006, 1, 5, 14, 30, tzinfo=pytz.utc),
                    amount=100,
                    filled=0,
                    sid=self.ASSET133
                )
            ]

            bar_data = BarData(data_portal,
                               lambda: self.minutes[0],
                               'minute')

            orders_txns = list(slippage_model.simulate(
                bar_data,
                self.ASSET133,
                open_orders,
            ))

            self.assertEquals(len(orders_txns), 1)
            _, txn = orders_txns[0]

            expected_txn = {
                'price': float(3.0001875),
                'dt': datetime.datetime(
                    2006, 1, 5, 14, 31, tzinfo=pytz.utc),
                'amount': int(5),
                'sid': int(133),
                'commission': None,
                'type': DATASOURCE_TYPE.TRANSACTION,
                'order_id': open_orders[0].id
            }

            self.assertIsNotNone(txn)

            # TODO: Make expected_txn an Transaction object and ensure there
            # is a __eq__ for that class.
            self.assertEquals(expected_txn, txn.__dict__)

            open_orders = [
                Order(
                    dt=datetime.datetime(2006, 1, 5, 14, 30, tzinfo=pytz.utc),
                    amount=100,
                    filled=0,
                    sid=self.ASSET133
                )
            ]

            # Set bar_data to be a minute ahead of last trade.
            # Volume share slippage should not execute when there is no trade.
            bar_data = BarData(data_portal,
                               lambda: self.minutes[1],
                               'minute')

            orders_txns = list(slippage_model.simulate(
                bar_data,
                self.ASSET133,
                open_orders,
            ))

            self.assertEquals(len(orders_txns), 0)

        finally:
            tempdir.cleanup()
Ejemplo n.º 20
0
    def __init__(self, algo, sim_params):

        # ==============
        # Simulation
        # Param Setup
        # ==============
        self.sim_params = sim_params

        # ==============
        # Algo Setup
        # ==============
        self.algo = algo
        self.algo_start = normalize_date(self.sim_params.first_open)
        self.env = algo.trading_environment

        # ==============
        # Snapshot Setup
        # ==============

        _day = timedelta(days=1)

        def _get_removal_date(sid,
                              finder=self.env.asset_finder,
                              default=self.sim_params.last_close + _day):
            """
            Get the date of the morning on which we should remove an asset from
            data.

            If we don't have an auto_close_date, this is just the end of the
            simulation.

            If we have an auto_close_date, then we remove assets from data on
            max(asset.auto_close_date, asset.end_date + timedelta(days=1))

            We hold assets at least until auto_close_date because up until that
            date the user might still hold positions or have open orders in an
            expired asset.

            We hold assets at least until end_date + 1, because an asset
            continues trading until the **end** of its end_date.  Even if an
            asset auto-closed before the end_date (say, because Interactive
            Brokers clears futures positions prior the actual notice or
            expiration), there may still be trades arriving that represent
            signals for other assets that are still tradeable. (Particularly in
            the futures case, trading in the final days of a contract are
            likely relevant for trading the next contract on the same future
            chain.)
            """
            try:
                asset = finder.retrieve_asset(sid)
            except ValueError:
                # Handle sid not an int, such as from a custom source.
                # So that they don't compare equal to other sids, and we'd
                # blow up comparing strings to ints, let's give them unique
                # close dates.
                return default + timedelta(microseconds=id(sid))
            except SidsNotFound:
                return default

            auto_close_date = asset.auto_close_date
            if auto_close_date is None:
                # If we don't have an auto_close_date, we never remove an asset
                # from the user's portfolio.
                return default

            end_date = asset.end_date
            if end_date is None:
                # If we have an auto_close_date but not an end_date, clear the
                # asset from data when we clear positions/orders.
                return auto_close_date

            # If we have both, make close once we're on or after the
            # auto_close_date, and strictly after the end_date.
            # See docstring above for an explanation of this logic.
            return max(auto_close_date, end_date + _day)

        self._get_removal_date = _get_removal_date

        # The algorithm's data as of our most recent event.
        # Maintain sids in order by asset close date, so that we can more
        # efficiently remove them when their times come...
        self.current_data = BarData(SortedDict(self._get_removal_date))

        # We don't have a datetime for the current snapshot until we
        # receive a message.
        self.simulation_dt = None

        # =============
        # Logging Setup
        # =============

        # Processor function for injecting the algo_dt into
        # user prints/logs.
        def inject_algo_dt(record):
            if 'algo_dt' not in record.extra:
                record.extra['algo_dt'] = self.simulation_dt
        self.processor = Processor(inject_algo_dt)
Ejemplo n.º 21
0
    def highs(self, assets, dt):
        """
        The high field's aggregation returns the largest high seen between
        the market open and the current dt.
        If there has been no data on or before the `dt` the high is `nan`.

        Returns
        -------
        np.array with dtype=float64, in order of assets parameter.
        """
        market_open, prev_dt, dt_value, entries = self._prelude(dt, 'high')

        highs = []
        normalized_date = normalize_date(dt)

        for asset in assets:
            if not asset._is_alive(normalized_date, True):
                highs.append(np.NaN)
                continue

            if prev_dt is None:
                val = self._minute_reader.get_value(asset, dt, 'high')
                entries[asset] = (dt_value, val)
                highs.append(val)
                continue
            else:
                try:
                    last_visited_dt, last_max = entries[asset]
                    if last_visited_dt == dt_value:
                        highs.append(last_max)
                        continue
                    elif last_visited_dt == prev_dt:
                        curr_val = self._minute_reader.get_value(
                            asset, dt, 'high')
                        if pd.isnull(curr_val):
                            val = last_max
                        elif pd.isnull(last_max):
                            val = curr_val
                        else:
                            val = max(last_max, curr_val)
                        entries[asset] = (dt_value, val)
                        highs.append(val)
                        continue
                    else:
                        after_last = pd.Timestamp(
                            last_visited_dt + self._one_min, tz='UTC')
                        window = self._minute_reader.load_raw_arrays(
                            ['high'],
                            after_last,
                            dt,
                            [asset],
                        )[0].T
                        val = max(last_max, np.nanmax(window))
                        entries[asset] = (dt_value, val)
                        highs.append(val)
                        continue
                except KeyError:
                    window = self._minute_reader.load_raw_arrays(
                        ['high'],
                        market_open,
                        dt,
                        [asset],
                    )[0].T
                    val = np.nanmax(window)
                    entries[asset] = (dt_value, val)
                    highs.append(val)
                    continue
        return np.array(highs)
Ejemplo n.º 22
0
    def lows(self, assets, dt):
        """
        The low field's aggregation returns the smallest low seen between
        the market open and the current dt.
        If there has been no data on or before the `dt` the low is `nan`.

        Returns
        -------
        np.array with dtype=float64, in order of assets parameter.
        """
        market_open, prev_dt, dt_value, entries = self._prelude(dt, 'low')

        lows = []
        normalized_date = normalize_date(dt)

        for asset in assets:
            if not asset._is_alive(normalized_date, True):
                lows.append(np.NaN)
                continue

            if prev_dt is None:
                val = self._minute_reader.get_value(asset, dt, 'low')
                entries[asset] = (dt_value, val)
                lows.append(val)
                continue
            else:
                try:
                    last_visited_dt, last_min = entries[asset]
                    if last_visited_dt == dt_value:
                        lows.append(last_min)
                        continue
                    elif last_visited_dt == prev_dt:
                        curr_val = self._minute_reader.get_value(
                            asset, dt, 'low')
                        val = np.nanmin([last_min, curr_val])
                        entries[asset] = (dt_value, val)
                        lows.append(val)
                        continue
                    else:
                        after_last = pd.Timestamp(
                            last_visited_dt + self._one_min, tz='UTC')
                        window = self._minute_reader.load_raw_arrays(
                            ['low'],
                            after_last,
                            dt,
                            [asset],
                        )[0].T
                        window_min = np.nanmin(window)
                        if pd.isnull(window_min):
                            val = last_min
                        else:
                            val = min(last_min, window_min)
                        entries[asset] = (dt_value, val)
                        lows.append(val)
                        continue
                except KeyError:
                    window = self._minute_reader.load_raw_arrays(
                        ['low'],
                        market_open,
                        dt,
                        [asset],
                    )[0].T
                    val = np.nanmin(window)
                    entries[asset] = (dt_value, val)
                    lows.append(val)
                    continue
        return np.array(lows)
Ejemplo n.º 23
0
    def opens(self, assets, dt):
        """
        The open field's aggregation returns the first value that occurs
        for the day, if there has been no data on or before the `dt` the open
        is `nan`.

        Once the first non-nan open is seen, that value remains constant per
        asset for the remainder of the day.

        Returns
        -------
        np.array with dtype=float64, in order of assets parameter.
        """
        market_open, prev_dt, dt_value, entries = self._prelude(dt, 'open')

        opens = []
        normalized_date = normalize_date(dt)

        for asset in assets:
            if not asset._is_alive(normalized_date, True):
                opens.append(np.NaN)
                continue

            if prev_dt is None:
                val = self._minute_reader.get_value(asset, dt, 'open')
                entries[asset] = (dt_value, val)
                opens.append(val)
                continue
            else:
                try:
                    last_visited_dt, first_open = entries[asset]
                    if last_visited_dt == dt_value:
                        opens.append(first_open)
                        continue
                    elif not pd.isnull(first_open):
                        opens.append(first_open)
                        entries[asset] = (dt_value, first_open)
                        continue
                    else:
                        after_last = pd.Timestamp(
                            last_visited_dt + self._one_min, tz='UTC')
                        window = self._minute_reader.load_raw_arrays(
                            ['open'],
                            after_last,
                            dt,
                            [asset],
                        )[0]
                        nonnan = window[~pd.isnull(window)]
                        if len(nonnan):
                            val = nonnan[0]
                        else:
                            val = np.nan
                        entries[asset] = (dt_value, val)
                        opens.append(val)
                        continue
                except KeyError:
                    window = self._minute_reader.load_raw_arrays(
                        ['open'],
                        market_open,
                        dt,
                        [asset],
                    )[0]
                    nonnan = window[~pd.isnull(window)]
                    if len(nonnan):
                        val = nonnan[0]
                    else:
                        val = np.nan
                    entries[asset] = (dt_value, val)
                    opens.append(val)
                    continue
        return np.array(opens)
Ejemplo n.º 24
0
    def __init__(self, algo, sim_params):

        # ==============
        # Simulation
        # Param Setup
        # ==============
        self.sim_params = sim_params

        # ==============
        # Algo Setup
        # ==============
        self.algo = algo
        self.algo_start = normalize_date(self.sim_params.first_open)
        self.env = algo.trading_environment

        # ==============
        # Snapshot Setup
        # ==============

        def _get_asset_close_date(sid,
                                  finder=self.env.asset_finder,
                                  default=self.sim_params.last_close +
                                  timedelta(days=1)):
            try:
                asset = finder.retrieve_asset(sid)
            except ValueError:
                # Handle sid not an int, such as from a custom source.
                # So that they don't compare equal to other sids, and we'd
                # blow up comparing strings to ints, let's give them unique
                # close dates.
                return default + timedelta(microseconds=id(sid))
            except SidsNotFound:
                return default
            # Default is used when the asset has no auto close date,
            # and is set to a time after the simulation ends, so that the
            # relevant asset isn't removed from the universe at all
            # (at least not for this reason).
            return asset.auto_close_date or default

        self._get_asset_close = _get_asset_close_date

        # The algorithm's data as of our most recent event.
        # Maintain sids in order by asset close date, so that we can more
        # efficiently remove them when their times come...
        self.current_data = BarData(SortedDict(self._get_asset_close))

        # We don't have a datetime for the current snapshot until we
        # receive a message.
        self.simulation_dt = None

        # =============
        # Logging Setup
        # =============

        # Processor function for injecting the algo_dt into
        # user prints/logs.
        def inject_algo_dt(record):
            if 'algo_dt' not in record.extra:
                record.extra['algo_dt'] = self.simulation_dt

        self.processor = Processor(inject_algo_dt)
Ejemplo n.º 25
0
    def _get_adjustments_in_range(self, asset, dts, field,
                                  is_perspective_after):
        """
        Get the Float64Multiply objects to pass to an AdjustedArrayWindow.

        For the use of AdjustedArrayWindow in the loader, which looks back
        from current simulation time back to a window of data the dictionary is
        structured with:
        - the key into the dictionary for adjustments is the location of the
        day from which the window is being viewed.
        - the start of all multiply objects is always 0 (in each window all
          adjustments are overlapping)
        - the end of the multiply object is the location before the calendar
          location of the adjustment action, making all days before the event
          adjusted.

        Parameters
        ----------
        asset : Asset
            The assets for which to get adjustments.
        days : iterable of datetime64-like
            The days for which adjustment data is needed.
        field : str
            OHLCV field for which to get the adjustments.
        is_perspective_after : bool
            see: `USEquityHistoryLoader.history`
            If True, the index at which the Multiply object is registered to
            be popped is calculated so that it applies to the last slot in the
            sliding window  when the adjustment occurs immediately after the dt
            that slot represents.

        Returns
        -------
        out : The adjustments as a dict of loc -> Float64Multiply
        """
        sid = int(asset)
        start = normalize_date(dts[0])
        end = normalize_date(dts[-1])
        adjs = {}
        if field != 'volume':
            mergers = self._adjustments_reader.get_adjustments_for_sid(
                'mergers', sid)
            for m in mergers:
                dt = m[0]
                if start < dt <= end:
                    end_loc = dts.searchsorted(dt)
                    adj_loc = end_loc
                    if is_perspective_after:
                        # Set adjustment pop location so that it applies
                        # to last value if adjustment occurs immediately after
                        # the last slot.
                        adj_loc -= 1
                    mult = Float64Multiply(0,
                                           end_loc - 1,
                                           0,
                                           0,
                                           m[1])
                    try:
                        adjs[adj_loc].append(mult)
                    except KeyError:
                        adjs[adj_loc] = [mult]
            divs = self._adjustments_reader.get_adjustments_for_sid(
                'dividends', sid)
            for d in divs:
                dt = d[0]
                if start < dt <= end:
                    end_loc = dts.searchsorted(dt)
                    adj_loc = end_loc
                    if is_perspective_after:
                        # Set adjustment pop location so that it applies
                        # to last value if adjustment occurs immediately after
                        # the last slot.
                        adj_loc -= 1
                    mult = Float64Multiply(0,
                                           end_loc - 1,
                                           0,
                                           0,
                                           d[1])
                    try:
                        adjs[adj_loc].append(mult)
                    except KeyError:
                        adjs[adj_loc] = [mult]
        splits = self._adjustments_reader.get_adjustments_for_sid(
            'splits', sid)
        for s in splits:
            dt = s[0]
            if start < dt <= end:
                if field == 'volume':
                    ratio = 1.0 / s[1]
                else:
                    ratio = s[1]
                end_loc = dts.searchsorted(dt)
                adj_loc = end_loc
                if is_perspective_after:
                    # Set adjustment pop location so that it applies
                    # to last value if adjustment occurs immediately after
                    # the last slot.
                    adj_loc -= 1
                mult = Float64Multiply(0,
                                       end_loc - 1,
                                       0,
                                       0,
                                       ratio)
                try:
                    adjs[adj_loc].append(mult)
                except KeyError:
                    adjs[adj_loc] = [mult]
        return adjs
Ejemplo n.º 26
0
    def transform(self):
        """
        Main generator work loop.
        """
        algo = self.algo
        algo.data_portal = self.data_portal
        handle_data = algo.event_manager.handle_data
        current_data = self.current_data

        data_portal = self.data_portal

        # can't cache a pointer to algo.perf_tracker because we're not
        # guaranteed that the algo doesn't swap out perf trackers during
        # its lifetime.
        # likewise, we can't cache a pointer to the blotter.

        algo.perf_tracker.position_tracker.data_portal = data_portal

        def every_bar(dt_to_use):
            # called every tick (minute or day).

            self.simulation_dt = dt_to_use
            algo.on_dt_changed(dt_to_use)

            blotter = algo.blotter
            perf_tracker = algo.perf_tracker

            # handle any transactions and commissions coming out new orders
            # placed in the last bar
            new_transactions, new_commissions = \
                blotter.get_transactions(current_data)

            for transaction in new_transactions:
                perf_tracker.process_transaction(transaction)

                # since this order was modified, record it
                order = blotter.orders[transaction.order_id]
                perf_tracker.process_order(order)

            if new_commissions:
                for commission in new_commissions:
                    perf_tracker.process_commission(commission)

            handle_data(algo, current_data, dt_to_use)

            # grab any new orders from the blotter, then clear the list.
            # this includes cancelled orders.
            new_orders = blotter.new_orders
            blotter.new_orders = []

            # if we have any new orders, record them so that we know
            # in what perf period they were placed.
            if new_orders:
                for new_order in new_orders:
                    perf_tracker.process_order(new_order)

            self.algo.portfolio_needs_update = True
            self.algo.account_needs_update = True
            self.algo.performance_needs_update = True

        def once_a_day(midnight_dt):
            # Get the positions before updating the date so that prices are
            # fetched for trading close instead of midnight
            positions = algo.perf_tracker.position_tracker.positions
            position_assets = algo.asset_finder.retrieve_all(positions)

            # set all the timestamps
            self.simulation_dt = midnight_dt
            algo.on_dt_changed(midnight_dt)

            # we want to wait until the clock rolls over to the next day
            # before cleaning up expired assets.
            self._cleanup_expired_assets(midnight_dt, position_assets)

            perf_tracker = algo.perf_tracker

            # handle any splits that impact any positions or any open orders.
            assets_we_care_about = \
                viewkeys(perf_tracker.position_tracker.positions) | \
                viewkeys(algo.blotter.open_orders)

            if assets_we_care_about:
                splits = data_portal.get_splits(assets_we_care_about,
                                                midnight_dt)
                if splits:
                    algo.blotter.process_splits(splits)
                    perf_tracker.position_tracker.handle_splits(splits)

            # call before trading start
            algo.before_trading_start(current_data)

        def handle_benchmark(date):
            algo.perf_tracker.all_benchmark_returns[date] = \
                self.benchmark_source.get_value(date)

        with ExitStack() as stack:
            stack.enter_context(self.processor)
            stack.enter_context(ZiplineAPI(self.algo))

            if algo.data_frequency == 'minute':

                def execute_order_cancellation_policy():
                    algo.blotter.execute_cancel_policy(DAY_END)
            else:

                def execute_order_cancellation_policy():
                    pass

            for dt, action in self.clock:
                if action == BAR:
                    every_bar(dt)
                elif action == DAY_START:
                    once_a_day(dt)
                elif action == DAY_END:
                    # End of the day.
                    execute_order_cancellation_policy()
                    handle_benchmark(normalize_date(dt))

                    yield self._get_daily_message(dt, algo, algo.perf_tracker)
                elif action == MINUTE_END:
                    handle_benchmark(dt)
                    minute_msg, daily_msg = \
                        self._get_minute_message(dt, algo, algo.perf_tracker)

                    yield minute_msg

                    if daily_msg:
                        yield daily_msg

        risk_message = algo.perf_tracker.handle_simulation_end()
        yield risk_message
Ejemplo n.º 27
0
    def _get_adjustments_in_range(self, asset, dts, field,
                                  is_perspective_after):
        """
        Get the Float64Multiply objects to pass to an AdjustedArrayWindow.

        For the use of AdjustedArrayWindow in the loader, which looks back
        from current simulation time back to a window of data the dictionary is
        structured with:
        - the key into the dictionary for adjustments is the location of the
        day from which the window is being viewed.
        - the start of all multiply objects is always 0 (in each window all
          adjustments are overlapping)
        - the end of the multiply object is the location before the calendar
          location of the adjustment action, making all days before the event
          adjusted.

        Parameters
        ----------
        asset : Asset
            The assets for which to get adjustments.
        days : iterable of datetime64-like
            The days for which adjustment data is needed.
        field : str
            OHLCV field for which to get the adjustments.
        is_perspective_after : bool
            see: `USEquityHistoryLoader.history`
            If True, the index at which the Multiply object is registered to
            be popped is calculated so that it applies to the last slot in the
            sliding window  when the adjustment occurs immediately after the dt
            that slot represents.

        Returns
        -------
        out : The adjustments as a dict of loc -> Float64Multiply
        """
        sid = int(asset)
        start = normalize_date(dts[0])
        end = normalize_date(dts[-1])
        adjs = {}
        if field != 'volume':
            mergers = self._adjustments_reader.get_adjustments_for_sid(
                'mergers', sid)
            for m in mergers:
                dt = m[0]
                if start < dt <= end:
                    end_loc = dts.searchsorted(dt)
                    adj_loc = end_loc
                    if is_perspective_after:
                        # Set adjustment pop location so that it applies
                        # to last value if adjustment occurs immediately after
                        # the last slot.
                        adj_loc -= 1
                    mult = Float64Multiply(0,
                                           end_loc - 1,
                                           0,
                                           0,
                                           m[1])
                    try:
                        adjs[adj_loc].append(mult)
                    except KeyError:
                        adjs[adj_loc] = [mult]
            divs = self._adjustments_reader.get_adjustments_for_sid(
                'dividends', sid)
            for d in divs:
                dt = d[0]
                if start < dt <= end:
                    end_loc = dts.searchsorted(dt)
                    adj_loc = end_loc
                    if is_perspective_after:
                        # Set adjustment pop location so that it applies
                        # to last value if adjustment occurs immediately after
                        # the last slot.
                        adj_loc -= 1
                    mult = Float64Multiply(0,
                                           end_loc - 1,
                                           0,
                                           0,
                                           d[1])
                    try:
                        adjs[adj_loc].append(mult)
                    except KeyError:
                        adjs[adj_loc] = [mult]
        splits = self._adjustments_reader.get_adjustments_for_sid(
            'splits', sid)
        for s in splits:
            dt = s[0]
            if start < dt <= end:
                if field == 'volume':
                    ratio = 1.0 / s[1]
                else:
                    ratio = s[1]
                end_loc = dts.searchsorted(dt)
                adj_loc = end_loc
                if is_perspective_after:
                    # Set adjustment pop location so that it applies
                    # to last value if adjustment occurs immediately after
                    # the last slot.
                    adj_loc -= 1
                mult = Float64Multiply(0,
                                       end_loc - 1,
                                       0,
                                       0,
                                       ratio)
                try:
                    adjs[adj_loc].append(mult)
                except KeyError:
                    adjs[adj_loc] = [mult]
        return adjs
Ejemplo n.º 28
0
    def _get_adjustments_in_range(self, asset, dts, field):
        """
        Get the Float64Multiply objects to pass to an AdjustedArrayWindow.

        For the use of AdjustedArrayWindow in the loader, which looks back
        from current simulation time back to a window of data the dictionary is
        structured with:
        - the key into the dictionary for adjustments is the location of the
        day from which the window is being viewed.
        - the start of all multiply objects is always 0 (in each window all
          adjustments are overlapping)
        - the end of the multiply object is the location before the calendar
          location of the adjustment action, making all days before the event
          adjusted.

        Parameters
        ----------
        asset : Asset
            The assets for which to get adjustments.
        dts : iterable of datetime64-like
            The dts for which adjustment data is needed.
        field : str
            OHLCV field for which to get the adjustments.

        Returns
        -------
        out : dict[loc -> Float64Multiply]
            The adjustments as a dict of loc -> Float64Multiply
        """
        sid = int(asset)
        start = normalize_date(dts[0])
        end = normalize_date(dts[-1])
        adjs = {}
        if field != 'volume':
            mergers = self._adjustments_reader.get_adjustments_for_sid(
                'mergers', sid)
            for m in mergers:
                dt = m[0]
                if start < dt <= end:
                    end_loc = dts.searchsorted(dt)
                    adj_loc = end_loc
                    mult = Float64Multiply(0, end_loc - 1, 0, 0, m[1])
                    try:
                        adjs[adj_loc].append(mult)
                    except KeyError:
                        adjs[adj_loc] = [mult]
            divs = self._adjustments_reader.get_adjustments_for_sid(
                'dividends', sid)
            for d in divs:
                dt = d[0]
                if start < dt <= end:
                    end_loc = dts.searchsorted(dt)
                    adj_loc = end_loc
                    mult = Float64Multiply(0, end_loc - 1, 0, 0, d[1])
                    try:
                        adjs[adj_loc].append(mult)
                    except KeyError:
                        adjs[adj_loc] = [mult]
        splits = self._adjustments_reader.get_adjustments_for_sid(
            'splits', sid)
        for s in splits:
            dt = s[0]
            if start < dt <= end:
                if field == 'volume':
                    ratio = 1.0 / s[1]
                else:
                    ratio = s[1]
                end_loc = dts.searchsorted(dt)
                adj_loc = end_loc
                mult = Float64Multiply(0, end_loc - 1, 0, 0, ratio)
                try:
                    adjs[adj_loc].append(mult)
                except KeyError:
                    adjs[adj_loc] = [mult]
        return adjs
Ejemplo n.º 29
0
    def volumes(self, assets, dt):
        """
        The volume field's aggregation returns the sum of all volumes
        between the market open and the `dt`
        If there has been no data on or before the `dt` the volume is 0.

        Returns
        -------
        np.array with dtype=int64, in order of assets parameter.
        """
        market_open, prev_dt, dt_value, entries = self._prelude(dt, 'volume')

        volumes = []
        normalized_date = normalize_date(dt)

        for asset in assets:
            if not asset._is_alive(normalized_date, True):
                volumes.append(0)
                continue

            if prev_dt is None:
                val = self._minute_reader.get_value(asset, dt, 'volume')
                entries[asset] = (dt_value, val)
                volumes.append(val)
                continue
            else:
                try:
                    last_visited_dt, last_total = entries[asset]
                    if last_visited_dt == dt_value:
                        volumes.append(last_total)
                        continue
                    elif last_visited_dt == prev_dt:
                        val = self._minute_reader.get_value(
                            asset, dt, 'volume')
                        val += last_total
                        entries[asset] = (dt_value, val)
                        volumes.append(val)
                        continue
                    else:
                        after_last = pd.Timestamp(
                            last_visited_dt + self._one_min, tz='UTC')
                        window = self._minute_reader.load_raw_arrays(
                            ['volume'],
                            after_last,
                            dt,
                            [asset],
                        )[0]
                        val = np.nansum(window) + last_total
                        entries[asset] = (dt_value, val)
                        volumes.append(val)
                        continue
                except KeyError:
                    window = self._minute_reader.load_raw_arrays(
                        ['volume'],
                        market_open,
                        dt,
                        [asset],
                    )[0]
                    val = np.nansum(window)
                    entries[asset] = (dt_value, val)
                    volumes.append(val)
                    continue
        return np.array(volumes)
Ejemplo n.º 30
0
    def transform(self):
        """
        Main generator work loop.
        """
        algo = self.algo
        emission_rate = algo.perf_tracker.emission_rate

        def every_bar(dt_to_use, current_data=self.current_data,
                      handle_data=algo.event_manager.handle_data):
            # called every tick (minute or day).
            algo.on_dt_changed(dt_to_use)

            for capital_change in calculate_minute_capital_changes(dt_to_use):
                yield capital_change

            self.simulation_dt = dt_to_use

            blotter = algo.blotter
            perf_tracker = algo.perf_tracker

            # handle any transactions and commissions coming out new orders
            # placed in the last bar

            new_transactions, new_commissions, closed_orders = \
                blotter.get_transactions(current_data)

            blotter.prune_orders(closed_orders)

            for transaction in new_transactions:
                perf_tracker.process_transaction(transaction)

                # since this order was modified, record it
                order = blotter.orders[transaction.order_id]
                perf_tracker.process_order(order)

            if new_commissions:
                for commission in new_commissions:
                    perf_tracker.process_commission(commission)

            handle_data(algo, current_data, dt_to_use)

            # grab any new orders from the blotter, then clear the list.
            # this includes cancelled orders.
            new_orders = blotter.new_orders
            blotter.new_orders = []

            # if we have any new orders, record them so that we know
            # in what perf period they were placed.
            if new_orders:
                for new_order in new_orders:
                    perf_tracker.process_order(new_order)

            algo.portfolio_needs_update = True
            algo.account_needs_update = True
            algo.performance_needs_update = True

        def once_a_day(midnight_dt, current_data=self.current_data,
                       data_portal=self.data_portal):

            perf_tracker = algo.perf_tracker

            # Get the positions before updating the date so that prices are
            # fetched for trading close instead of midnight
            positions = algo.perf_tracker.position_tracker.positions
            position_assets = algo.asset_finder.retrieve_all(positions)

            # set all the timestamps
            self.simulation_dt = midnight_dt
            algo.on_dt_changed(midnight_dt)

            # process any capital changes that came overnight
            for capital_change in algo.calculate_capital_changes(
                    midnight_dt, emission_rate=emission_rate,
                    is_interday=True):
                yield capital_change

            # we want to wait until the clock rolls over to the next day
            # before cleaning up expired assets.
            self._cleanup_expired_assets(midnight_dt, position_assets)

            # handle any splits that impact any positions or any open orders.
            assets_we_care_about = \
                viewkeys(perf_tracker.position_tracker.positions) | \
                viewkeys(algo.blotter.open_orders)

            for a in assets_we_care_about.copy():
                if not isinstance(a, Equity):
                    assets_we_care_about.remove(a)


            # TODO GD REMOVE THE NOT !!! TMP HACK !!!
            if assets_we_care_about:
                splits = data_portal.get_splits(assets_we_care_about,
                                                midnight_dt)
                if splits:
                    algo.blotter.process_splits(splits)
                    perf_tracker.position_tracker.handle_splits(splits)

        def handle_benchmark(date, benchmark_source=self.benchmark_source):
            algo.perf_tracker.all_benchmark_returns[date] = \
                benchmark_source.get_value(date)

        def on_exit():
            # Remove references to algo, data portal, et al to break cycles
            # and ensure deterministic cleanup of these objects when the
            # simulation finishes.
            self.algo = None
            self.benchmark_source = self.current_data = self.data_portal = None

        with ExitStack() as stack:
            stack.callback(on_exit)
            stack.enter_context(self.processor)
            stack.enter_context(ZiplineAPI(self.algo))

            if algo.data_frequency == 'minute':
                def execute_order_cancellation_policy():
                    algo.blotter.execute_cancel_policy(SESSION_END)

                def calculate_minute_capital_changes(dt):
                    # process any capital changes that came between the last
                    # and current minutes
                    return algo.calculate_capital_changes(
                        dt, emission_rate=emission_rate, is_interday=False)
            else:
                def execute_order_cancellation_policy():
                    pass

                def calculate_minute_capital_changes(dt):
                    return []

            for dt, action in self.clock:
                if action == BAR:
                    for capital_change_packet in every_bar(dt):
                        yield capital_change_packet
                elif action == SESSION_START:
                    for capital_change_packet in once_a_day(dt):
                        yield capital_change_packet
                elif action == SESSION_END:
                    # End of the session.
                    if emission_rate == 'daily':
                        handle_benchmark(normalize_date(dt))
                    execute_order_cancellation_policy()

                    yield self._get_daily_message(dt, algo, algo.perf_tracker)
                elif action == BEFORE_TRADING_START_BAR:
                    self.simulation_dt = dt
                    algo.on_dt_changed(dt)
                    algo.before_trading_start(self.current_data)
                elif action == MINUTE_END:
                    handle_benchmark(dt)
                    minute_msg = \
                        self._get_minute_message(dt, algo, algo.perf_tracker)

                    yield minute_msg

        risk_message = algo.perf_tracker.handle_simulation_end()
        yield risk_message
Ejemplo n.º 31
0
 def _call_before_trading_start(self, dt):
     dt = normalize_date(dt)
     self.simulation_dt = dt
     self.on_dt_changed(dt)
     self.algo.before_trading_start()
Ejemplo n.º 32
0
    def test_volume_share_slippage(self):
        assets = (
            (133, pd.DataFrame(
                {
                    'open': [3.00],
                    'high': [3.15],
                    'low': [2.85],
                    'close': [3.00],
                    'volume': [200],
                },
                index=[self.minutes[0]],
            )),
        )
        days = pd.date_range(
            start=normalize_date(self.minutes[0]),
            end=normalize_date(self.minutes[-1])
        )
        with tmp_bcolz_minute_bar_reader(self.env, days, assets) as reader:
            data_portal = DataPortal(
                self.env,
                first_trading_day=reader.first_trading_day,
                equity_minute_reader=reader,
            )

            slippage_model = VolumeShareSlippage()

            open_orders = [
                Order(
                    dt=datetime.datetime(2006, 1, 5, 14, 30, tzinfo=pytz.utc),
                    amount=100,
                    filled=0,
                    sid=self.ASSET133
                )
            ]

            bar_data = BarData(data_portal,
                               lambda: self.minutes[0],
                               'minute')

            orders_txns = list(slippage_model.simulate(
                bar_data,
                self.ASSET133,
                open_orders,
            ))

            self.assertEquals(len(orders_txns), 1)
            _, txn = orders_txns[0]

            expected_txn = {
                'price': float(3.0001875),
                'dt': datetime.datetime(
                    2006, 1, 5, 14, 31, tzinfo=pytz.utc),
                'amount': int(5),
                'sid': int(133),
                'commission': None,
                'type': DATASOURCE_TYPE.TRANSACTION,
                'order_id': open_orders[0].id
            }

            self.assertIsNotNone(txn)

            # TODO: Make expected_txn an Transaction object and ensure there
            # is a __eq__ for that class.
            self.assertEquals(expected_txn, txn.__dict__)

            open_orders = [
                Order(
                    dt=datetime.datetime(2006, 1, 5, 14, 30, tzinfo=pytz.utc),
                    amount=100,
                    filled=0,
                    sid=self.ASSET133
                )
            ]

            # Set bar_data to be a minute ahead of last trade.
            # Volume share slippage should not execute when there is no trade.
            bar_data = BarData(data_portal,
                               lambda: self.minutes[1],
                               'minute')

            orders_txns = list(slippage_model.simulate(
                bar_data,
                self.ASSET133,
                open_orders,
            ))

            self.assertEquals(len(orders_txns), 0)
Ejemplo n.º 33
0
    def test_volume_share_slippage(self):
        assets = {
            133: pd.DataFrame(
                {
                    'open': [3.00],
                    'high': [3.15],
                    'low': [2.85],
                    'close': [3.00],
                    'volume': [200],
                },
                index=[self.minutes[0]],
            ),
        }
        days = pd.date_range(
            start=normalize_date(self.minutes[0]),
            end=normalize_date(self.minutes[-1])
        )
        with tmp_bcolz_minute_bar_reader(self.env, days, assets) as reader:
            data_portal = DataPortal(
                self.env,
                equity_minute_reader=reader,
            )

            slippage_model = VolumeShareSlippage()

            open_orders = [
                Order(
                    dt=datetime.datetime(2006, 1, 5, 14, 30, tzinfo=pytz.utc),
                    amount=100,
                    filled=0,
                    sid=self.ASSET133
                )
            ]

            bar_data = BarData(data_portal,
                               lambda: self.minutes[0],
                               'minute')

            orders_txns = list(slippage_model.simulate(
                bar_data,
                self.ASSET133,
                open_orders,
            ))

            self.assertEquals(len(orders_txns), 1)
            _, txn = orders_txns[0]

            expected_txn = {
                'price': float(3.0001875),
                'dt': datetime.datetime(
                    2006, 1, 5, 14, 31, tzinfo=pytz.utc),
                'amount': int(5),
                'sid': int(133),
                'commission': None,
                'type': DATASOURCE_TYPE.TRANSACTION,
                'order_id': open_orders[0].id
            }

            self.assertIsNotNone(txn)

            # TODO: Make expected_txn an Transaction object and ensure there
            # is a __eq__ for that class.
            self.assertEquals(expected_txn, txn.__dict__)

            open_orders = [
                Order(
                    dt=datetime.datetime(2006, 1, 5, 14, 30, tzinfo=pytz.utc),
                    amount=100,
                    filled=0,
                    sid=self.ASSET133
                )
            ]

            # Set bar_data to be a minute ahead of last trade.
            # Volume share slippage should not execute when there is no trade.
            bar_data = BarData(data_portal,
                               lambda: self.minutes[1],
                               'minute')

            orders_txns = list(slippage_model.simulate(
                bar_data,
                self.ASSET133,
                open_orders,
            ))

            self.assertEquals(len(orders_txns), 0)
Ejemplo n.º 34
0
    def transform(self):
        """
        Main generator work loop.
        """
        algo = self.algo
        emission_rate = algo.perf_tracker.emission_rate

        def every_bar(dt_to_use, current_data=self.current_data,
                      handle_data=algo.event_manager.handle_data):
            # called every tick (minute or day).
            algo.on_dt_changed(dt_to_use)

            for capital_change in calculate_minute_capital_changes(dt_to_use):
                yield capital_change

            self.simulation_dt = dt_to_use

            blotter = algo.blotter
            perf_tracker = algo.perf_tracker

            # handle any transactions and commissions coming out new orders
            # placed in the last bar
            new_transactions, new_commissions, closed_orders = \
                blotter.get_transactions(current_data)

            blotter.prune_orders(closed_orders)

            for transaction in new_transactions:
                perf_tracker.process_transaction(transaction)

                # since this order was modified, record it
                order = blotter.orders[transaction.order_id]
                perf_tracker.process_order(order)

            if new_commissions:
                for commission in new_commissions:
                    perf_tracker.process_commission(commission)

            handle_data(algo, current_data, dt_to_use)

            # grab any new orders from the blotter, then clear the list.
            # this includes cancelled orders.
            new_orders = blotter.new_orders
            blotter.new_orders = []

            # if we have any new orders, record them so that we know
            # in what perf period they were placed.
            if new_orders:
                for new_order in new_orders:
                    perf_tracker.process_order(new_order)

            algo.portfolio_needs_update = True
            algo.account_needs_update = True
            algo.performance_needs_update = True

        def once_a_day(midnight_dt, current_data=self.current_data,
                       data_portal=self.data_portal):

            perf_tracker = algo.perf_tracker

            # Get the positions before updating the date so that prices are
            # fetched for trading close instead of midnight
            positions = algo.perf_tracker.position_tracker.positions
            position_assets = algo.asset_finder.retrieve_all(positions)

            # set all the timestamps
            self.simulation_dt = midnight_dt
            algo.on_dt_changed(midnight_dt)

            # process any capital changes that came overnight
            for capital_change in algo.calculate_capital_changes(
                    midnight_dt, emission_rate=emission_rate,
                    is_interday=True):
                yield capital_change

            # we want to wait until the clock rolls over to the next day
            # before cleaning up expired assets.
            self._cleanup_expired_assets(midnight_dt, position_assets)

            # handle any splits that impact any positions or any open orders.
            assets_we_care_about = \
                viewkeys(perf_tracker.position_tracker.positions) | \
                viewkeys(algo.blotter.open_orders)

            if assets_we_care_about:
                splits = data_portal.get_splits(assets_we_care_about,
                                                midnight_dt)
                if splits:
                    algo.blotter.process_splits(splits)
                    perf_tracker.position_tracker.handle_splits(splits)

        def handle_benchmark(date, benchmark_source=self.benchmark_source):
            algo.perf_tracker.all_benchmark_returns[date] = \
                benchmark_source.get_value(date)

        def on_exit():
            # Remove references to algo, data portal, et al to break cycles
            # and ensure deterministic cleanup of these objects when the
            # simulation finishes.
            self.algo = None
            self.benchmark_source = self.current_data = self.data_portal = None

        with ExitStack() as stack:
            stack.callback(on_exit)
            stack.enter_context(self.processor)
            stack.enter_context(ZiplineAPI(self.algo))

            if algo.data_frequency == 'minute':
                def execute_order_cancellation_policy():
                    algo.blotter.execute_cancel_policy(SESSION_END)

                def calculate_minute_capital_changes(dt):
                    # process any capital changes that came between the last
                    # and current minutes
                    return algo.calculate_capital_changes(
                        dt, emission_rate=emission_rate, is_interday=False)
            else:
                def execute_order_cancellation_policy():
                    pass

                def calculate_minute_capital_changes(dt):
                    return []

            for dt, action in self.clock:
                if action == BAR:
                    for capital_change_packet in every_bar(dt):
                        yield capital_change_packet
                elif action == SESSION_START:
                    for capital_change_packet in once_a_day(dt):
                        yield capital_change_packet
                elif action == SESSION_END:
                    # End of the session.
                    if emission_rate == 'daily':
                        handle_benchmark(normalize_date(dt))
                    execute_order_cancellation_policy()

                    yield self._get_daily_message(dt, algo, algo.perf_tracker)
                elif action == BEFORE_TRADING_START_BAR:
                    self.simulation_dt = dt
                    algo.on_dt_changed(dt)
                    algo.before_trading_start(self.current_data)
                elif action == MINUTE_END:
                    handle_benchmark(dt)
                    minute_msg = \
                        self._get_minute_message(dt, algo, algo.perf_tracker)

                    yield minute_msg

        risk_message = algo.perf_tracker.handle_simulation_end()
        yield risk_message
Ejemplo n.º 35
0
    def transform(self):
        """
        Main generator work loop.
        """
        algo = self.algo

        def every_bar(dt_to_use, current_data=self.current_data,
                      handle_data=algo.event_manager.handle_data):
            # called every tick (minute or day).

            self.simulation_dt = dt_to_use
            algo.on_dt_changed(dt_to_use)

            blotter = algo.blotter
            perf_tracker = algo.perf_tracker

            # handle any transactions and commissions coming out new orders
            # placed in the last bar
            new_transactions, new_commissions, closed_orders = \
                blotter.get_transactions(current_data)

            blotter.prune_orders(closed_orders)

            for transaction in new_transactions:
                perf_tracker.process_transaction(transaction)

                # since this order was modified, record it
                order = blotter.orders[transaction.order_id]
                perf_tracker.process_order(order)

            if new_commissions:
                for commission in new_commissions:
                    perf_tracker.process_commission(commission)

            handle_data(algo, current_data, dt_to_use)

            # grab any new orders from the blotter, then clear the list.
            # this includes cancelled orders.
            new_orders = blotter.new_orders
            blotter.new_orders = []

            # if we have any new orders, record them so that we know
            # in what perf period they were placed.
            if new_orders:
                for new_order in new_orders:
                    perf_tracker.process_order(new_order)

            self.algo.portfolio_needs_update = True
            self.algo.account_needs_update = True
            self.algo.performance_needs_update = True

        def once_a_day(midnight_dt, current_data=self.current_data,
                       data_portal=self.data_portal):
            # Get the positions before updating the date so that prices are
            # fetched for trading close instead of midnight
            positions = algo.perf_tracker.position_tracker.positions
            position_assets = algo.asset_finder.retrieve_all(positions)

            # set all the timestamps
            self.simulation_dt = midnight_dt
            algo.on_dt_changed(midnight_dt)

            # we want to wait until the clock rolls over to the next day
            # before cleaning up expired assets.
            self._cleanup_expired_assets(midnight_dt, position_assets)

            perf_tracker = algo.perf_tracker

            # handle any splits that impact any positions or any open orders.
            assets_we_care_about = \
                viewkeys(perf_tracker.position_tracker.positions) | \
                viewkeys(algo.blotter.open_orders)

            if assets_we_care_about:
                splits = data_portal.get_splits(assets_we_care_about,
                                                midnight_dt)
                if splits:
                    algo.blotter.process_splits(splits)
                    perf_tracker.position_tracker.handle_splits(splits)

            # call before trading start
            algo.before_trading_start(current_data)

        def handle_benchmark(date, benchmark_source=self.benchmark_source):
            algo.perf_tracker.all_benchmark_returns[date] = \
                benchmark_source.get_value(date)

        def on_exit():
            self.benchmark_source = self.current_data = self.data_portal = None

        with ExitStack() as stack:
            stack.callback(on_exit)
            stack.enter_context(self.processor)
            stack.enter_context(ZiplineAPI(self.algo))

            if algo.data_frequency == 'minute':
                def execute_order_cancellation_policy():
                    algo.blotter.execute_cancel_policy(DAY_END)
            else:
                def execute_order_cancellation_policy():
                    pass

            for dt, action in self.clock:
                if action == BAR:
                    every_bar(dt)
                elif action == DAY_START:
                    once_a_day(dt)
                elif action == DAY_END:
                    # End of the day.
                    if algo.perf_tracker.emission_rate == 'daily':
                        handle_benchmark(normalize_date(dt))
                    execute_order_cancellation_policy()

                    yield self._get_daily_message(dt, algo, algo.perf_tracker)
                elif action == MINUTE_END:
                    handle_benchmark(dt)
                    minute_msg = \
                        self._get_minute_message(dt, algo, algo.perf_tracker)

                    yield minute_msg

        risk_message = algo.perf_tracker.handle_simulation_end()
        yield risk_message
Ejemplo n.º 36
0
    def closes(self, assets, dt):
        """
        The close field's aggregation returns the latest close at the given
        dt.
        If the close for the given dt is `nan`, the most recent non-nan
        `close` is used.
        If there has been no data on or before the `dt` the close is `nan`.

        Returns
        -------
        np.array with dtype=float64, in order of assets parameter.
        """
        market_open, prev_dt, dt_value, entries = self._prelude(dt, 'close')

        closes = []
        normalized_dt = normalize_date(dt)

        for asset in assets:
            if not asset._is_alive(normalized_dt, True):
                closes.append(np.NaN)
                continue

            if prev_dt is None:
                val = self._minute_reader.get_value(asset, dt, 'close')
                entries[asset] = (dt_value, val)
                closes.append(val)
                continue
            else:
                try:
                    last_visited_dt, last_close = entries[asset]
                    if last_visited_dt == dt_value:
                        closes.append(last_close)
                        continue
                    elif last_visited_dt == prev_dt:
                        val = self._minute_reader.get_value(
                            asset, dt, 'close')
                        if pd.isnull(val):
                            val = last_close
                        entries[asset] = (dt_value, val)
                        closes.append(val)
                        continue
                    else:
                        val = self._minute_reader.get_value(
                            asset, dt, 'close')
                        if pd.isnull(val):
                            val = self.closes(
                                [asset],
                                pd.Timestamp(prev_dt, tz='UTC'))[0]
                        entries[asset] = (dt_value, val)
                        closes.append(val)
                        continue
                except KeyError:
                    val = self._minute_reader.get_value(
                        asset, dt, 'close')
                    if pd.isnull(val):
                        val = self.closes([asset],
                                          pd.Timestamp(prev_dt, tz='UTC'))[0]
                    entries[asset] = (dt_value, val)
                    closes.append(val)
                    continue
        return np.array(closes)
Ejemplo n.º 37
0
    def _get_adjustments_in_range(self, asset, dts, field):
        """
        Get the Float64Multiply objects to pass to an AdjustedArrayWindow.

        For the use of AdjustedArrayWindow in the loader, which looks back
        from current simulation time back to a window of data the dictionary is
        structured with:
        - the key into the dictionary for adjustments is the location of the
        day from which the window is being viewed.
        - the start of all multiply objects is always 0 (in each window all
          adjustments are overlapping)
        - the end of the multiply object is the location before the calendar
          location of the adjustment action, making all days before the event
          adjusted.

        Parameters
        ----------
        asset : Asset
            The assets for which to get adjustments.
        dts : iterable of datetime64-like
            The dts for which adjustment data is needed.
        field : str
            OHLCV field for which to get the adjustments.

        Returns
        -------
        out : dict[loc -> Float64Multiply]
            The adjustments as a dict of loc -> Float64Multiply
        """
        sid = int(asset)
        start = normalize_date(dts[0])
        end = normalize_date(dts[-1])
        adjs = {}
        if field != 'volume':
            mergers = self._adjustments_reader.get_adjustments_for_sid(
                'mergers', sid)
            for m in mergers:
                dt = m[0]
                if start < dt <= end:
                    end_loc = dts.searchsorted(dt)
                    adj_loc = end_loc
                    mult = Float64Multiply(0,
                                           end_loc - 1,
                                           0,
                                           0,
                                           m[1])
                    try:
                        adjs[adj_loc].append(mult)
                    except KeyError:
                        adjs[adj_loc] = [mult]
            divs = self._adjustments_reader.get_adjustments_for_sid(
                'dividends', sid)
            for d in divs:
                dt = d[0]
                if start < dt <= end:
                    end_loc = dts.searchsorted(dt)
                    adj_loc = end_loc
                    mult = Float64Multiply(0,
                                           end_loc - 1,
                                           0,
                                           0,
                                           d[1])
                    try:
                        adjs[adj_loc].append(mult)
                    except KeyError:
                        adjs[adj_loc] = [mult]
        splits = self._adjustments_reader.get_adjustments_for_sid(
            'splits', sid)
        for s in splits:
            dt = s[0]
            if start < dt <= end:
                if field == 'volume':
                    ratio = 1.0 / s[1]
                else:
                    ratio = s[1]
                end_loc = dts.searchsorted(dt)
                adj_loc = end_loc
                mult = Float64Multiply(0,
                                       end_loc - 1,
                                       0,
                                       0,
                                       ratio)
                try:
                    adjs[adj_loc].append(mult)
                except KeyError:
                    adjs[adj_loc] = [mult]
        return adjs
Ejemplo n.º 38
0
    def transform(self):
        """
        Main generator work loop.
        """
        algo = self.algo

        def every_bar(dt_to_use, current_data=self.current_data,
                      handle_data=algo.event_manager.handle_data):
            # called every tick (minute or day).

            if dt_to_use in algo.capital_changes:
                process_minute_capital_changes(dt_to_use)

            self.simulation_dt = dt_to_use
            algo.on_dt_changed(dt_to_use)

            blotter = algo.blotter
            perf_tracker = algo.perf_tracker

            # handle any transactions and commissions coming out new orders
            # placed in the last bar
            new_transactions, new_commissions, closed_orders = \
                blotter.get_transactions(current_data)

            blotter.prune_orders(closed_orders)

            for transaction in new_transactions:
                perf_tracker.process_transaction(transaction)

                # since this order was modified, record it
                order = blotter.orders[transaction.order_id]
                perf_tracker.process_order(order)

            if new_commissions:
                for commission in new_commissions:
                    perf_tracker.process_commission(commission)

            handle_data(algo, current_data, dt_to_use)

            # grab any new orders from the blotter, then clear the list.
            # this includes cancelled orders.
            new_orders = blotter.new_orders
            blotter.new_orders = []

            # if we have any new orders, record them so that we know
            # in what perf period they were placed.
            if new_orders:
                for new_order in new_orders:
                    perf_tracker.process_order(new_order)

            algo.portfolio_needs_update = True
            algo.account_needs_update = True
            algo.performance_needs_update = True

        def once_a_day(midnight_dt, current_data=self.current_data,
                       data_portal=self.data_portal):

            perf_tracker = algo.perf_tracker

            if midnight_dt in algo.capital_changes:
                # process any capital changes that came overnight
                change = algo.capital_changes[midnight_dt]
                log.info('Processing capital change of %s at %s' %
                         (change, midnight_dt))
                perf_tracker.process_capital_changes(change, is_interday=True)

            # Get the positions before updating the date so that prices are
            # fetched for trading close instead of midnight
            positions = algo.perf_tracker.position_tracker.positions
            position_assets = algo.asset_finder.retrieve_all(positions)

            # set all the timestamps
            self.simulation_dt = midnight_dt
            algo.on_dt_changed(midnight_dt)

            # we want to wait until the clock rolls over to the next day
            # before cleaning up expired assets.
            self._cleanup_expired_assets(midnight_dt, position_assets)

            # handle any splits that impact any positions or any open orders.
            assets_we_care_about = \
                viewkeys(perf_tracker.position_tracker.positions) | \
                viewkeys(algo.blotter.open_orders)

            if assets_we_care_about:
                splits = data_portal.get_splits(assets_we_care_about,
                                                midnight_dt)
                if splits:
                    algo.blotter.process_splits(splits)
                    perf_tracker.position_tracker.handle_splits(splits)

            # call before trading start
            algo.before_trading_start(current_data)

        def handle_benchmark(date, benchmark_source=self.benchmark_source):
            algo.perf_tracker.all_benchmark_returns[date] = \
                benchmark_source.get_value(date)

        def on_exit():
            # Remove references to algo, data portal, et al to break cycles
            # and ensure deterministic cleanup of these objects when the
            # simulation finishes.
            self.algo = None
            self.benchmark_source = self.current_data = self.data_portal = None

        with ExitStack() as stack:
            stack.callback(on_exit)
            stack.enter_context(self.processor)
            stack.enter_context(ZiplineAPI(self.algo))

            if algo.data_frequency == 'minute':
                def execute_order_cancellation_policy():
                    algo.blotter.execute_cancel_policy(DAY_END)

                def process_minute_capital_changes(dt):
                    # If we are running daily emission, prices won't
                    # necessarily be synced at the end of every minute, and we
                    # need the up-to-date prices for capital change
                    # calculations. We want to sync the prices as of the
                    # last market minute, and this is okay from a data portal
                    # perspective as we have technically not "advanced" to the
                    # current dt yet.
                    algo.perf_tracker.position_tracker.sync_last_sale_prices(
                        self.algo.trading_calendar.previous_minute(dt),
                        False,
                        self.data_portal
                    )

                    # process any capital changes that came between the last
                    # and current minutes
                    change = algo.capital_changes[dt]
                    log.info('Processing capital change of %s at %s' %
                             (change, dt))
                    algo.perf_tracker.process_capital_changes(
                        change,
                        is_interday=False
                    )
            else:
                def execute_order_cancellation_policy():
                    pass

                def process_minute_capital_changes(dt):
                    pass

            for dt, action in self.clock:
                if action == BAR:
                    every_bar(dt)
                elif action == DAY_START:
                    once_a_day(dt)
                elif action == DAY_END:
                    # End of the day.
                    if algo.perf_tracker.emission_rate == 'daily':
                        handle_benchmark(normalize_date(dt))
                    execute_order_cancellation_policy()

                    yield self._get_daily_message(dt, algo, algo.perf_tracker)
                elif action == MINUTE_END:
                    handle_benchmark(dt)
                    minute_msg = \
                        self._get_minute_message(dt, algo, algo.perf_tracker)

                    yield minute_msg

        risk_message = algo.perf_tracker.handle_simulation_end()
        yield risk_message
Ejemplo n.º 39
0
    def transform(self):
        """
        Main generator work loop.
        """
        algo = self.algo

        def every_bar(dt_to_use, current_data=self.current_data,
                      handle_data=algo.event_manager.handle_data):
            # called every tick (minute or day).

            if dt_to_use in algo.capital_changes:
                process_minute_capital_changes(dt_to_use)

            self.simulation_dt = dt_to_use
            algo.on_dt_changed(dt_to_use)

            blotter = algo.blotter
            perf_tracker = algo.perf_tracker

            # handle any transactions and commissions coming out new orders
            # placed in the last bar
            new_transactions, new_commissions, closed_orders = \
                blotter.get_transactions(current_data)

            blotter.prune_orders(closed_orders)

            for transaction in new_transactions:
                perf_tracker.process_transaction(transaction)

                # since this order was modified, record it
                order = blotter.orders[transaction.order_id]
                perf_tracker.process_order(order)

            if new_commissions:
                for commission in new_commissions:
                    perf_tracker.process_commission(commission)

            handle_data(algo, current_data, dt_to_use)

            # grab any new orders from the blotter, then clear the list.
            # this includes cancelled orders.
            new_orders = blotter.new_orders
            blotter.new_orders = []

            # if we have any new orders, record them so that we know
            # in what perf period they were placed.
            if new_orders:
                for new_order in new_orders:
                    perf_tracker.process_order(new_order)

            self.algo.portfolio_needs_update = True
            self.algo.account_needs_update = True
            self.algo.performance_needs_update = True

        def once_a_day(midnight_dt, current_data=self.current_data,
                       data_portal=self.data_portal):

            perf_tracker = algo.perf_tracker

            if midnight_dt in algo.capital_changes:
                # process any capital changes that came overnight
                change = algo.capital_changes[midnight_dt]
                log.info('Processing capital change of %s at %s' %
                         (change, midnight_dt))
                perf_tracker.process_capital_changes(change, is_interday=True)

            # Get the positions before updating the date so that prices are
            # fetched for trading close instead of midnight
            positions = algo.perf_tracker.position_tracker.positions
            position_assets = algo.asset_finder.retrieve_all(positions)

            # set all the timestamps
            self.simulation_dt = midnight_dt
            algo.on_dt_changed(midnight_dt)

            # we want to wait until the clock rolls over to the next day
            # before cleaning up expired assets.
            self._cleanup_expired_assets(midnight_dt, position_assets)

            # handle any splits that impact any positions or any open orders.
            assets_we_care_about = \
                viewkeys(perf_tracker.position_tracker.positions) | \
                viewkeys(algo.blotter.open_orders)

            if assets_we_care_about:
                splits = data_portal.get_splits(assets_we_care_about,
                                                midnight_dt)
                if splits:
                    algo.blotter.process_splits(splits)
                    perf_tracker.position_tracker.handle_splits(splits)

            # call before trading start
            algo.before_trading_start(current_data)

        def handle_benchmark(date, benchmark_source=self.benchmark_source):
            algo.perf_tracker.all_benchmark_returns[date] = \
                benchmark_source.get_value(date)

        def on_exit():
            self.benchmark_source = self.current_data = self.data_portal = None

        with ExitStack() as stack:
            stack.callback(on_exit)
            stack.enter_context(self.processor)
            stack.enter_context(ZiplineAPI(self.algo))

            if algo.data_frequency == 'minute':
                def execute_order_cancellation_policy():
                    algo.blotter.execute_cancel_policy(DAY_END)

                def process_minute_capital_changes(dt):
                    # If we are running daily emission, prices won't
                    # necessarily be synced at the end of every minute, and we
                    # need the up-to-date prices for capital change
                    # calculations. We want to sync the prices as of the
                    # last market minute, and this is okay from a data portal
                    # perspective as we have technically not "advanced" to the
                    # current dt yet.
                    algo.perf_tracker.position_tracker.sync_last_sale_prices(
                        self.env.previous_market_minute(dt),
                        False,
                        self.data_portal
                    )

                    # process any capital changes that came between the last
                    # and current minutes
                    change = algo.capital_changes[dt]
                    log.info('Processing capital change of %s at %s' %
                             (change, dt))
                    algo.perf_tracker.process_capital_changes(
                        change,
                        is_interday=False
                    )
            else:
                def execute_order_cancellation_policy():
                    pass

                def process_minute_capital_changes(dt):
                    pass

            for dt, action in self.clock:
                if action == BAR:
                    every_bar(dt)
                elif action == DAY_START:
                    once_a_day(dt)
                elif action == DAY_END:
                    # End of the day.
                    if algo.perf_tracker.emission_rate == 'daily':
                        handle_benchmark(normalize_date(dt))
                    execute_order_cancellation_policy()

                    yield self._get_daily_message(dt, algo, algo.perf_tracker)
                elif action == MINUTE_END:
                    handle_benchmark(dt)
                    minute_msg = \
                        self._get_minute_message(dt, algo, algo.perf_tracker)

                    yield minute_msg

        risk_message = algo.perf_tracker.handle_simulation_end()
        yield risk_message
Ejemplo n.º 40
0
 def _call_before_trading_start(self, dt):
     dt = normalize_date(dt)
     self.simulation_dt = dt
     self.on_dt_changed(dt)
     self.algo.before_trading_start()
Ejemplo n.º 41
0
    def __init__(self, algo, sim_params):

        # ==============
        # Simulation
        # Param Setup
        # ==============
        self.sim_params = sim_params

        # ==============
        # Algo Setup
        # ==============
        self.algo = algo
        self.algo_start = normalize_date(self.sim_params.first_open)
        self.env = algo.trading_environment

        # ==============
        # Snapshot Setup
        # ==============

        _day = timedelta(days=1)

        def _get_removal_date(sid,
                              finder=self.env.asset_finder,
                              default=self.sim_params.last_close + _day):
            """
            Get the date of the morning on which we should remove an asset from
            data.

            If we don't have an auto_close_date, this is just the end of the
            simulation.

            If we have an auto_close_date, then we remove assets from data on
            max(asset.auto_close_date, asset.end_date + timedelta(days=1))

            We hold assets at least until auto_close_date because up until that
            date the user might still hold positions or have open orders in an
            expired asset.

            We hold assets at least until end_date + 1, because an asset
            continues trading until the **end** of its end_date.  Even if an
            asset auto-closed before the end_date (say, because Interactive
            Brokers clears futures positions prior the actual notice or
            expiration), there may still be trades arriving that represent
            signals for other assets that are still tradeable. (Particularly in
            the futures case, trading in the final days of a contract are
            likely relevant for trading the next contract on the same future
            chain.)
            获取上午的日期,我们应该从数据中删除资产。

            #如果我们没有auto_close_date,那只是结束了
            #模拟。

            #如果我们有auto_close_date,那么我们从数据中删除资产
            #max(asset.auto_close_date,asset.end_date + timedelta(days = 1))

            #我们至少持有资产直到auto_close_date,因为直到那之前
            #用户可能仍然持有头寸或在一个开仓订单的日期
            #过期资产。

            #我们持有资产至少到end_date + 1,因为资产
            #继续交易直到其end_date的**结束**。即使是
            #end_date之前的资产自动关闭(比如因为Interactive
            #经纪人在实际通知之前清算期货头寸或
            #到期),可能仍有交易到达代表
            #其他资产仍可交易的信号。 (特别是在
            #期货案例,在合约的最后几天进行交易
            #可能与在同一个未来交易下一份合约有关
            #链。)
            """
            try:
                asset = finder.retrieve_asset(sid)
            except ValueError:
                # Handle sid not an int, such as from a custom source.
                # So that they don't compare equal to other sids, and we'd
                # blow up comparing strings to ints, let's give them unique
                # close dates.
                return default + timedelta(microseconds=id(sid))
            except SidsNotFound:
                return default

            auto_close_date = asset.auto_close_date
            if auto_close_date is None:
                # If we don't have an auto_close_date, we never remove an asset
                # from the user's portfolio.
                return default

            end_date = asset.end_date
            if end_date is None:
                # If we have an auto_close_date but not an end_date, clear the
                # asset from data when we clear positions/orders.
                return auto_close_date

            # If we have both, make close once we're on or after the
            # auto_close_date, and strictly after the end_date.
            # See docstring above for an explanation of this logic.
            return max(auto_close_date, end_date + _day)

        self._get_removal_date = _get_removal_date

        # The algorithm's data as of our most recent event.
        # Maintain sids in order by asset close date, so that we can more
        # efficiently remove them when their times come...算法的数据是我们最近的事件。
        # 按资产截止日期按顺序维护sid,以便我们可以在时间到来时更有效地删除它们...
        self.current_data = BarData(SortedDict(self._get_removal_date))
        # We don't have a datetime for the current snapshot until we
        # receive a message.在收到消息之前,我们没有当前快照的日期时间
        self.simulation_dt = None

        # =============
        # Logging Setup
        # =============

        # Processor function for injecting the algo_dt into
        # user prints/logs.
        def inject_algo_dt(record):
            if 'algo_dt' not in record.extra:
                record.extra['algo_dt'] = self.simulation_dt

        self.processor = Processor(inject_algo_dt)