示例#1
0
    def test_force_registration(self):
        register_calendar("DMY", self.dummy_cal_type())
        first_dummy = get_calendar("DMY")

        # force-register a new instance
        register_calendar("DMY", self.dummy_cal_type(), force=True)

        second_dummy = get_calendar("DMY")

        self.assertNotEqual(first_dummy, second_dummy)
示例#2
0
    def read(cls, rootdir):
        path = cls.metadata_path(rootdir)
        with open(path) as fp:
            raw_data = json.load(fp)

            try:
                version = raw_data['version']
            except KeyError:
                # Version was first written with version 1, assume 0,
                # if version does not match.
                version = 0

            default_ohlc_ratio = raw_data['ohlc_ratio']

            if version >= 1:
                minutes_per_day = raw_data['minutes_per_day']
            else:
                # version 0 always assumed US equities.
                minutes_per_day = US_EQUITIES_MINUTES_PER_DAY

            if version >= 2:
                calendar = get_calendar(raw_data['calendar_name'])
                start_session = pd.Timestamp(raw_data['start_session'],
                                             tz='UTC')
                end_session = pd.Timestamp(raw_data['end_session'], tz='UTC')
            else:
                # No calendar info included in older versions, so
                # default to NYSE.
                calendar = get_calendar('NYSE')

                start_session = pd.Timestamp(raw_data['first_trading_day'],
                                             tz='UTC')
                end_session = calendar.minute_to_session_label(
                    pd.Timestamp(raw_data['market_closes'][-1],
                                 unit='m',
                                 tz='UTC'))

            if version >= 3:
                ohlc_ratios_per_sid = raw_data['ohlc_ratios_per_sid']
                if ohlc_ratios_per_sid is not None:
                    ohlc_ratios_per_sid = keymap(int, ohlc_ratios_per_sid)
            else:
                ohlc_ratios_per_sid = None

            return cls(
                default_ohlc_ratio,
                ohlc_ratios_per_sid,
                calendar,
                start_session,
                end_session,
                minutes_per_day,
                version=version,
            )
示例#3
0
    def test_register_calendar(self):
        # Build a fake calendar
        dummy_cal = self.dummy_cal_type()

        # Try to register and retrieve the calendar
        register_calendar('DMY', dummy_cal)
        retr_cal = get_calendar('DMY')
        self.assertEqual(dummy_cal, retr_cal)

        # Try to register again, expecting a name collision
        with self.assertRaises(CalendarNameCollision):
            register_calendar('DMY', dummy_cal)

        # Deregister the calendar and ensure that it is removed
        deregister_calendar('DMY')
        with self.assertRaises(InvalidCalendarName):
            get_calendar('DMY')
示例#4
0
    def __init__(self, raw_price_loader, adjustments_loader):
        self.raw_price_loader = raw_price_loader
        self.adjustments_loader = adjustments_loader

        cal = self.raw_price_loader.trading_calendar or \
            get_calendar("NYSE")

        self._all_sessions = cal.all_sessions
示例#5
0
    def __init__(self, env, trading_calendar=None,
                 first_trading_day=None):
        if trading_calendar is None:
            trading_calendar = get_calendar("NYSE")

        super(FakeDataPortal, self).__init__(env.asset_finder,
                                             trading_calendar,
                                             first_trading_day)
示例#6
0
    def setUpClass(cls):
        cls.nyse_calendar = get_calendar("NYSE")

        # july 15 is friday, so there are 3 sessions in this range (15, 18, 19)
        cls.sessions = cls.nyse_calendar.sessions_in_range(
            pd.Timestamp("2016-07-15"), pd.Timestamp("2016-07-19"))

        trading_o_and_c = cls.nyse_calendar.schedule.ix[cls.sessions]
        cls.opens = trading_o_and_c['market_open']
        cls.closes = trading_o_and_c['market_close']
示例#7
0
def gen_calendars(start, stop, critical_dates):
    """
    Generate calendars to use as inputs.
    """
    all_dates = pd.date_range(start, stop, tz='utc')
    for to_drop in map(list, powerset(critical_dates)):
        # Have to yield tuples.
        yield (all_dates.drop(to_drop),)

    # Also test with the trading calendar.
    trading_days = get_calendar("NYSE").all_days
    yield (trading_days[trading_days.slice_indexer(start, stop)],)
示例#8
0
    def setUp(self):
        self.trading_day = get_calendar("NYSE").day

        self.nsids = 5
        self.ndates = 20

        self.sids = Int64Index(range(self.nsids))
        self.dates = DatetimeIndex(
            start='2014-01-02',
            freq=self.trading_day,
            periods=self.ndates,
        )

        self.mask = ones((len(self.dates), len(self.sids)), dtype=bool)
示例#9
0
    def sessions(self):
        if 'calendar' in self._table.attrs.attrs:
            # backwards compatibility with old formats, will remove
            return DatetimeIndex(self._table.attrs['calendar'], tz='UTC')
        else:
            cal = get_calendar(self._table.attrs['calendar_name'])
            start_session_ns = self._table.attrs['start_session_ns']
            start_session = Timestamp(start_session_ns, tz='UTC')

            end_session_ns = self._table.attrs['end_session_ns']
            end_session = Timestamp(end_session_ns, tz='UTC')

            sessions = cal.sessions_in_range(start_session, end_session)

            return sessions
示例#10
0
    def setUpClass(cls):
        # On the AfterOpen and BeforeClose tests, we want ensure that the
        # functions are pure, and that running them with the same input will
        # provide the same output, regardless of whether the function is run 1
        # or N times. (For performance reasons, we cache some internal state
        # in AfterOpen and BeforeClose, but we don't want it to affect
        # purity). Hence, we use the same before_close and after_open across
        # subtests.
        cls.before_close = BeforeClose(hours=1, minutes=5)
        cls.after_open = AfterOpen(hours=1, minutes=5)
        cls.class_ = None  # Mark that this is the base class.

        cal = get_calendar(cls.CALENDAR_STRING)
        cls.before_close.cal = cal
        cls.after_open.cal = cal
示例#11
0
def run_example(example_name, environ):
    """
    Run an example module from gateway.examples.
    """
    mod = EXAMPLE_MODULES[example_name]

    register_calendar("YAHOO", get_calendar("NYSE"), force=True)

    return run_algorithm(
        initialize=getattr(mod, 'initialize', None),
        handle_data=getattr(mod, 'handle_data', None),
        before_trading_start=getattr(mod, 'before_trading_start', None),
        analyze=getattr(mod, 'analyze', None),
        bundle='test',
        environ=environ,
        # Provide a default capital base, but allow the test to override.
        **merge({'capital_base': 1e7}, mod._test_args()))
示例#12
0
    def setUpClass(cls):
        super(StatelessRulesTests, cls).setUpClass()

        cls.class_ = StatelessRule
        cls.cal = get_calendar(cls.CALENDAR_STRING)

        # First day of 09/2014 is closed whereas that for 10/2014 is open
        cls.sept_sessions = cls.cal.sessions_in_range(
            pd.Timestamp('2014-09-01', tz='UTC'),
            pd.Timestamp('2014-09-30', tz='UTC'),
        )
        cls.oct_sessions = cls.cal.sessions_in_range(
            pd.Timestamp('2014-10-01', tz='UTC'),
            pd.Timestamp('2014-10-31', tz='UTC'),
        )

        cls.sept_week = cls.cal.minutes_for_sessions_in_range(
            pd.Timestamp("2014-09-22", tz='UTC'),
            pd.Timestamp("2014-09-26", tz='UTC'))

        cls.HALF_SESSION = None
        cls.FULL_SESSION = None
示例#13
0
    def __init__(
        self,
        load=None,
        bm_symbol='SPY',
        exchange_tz="US/Eastern",
        trading_calendar=None,
        asset_db_path=':memory:',
        future_chain_predicates=CHAIN_PREDICATES,
        environ=None,
    ):

        self.bm_symbol = bm_symbol
        if not load:
            load = partial(load_market_data, environ=environ)

        if not trading_calendar:
            trading_calendar = get_calendar("NYSE")

        self.benchmark_returns, self.treasury_curves = load(
            trading_calendar.day,
            trading_calendar.schedule.index,
            self.bm_symbol,
        )

        self.exchange_tz = exchange_tz

        if isinstance(asset_db_path, string_types):
            asset_db_path = 'sqlite:///' + asset_db_path
            self.engine = engine = create_engine(asset_db_path)
        else:
            self.engine = engine = asset_db_path

        if engine is not None:
            AssetDBWriter(engine).init_db()
            self.asset_finder = AssetFinder(
                engine, future_chain_predicates=future_chain_predicates)
        else:
            self.asset_finder = None
    def test_write_attrs(self):
        result = self.bcolz_daily_bar_ctable
        expected_first_row = {
            '1': 0,
            '2': 5,  # Asset 1 has 5 trading days.
            '3': 12,  # Asset 2 has 7 trading days.
            '4': 33,  # Asset 3 has 21 trading days.
            '5': 44,  # Asset 4 has 11 trading days.
            '6': 49,  # Asset 5 has 5 trading days.
        }
        expected_last_row = {
            '1': 4,
            '2': 11,
            '3': 32,
            '4': 43,
            '5': 48,
            '6': 57,  # Asset 6 has 9 trading days.
        }
        expected_calendar_offset = {
            '1': 0,  # Starts on 6-01, 1st trading day of month.
            '2': 15,  # Starts on 6-22, 16th trading day of month.
            '3': 1,  # Starts on 6-02, 2nd trading day of month.
            '4': 0,  # Starts on 6-01, 1st trading day of month.
            '5': 9,  # Starts on 6-12, 10th trading day of month.
            '6': 10,  # Starts on 6-15, 11th trading day of month.
        }
        self.assertEqual(result.attrs['first_row'], expected_first_row)
        self.assertEqual(result.attrs['last_row'], expected_last_row)
        self.assertEqual(
            result.attrs['calendar_offset'],
            expected_calendar_offset,
        )
        cal = get_calendar(result.attrs['calendar_name'])
        first_session = Timestamp(result.attrs['start_session_ns'], tz='UTC')
        end_session = Timestamp(result.attrs['end_session_ns'], tz='UTC')
        sessions = cal.sessions_in_range(first_session, end_session)

        assert_index_equal(self.sessions, sessions)
示例#15
0
def create_simulation_parameters(year=2006,
                                 start=None,
                                 end=None,
                                 capital_base=float("1.0e5"),
                                 num_days=None,
                                 data_frequency='daily',
                                 emission_rate='daily',
                                 trading_calendar=None):

    if not trading_calendar:
        trading_calendar = get_calendar("NYSE")

    if start is None:
        start = pd.Timestamp("{0}-01-01".format(year), tz='UTC')
    elif type(start) == datetime:
        start = pd.Timestamp(start)

    if end is None:
        if num_days:
            start_index = trading_calendar.all_sessions.searchsorted(start)
            end = trading_calendar.all_sessions[start_index + num_days - 1]
        else:
            end = pd.Timestamp("{0}-12-31".format(year), tz='UTC')
    elif type(end) == datetime:
        end = pd.Timestamp(end)

    sim_params = SimulationParameters(
        start_session=start,
        end_session=end,
        capital_base=capital_base,
        data_frequency=data_frequency,
        emission_rate=emission_rate,
        trading_calendar=trading_calendar,
    )

    return sim_params
示例#16
0
    def init_class_fixtures(cls):
        super(WithPanelBarReader, cls).init_class_fixtures()

        finder = cls.asset_finder
        trading_calendar = get_calendar('NYSE')

        items = finder.retrieve_all(finder.sids)
        major_axis = (trading_calendar.sessions_in_range
                      if cls.FREQUENCY == 'daily' else
                      trading_calendar.minutes_for_sessions_in_range)(
                          cls.START_DATE, cls.END_DATE)
        minor_axis = ['open', 'high', 'low', 'close', 'volume']

        shape = tuple(map(len, [items, major_axis, minor_axis]))
        raw_data = np.arange(shape[0] * shape[1] * shape[2]).reshape(shape)

        cls.panel = pd.Panel(
            raw_data,
            items=items,
            major_axis=major_axis,
            minor_axis=minor_axis,
        )

        cls.reader = PanelBarReader(trading_calendar, cls.panel, cls.FREQUENCY)
示例#17
0
class ClosesAndVolumes(WithDataPortal, GatewayTestCase):
    sids = 1, 2, 3
    START_DATE = pd.Timestamp('2014-01-01', tz='utc')
    END_DATE = pd.Timestamp('2014-02-01', tz='utc')
    dates = date_range(START_DATE,
                       END_DATE,
                       freq=get_calendar("NYSE").day,
                       tz='utc')

    @classmethod
    def make_equity_info(cls):
        cls.equity_info = ret = DataFrame.from_records([
            {
                'sid': 1,
                'symbol': 'A',
                'start_date': cls.dates[10],
                'end_date': cls.dates[13],
                'exchange': 'TEST',
            },
            {
                'sid': 2,
                'symbol': 'B',
                'start_date': cls.dates[11],
                'end_date': cls.dates[14],
                'exchange': 'TEST',
            },
            {
                'sid': 3,
                'symbol': 'C',
                'start_date': cls.dates[12],
                'end_date': cls.dates[15],
                'exchange': 'TEST',
            },
        ])
        return ret

    @classmethod
    def make_equity_daily_bar_data(cls):
        cls.closes = DataFrame(
            {sid: arange(1,
                         len(cls.dates) + 1) * sid
             for sid in cls.sids},
            index=cls.dates,
            dtype=float,
        )
        cls.volumes = cls.closes * 1000
        for sid in cls.sids:
            yield sid, DataFrame(
                {
                    'open': cls.closes[sid].values,
                    'high': cls.closes[sid].values,
                    'low': cls.closes[sid].values,
                    'close': cls.closes[sid].values,
                    'volume': cls.volumes[sid].values,
                },
                index=cls.dates,
            )

    @classmethod
    def init_class_fixtures(cls):
        super(ClosesAndVolumes, cls).init_class_fixtures()
        cls.first_asset_start = min(cls.equity_info.start_date)
        cls.last_asset_end = max(cls.equity_info.end_date)
        cls.assets = cls.asset_finder.retrieve_all(cls.sids)

        cls.trading_day = cls.trading_calendar.day

        # Add a split for 'A' on its second date.
        cls.split_asset = cls.assets[0]
        cls.split_date = cls.split_asset.start_date + cls.trading_day
        cls.split_ratio = 0.5
        cls.adjustments = DataFrame.from_records([{
            'sid':
            cls.split_asset.sid,
            'value':
            cls.split_ratio,
            'kind':
            MULTIPLY,
            'start_date':
            Timestamp('NaT'),
            'end_date':
            cls.split_date,
            'apply_date':
            cls.split_date,
        }])

    def init_instance_fixtures(self):
        super(ClosesAndVolumes, self).init_instance_fixtures()

        # View of the data on/after the split.
        self.adj_closes = adj_closes = self.closes.copy()
        adj_closes.ix[:self.split_date, self.split_asset] *= self.split_ratio
        self.adj_volumes = adj_volumes = self.volumes.copy()
        adj_volumes.ix[:self.split_date, self.split_asset] *= self.split_ratio

        self.pipeline_close_loader = DataFrameLoader(
            column=USEquityPricing.close,
            baseline=self.closes,
            adjustments=self.adjustments,
        )

        self.pipeline_volume_loader = DataFrameLoader(
            column=USEquityPricing.volume,
            baseline=self.volumes,
            adjustments=self.adjustments,
        )

    def expected_close(self, date, asset):
        if date < self.split_date:
            lookup = self.closes
        else:
            lookup = self.adj_closes
        return lookup.loc[date, asset]

    def expected_volume(self, date, asset):
        if date < self.split_date:
            lookup = self.volumes
        else:
            lookup = self.adj_volumes
        return lookup.loc[date, asset]

    def exists(self, date, asset):
        return asset.start_date <= date <= asset.end_date

    def test_attach_pipeline_after_initialize(self):
        """
        Assert that calling attach_pipeline after initialize raises correctly.
        """
        def initialize(context):
            pass

        def late_attach(context, data):
            attach_pipeline(Pipeline(), 'test')
            raise AssertionError("Shouldn't make it past attach_pipeline!")

        algo = TradingAlgorithm(
            initialize=initialize,
            handle_data=late_attach,
            data_frequency='daily',
            get_pipeline_loader=lambda column: self.pipeline_close_loader,
            start=self.first_asset_start - self.trading_day,
            end=self.last_asset_end + self.trading_day,
            env=self.env,
        )

        with self.assertRaises(AttachPipelineAfterInitialize):
            algo.run(self.data_portal)

        def barf(context, data):
            raise AssertionError("Shouldn't make it past before_trading_start")

        algo = TradingAlgorithm(
            initialize=initialize,
            before_trading_start=late_attach,
            handle_data=barf,
            data_frequency='daily',
            get_pipeline_loader=lambda column: self.pipeline_close_loader,
            start=self.first_asset_start - self.trading_day,
            end=self.last_asset_end + self.trading_day,
            env=self.env,
        )

        with self.assertRaises(AttachPipelineAfterInitialize):
            algo.run(self.data_portal)

    def test_pipeline_output_after_initialize(self):
        """
        Assert that calling pipeline_output after initialize raises correctly.
        """
        def initialize(context):
            attach_pipeline(Pipeline(), 'test')
            pipeline_output('test')
            raise AssertionError("Shouldn't make it past pipeline_output()")

        def handle_data(context, data):
            raise AssertionError("Shouldn't make it past initialize!")

        def before_trading_start(context, data):
            raise AssertionError("Shouldn't make it past initialize!")

        algo = TradingAlgorithm(
            initialize=initialize,
            handle_data=handle_data,
            before_trading_start=before_trading_start,
            data_frequency='daily',
            get_pipeline_loader=lambda column: self.pipeline_close_loader,
            start=self.first_asset_start - self.trading_day,
            end=self.last_asset_end + self.trading_day,
            env=self.env,
        )

        with self.assertRaises(PipelineOutputDuringInitialize):
            algo.run(self.data_portal)

    def test_get_output_nonexistent_pipeline(self):
        """
        Assert that calling add_pipeline after initialize raises appropriately.
        """
        def initialize(context):
            attach_pipeline(Pipeline(), 'test')

        def handle_data(context, data):
            raise AssertionError("Shouldn't make it past before_trading_start")

        def before_trading_start(context, data):
            pipeline_output('not_test')
            raise AssertionError("Shouldn't make it past pipeline_output!")

        algo = TradingAlgorithm(
            initialize=initialize,
            handle_data=handle_data,
            before_trading_start=before_trading_start,
            data_frequency='daily',
            get_pipeline_loader=lambda column: self.pipeline_close_loader,
            start=self.first_asset_start - self.trading_day,
            end=self.last_asset_end + self.trading_day,
            env=self.env,
        )

        with self.assertRaises(NoSuchPipeline):
            algo.run(self.data_portal)

    @parameterized.expand([('default', None), ('day', 1), ('week', 5),
                           ('year', 252),
                           ('all_but_one_day', 'all_but_one_day'),
                           ('custom_iter', 'custom_iter')])
    def test_assets_appear_on_correct_days(self, test_name, chunks):
        """
        Assert that assets appear at correct times during a backtest, with
        correctly-adjusted close price values.
        """

        if chunks == 'all_but_one_day':
            chunks = (self.dates.get_loc(self.last_asset_end) -
                      self.dates.get_loc(self.first_asset_start)) - 1
        elif chunks == 'custom_iter':
            chunks = []
            st = np.random.RandomState(12345)
            remaining = (self.dates.get_loc(self.last_asset_end) -
                         self.dates.get_loc(self.first_asset_start))
            while remaining > 0:
                chunk = st.randint(3)
                chunks.append(chunk)
                remaining -= chunk

        def initialize(context):
            p = attach_pipeline(Pipeline(), 'test', chunks=chunks)
            p.add(USEquityPricing.close.latest, 'close')

        def handle_data(context, data):
            results = pipeline_output('test')
            date = get_datetime().normalize()
            for asset in self.assets:
                # Assets should appear iff they exist today and yesterday.
                exists_today = self.exists(date, asset)
                existed_yesterday = self.exists(date - self.trading_day, asset)
                if exists_today and existed_yesterday:
                    latest = results.loc[asset, 'close']
                    self.assertEqual(latest, self.expected_close(date, asset))
                else:
                    self.assertNotIn(asset, results.index)

        before_trading_start = handle_data

        algo = TradingAlgorithm(
            initialize=initialize,
            handle_data=handle_data,
            before_trading_start=before_trading_start,
            data_frequency='daily',
            get_pipeline_loader=lambda column: self.pipeline_close_loader,
            start=self.first_asset_start,
            end=self.last_asset_end,
            env=self.env,
        )

        # Run for a week in the middle of our data.
        algo.run(self.data_portal)

    def test_multiple_pipelines(self):
        """
        Test that we can attach multiple pipelines and access the correct
        output based on the pipeline name.
        """
        def initialize(context):
            pipeline_close = attach_pipeline(Pipeline(), 'test_close')
            pipeline_volume = attach_pipeline(Pipeline(), 'test_volume')

            pipeline_close.add(USEquityPricing.close.latest, 'close')
            pipeline_volume.add(USEquityPricing.volume.latest, 'volume')

        def handle_data(context, data):
            closes = pipeline_output('test_close')
            volumes = pipeline_output('test_volume')
            date = get_datetime().normalize()
            for asset in self.assets:
                # Assets should appear iff they exist today and yesterday.
                exists_today = self.exists(date, asset)
                existed_yesterday = self.exists(date - self.trading_day, asset)
                if exists_today and existed_yesterday:
                    self.assertEqual(closes.loc[asset, 'close'],
                                     self.expected_close(date, asset))
                    self.assertEqual(volumes.loc[asset, 'volume'],
                                     self.expected_volume(date, asset))
                else:
                    self.assertNotIn(asset, closes.index)
                    self.assertNotIn(asset, volumes.index)

        column_to_loader = {
            USEquityPricing.close: self.pipeline_close_loader,
            USEquityPricing.volume: self.pipeline_volume_loader,
        }

        algo = TradingAlgorithm(
            initialize=initialize,
            handle_data=handle_data,
            data_frequency='daily',
            get_pipeline_loader=lambda column: column_to_loader[column],
            start=self.first_asset_start,
            end=self.last_asset_end,
            env=self.env,
        )

        algo.run(self.data_portal)

    def test_duplicate_pipeline_names(self):
        """
        Test that we raise an error when we try to attach a pipeline with a
        name that already exists for another attached pipeline.
        """
        def initialize(context):
            attach_pipeline(Pipeline(), 'test')
            attach_pipeline(Pipeline(), 'test')

        algo = TradingAlgorithm(
            initialize=initialize,
            data_frequency='daily',
            get_pipeline_loader=lambda column: self.pipeline_close_loader,
            start=self.first_asset_start,
            end=self.last_asset_end,
            env=self.env,
        )

        with self.assertRaises(DuplicatePipelineName):
            algo.run(self.data_portal)
示例#18
0
    def ingest(name,
               environ=os.environ,
               timestamp=None,
               assets_versions=(),
               show_progress=False):
        """Ingest data for a given bundle.

        Parameters
        ----------
        name : str
            The name of the bundle.
        environ : mapping, optional
            The environment variables. By default this is os.environ.
        timestamp : datetime, optional
            The timestamp to use for the load.
            By default this is the current time.
        assets_versions : Iterable[int], optional
            Versions of the assets db to which to downgrade.
        show_progress : bool, optional
            Tell the ingest function to display the progress where possible.
        """
        try:
            bundle = bundles[name]
        except KeyError:
            raise UnknownBundle(name)

        calendar = get_calendar(bundle.calendar_name)

        start_session = bundle.start_session
        end_session = bundle.end_session

        if start_session is None or start_session < calendar.first_session:
            start_session = calendar.first_session

        if end_session is None or end_session > calendar.last_session:
            end_session = calendar.last_session

        if timestamp is None:
            timestamp = pd.Timestamp.utcnow()
        timestamp = timestamp.tz_convert('utc').tz_localize(None)

        timestr = to_bundle_ingest_dirname(timestamp)
        cachepath = cache_path(name, environ=environ)
        pth.ensure_directory(pth.data_path([name, timestr], environ=environ))
        pth.ensure_directory(cachepath)
        with dataframe_cache(cachepath, clean_on_failure=False) as cache, \
                ExitStack() as stack:
            # we use `cleanup_on_failure=False` so that we don't purge the
            # cache directory if the load fails in the middle
            if bundle.create_writers:
                wd = stack.enter_context(working_dir(
                    pth.data_path([], environ=environ))
                )
                daily_bars_path = wd.ensure_dir(
                    *daily_equity_relative(
                        name, timestr, environ=environ,
                    )
                )
                daily_bar_writer = BcolzDailyBarWriter(
                    daily_bars_path,
                    calendar,
                    start_session,
                    end_session,
                )
                # Do an empty write to ensure that the daily ctables exist
                # when we create the SQLiteAdjustmentWriter below. The
                # SQLiteAdjustmentWriter needs to open the daily ctables so
                # that it can compute the adjustment ratios for the dividends.

                daily_bar_writer.write(())
                minute_bar_writer = BcolzMinuteBarWriter(
                    wd.ensure_dir(*minute_equity_relative(
                        name, timestr, environ=environ)
                    ),
                    calendar,
                    start_session,
                    end_session,
                    minutes_per_day=bundle.minutes_per_day,
                )
                assets_db_path = wd.getpath(*asset_db_relative(
                    name, timestr, environ=environ,
                ))
                asset_db_writer = AssetDBWriter(assets_db_path)

                adjustment_db_writer = stack.enter_context(
                    SQLiteAdjustmentWriter(
                        wd.getpath(*adjustment_db_relative(
                            name, timestr, environ=environ)),
                        BcolzDailyBarReader(daily_bars_path),
                        calendar.all_sessions,
                        overwrite=True,
                    )
                )
            else:
                daily_bar_writer = None
                minute_bar_writer = None
                asset_db_writer = None
                adjustment_db_writer = None
                if assets_versions:
                    raise ValueError('Need to ingest a bundle that creates '
                                     'writers in order to downgrade the assets'
                                     ' db.')
            bundle.ingest(
                environ,
                asset_db_writer,
                minute_bar_writer,
                daily_bar_writer,
                adjustment_db_writer,
                calendar,
                start_session,
                end_session,
                cache,
                show_progress,
                pth.data_path([name, timestr], environ=environ),
            )

            for version in sorted(set(assets_versions), reverse=True):
                version_path = wd.getpath(*asset_db_relative(
                    name, timestr, environ=environ, db_version=version,
                ))
                with working_file(version_path) as wf:
                    shutil.copy2(assets_db_path, wf.path)
                    downgrade(wf.path, version)
示例#19
0
 def init_class_fixtures(cls):
     super(TestMinuteBarDataFuturesCalendar, cls).init_class_fixtures()
     cls.trading_calendar = get_calendar('CME')
示例#20
0
 def test_register_calendar_type(self):
     register_calendar_type("DMY", self.dummy_cal_type)
     retr_cal = get_calendar("DMY")
     self.assertEqual(self.dummy_cal_type, type(retr_cal))
示例#21
0
 def trading_calendar(self):
     if 'calendar_name' in self._table.attrs.attrs:
         return get_calendar(self._table.attrs['calendar_name'])
     else:
         return None
示例#22
0
def load_market_data(trading_day=None, trading_days=None, bm_symbol='SPY',
                     environ=None):
    """
    Load benchmark returns and treasury yield curves for the given calendar and
    benchmark symbol.

    Benchmarks are downloaded as a Series from IEX Trading.  Treasury curves
    are US Treasury Bond rates and are downloaded from 'www.federalreserve.gov'
    by default.  For Canadian exchanges, a loader for Canadian bonds from the
    Bank of Canada is also available.

    Results downloaded from the internet are cached in
    ~/.gateway/data. Subsequent loads will attempt to read from the cached
    files before falling back to redownload.

    Parameters
    ----------
    trading_day : pandas.CustomBusinessDay, optional
        A trading_day used to determine the latest day for which we
        expect to have data.  Defaults to an NYSE trading day.
    trading_days : pd.DatetimeIndex, optional
        A calendar of trading days.  Also used for determining what cached
        dates we should expect to have cached. Defaults to the NYSE calendar.
    bm_symbol : str, optional
        Symbol for the benchmark index to load. Defaults to 'SPY', the ticker
        for the S&P 500, provided by IEX Trading.

    Returns
    -------
    (benchmark_returns, treasury_curves) : (pd.Series, pd.DataFrame)

    Notes
    -----

    Both return values are DatetimeIndexed with values dated to midnight in UTC
    of each stored date.  The columns of `treasury_curves` are:

    '1month', '3month', '6month',
    '1year','2year','3year','5year','7year','10year','20year','30year'
    """
    if trading_day is None:
        trading_day = get_calendar('NYSE').trading_day
    if trading_days is None:
        trading_days = get_calendar('NYSE').all_sessions

    first_date = trading_days[0]
    now = pd.Timestamp.utcnow()

    # we will fill missing benchmark data through latest trading date
    last_date = trading_days[trading_days.get_loc(now, method='ffill')]

    br = ensure_benchmark_data(
        bm_symbol,
        first_date,
        last_date,
        now,
        # We need the trading_day to figure out the close prior to the first
        # date so that we can compute returns for the first date.
        trading_day,
        environ,
    )
    tc = ensure_treasury_data(
        bm_symbol,
        first_date,
        last_date,
        now,
        environ,
    )

    # combine dt indices and reindex using ffill then bfill
    all_dt = br.index.union(tc.index)
    br = br.reindex(all_dt, method='ffill').fillna(method='bfill')
    tc = tc.reindex(all_dt, method='ffill').fillna(method='bfill')

    benchmark_returns = br[br.index.slice_indexer(first_date, last_date)]
    treasury_curves = tc[tc.index.slice_indexer(first_date, last_date)]
    return benchmark_returns, treasury_curves
示例#23
0
def _run(handle_data, initialize, before_trading_start, analyze, algofile,
         algotext, defines, data_frequency, capital_base, data, bundle,
         bundle_timestamp, start, end, output, trading_calendar, print_algo,
         local_namespace, environ):
    """Run a backtest for the given algorithm.

    This is shared between the cli and :func:`gateway.run_algo`.
    """
    if algotext is not None:
        if local_namespace:
            ip = get_ipython()  # noqa
            namespace = ip.user_ns
        else:
            namespace = {}

        for assign in defines:
            try:
                name, value = assign.split('=', 2)
            except ValueError:
                raise ValueError(
                    'invalid define %r, should be of the form name=value' %
                    assign, )
            try:
                # evaluate in the same namespace so names may refer to
                # eachother
                namespace[name] = eval(value, namespace)
            except Exception as e:
                raise ValueError(
                    'failed to execute definition for name %r: %s' %
                    (name, e), )
    elif defines:
        raise _RunAlgoError(
            'cannot pass define without `algotext`',
            "cannot pass '-D' / '--define' without '-t' / '--algotext'",
        )
    else:
        namespace = {}
        if algofile is not None:
            algotext = algofile.read()

    if print_algo:
        if PYGMENTS:
            highlight(
                algotext,
                PythonLexer(),
                TerminalFormatter(),
                outfile=sys.stdout,
            )
        else:
            click.echo(algotext)

    if trading_calendar is None:
        trading_calendar = get_calendar('NYSE')

    if bundle is not None:
        bundle_data = load(
            bundle,
            environ,
            bundle_timestamp,
        )

        prefix, connstr = re.split(
            r'sqlite:///',
            str(bundle_data.asset_finder.engine.url),
            maxsplit=1,
        )
        if prefix:
            raise ValueError(
                "invalid url %r, must begin with 'sqlite:///'" %
                str(bundle_data.asset_finder.engine.url), )
        env = TradingEnvironment(asset_db_path=connstr, environ=environ)
        first_trading_day =\
            bundle_data.equity_minute_bar_reader.first_trading_day
        data = DataPortal(
            env.asset_finder,
            trading_calendar=trading_calendar,
            first_trading_day=first_trading_day,
            equity_minute_reader=bundle_data.equity_minute_bar_reader,
            equity_daily_reader=bundle_data.equity_daily_bar_reader,
            adjustment_reader=bundle_data.adjustment_reader,
        )

        pipeline_loader = USEquityPricingLoader(
            bundle_data.equity_daily_bar_reader,
            bundle_data.adjustment_reader,
        )

        def choose_loader(column):
            if column in USEquityPricing.columns:
                return pipeline_loader
            raise ValueError("No PipelineLoader registered for column %s." %
                             column)
    else:
        env = TradingEnvironment(environ=environ)
        choose_loader = None

    perf = TradingAlgorithm(
        namespace=namespace,
        env=env,
        get_pipeline_loader=choose_loader,
        trading_calendar=trading_calendar,
        sim_params=create_simulation_parameters(
            start=start,
            end=end,
            capital_base=capital_base,
            data_frequency=data_frequency,
            trading_calendar=trading_calendar,
        ),
        **{
            'initialize': initialize,
            'handle_data': handle_data,
            'before_trading_start': before_trading_start,
            'analyze': analyze,
        } if algotext is None else {
            'algo_filename': getattr(algofile, 'name', '<algorithm>'),
            'script': algotext,
        }).run(
            data,
            overwrite_sim_params=False,
        )

    if output == '-':
        click.echo(str(perf))
    elif output != os.devnull:  # make the gateway magic not write any data
        perf.to_pickle(output)

    return perf
示例#24
0
def create_test_gateway(**config):
    """
       :param config: A configuration object that is a dict with:

           - sid - an integer, which will be used as the asset ID.
           - order_count - the number of orders the test algo will place,
             defaults to 100
           - order_amount - the number of shares per order, defaults to 100
           - trade_count - the number of trades to simulate, defaults to 101
             to ensure all orders are processed.
           - algorithm - optional parameter providing an algorithm. defaults
             to :py:class:`gateway.test.algorithms.TestAlgorithm`
           - trade_source - optional parameter to specify trades, if present.
             If not present :py:class:`gateway.sources.SpecificEquityTrades`
             is the source, with daily frequency in trades.
           - slippage: optional parameter that configures the
             :py:class:`gateway.gens.tradingsimulation.TransactionSimulator`.
             Expects an object with a simulate mehod, such as
             :py:class:`gateway.gens.tradingsimulation.FixedSlippage`.
             :py:mod:`gateway.finance.trading`
       """
    assert isinstance(config, dict)

    try:
        sid_list = config['sid_list']
    except KeyError:
        try:
            sid_list = [config['sid']]
        except KeyError:
            raise Exception("simfactory create_test_gateway() requires "
                            "argument 'sid_list' or 'sid'")

    concurrent_trades = config.get('concurrent_trades', False)
    order_count = config.get('order_count', 100)
    order_amount = config.get('order_amount', 100)
    trading_calendar = config.get('trading_calendar', get_calendar("NYSE"))

    # -------------------
    # Create the Algo
    # -------------------
    if 'algorithm' in config:
        test_algo = config['algorithm']
    else:
        test_algo = TestAlgorithm(sid_list[0],
                                  order_amount,
                                  order_count,
                                  sim_params=config.get(
                                      'sim_params',
                                      factory.create_simulation_parameters()),
                                  trading_calendar=trading_calendar,
                                  slippage=config.get('slippage'),
                                  identifiers=sid_list)

    # -------------------
    # Trade Source
    # -------------------
    if 'skip_data' not in config:
        if 'trade_source' in config:
            trade_source = config['trade_source']
        else:
            trade_source = factory.create_daily_trade_source(
                sid_list,
                test_algo.sim_params,
                test_algo.trading_environment,
                trading_calendar,
                concurrent=concurrent_trades,
            )

        trades_by_sid = {}
        for trade in trade_source:
            if trade.sid not in trades_by_sid:
                trades_by_sid[trade.sid] = []

            trades_by_sid[trade.sid].append(trade)

        data_portal = create_data_portal_from_trade_history(
            config['env'].asset_finder, trading_calendar, config['tempdir'],
            config['sim_params'], trades_by_sid)

        test_algo.data_portal = data_portal

    # -------------------
    # Benchmark source
    # -------------------

    test_algo.benchmark_return_source = config.get('benchmark_source', None)

    # ------------------
    # generator/simulator
    sim = test_algo.get_generator()

    return sim
示例#25
0
from gateway.utils.calendars import get_calendar, register_calendar
from .exchange_calendar_shsz import SHSZExchangeCalendar

register_calendar("SHSZ", SHSZExchangeCalendar(), force=True)

#singleton in python
shsz_calendar = get_calendar("SHSZ")
示例#26
0
    def test_ingest(self):
        calendar = get_calendar('NYSE')
        sessions = calendar.sessions_in_range(self.START_DATE, self.END_DATE)
        minutes = calendar.minutes_for_sessions_in_range(
            self.START_DATE,
            self.END_DATE,
        )

        sids = tuple(range(3))
        equities = make_simple_equity_info(
            sids,
            self.START_DATE,
            self.END_DATE,
        )

        daily_bar_data = make_bar_data(equities, sessions)
        minute_bar_data = make_bar_data(equities, minutes)
        first_split_ratio = 0.5
        second_split_ratio = 0.1
        splits = pd.DataFrame.from_records([
            {
                'effective_date': str_to_seconds('2014-01-08'),
                'ratio': first_split_ratio,
                'sid': 0,
            },
            {
                'effective_date': str_to_seconds('2014-01-09'),
                'ratio': second_split_ratio,
                'sid': 1,
            },
        ])

        @self.register(
            'bundle',
            calendar_name='NYSE',
            start_session=self.START_DATE,
            end_session=self.END_DATE,
        )
        def bundle_ingest(environ, asset_db_writer, minute_bar_writer,
                          daily_bar_writer, adjustment_writer, calendar,
                          start_session, end_session, cache, show_progress,
                          output_dir):
            assert_is(environ, self.environ)

            asset_db_writer.write(equities=equities)
            minute_bar_writer.write(minute_bar_data)
            daily_bar_writer.write(daily_bar_data)
            adjustment_writer.write(splits=splits)

            assert_is_instance(calendar, TradingCalendar)
            assert_is_instance(cache, dataframe_cache)
            assert_is_instance(show_progress, bool)

        self.ingest('bundle', environ=self.environ)
        bundle = self.load('bundle', environ=self.environ)

        assert_equal(set(bundle.asset_finder.sids), set(sids))

        columns = 'open', 'high', 'low', 'close', 'volume'

        actual = bundle.equity_minute_bar_reader.load_raw_arrays(
            columns,
            minutes[0],
            minutes[-1],
            sids,
        )

        for actual_column, colname in zip(actual, columns):
            assert_equal(
                actual_column,
                expected_bar_values_2d(minutes, equities, colname),
                msg=colname,
            )

        actual = bundle.equity_daily_bar_reader.load_raw_arrays(
            columns,
            self.START_DATE,
            self.END_DATE,
            sids,
        )
        for actual_column, colname in zip(actual, columns):
            assert_equal(
                actual_column,
                expected_bar_values_2d(sessions, equities, colname),
                msg=colname,
            )
        adjustments_for_cols = bundle.adjustment_reader.load_adjustments(
            columns,
            sessions,
            pd.Index(sids),
        )
        for column, adjustments in zip(columns, adjustments_for_cols[:-1]):
            # iterate over all the adjustments but `volume`
            assert_equal(
                adjustments,
                {
                    2: [
                        Float64Multiply(
                            first_row=0,
                            last_row=2,
                            first_col=0,
                            last_col=0,
                            value=first_split_ratio,
                        )
                    ],
                    3: [
                        Float64Multiply(
                            first_row=0,
                            last_row=3,
                            first_col=1,
                            last_col=1,
                            value=second_split_ratio,
                        )
                    ],
                },
                msg=column,
            )

        # check the volume, the value should be 1/ratio
        assert_equal(
            adjustments_for_cols[-1],
            {
                2: [
                    Float64Multiply(
                        first_row=0,
                        last_row=2,
                        first_col=0,
                        last_col=0,
                        value=1 / first_split_ratio,
                    )
                ],
                3: [
                    Float64Multiply(
                        first_row=0,
                        last_row=3,
                        first_col=1,
                        last_col=1,
                        value=1 / second_split_ratio,
                    )
                ],
            },
            msg='volume',
        )
示例#27
0
 def init_class_fixtures(cls):
     super(TestDateUtils, cls).init_class_fixtures()
     cls.calendar = get_calendar('NYSE')
示例#28
0
class CSVDIRBundleTestCase(GatewayTestCase):
    symbols = 'AAPL', 'IBM', 'KO', 'MSFT'
    asset_start = pd.Timestamp('2012-01-03', tz='utc')
    asset_end = pd.Timestamp('2014-12-31', tz='utc')
    bundle = bundles['csvdir']
    calendar = get_calendar(bundle.calendar_name)
    start_date = calendar.first_session
    end_date = calendar.last_session
    api_key = 'ayylmao'
    columns = 'open', 'high', 'low', 'close', 'volume'

    def _expected_data(self, asset_finder):
        sids = {
            symbol: asset_finder.lookup_symbol(
                symbol,
                self.asset_start,
            ).sid
            for symbol in self.symbols
        }

        def per_symbol(symbol):
            df = pd.read_csv(
                test_resource_path('csvdir_samples', 'csvdir', 'daily',
                                   symbol + '.csv.gz'),
                parse_dates=['date'],
                index_col='date',
                usecols=[
                    'open',
                    'high',
                    'low',
                    'close',
                    'volume',
                    'date',
                    'dividend',
                    'split',
                ],
                na_values=['NA'],
            )
            df['sid'] = sids[symbol]
            return df

        all_ = pd.concat(map(per_symbol, self.symbols)).set_index(
            'sid',
            append=True,
        ).unstack()

        # fancy list comprehension with statements
        @list
        @apply
        def pricing():
            for column in self.columns:
                vs = all_[column].values
                if column == 'volume':
                    vs = np.nan_to_num(vs)
                yield vs

        adjustments = [[
            5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699, 5701, 5702, 5722,
            5760, 5764, 5774, 5821, 5822, 5829, 5845, 5884, 5885, 5888, 5908,
            5947, 5948, 5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096, 6135,
            6136, 6139, 6157, 6160, 6198, 6199, 6207, 6223, 6263, 6271, 6277
        ],
                       [
                           5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
                           5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
                           5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
                           5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
                           6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
                           6223, 6263, 6271, 6277
                       ],
                       [
                           5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
                           5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
                           5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
                           5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
                           6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
                           6223, 6263, 6271, 6277
                       ],
                       [
                           5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
                           5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
                           5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
                           5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
                           6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
                           6223, 6263, 6271, 6277
                       ], [5701, 6157]]

        return pricing, adjustments

    def test_bundle(self):
        environ = {'CSVDIR': test_resource_path('csvdir_samples', 'csvdir')}

        ingest('csvdir', environ=environ)
        bundle = load('csvdir', environ=environ)
        sids = 0, 1, 2, 3
        assert_equal(set(bundle.asset_finder.sids), set(sids))

        for equity in bundle.asset_finder.retrieve_all(sids):
            assert_equal(equity.start_date, self.asset_start, msg=equity)
            assert_equal(equity.end_date, self.asset_end, msg=equity)

        sessions = self.calendar.all_sessions
        actual = bundle.equity_daily_bar_reader.load_raw_arrays(
            self.columns,
            sessions[sessions.get_loc(self.asset_start, 'bfill')],
            sessions[sessions.get_loc(self.asset_end, 'ffill')],
            sids,
        )

        expected_pricing, expected_adjustments = self._expected_data(
            bundle.asset_finder, )
        assert_equal(actual, expected_pricing, array_decimal=2)

        adjustments_for_cols = bundle.adjustment_reader.load_adjustments(
            self.columns,
            sessions,
            pd.Index(sids),
        )
        assert_equal([sorted(adj.keys()) for adj in adjustments_for_cols],
                     expected_adjustments)
示例#29
0
    def setUpClass(cls):
        super(StatefulRulesTests, cls).setUpClass()

        cls.class_ = StatefulRule
        cls.cal = get_calendar(cls.CALENDAR_STRING)
示例#30
0
 def test_default_calendars(self):
     for name in concat(
         [_default_calendar_factories, _default_calendar_aliases]):
         self.assertIsNotNone(get_calendar(name),
                              "get_calendar(%r) returned None" % name)