コード例 #1
0
    def test_load_adjustments_from_sqlite(self):
        reader = SQLiteAdjustmentReader(self.db_path)
        columns = [USEquityPricing.close, USEquityPricing.volume]
        query_days = self.calendar_days_between(TEST_QUERY_START, TEST_QUERY_STOP)

        adjustments = reader.load_adjustments(columns, query_days, self.assets)

        close_adjustments = adjustments[0]
        volume_adjustments = adjustments[1]

        expected_close_adjustments, expected_volume_adjustments = self.expected_adjustments(
            TEST_QUERY_START, TEST_QUERY_STOP
        )
        for key in expected_close_adjustments:
            close_adjustment = close_adjustments[key]
            for j, adj in enumerate(close_adjustment):
                expected = expected_close_adjustments[key][j]
                self.assertEqual(adj.first_row, expected.first_row)
                self.assertEqual(adj.last_row, expected.last_row)
                self.assertEqual(adj.first_col, expected.first_col)
                self.assertEqual(adj.last_col, expected.last_col)
                assert_allclose(adj.value, expected.value)

        for key in expected_volume_adjustments:
            volume_adjustment = volume_adjustments[key]
            for j, adj in enumerate(volume_adjustment):
                expected = expected_volume_adjustments[key][j]
                self.assertEqual(adj.first_row, expected.first_row)
                self.assertEqual(adj.last_row, expected.last_row)
                self.assertEqual(adj.first_col, expected.first_col)
                self.assertEqual(adj.last_col, expected.last_col)
                assert_allclose(adj.value, expected.value)
コード例 #2
0
ファイル: test_api_shim.py プロジェクト: edouardswiac/zipline
    def create_adjustments_reader(cls):
        path = cls.tempdir.getpath("test_adjustments.db")

        adj_writer = SQLiteAdjustmentWriter(path, cls.env.trading_days,
                                            MockDailyBarReader())

        splits = pd.DataFrame([{
            'effective_date': str_to_seconds("2016-01-06"),
            'ratio': 0.5,
            'sid': cls.asset3.sid
        }])

        # Mergers and Dividends are not tested, but we need to have these
        # anyway
        mergers = pd.DataFrame({}, columns=['effective_date', 'ratio', 'sid'])
        mergers.effective_date = mergers.effective_date.astype(np.int64)
        mergers.ratio = mergers.ratio.astype(np.float64)
        mergers.sid = mergers.sid.astype(np.int64)

        dividends = pd.DataFrame({},
                                 columns=[
                                     'ex_date', 'record_date', 'declared_date',
                                     'pay_date', 'amount', 'sid'
                                 ])
        dividends.amount = dividends.amount.astype(np.float64)
        dividends.sid = dividends.sid.astype(np.int64)

        adj_writer.write(splits, mergers, dividends)

        return SQLiteAdjustmentReader(path)
コード例 #3
0
ファイル: test_pipeline_algo.py プロジェクト: win2cs/zipline
 def create_adjustment_reader(cls, tempdir):
     dbpath = tempdir.getpath('adjustments.sqlite')
     writer = SQLiteAdjustmentWriter(dbpath, cls.env.trading_days,
                                     MockDailyBarSpotReader())
     splits = DataFrame.from_records([
         {
             'effective_date': str_to_seconds('2014-06-09'),
             'ratio': (1 / 7.0),
             'sid': cls.AAPL,
         }
     ])
     mergers = DataFrame(
         {
             # Hackery to make the dtypes correct on an empty frame.
             'effective_date': array([], dtype=int),
             'ratio': array([], dtype=float),
             'sid': array([], dtype=int),
         },
         index=DatetimeIndex([], tz='UTC'),
         columns=['effective_date', 'ratio', 'sid'],
     )
     dividends = DataFrame({
         'sid': array([], dtype=uint32),
         'amount': array([], dtype=float64),
         'record_date': array([], dtype='datetime64[ns]'),
         'ex_date': array([], dtype='datetime64[ns]'),
         'declared_date': array([], dtype='datetime64[ns]'),
         'pay_date': array([], dtype='datetime64[ns]'),
     })
     writer.write(splits, mergers, dividends)
     return SQLiteAdjustmentReader(dbpath)
コード例 #4
0
    def test_load_adjustments_from_sqlite(self):
        reader = SQLiteAdjustmentReader(self.db_path)
        columns = [USEquityPricing.close, USEquityPricing.volume]
        query_days = self.calendar_days_between(
            TEST_QUERY_START,
            TEST_QUERY_STOP,
        )

        adjustments = reader.load_adjustments(
            columns,
            query_days,
            self.assets,
        )

        close_adjustments = adjustments[0]
        volume_adjustments = adjustments[1]

        expected_close_adjustments, expected_volume_adjustments = \
            self.expected_adjustments(TEST_QUERY_START, TEST_QUERY_STOP)
        self.assertEqual(close_adjustments, expected_close_adjustments)
        self.assertEqual(volume_adjustments, expected_volume_adjustments)
コード例 #5
0
    def test_load_adjustments_from_sqlite(self):
        reader = SQLiteAdjustmentReader(self.db_path)
        columns = [USEquityPricing.close, USEquityPricing.volume]
        query_days = self.calendar_days_between(
            TEST_QUERY_START,
            TEST_QUERY_STOP,
        )

        adjustments = reader.load_adjustments(
            columns,
            query_days,
            self.assets,
        )

        close_adjustments = adjustments[0]
        volume_adjustments = adjustments[1]

        expected_close_adjustments, expected_volume_adjustments = \
            self.expected_adjustments(TEST_QUERY_START, TEST_QUERY_STOP)
        self.assertEqual(close_adjustments, expected_close_adjustments)
        self.assertEqual(volume_adjustments, expected_volume_adjustments)
コード例 #6
0
    def test_load_adjustments_from_sqlite(self):
        reader = SQLiteAdjustmentReader(self.db_path)
        columns = [USEquityPricing.close, USEquityPricing.volume]
        query_days = self.calendar_days_between(
            TEST_QUERY_START,
            TEST_QUERY_STOP,
        )

        adjustments = reader.load_adjustments(
            columns,
            query_days,
            self.assets,
        )

        close_adjustments = adjustments[0]
        volume_adjustments = adjustments[1]

        expected_close_adjustments, expected_volume_adjustments = \
            self.expected_adjustments(TEST_QUERY_START, TEST_QUERY_STOP)
        for key in expected_close_adjustments:
            close_adjustment = close_adjustments[key]
            for j, adj in enumerate(close_adjustment):
                expected = expected_close_adjustments[key][j]
                self.assertEqual(adj.first_row, expected.first_row)
                self.assertEqual(adj.last_row, expected.last_row)
                self.assertEqual(adj.first_col, expected.first_col)
                self.assertEqual(adj.last_col, expected.last_col)
                assert_allclose(adj.value, expected.value)

        for key in expected_volume_adjustments:
            volume_adjustment = volume_adjustments[key]
            for j, adj in enumerate(volume_adjustment):
                expected = expected_volume_adjustments[key][j]
                self.assertEqual(adj.first_row, expected.first_row)
                self.assertEqual(adj.last_row, expected.last_row)
                self.assertEqual(adj.first_col, expected.first_col)
                self.assertEqual(adj.last_col, expected.last_col)
                assert_allclose(adj.value, expected.value)
コード例 #7
0
    def from_files(cls, pricing_path, adjustments_path):
        """
        Create a loader from a bcolz equity pricing dir and a SQLite
        adjustments path.

        Parameters
        ----------
        pricing_path : str
            Path to a bcolz directory written by a BcolzDailyBarWriter.
        adjusments_path : str
            Path to an adjusments db written by a SQLiteAdjustmentWriter.
        """
        return cls(BcolzDailyBarReader(pricing_path),
                   SQLiteAdjustmentReader(adjustments_path))
コード例 #8
0
    def create_adjustments_reader(cls):
        path = create_mock_adjustments(cls.tempdir,
                                       cls.days,
                                       splits=[{
                                           'effective_date':
                                           str_to_seconds("2016-01-06"),
                                           'ratio':
                                           0.5,
                                           'sid':
                                           cls.SPLIT_ASSET.sid
                                       }, {
                                           'effective_date':
                                           str_to_seconds("2016-01-06"),
                                           'ratio':
                                           0.5,
                                           'sid':
                                           cls.ILLIQUID_SPLIT_ASSET.sid
                                       }])

        return SQLiteAdjustmentReader(path)
コード例 #9
0
ファイル: test_pipeline_algo.py プロジェクト: mirizzi/zipline
 def create_adjustment_reader(cls, tempdir):
     dbpath = tempdir.getpath('adjustments.sqlite')
     writer = SQLiteAdjustmentWriter(dbpath)
     splits = DataFrame.from_records([{
         'effective_date':
         str_to_seconds('2014-06-09'),
         'ratio': (1 / 7.0),
         'sid':
         cls.AAPL,
     }])
     mergers = dividends = DataFrame(
         {
             # Hackery to make the dtypes correct on an empty frame.
             'effective_date': array([], dtype=int),
             'ratio': array([], dtype=float),
             'sid': array([], dtype=int),
         },
         index=DatetimeIndex([], tz='UTC'),
         columns=['effective_date', 'ratio', 'sid'],
     )
     writer.write(splits, mergers, dividends)
     return SQLiteAdjustmentReader(dbpath)
コード例 #10
0
 def create_adjustment_reader(cls, tempdir):
     dbpath = tempdir.getpath('adjustments.sqlite')
     writer = SQLiteAdjustmentWriter(dbpath, cls.env.trading_days,
                                     MockDailyBarSpotReader())
     splits = DataFrame.from_records([
         {
             'effective_date': str_to_seconds('2014-06-09'),
             'ratio': (1 / 7.0),
             'sid': cls.AAPL,
         }
     ])
     mergers = create_empty_splits_mergers_frame()
     dividends = DataFrame({
         'sid': array([], dtype=uint32),
         'amount': array([], dtype=float64),
         'record_date': array([], dtype='datetime64[ns]'),
         'ex_date': array([], dtype='datetime64[ns]'),
         'declared_date': array([], dtype='datetime64[ns]'),
         'pay_date': array([], dtype='datetime64[ns]'),
     })
     writer.write(splits, mergers, dividends)
     return SQLiteAdjustmentReader(dbpath)
コード例 #11
0
    def create_adjustments_reader(cls):
        path = cls.tempdir.getpath("test_adjustments.db")

        adj_writer = SQLiteAdjustmentWriter(path, cls.env.trading_days,
                                            MockDailyBarReader())

        splits = pd.DataFrame([{
            'effective_date': str_to_seconds("2016-01-06"),
            'ratio': 0.5,
            'sid': cls.SPLIT_ASSET.sid
        }, {
            'effective_date': str_to_seconds("2016-01-07"),
            'ratio': 0.5,
            'sid': cls.ILLIQUID_SPLIT_ASSET.sid
        }])

        mergers = pd.DataFrame([{
            'effective_date': str_to_seconds("2016-01-06"),
            'ratio': 0.5,
            'sid': cls.MERGER_ASSET.sid
        }, {
            'effective_date': str_to_seconds("2016-01-07"),
            'ratio': 0.6,
            'sid': cls.ILLIQUID_MERGER_ASSET.sid
        }])

        # we're using a fake daily reader in the adjustments writer which
        # returns every daily price as 100, so dividend amounts of 2.0 and 4.0
        # correspond to 2% and 4% dividends, respectively.
        dividends = pd.DataFrame(
            [
                {
                    # only care about ex date, the other dates don't matter here
                    'ex_date':
                    pd.Timestamp("2016-01-06", tz='UTC').to_datetime64(),
                    'record_date':
                    pd.Timestamp("2016-01-06", tz='UTC').to_datetime64(),
                    'declared_date':
                    pd.Timestamp("2016-01-06", tz='UTC').to_datetime64(),
                    'pay_date':
                    pd.Timestamp("2016-01-06", tz='UTC').to_datetime64(),
                    'amount':
                    2.0,
                    'sid':
                    cls.DIVIDEND_ASSET.sid
                },
                {
                    'ex_date':
                    pd.Timestamp("2016-01-07", tz='UTC').to_datetime64(),
                    'record_date':
                    pd.Timestamp("2016-01-07", tz='UTC').to_datetime64(),
                    'declared_date':
                    pd.Timestamp("2016-01-07", tz='UTC').to_datetime64(),
                    'pay_date':
                    pd.Timestamp("2016-01-07", tz='UTC').to_datetime64(),
                    'amount':
                    4.0,
                    'sid':
                    cls.ILLIQUID_DIVIDEND_ASSET.sid
                }
            ],
            columns=[
                'ex_date', 'record_date', 'declared_date', 'pay_date',
                'amount', 'sid'
            ])

        adj_writer.write(splits, mergers, dividends)

        return SQLiteAdjustmentReader(path)
コード例 #12
0
    def test_read_with_adjustments(self):
        columns = [USEquityPricing.high, USEquityPricing.volume]
        query_days = self.calendar_days_between(TEST_QUERY_START,
                                                TEST_QUERY_STOP)
        # Our expected results for each day are based on values from the
        # previous day.
        shifted_query_days = self.calendar_days_between(
            TEST_QUERY_START,
            TEST_QUERY_STOP,
            shift=-1,
        )

        baseline_reader = BcolzDailyBarReader(self.bcolz_path)
        adjustment_reader = SQLiteAdjustmentReader(self.db_path)
        pricing_loader = USEquityPricingLoader(
            baseline_reader,
            adjustment_reader,
        )

        highs, volumes = pricing_loader.load_adjusted_array(
            columns,
            dates=query_days,
            assets=Int64Index(arange(1, 7)),
            mask=ones((len(query_days), 6), dtype=bool),
        )

        expected_baseline_highs = self.bcolz_writer.expected_values_2d(
            shifted_query_days,
            self.assets,
            'high',
        )
        expected_baseline_volumes = self.bcolz_writer.expected_values_2d(
            shifted_query_days,
            self.assets,
            'volume',
        )

        # At each point in time, the AdjustedArrays should yield the baseline
        # with all adjustments up to that date applied.
        for windowlen in range(1, len(query_days) + 1):
            for offset, window in enumerate(highs.traverse(windowlen)):
                baseline = expected_baseline_highs[offset:offset + windowlen]
                baseline_dates = query_days[offset:offset + windowlen]
                expected_adjusted_highs = self.apply_adjustments(
                    baseline_dates,
                    self.assets,
                    baseline,
                    # Apply all adjustments.
                    concat([SPLITS, MERGERS, DIVIDENDS_EXPECTED],
                           ignore_index=True),
                )
                assert_allclose(expected_adjusted_highs, window)

            for offset, window in enumerate(volumes.traverse(windowlen)):
                baseline = expected_baseline_volumes[offset:offset + windowlen]
                baseline_dates = query_days[offset:offset + windowlen]
                # Apply only splits and invert the ratio.
                adjustments = SPLITS.copy()
                adjustments.ratio = 1 / adjustments.ratio

                expected_adjusted_volumes = self.apply_adjustments(
                    baseline_dates,
                    self.assets,
                    baseline,
                    adjustments,
                )
                # FIXME: Make AdjustedArray properly support integral types.
                assert_array_equal(
                    expected_adjusted_volumes,
                    window.astype(uint32),
                )

        # Verify that we checked up to the longest possible window.
        with self.assertRaises(WindowLengthTooLong):
            highs.traverse(windowlen + 1)
        with self.assertRaises(WindowLengthTooLong):
            volumes.traverse(windowlen + 1)
コード例 #13
0
    def setUpClass(cls):
        cls.env = TradingEnvironment()
        cls.tempdir = TempDirectory()

        cls.sim_params = factory.create_simulation_parameters()

        cls.env.write_data(
            equities_data={
                1: {
                    "start_date": cls.sim_params.trading_days[0],
                    "end_date": cls.sim_params.trading_days[-1] +
                    timedelta(days=1)
                },
                2: {
                    "start_date": cls.sim_params.trading_days[0],
                    "end_date": cls.sim_params.trading_days[-1] +
                    timedelta(days=1)
                },
                3: {
                    "start_date": cls.sim_params.trading_days[100],
                    "end_date": cls.sim_params.trading_days[-100]
                },
                4: {
                    "start_date": cls.sim_params.trading_days[0],
                    "end_date": cls.sim_params.trading_days[-1] +
                    timedelta(days=1)
                }
            })

        dbpath = os.path.join(cls.tempdir.path, "adjustments.db")

        writer = SQLiteAdjustmentWriter(dbpath, cls.env.trading_days,
                                        MockDailyBarSpotReader())
        splits = mergers = create_empty_splits_mergers_frame()
        dividends = pd.DataFrame({
            'sid':
            np.array([], dtype=np.uint32),
            'amount':
            np.array([], dtype=np.float64),
            'declared_date':
            np.array([], dtype='datetime64[ns]'),
            'ex_date':
            np.array([], dtype='datetime64[ns]'),
            'pay_date':
            np.array([], dtype='datetime64[ns]'),
            'record_date':
            np.array([], dtype='datetime64[ns]'),
        })
        declared_date = cls.sim_params.trading_days[45]
        ex_date = cls.sim_params.trading_days[50]
        record_date = pay_date = cls.sim_params.trading_days[55]

        stock_dividends = pd.DataFrame({
            'sid':
            np.array([4], dtype=np.uint32),
            'payment_sid':
            np.array([5], dtype=np.uint32),
            'ratio':
            np.array([2], dtype=np.float64),
            'declared_date':
            np.array([declared_date], dtype='datetime64[ns]'),
            'ex_date':
            np.array([ex_date], dtype='datetime64[ns]'),
            'record_date':
            np.array([record_date], dtype='datetime64[ns]'),
            'pay_date':
            np.array([pay_date], dtype='datetime64[ns]'),
        })
        writer.write(splits,
                     mergers,
                     dividends,
                     stock_dividends=stock_dividends)

        cls.data_portal = create_data_portal(
            cls.env,
            cls.tempdir,
            cls.sim_params, [1, 2, 3, 4],
            adjustment_reader=SQLiteAdjustmentReader(dbpath))