def test_read_no_adjustments(self): adjustment_reader = NullAdjustmentReader() columns = [USEquityPricing.close, USEquityPricing.volume] query_days = self.calendar_days_between(TEST_QUERY_START, TEST_QUERY_STOP) # Our expected results for each day are based on values from the # previous day. shifted_query_days = self.calendar_days_between(TEST_QUERY_START, TEST_QUERY_STOP, shift=-1) adjustments = adjustment_reader.load_adjustments(columns, query_days, self.assets) self.assertEqual(adjustments, [{}, {}]) baseline_reader = BcolzDailyBarReader(self.bcolz_path) pricing_loader = USEquityPricingLoader(baseline_reader, adjustment_reader) results = pricing_loader.load_adjusted_array( columns, dates=query_days, assets=self.assets, mask=ones((len(query_days), len(self.assets)), dtype=bool) ) closes, volumes = map(getitem(results), columns) expected_baseline_closes = self.bcolz_writer.expected_values_2d(shifted_query_days, self.assets, "close") expected_baseline_volumes = self.bcolz_writer.expected_values_2d(shifted_query_days, self.assets, "volume") # AdjustedArrays should yield the same data as the expected baseline. for windowlen in range(1, len(query_days) + 1): for offset, window in enumerate(closes.traverse(windowlen)): assert_array_equal(expected_baseline_closes[offset : offset + windowlen], window) for offset, window in enumerate(volumes.traverse(windowlen)): assert_array_equal(expected_baseline_volumes[offset : offset + windowlen], window) # Verify that we checked up to the longest possible window. with self.assertRaises(WindowLengthTooLong): closes.traverse(windowlen + 1) with self.assertRaises(WindowLengthTooLong): volumes.traverse(windowlen + 1)
def init_class_fixtures(cls): (super(WithInternationalPricingPipelineEngine, cls).init_class_fixtures()) adjustments = NullAdjustmentReader() cls.loaders = { GB_EQUITIES: EquityPricingLoader( cls.daily_bar_readers["XLON"], adjustments, cls.in_memory_fx_rate_reader, ), US_EQUITIES: EquityPricingLoader( cls.daily_bar_readers["XNYS"], adjustments, cls.in_memory_fx_rate_reader, ), CA_EQUITIES: EquityPricingLoader( cls.daily_bar_readers["XTSE"], adjustments, cls.in_memory_fx_rate_reader, ), } cls.engine = SimplePipelineEngine( get_loader=cls.get_loader, asset_finder=cls.asset_finder, )
def init_class_fixtures(cls): (super().init_class_fixtures()) adjustments = NullAdjustmentReader() cls.loaders = { GB_EQUITIES: EquityPricingLoader( cls.daily_bar_readers['XLON'], adjustments, cls.in_memory_fx_rate_reader, ), US_EQUITIES: EquityPricingLoader( cls.daily_bar_readers['XNYS'], adjustments, cls.in_memory_fx_rate_reader, ), CA_EQUITIES: EquityPricingLoader( cls.daily_bar_readers['XTSE'], adjustments, cls.in_memory_fx_rate_reader, ) } cls.engine = SimplePipelineEngine( get_loader=cls.get_loader, asset_finder=cls.asset_finder, )
def init_class_fixtures(cls): (super(WithInternationalPricingPipelineEngine, cls).init_class_fixtures()) adjustments = NullAdjustmentReader() cls.loaders = { GB_EQUITIES: EquityPricingLoader( cls.daily_bar_readers['LSE'], adjustments, ), US_EQUITIES: EquityPricingLoader( cls.daily_bar_readers['NYSE'], adjustments, ), CA_EQUITIES: EquityPricingLoader( cls.daily_bar_readers['TSX'], adjustments, ) } cls.engine = SimplePipelineEngine( get_loader=cls.get_loader, asset_finder=cls.asset_finder, )
def setUpClass(cls): cls.first_asset_start = Timestamp('2015-04-01', tz='UTC') cls.env = TradingEnvironment() cls.trading_day = day = cls.env.trading_day cls.calendar = date_range('2015', '2015-08', tz='UTC', freq=day) cls.asset_info = make_rotating_asset_info( num_assets=6, first_start=cls.first_asset_start, frequency=day, periods_between_starts=4, asset_lifetime=8, ) cls.last_asset_end = cls.asset_info['end_date'].max() cls.all_assets = cls.asset_info.index cls.env.write_data(equities_df=cls.asset_info) cls.finder = cls.env.asset_finder cls.temp_dir = TempDirectory() cls.temp_dir.create() try: cls.writer = SyntheticDailyBarWriter( asset_info=cls.asset_info[['start_date', 'end_date']], calendar=cls.calendar, ) table = cls.writer.write( cls.temp_dir.getpath('testdata.bcolz'), cls.calendar, cls.all_assets, ) cls.pipeline_loader = USEquityPricingLoader( BcolzDailyBarReader(table), NullAdjustmentReader(), ) except: cls.temp_dir.cleanup() raise
def test_read_no_adjustments(self): adjustment_reader = NullAdjustmentReader() columns = [USEquityPricing.close, USEquityPricing.volume] query_days = self.calendar_days_between(TEST_QUERY_START, TEST_QUERY_STOP) # Our expected results for each day are based on values from the # previous day. shifted_query_days = self.calendar_days_between( TEST_QUERY_START, TEST_QUERY_STOP, shift=-1, ) adjustments = adjustment_reader.load_adjustments( [c.name for c in columns], query_days, self.assets, ) self.assertEqual(adjustments, [{}, {}]) pricing_loader = USEquityPricingLoader( self.bcolz_equity_daily_bar_reader, adjustment_reader, ) results = pricing_loader.load_adjusted_array( columns, dates=query_days, assets=self.assets, mask=ones((len(query_days), len(self.assets)), dtype=bool), ) closes, volumes = map(getitem(results), columns) expected_baseline_closes = expected_bar_values_2d( shifted_query_days, self.asset_info, 'close', ) expected_baseline_volumes = expected_bar_values_2d( shifted_query_days, self.asset_info, 'volume', ) # AdjustedArrays should yield the same data as the expected baseline. for windowlen in range(1, len(query_days) + 1): for offset, window in enumerate(closes.traverse(windowlen)): assert_array_equal( expected_baseline_closes[offset:offset + windowlen], window, ) for offset, window in enumerate(volumes.traverse(windowlen)): assert_array_equal( expected_baseline_volumes[offset:offset + windowlen], window, ) # Verify that we checked up to the longest possible window. with self.assertRaises(WindowLengthTooLong): closes.traverse(windowlen + 1) with self.assertRaises(WindowLengthTooLong): volumes.traverse(windowlen + 1)
def test_read_no_adjustments(self): adjustment_reader = NullAdjustmentReader() columns = [USEquityPricing.close, USEquityPricing.volume] query_days = self.calendar_days_between( TEST_QUERY_START, TEST_QUERY_STOP ) # Our expected results for each day are based on values from the # previous day. shifted_query_days = self.calendar_days_between( TEST_QUERY_START, TEST_QUERY_STOP, shift=-1, ) adjustments = adjustment_reader.load_pricing_adjustments( [c.name for c in columns], query_days, self.sids, ) self.assertEqual(adjustments, [{}, {}]) pricing_loader = USEquityPricingLoader( self.bcolz_equity_daily_bar_reader, adjustment_reader, ) results = pricing_loader.load_adjusted_array( domain=US_EQUITIES, columns=columns, dates=query_days, sids=self.sids, mask=ones((len(query_days), len(self.sids)), dtype=bool), ) closes, volumes = map(getitem(results), columns) expected_baseline_closes = expected_bar_values_2d( shifted_query_days, self.sids, self.asset_info, 'close', ) expected_baseline_volumes = expected_bar_values_2d( shifted_query_days, self.sids, self.asset_info, 'volume', ) # AdjustedArrays should yield the same data as the expected baseline. for windowlen in range(1, len(query_days) + 1): for offset, window in enumerate(closes.traverse(windowlen)): assert_array_equal( expected_baseline_closes[offset:offset + windowlen], window, ) for offset, window in enumerate(volumes.traverse(windowlen)): assert_array_equal( expected_baseline_volumes[offset:offset + windowlen], window, ) # Verify that we checked up to the longest possible window. with self.assertRaises(WindowLengthTooLong): closes.traverse(windowlen + 1) with self.assertRaises(WindowLengthTooLong): volumes.traverse(windowlen + 1)