def setUpClass(cls): cls.AAPL = 1 cls.MSFT = 2 cls.BRK_A = 3 cls.assets = [cls.AAPL, cls.MSFT, cls.BRK_A] asset_info = make_simple_asset_info( cls.assets, Timestamp('2014'), Timestamp('2015'), ['AAPL', 'MSFT', 'BRK_A'], ) cls.env = trading.TradingEnvironment() cls.env.write_data(equities_df=asset_info) cls.tempdir = tempdir = TempDirectory() tempdir.create() try: cls.raw_data, cls.bar_reader = cls.create_bar_reader(tempdir) cls.adj_reader = cls.create_adjustment_reader(tempdir) cls.pipeline_loader = USEquityPricingLoader( cls.bar_reader, cls.adj_reader) except: cls.tempdir.cleanup() raise cls.dates = cls.raw_data[cls.AAPL].index.tz_localize('UTC') cls.AAPL_split_date = Timestamp("2014-06-09", tz='UTC')
def make_pipeline_engine(bundle=None, start=None, end=None, live=False): """Creates a pipeline engine for the dates in (start, end). Using this allows usage very similar to run_pipeline in Quantopian's env.""" if bundle is None: bundle = load_sharadar_bundle() if start is None: start = bundle.equity_daily_bar_reader.first_trading_day if end is None: end = pd.to_datetime('today', utc=True) pipeline_loader = USEquityPricingLoader(bundle.equity_daily_bar_reader, bundle.adjustment_reader, SimpleFXRateReader()) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError("No PipelineLoader registered for column %s." % column) bundle.asset_finder.is_live_trading = live spe = BundlePipelineEngine(get_loader=choose_loader, asset_finder=bundle.asset_finder) return spe
def init_class_fixtures(cls): super(PipelineAlgorithmTestCase, cls).init_class_fixtures() cls.pipeline_loader = USEquityPricingLoader( cls.bcolz_equity_daily_bar_reader, cls.adjustment_reader, ) cls.dates = cls.raw_data[cls.AAPL].index.tz_localize('UTC') cls.AAPL_split_date = Timestamp("2014-06-09", tz='UTC')
def init_class_fixtures(cls): super(SyntheticBcolzTestCase, cls).init_class_fixtures() cls.all_asset_ids = cls.asset_finder.sids cls.last_asset_end = cls.equity_info['end_date'].max() cls.pipeline_loader = USEquityPricingLoader( cls.bcolz_daily_bar_reader, cls.adjustment_reader, )
def setUpClass(cls): cls.first_asset_start = Timestamp('2015-04-01', tz='UTC') cls.env = TradingEnvironment() cls.trading_day = day = cls.env.trading_day cls.calendar = date_range('2015', '2015-08', tz='UTC', freq=day) cls.asset_info = make_rotating_asset_info( num_assets=6, first_start=cls.first_asset_start, frequency=day, periods_between_starts=4, asset_lifetime=8, ) cls.last_asset_end = cls.asset_info['end_date'].max() cls.all_assets = cls.asset_info.index cls.env.write_data(equities_df=cls.asset_info) cls.finder = cls.env.asset_finder cls.temp_dir = TempDirectory() cls.temp_dir.create() try: cls.writer = SyntheticDailyBarWriter( asset_info=cls.asset_info[['start_date', 'end_date']], calendar=cls.calendar, ) table = cls.writer.write( cls.temp_dir.getpath('testdata.bcolz'), cls.calendar, cls.all_assets, ) cls.pipeline_loader = USEquityPricingLoader( BcolzDailyBarReader(table), NullAdjustmentReader(), ) except: cls.temp_dir.cleanup() raise
def test_read_with_adjustments(self): columns = [USEquityPricing.high, USEquityPricing.volume] query_days = self.calendar_days_between(TEST_QUERY_START, TEST_QUERY_STOP) # Our expected results for each day are based on values from the # previous day. shifted_query_days = self.calendar_days_between( TEST_QUERY_START, TEST_QUERY_STOP, shift=-1, ) pricing_loader = USEquityPricingLoader( self.bcolz_equity_daily_bar_reader, self.adjustment_reader, ) results = pricing_loader.load_adjusted_array( columns, dates=query_days, assets=Int64Index(arange(1, 7)), mask=ones((len(query_days), 6), dtype=bool), ) highs, volumes = map(getitem(results), columns) expected_baseline_highs = expected_bar_values_2d( shifted_query_days, self.asset_info, 'high', ) expected_baseline_volumes = expected_bar_values_2d( shifted_query_days, self.asset_info, 'volume', ) # At each point in time, the AdjustedArrays should yield the baseline # with all adjustments up to that date applied. for windowlen in range(1, len(query_days) + 1): for offset, window in enumerate(highs.traverse(windowlen)): baseline = expected_baseline_highs[offset:offset + windowlen] baseline_dates = query_days[offset:offset + windowlen] expected_adjusted_highs = self.apply_adjustments( baseline_dates, self.assets, baseline, # Apply all adjustments. concat([SPLITS, MERGERS, DIVIDENDS_EXPECTED], ignore_index=True), ) assert_allclose(expected_adjusted_highs, window) for offset, window in enumerate(volumes.traverse(windowlen)): baseline = expected_baseline_volumes[offset:offset + windowlen] baseline_dates = query_days[offset:offset + windowlen] # Apply only splits and invert the ratio. adjustments = SPLITS.copy() adjustments.ratio = 1 / adjustments.ratio expected_adjusted_volumes = self.apply_adjustments( baseline_dates, self.assets, baseline, adjustments, ) # FIXME: Make AdjustedArray properly support integral types. assert_array_equal( expected_adjusted_volumes, window.astype(uint32), ) # Verify that we checked up to the longest possible window. with self.assertRaises(WindowLengthTooLong): highs.traverse(windowlen + 1) with self.assertRaises(WindowLengthTooLong): volumes.traverse(windowlen + 1)
def test_read_no_adjustments(self): adjustment_reader = NullAdjustmentReader() columns = [USEquityPricing.close, USEquityPricing.volume] query_days = self.calendar_days_between(TEST_QUERY_START, TEST_QUERY_STOP) # Our expected results for each day are based on values from the # previous day. shifted_query_days = self.calendar_days_between( TEST_QUERY_START, TEST_QUERY_STOP, shift=-1, ) adjustments = adjustment_reader.load_adjustments( [c.name for c in columns], query_days, self.assets, ) self.assertEqual(adjustments, [{}, {}]) pricing_loader = USEquityPricingLoader( self.bcolz_equity_daily_bar_reader, adjustment_reader, ) results = pricing_loader.load_adjusted_array( columns, dates=query_days, assets=self.assets, mask=ones((len(query_days), len(self.assets)), dtype=bool), ) closes, volumes = map(getitem(results), columns) expected_baseline_closes = expected_bar_values_2d( shifted_query_days, self.asset_info, 'close', ) expected_baseline_volumes = expected_bar_values_2d( shifted_query_days, self.asset_info, 'volume', ) # AdjustedArrays should yield the same data as the expected baseline. for windowlen in range(1, len(query_days) + 1): for offset, window in enumerate(closes.traverse(windowlen)): assert_array_equal( expected_baseline_closes[offset:offset + windowlen], window, ) for offset, window in enumerate(volumes.traverse(windowlen)): assert_array_equal( expected_baseline_volumes[offset:offset + windowlen], window, ) # Verify that we checked up to the longest possible window. with self.assertRaises(WindowLengthTooLong): closes.traverse(windowlen + 1) with self.assertRaises(WindowLengthTooLong): volumes.traverse(windowlen + 1)
def get_pipeline_loader(term): return USEquityPricingLoader(raw_price_loader, adjustments_loader)