def per_symbol(symbol): df = pd.read_csv( test_resource_path('quandl_samples', symbol + '.csv.gz'), parse_dates=['Date'], index_col='Date', usecols=[ 'Open', 'High', 'Low', 'Close', 'Volume', 'Date', 'Ex-Dividend', 'Split Ratio', ], na_values=['NA'], ).rename( columns={ 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume', 'Date': 'date', 'Ex-Dividend': 'ex_dividend', 'Split Ratio': 'split_ratio', }) df['sid'] = sids[symbol] return df
def per_symbol(symbol): df = pd.read_csv( test_resource_path('quandl_samples', symbol + '.csv.gz'), parse_dates=['Date'], index_col='Date', usecols=[ 'Open', 'High', 'Low', 'Close', 'Volume', 'Date', 'Ex-Dividend', 'Split Ratio', ], na_values=['NA'], ).rename(columns={ 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume', 'Date': 'date', 'Ex-Dividend': 'ex_dividend', 'Split Ratio': 'split_ratio', }) df['sid'] = sids[symbol] return df
def pricing_callback(request): headers = { 'content-encoding': 'gzip', 'content-type': 'text/csv', } path = test_resource_path( 'yahoo_samples', get_symbol_from_url(request.url) + '.csv.gz', ) with open(path, 'rb') as f: return ( 200, headers, f.read(), )
def init_class_fixtures(cls): super(ExamplesTests, cls).init_class_fixtures() register('test', lambda *args: None) cls.add_class_callback(partial(unregister, 'test')) with tarfile.open(test_resource_path('example_data.tar.gz')) as tar: tar.extractall(cls.tmpdir.path) cls.expected_perf = dataframe_cache( cls.tmpdir.getpath( 'example_data/expected_perf/%s' % pd.__version__.replace('.', '-'), ), serialization='pickle', ) market_data = ('SPY_benchmark.csv', 'treasury_curves.csv') for data in market_data: update_modified_time( cls.tmpdir.getpath('example_data/root/data/' + data))
def init_class_fixtures(cls): super(ExamplesTests, cls).init_class_fixtures() register('test', lambda *args: None) cls.add_class_callback(partial(unregister, 'test')) with tarfile.open(test_resource_path('example_data.tar.gz')) as tar: tar.extractall(cls.tmpdir.path) cls.expected_perf = dataframe_cache( cls.tmpdir.getpath( 'example_data/expected_perf/%s' % pd.__version__.replace('.', '-'), ), serialization='pickle', ) market_data = ('SPY_benchmark.csv', 'treasury_curves.csv') for data in market_data: update_modified_time( cls.tmpdir.getpath( 'example_data/root/data/' + data ) )
def zipfile_path(symbol): return test_resource_path('quandl_samples', symbol + '.csv.gz')
def adjustments_callback(request): path = test_resource_path( 'yahoo_samples', get_symbol_from_url(request.url) + '.adjustments.gz', ) return 200, {}, read_compressed(path)
def test_bundle(self): url_map = merge( { format_wiki_url( self.api_key, symbol, self.start_date, self.end_date, ): test_resource_path('quandl_samples', symbol + '.csv.gz') for symbol in self.symbols }, { format_metadata_url(self.api_key, n): test_resource_path( 'quandl_samples', 'metadata-%d.csv.gz' % n, ) for n in (1, 2) }, ) catalyst_root = self.enter_instance_context(tmp_dir()).path environ = { 'ZIPLINE_ROOT': catalyst_root, 'QUANDL_API_KEY': self.api_key, } with patch_read_csv(url_map, strict=True): ingest('quandl', environ=environ) bundle = load('quandl', environ=environ) sids = 0, 1, 2, 3 assert_equal(set(bundle.asset_finder.sids), set(sids)) for equity in bundle.asset_finder.retrieve_all(sids): assert_equal(equity.start_date, self.asset_start, msg=equity) assert_equal(equity.end_date, self.asset_end, msg=equity) sessions = self.calendar.all_sessions actual = bundle.equity_daily_bar_reader.load_raw_arrays( self.columns, sessions[sessions.get_loc(self.asset_start, 'bfill')], sessions[sessions.get_loc(self.asset_end, 'ffill')], sids, ) expected_pricing, expected_adjustments = self._expected_data( bundle.asset_finder, ) assert_equal(actual, expected_pricing, array_decimal=2) adjustments_for_cols = bundle.adjustment_reader.load_adjustments( self.columns, sessions, pd.Index(sids), ) for column, adjustments, expected in zip(self.columns, adjustments_for_cols, expected_adjustments): assert_equal( adjustments, expected, msg=column, )