def setUpClass(cls): random.seed(0) cls.sids = (1, 2, 3) minute_sim_ps = factory.create_simulation_parameters( num_days=3, sids=cls.sids, data_frequency='minute', emission_rate='minute', ) daily_sim_ps = factory.create_simulation_parameters( num_days=30, sids=cls.sids, data_frequency='daily', emission_rate='daily', ) cls.sim_and_source = { 'minute': (minute_sim_ps, factory.create_minutely_trade_source( cls.sids, trade_count=45, sim_params=minute_sim_ps, )), 'daily': (daily_sim_ps, factory.create_trade_source( cls.sids, trade_count=90, trade_time_increment=timedelta(days=1), sim_params=daily_sim_ps, )), }
def init_class_fixtures(cls): super(SecurityListTestCase, cls).init_class_fixtures() # this is ugly, but we need to create two different # TradingEnvironment/DataPortal pairs cls.start = pd.Timestamp(list(LEVERAGED_ETFS.keys())[0]) end = pd.Timestamp('2015-02-17', tz='utc') cls.extra_knowledge_date = pd.Timestamp('2015-01-27', tz='utc') cls.trading_day_before_first_kd = pd.Timestamp('2015-01-23', tz='utc') symbols = ['AAPL', 'GOOG', 'BZQ', 'URTY', 'JFT'] cls.env = cls.enter_class_context(tmp_trading_env( equities=pd.DataFrame.from_records([{ 'start_date': cls.start, 'end_date': end, 'symbol': symbol, 'exchange': "TEST", } for symbol in symbols]), load=cls.make_load_function(), )) cls.sim_params = factory.create_simulation_parameters( start=cls.start, num_days=4, trading_calendar=cls.trading_calendar ) cls.sim_params2 = sp2 = factory.create_simulation_parameters( start=cls.trading_day_before_first_kd, num_days=4 ) cls.env2 = cls.enter_class_context(tmp_trading_env( equities=pd.DataFrame.from_records([{ 'start_date': sp2.start_session, 'end_date': sp2.end_session, 'symbol': symbol, 'exchange': "TEST", } for symbol in symbols]), load=cls.make_load_function(), )) cls.tempdir = cls.enter_class_context(tmp_dir()) cls.tempdir2 = cls.enter_class_context(tmp_dir()) cls.data_portal = create_data_portal( asset_finder=cls.env.asset_finder, tempdir=cls.tempdir, sim_params=cls.sim_params, sids=range(0, 5), trading_calendar=cls.trading_calendar, ) cls.data_portal2 = create_data_portal( asset_finder=cls.env2.asset_finder, tempdir=cls.tempdir2, sim_params=cls.sim_params2, sids=range(0, 5), trading_calendar=cls.trading_calendar, )
def setUpClass(cls): random.seed(0) cls.sids = (1, 2, 3) minute_sim_ps = factory.create_simulation_parameters( num_days=3, data_frequency="minute", emission_rate="minute" ) daily_sim_ps = factory.create_simulation_parameters(num_days=30, data_frequency="daily", emission_rate="daily") cls.sim_and_source = { "minute": (minute_sim_ps, factory.create_minutely_trade_source(cls.sids, sim_params=minute_sim_ps)), "daily": ( daily_sim_ps, factory.create_trade_source(cls.sids, trade_time_increment=timedelta(days=1), sim_params=daily_sim_ps), ), }
def test_minutely_fetcher(self): self.responses.add( self.responses.GET, 'https://fake.urls.com/aapl_minute_csv_data.csv', body=AAPL_MINUTE_CSV_DATA, content_type='text/csv', ) sim_params = factory.create_simulation_parameters( start=pd.Timestamp("2006-01-03", tz='UTC'), end=pd.Timestamp("2006-01-10", tz='UTC'), emission_rate="minute", data_frequency="minute" ) test_algo = TradingAlgorithm( script=""" from zipline.api import fetch_csv, record, sid def initialize(context): fetch_csv('https://fake.urls.com/aapl_minute_csv_data.csv') def handle_data(context, data): record(aapl_signal=data.current(sid(24), "signal")) """, sim_params=sim_params, data_frequency="minute", env=self.env) # manually setting data portal and getting generator because we need # the minutely emission packets here. TradingAlgorithm.run() only # returns daily packets. test_algo.data_portal = FetcherDataPortal(self.env, self.trading_calendar) gen = test_algo.get_generator() perf_packets = list(gen) signal = [result["minute_perf"]["recorded_vars"]["aapl_signal"] for result in perf_packets if "minute_perf" in result] self.assertEqual(6 * 390, len(signal)) # csv data is: # symbol,date,signal # aapl,1/4/06 5:31AM, 1 # aapl,1/4/06 11:30AM, 2 # aapl,1/5/06 5:31AM, 1 # aapl,1/5/06 11:30AM, 3 # aapl,1/9/06 5:31AM, 1 # aapl,1/9/06 11:30AM, 4 for dates 1/3 to 1/10 # 2 signals per day, only last signal is taken. So we expect # 390 bars of signal NaN on 1/3 # 390 bars of signal 2 on 1/4 # 390 bars of signal 3 on 1/5 # 390 bars of signal 3 on 1/6 (forward filled) # 390 bars of signal 4 on 1/9 # 390 bars of signal 4 on 1/9 (forward filled) np.testing.assert_array_equal([np.NaN] * 390, signal[0:390]) np.testing.assert_array_equal([2] * 390, signal[390:780]) np.testing.assert_array_equal([3] * 780, signal[780:1560]) np.testing.assert_array_equal([4] * 780, signal[1560:])
def test_before_trading_start(self, test_name, num_days, freq, emission_rate): params = factory.create_simulation_parameters( num_days=num_days, data_frequency=freq, emission_rate=emission_rate) def fake_benchmark(self, dt): return 0.01 with patch.object(BenchmarkSource, "get_value", self.fake_minutely_benchmark): algo = BeforeTradingAlgorithm(sim_params=params, env=self.env) algo.run(FakeDataPortal(self.env)) self.assertEqual( len(algo.perf_tracker.sim_params.sessions), num_days ) bts_minutes = days_at_time( params.sessions, time(8, 45), "US/Eastern" ) self.assertTrue( bts_minutes.equals( pd.DatetimeIndex(algo.before_trading_at) ), "Expected %s but was %s." % (params.sessions, algo.before_trading_at))
def test_algo_without_rl_violation_after_delete(self): sim_params = factory.create_simulation_parameters( start=self.extra_knowledge_date, num_days=4, ) equities = pd.DataFrame.from_records([{ 'symbol': 'BZQ', 'start_date': sim_params.start_session, 'end_date': sim_params.end_session, 'exchange': "TEST", }]) with TempDirectory() as new_tempdir, \ security_list_copy(), \ tmp_trading_env(equities=equities) as env: # add a delete statement removing bzq # write a new delete statement file to disk add_security_data([], ['BZQ']) data_portal = create_data_portal( env.asset_finder, new_tempdir, sim_params, range(0, 5), trading_calendar=self.trading_calendar, ) algo = RestrictedAlgoWithoutCheck( symbol='BZQ', sim_params=sim_params, env=env ) algo.run(data_portal)
def setUp(self): setup_logger(self) sim_params = factory.create_simulation_parameters( start=datetime(1990, 1, 1, tzinfo=pytz.utc), end=datetime(1990, 3, 30, tzinfo=pytz.utc)) self.source, self.panel = \ factory.create_test_panel_ohlc_source(sim_params)
def test_blotter_processes_splits(self): sim_params = factory.create_simulation_parameters() blotter = Blotter() blotter.set_date(sim_params.period_start) # set up two open limit orders with very low limit prices, # one for sid 1 and one for sid 2 blotter.order(1, 100, LimitOrder(10)) blotter.order(2, 100, LimitOrder(10)) # send in a split for sid 2 split_event = factory.create_split(2, 0.33333, sim_params.period_start + timedelta(days=1)) blotter.process_split(split_event) for sid in [1, 2]: order_lists = blotter.open_orders[sid] self.assertIsNotNone(order_lists) self.assertEqual(1, len(order_lists)) aapl_order = blotter.open_orders[1][0].to_dict() fls_order = blotter.open_orders[2][0].to_dict() # make sure the aapl order didn't change self.assertEqual(100, aapl_order['amount']) self.assertEqual(10, aapl_order['limit']) self.assertEqual(1, aapl_order['sid']) # make sure the fls order did change # to 300 shares at 3.33 self.assertEqual(300, fls_order['amount']) self.assertEqual(3.33, fls_order['limit']) self.assertEqual(2, fls_order['sid'])
def test_lse_algorithm(self): lse = trading.TradingEnvironment( bm_symbol='^FTSE', exchange_tz='Europe/London' ) with lse: sim_params = factory.create_simulation_parameters( start=datetime(2012, 5, 1, tzinfo=pytz.utc), end=datetime(2012, 6, 30, tzinfo=pytz.utc) ) algo = TestAlgo(self, sim_params=sim_params) trade_source = factory.create_daily_trade_source( [8229], 200, sim_params ) algo.set_sources([trade_source]) gen = algo.get_generator() results = list(gen) self.assertEqual(len(results), 42) # May 7, 2012 was an LSE holiday, confirm the 4th trading # day was May 8. self.assertEqual(results[4]['daily_perf']['period_open'], datetime(2012, 5, 8, 8, 30, tzinfo=pytz.utc))
def run(self, identity, data, strategy): ''' Wrapper of zipline run() method. Use the configuration set so far to build up the trading environment and launch the system ''' engine = TradingEngine(identity, self.configuration['modules'], strategy) #NOTE This method does not change anything #engine.set_sources([DataLiveSource(data_tmp)]) #TODO A new command line parameter ? only minutely and daily # (and hourly normaly) Use filter parameter of datasource ? #engine.set_data_frequency(self.configuration['frequency']) engine.is_live = self.configuration['live'] # Running simulation with it #FIXME crash if trading one day that is not a trading day with self.context: sim_params = create_simulation_parameters( capital_base=strategy['manager']['cash'], start=self.configuration['index'][0], end=self.configuration['index'][-1]) daily_stats = engine.trade(data, sim_params=sim_params) return Analyze( results=daily_stats, metrics=engine.risk_report, configuration=self.configuration)
def test_history_daily_data_1m_window(self): algo_text = """ from zipline.api import history, add_history def initialize(context): add_history(bar_count=1, frequency='1m', field='price') def handle_data(context, data): prices = history(bar_count=3, frequency='1d', field='price') """.strip() start = pd.Timestamp('2006-03-20', tz='UTC') end = pd.Timestamp('2006-03-30', tz='UTC') sim_params = factory.create_simulation_parameters( start=start, end=end) with self.assertRaises(IncompatibleHistoryFrequency): algo = TradingAlgorithm( script=algo_text, data_frequency='daily', sim_params=sim_params ) source = RandomWalkSource(start=start, end=end) algo.run(source)
def test_algo_with_rl_violation(self): sim_params = factory.create_simulation_parameters( start=list(LEVERAGED_ETFS.keys())[0], num_days=4) trade_history = factory.create_trade_history( 'BZQ', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params ) self.source = SpecificEquityTrades(event_list=trade_history) algo = RestrictedAlgoWithoutCheck(symbol='BZQ', sim_params=sim_params) with self.assertRaises(TradingControlViolation) as ctx: algo.run(self.source) self.check_algo_exception(algo, ctx, 0) # repeat with a symbol from a different lookup date trade_history = factory.create_trade_history( 'JFT', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params ) self.source = SpecificEquityTrades(event_list=trade_history) algo = RestrictedAlgoWithoutCheck(symbol='JFT', sim_params=sim_params) with self.assertRaises(TradingControlViolation) as ctx: algo.run(self.source) self.check_algo_exception(algo, ctx, 0)
def test_fetcher_in_before_trading_start(self): self.responses.add( self.responses.GET, 'https://fake.urls.com/fetcher_nflx_data.csv', body=NFLX_DATA, content_type='text/csv', ) sim_params = factory.create_simulation_parameters( start=pd.Timestamp("2013-06-13", tz='UTC'), end=pd.Timestamp("2013-11-15", tz='UTC'), data_frequency="minute" ) results = self.run_algo(""" from zipline.api import fetch_csv, record, symbol def initialize(context): fetch_csv('https://fake.urls.com/fetcher_nflx_data.csv', date_column = 'Settlement Date', date_format = '%m/%d/%y') context.stock = symbol('NFLX') def before_trading_start(context, data): record(Short_Interest = data.current(context.stock, 'dtc')) """, sim_params=sim_params, data_frequency="minute") values = results["Short_Interest"] np.testing.assert_array_equal(values[0:33], np.full(33, np.nan)) np.testing.assert_array_almost_equal(values[33:44], [1.690317] * 11) np.testing.assert_array_almost_equal(values[44:55], [2.811858] * 11) np.testing.assert_array_almost_equal(values[55:64], [2.50233] * 9) np.testing.assert_array_almost_equal(values[64:75], [2.550829] * 11) np.testing.assert_array_almost_equal(values[75:], [2.64484] * 35)
def test_fetcher_universe_non_security_return(self): self.responses.add( self.responses.GET, 'https://fake.urls.com/bad_fetcher_universe_data.csv', body=NON_ASSET_FETCHER_UNIVERSE_DATA, content_type='text/csv', ) sim_params = factory.create_simulation_parameters( start=pd.Timestamp("2006-01-09", tz='UTC'), end=pd.Timestamp("2006-01-10", tz='UTC') ) self.run_algo( """ from zipline.api import fetch_csv def initialize(context): fetch_csv( 'https://fake.urls.com/bad_fetcher_universe_data.csv', date_format='%m/%d/%Y' ) def handle_data(context, data): if len(data.fetcher_assets) > 0: raise Exception("Shouldn't be any assets in fetcher_assets!") """, sim_params=sim_params, )
def test_fetcher_bad_data(self): self.responses.add( self.responses.GET, 'https://fake.urls.com/fetcher_nflx_data.csv', body=NFLX_DATA, content_type='text/csv', ) sim_params = factory.create_simulation_parameters( start=pd.Timestamp("2013-06-12", tz='UTC'), end=pd.Timestamp("2013-06-14", tz='UTC'), data_frequency="minute" ) results = self.run_algo(""" from zipline.api import fetch_csv, symbol import numpy as np def initialize(context): fetch_csv('https://fake.urls.com/fetcher_nflx_data.csv', date_column = 'Settlement Date', date_format = '%m/%d/%y') context.nflx = symbol('NFLX') context.aapl = symbol('AAPL') def handle_data(context, data): assert np.isnan(data.current(context.nflx, 'invalid_column')) assert np.isnan(data.current(context.aapl, 'invalid_column')) assert np.isnan(data.current(context.aapl, 'dtc')) """, sim_params=sim_params, data_frequency="minute") self.assertEqual(3, len(results))
def test_minutely_emissions_generate_performance_stats_for_last_day(self): params = factory.create_simulation_parameters(num_days=1) params.data_frequency = 'minute' params.emission_rate = 'minute' algo = NoopAlgorithm() algo.run(source=[], sim_params=params) self.assertEqual(algo.perf_tracker.day_count, 1.0)
def test_algo_with_rl_violation_cumulative(self): """ Add a new restriction, run a test long after both knowledge dates, make sure stock from original restriction set is still disallowed. """ sim_params = factory.create_simulation_parameters( start=list( LEVERAGED_ETFS.keys())[0] + timedelta(days=7), num_days=4) with security_list_copy(): add_security_data(['AAPL'], []) trade_history = factory.create_trade_history( 'BZQ', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params, env=self.env, ) self.source = SpecificEquityTrades(event_list=trade_history, env=self.env) algo = RestrictedAlgoWithoutCheck( symbol='BZQ', sim_params=sim_params, env=self.env) with self.assertRaises(TradingControlViolation) as ctx: algo.run(self.source) self.check_algo_exception(algo, ctx, 0)
def setUpClass(cls): setup_logger(cls) cls.env = trading.TradingEnvironment() cls.sim_params = factory.create_simulation_parameters( start=pd.Timestamp("2006-01-05", tz='UTC'), end=pd.Timestamp("2006-01-06", tz='UTC') ) cls.env.write_data(equities_data={ 24: { 'start_date': cls.sim_params.trading_days[0], 'end_date': cls.env.next_trading_day( cls.sim_params.trading_days[-1] ) }, 25: { 'start_date': cls.sim_params.trading_days[0], 'end_date': cls.env.next_trading_day( cls.sim_params.trading_days[-1] ) } }) cls.tempdir = TempDirectory() assets = { 24: pd.DataFrame({ "open": [50, 50], "high": [50, 50], "low": [50, 50], "close": [50, 50], "volume": [100, 400], "day": [day.value for day in cls.sim_params.trading_days] }), 25: pd.DataFrame({ "open": [50, 50], "high": [50, 50], "low": [50, 50], "close": [50, 50], "volume": [100, 400], "day": [day.value for day in cls.sim_params.trading_days] }) } path = os.path.join(cls.tempdir.path, "tempdata.bcolz") DailyBarWriterFromDataFrames(assets).write( path, cls.sim_params.trading_days, assets ) equity_daily_reader = BcolzDailyBarReader(path) cls.data_portal = DataPortal( cls.env, equity_daily_reader=equity_daily_reader, )
def test_factory_daily(self): sim_params = factory.create_simulation_parameters() trade_source = factory.create_daily_trade_source([133], 200, sim_params) prev = None for trade in trade_source: if prev: self.assertTrue(trade.dt > prev.dt) prev = trade
def setUp(self): self.sim_params = factory.create_simulation_parameters(num_days=4) trade_history = factory.create_trade_history( 133, [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), self.sim_params ) self.source = SpecificEquityTrades(event_list=trade_history) self.df_source, self.df = factory.create_test_df_source(self.sim_params)
def setUp(self): self.sim_params = factory.create_simulation_parameters() setup_logger(self) trade_history = factory.create_trade_history( 133, [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), self.sim_params ) self.source = SpecificEquityTrades(event_list=trade_history)
def setUp(self): self.sim_params = factory.create_simulation_parameters( start=datetime(1990, 1, 1, tzinfo=pytz.utc), end=datetime(1990, 1, 8, tzinfo=pytz.utc) ) setup_logger(self) self.source, self.df = \ factory.create_test_df_source(self.sim_params)
def test_minutely_emissions_generate_performance_stats_for_last_day(self): params = factory.create_simulation_parameters(num_days=1, data_frequency='minute', emission_rate='minute') with patch.object(BenchmarkSource, "get_value", self.fake_minutely_benchmark): algo = NoopAlgorithm(sim_params=params, env=self.env) algo.run(FakeDataPortal(self.env)) self.assertEqual(len(algo.perf_tracker.sim_params.sessions), 1)
def setUp(self): self.sids = range(90) self.env = TradingEnvironment() self.env.write_data(equities_identifiers=self.sids) self.sim_params = factory.create_simulation_parameters( start=datetime(1990, 1, 1, tzinfo=pytz.utc), end=datetime(1990, 1, 8, tzinfo=pytz.utc), env=self.env, )
def setUpClass(cls): start = datetime.datetime( year=2006, month=1, day=1, tzinfo=pytz.utc) end = datetime.datetime( year=2006, month=1, day=10, tzinfo=pytz.utc) cls.period = pd.date_range(start, end) sim_params = create_simulation_parameters(start=start, end=end) sim_params.capital_base = 200 perf = cls.create_mock_perf() cls.metrics = RiskReport(perf, sim_params)
def test_basic_history_positional_args(self): """ Ensure that positional args work. """ algo_text = """ import copy from zipline.api import history, add_history def initialize(context): add_history(2, '1d', 'price') def handle_data(context, data): prices = history(2, '1d', 'price') context.last_prices = copy.deepcopy(prices) """.strip() # March 2006 # Su Mo Tu We Th Fr Sa # 1 2 3 4 # 5 6 7 8 9 10 11 # 12 13 14 15 16 17 18 # 19 20 21 22 23 24 25 # 26 27 28 29 30 31 start = pd.Timestamp('2006-03-20', tz='UTC') end = pd.Timestamp('2006-03-21', tz='UTC') sim_params = factory.create_simulation_parameters( start=start, end=end) test_algo = TradingAlgorithm( script=algo_text, data_frequency='minute', sim_params=sim_params ) source = RandomWalkSource(start=start, end=end) output = test_algo.run(source) self.assertIsNotNone(output) last_prices = test_algo.last_prices[0] oldest_dt = pd.Timestamp( '2006-03-20 4:00 PM', tz='US/Eastern').tz_convert('UTC') newest_dt = pd.Timestamp( '2006-03-21 4:00 PM', tz='US/Eastern').tz_convert('UTC') self.assertEquals(oldest_dt, last_prices.index[0]) self.assertEquals(newest_dt, last_prices.index[-1]) self.assertEquals(139.36946942498648, last_prices[oldest_dt]) self.assertEquals(180.15661995395106, last_prices[newest_dt])
def setUp(self): setup_logger(self) sids = [1, 2] self.sim_params = factory.create_simulation_parameters(num_days=2, sids=sids) self.source = factory.create_minutely_trade_source( sids, trade_count=100, sim_params=self.sim_params, concurrent=True, )
def setUp(self): start = pd.datetime(1990, 1, 3, 0, 0, 0, 0, pytz.utc) end = pd.datetime(1990, 1, 8, 0, 0, 0, 0, pytz.utc) self.sim_params = factory.create_simulation_parameters( start=start, end=end, ) self.sim_params.emission_rate = 'daily' self.sim_params.data_frequency = 'minute' setup_logger(self) self.source, self.df = \ factory.create_test_df_source(bars='minute')
def setUp(self): self.sim_params = factory.create_simulation_parameters(num_days=4) self.sidint = 133 self.trade_history = factory.create_trade_history( self.sidint, [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), self.sim_params ) self.source = SpecificEquityTrades(event_list=self.trade_history)
def test_fetcher_universe(self, name, data, column_name): # Patching fetch_url here rather than using responses because (a) it's # easier given the paramaterization, and (b) there are enough tests # using responses that the fetch_url code is getting a good workout so # we don't have to use it in every test. with patch('zipline.sources.requests_csv.PandasRequestsCSV.fetch_url', new=lambda *a, **k: data): sim_params = factory.create_simulation_parameters( start=pd.Timestamp("2006-01-09", tz='UTC'), end=pd.Timestamp("2006-01-11", tz='UTC') ) algocode = """ from pandas import Timestamp from pandas.tseries.tools import normalize_date from zipline.api import fetch_csv, record, sid, get_datetime def initialize(context): fetch_csv( 'https://dl.dropbox.com/u/16705795/dtoc_history.csv', date_format='%m/%d/%Y'{token} ) context.expected_sids = {{ Timestamp('2006-01-09 00:00:00+0000', tz='UTC'):[24, 3766, 5061], Timestamp('2006-01-10 00:00:00+0000', tz='UTC'):[24, 3766, 5061], Timestamp('2006-01-11 00:00:00+0000', tz='UTC'):[24, 3766, 5061, 14848] }} context.bar_count = 0 def handle_data(context, data): expected = context.expected_sids[normalize_date(get_datetime())] actual = data.fetcher_assets for stk in expected: if stk not in actual: raise Exception( "{{stk}} is missing on dt={{dt}}".format( stk=stk, dt=get_datetime())) record(sid_count=len(actual)) record(bar_count=context.bar_count) context.bar_count += 1 """ replacement = "" if column_name: replacement = ",symbol_column='%s'\n" % column_name real_algocode = algocode.format(token=replacement) results = self.run_algo(real_algocode, sim_params=sim_params) self.assertEqual(len(results), 3) self.assertEqual(3, results["sid_count"].iloc[0]) self.assertEqual(3, results["sid_count"].iloc[1]) self.assertEqual(4, results["sid_count"].iloc[2])
def test_algo_with_rl_violation_after_add(self): try: add_security_data(['AAPL'], []) sim_params = factory.create_simulation_parameters( start=self.trading_day_before_first_kd, num_days=4) trade_history = factory.create_trade_history( 'AAPL', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params) self.source = SpecificEquityTrades(event_list=trade_history) algo = RestrictedAlgoWithoutCheck(sid='AAPL', sim_params=sim_params) with self.assertRaises(TradingControlViolation) as ctx: algo.run(self.source) self.check_algo_exception(algo, ctx, 2) finally: remove_security_data_directory()
def setUp(self): setup_logger(self) self.sim_params = factory.create_simulation_parameters(num_days=4) setup_logger(self) trade_history = factory.create_trade_history(1, [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), self.sim_params) self.source = SpecificEquityTrades(event_list=trade_history) self.df_source, self.df = \ factory.create_test_df_source(self.sim_params) self.panel_source, self.panel = \ factory.create_test_panel_source(self.sim_params)
def setUp(self): days = 251 self.sim_params = factory.create_simulation_parameters(num_days=days) setup_logger(self) trade_history = factory.create_trade_history(133, [10.0] * days, [100] * days, timedelta(days=1), self.sim_params) self.source = SpecificEquityTrades(event_list=trade_history) self.df_source, self.df = \ factory.create_test_df_source(self.sim_params) self.zipline_test_config = { 'sid': 0, }
def test_algo_with_rl_violation_after_knowledge_date(self): sim_params = factory.create_simulation_parameters( start=list( LEVERAGED_ETFS.keys())[0] + timedelta(days=7), num_days=5) trade_history = factory.create_trade_history( 'BZQ', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params ) self.source = SpecificEquityTrades(event_list=trade_history) algo = RestrictedAlgoWithoutCheck(sid='BZQ', sim_params=sim_params) with self.assertRaises(TradingControlViolation) as ctx: algo.run(self.source) self.check_algo_exception(algo, ctx, 0)
def test_set_portfolio(self): """ Are we protected against overwriting an algo's portfolio? """ # Simulation # ---------- self.zipline_test_config['algorithm'] = \ SetPortfolioAlgorithm( self.zipline_test_config['sid'], sim_params=factory.create_simulation_parameters(), env=self.env ) zipline = simfactory.create_test_zipline(**self.zipline_test_config) with self.assertRaises(AttributeError): output, _ = drain_zipline(self, zipline)
def test_algo_without_rl_violation_via_check(self): sim_params = factory.create_simulation_parameters(start=list( LEVERAGED_ETFS.keys())[0], num_days=4, env=self.env) trade_history = factory.create_trade_history('BZQ', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params, env=self.env) self.source = SpecificEquityTrades(event_list=trade_history, env=self.env) algo = RestrictedAlgoWithCheck(symbol='BZQ', sim_params=sim_params, env=self.env) algo.run(self.source)
def test_algo_with_rl_violation_cumulative(self): """ Add a new restriction, run a test long after both knowledge dates, make sure stock from original restriction set is still disallowed. """ sim_params = factory.create_simulation_parameters( start=list( LEVERAGED_ETFS.keys())[0] + timedelta(days=7), num_days=4) with security_list_copy(): add_security_data(['AAPL'], []) algo = RestrictedAlgoWithoutCheck( symbol='BZQ', sim_params=sim_params, env=self.env) with self.assertRaises(TradingControlViolation) as ctx: algo.run(self.data_portal) self.check_algo_exception(algo, ctx, 0)
def build_backtest_environment(star_date, end_date,capital_base=100000): trading_calendar = get_calendar("NYSE") sim_params = create_simulation_parameters(capital_base=capital_base, data_frequency='daily', trading_calendar=trading_calendar, start=pd.Timestamp(pd.to_datetime(star_date)).tz_localize('US/Eastern'), end=pd.Timestamp(pd.to_datetime(end_date)).tz_localize('US/Eastern') ) bundle = bundles.load('quandl') prefix, connstr = re.split(r'sqlite:///', str(bundle.asset_finder.engine.url), maxsplit=1, ) env = TradingEnvironment(asset_db_path=connstr, environ=os.environ) data = DataPortal( env.asset_finder, trading_calendar, first_trading_day=bundle.equity_minute_bar_reader.first_trading_day, equity_minute_reader=bundle.equity_minute_bar_reader, equity_daily_reader=bundle.equity_daily_bar_reader, adjustment_reader=bundle.adjustment_reader, ) return data,env,bundle,sim_params
def run_batchtransform(window_length=10): sim_params = factory.create_simulation_parameters( start=datetime(1990, 1, 1, tzinfo=pytz.utc), end=datetime(1995, 1, 8, tzinfo=pytz.utc) ) source, df = factory.create_test_df_source(sim_params) return_price_class = ReturnPriceBatchTransform( refresh_period=1, window_length=window_length, clean_nans=False ) for raw_event in source: raw_event['datetime'] = raw_event.dt event = {0: raw_event} return_price_class.handle_data(event) batch_transform, ReturnPriceBatchTransform)
def test_algo_without_rl_violation_after_delete(self): with security_list_copy(): # add a delete statement removing bzq # write a new delete statement file to disk add_security_data([], ['BZQ']) sim_params = factory.create_simulation_parameters( start=self.extra_knowledge_date, num_days=3) trade_history = factory.create_trade_history( 'BZQ', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params ) self.source = SpecificEquityTrades(event_list=trade_history) algo = RestrictedAlgoWithoutCheck( sid='BZQ', sim_params=sim_params) algo.run(self.source)
def test_algo_with_rl_violation(self): sim_params = factory.create_simulation_parameters( start=list(LEVERAGED_ETFS.keys())[0], num_days=4) trade_history = factory.create_trade_history( 'BZQ', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params ) self.source = SpecificEquityTrades(event_list=trade_history) self.df_source, self.df = \ factory.create_test_df_source(sim_params) algo = RestrictedAlgoWithoutCheck(sid='BZQ', sim_params=sim_params) with self.assertRaises(TradingControlViolation) as ctx: algo.run(self.source) self.check_algo_exception(algo, ctx, 0) # repeat with a symbol from a different lookup date trade_history = factory.create_trade_history( 'JFT', [10.0, 10.0, 11.0, 11.0], [100, 100, 100, 300], timedelta(days=1), sim_params ) self.source = SpecificEquityTrades(event_list=trade_history) self.df_source, self.df = \ factory.create_test_df_source(sim_params) algo = RestrictedAlgoWithoutCheck(sid='JFT', sim_params=sim_params) with self.assertRaises(TradingControlViolation) as ctx: algo.run(self.source) self.check_algo_exception(algo, ctx, 0)
def test_fetcher_universe_non_security_return(self): sim_params = factory.create_simulation_parameters( start=pd.Timestamp("2006-01-09", tz='UTC'), end=pd.Timestamp("2006-01-10", tz='UTC')) self.run_algo( """ from zipline.api import fetch_csv def initialize(context): fetch_csv( 'https://fake.urls.com/bad_fetcher_universe_data.csv', date_format='%m/%d/%Y' ) def handle_data(context, data): if len(data.fetcher_assets) > 0: raise Exception("Shouldn't be any assets in fetcher_assets!") """, sim_params=sim_params, )
def test_fetcher_universe_minute(self): sim_params = factory.create_simulation_parameters( start=pd.Timestamp("2006-01-09", tz='UTC'), end=pd.Timestamp("2006-01-11", tz='UTC'), data_frequency="minute") results = self.run_algo(""" from pandas import Timestamp from zipline.api import fetch_csv, record, get_datetime def initialize(context): fetch_csv( 'https://fake.urls.com/fetcher_universe_data.csv', date_format='%m/%d/%Y' ) context.expected_sids = { Timestamp('2006-01-09 00:00:00+0000', tz='UTC'):[24, 3766, 5061], Timestamp('2006-01-10 00:00:00+0000', tz='UTC'):[24, 3766, 5061], Timestamp('2006-01-11 00:00:00+0000', tz='UTC'):[24, 3766, 5061, 14848] } context.bar_count = 0 def handle_data(context, data): expected = context.expected_sids[get_datetime().replace(hour=0, minute=0)] actual = data.fetcher_assets for stk in expected: if stk not in actual: raise Exception("{stk} is missing".format(stk=stk)) record(sid_count=len(actual)) record(bar_count=context.bar_count) context.bar_count += 1 """, sim_params=sim_params, data_frequency="minute") self.assertEqual(3, len(results)) self.assertEqual(3, results["sid_count"].iloc[0]) self.assertEqual(3, results["sid_count"].iloc[1]) self.assertEqual(4, results["sid_count"].iloc[2])
def test_blotter_processes_splits(self): sim_params = factory.create_simulation_parameters( start=self.start, end=self.end) blotter = SimulationBlotter(sim_params, equity_slippage=FixedSlippage()) # set up two open limit orders with very low limit prices, # one for sid 1 and one for sid 2 asset1 = self.asset_finder.retrieve_asset(1) asset2 = self.asset_finder.retrieve_asset(2) asset133 = self.asset_finder.retrieve_asset(133) blotter.order(asset1, 100, LimitOrder(10, asset=asset1)) blotter.order(asset2, 100, LimitOrder(10, asset=asset2)) # send in splits for assets 133 and 2. We have no open orders for # asset 133 so it should be ignored. blotter.process_splits([(asset133, 0.5), (asset2, 0.3333)]) for asset in [asset1, asset2]: order_lists = blotter.open_orders[asset] self.assertIsNotNone(order_lists) self.assertEqual(1, len(order_lists)) asset1_order = blotter.open_orders[1][0] asset2_order = blotter.open_orders[2][0] # make sure the asset1 order didn't change self.assertEqual(100, asset1_order.amount) self.assertEqual(10, asset1_order.limit) self.assertEqual(1, asset1_order.asset) # make sure the asset2 order did change # to 300 shares at 3.33 self.assertEqual(300, asset2_order.amount) self.assertEqual(3.33, asset2_order.limit) self.assertEqual(2, asset2_order.asset)