def initialize(self, *args, **kwargs): self._context_persistence_excludes = \ self._context_persistence_blacklist + \ [e for e in self.__dict__.keys() if e not in self._context_persistence_whitelist] if os.path.isfile(self.state_filename): log.info("Loading state from {}".format(self.state_filename)) load_context(self.state_filename, context=self, checksum=self.algo_filename) self.initialized = False with ZiplineAPI(self): super(self.__class__, self).initialize(*args, **kwargs) store_context(self.state_filename, context=self, checksum=self.algo_filename, exclude_list=self._context_persistence_excludes) self.initialized = True
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, metrics_set, local_namespace, environ, blotter, benchmark_symbol, broker, state_filename): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. additions useful for live trading: broker - wrapper to connect to a real broker state_filename - saving the context of the algo to be able to restart """ log.info("Using bundle '%s'." % bundle) if trading_calendar is None: trading_calendar = get_calendar('XNYS') bundle_data = load_sharadar_bundle(bundle) now = pd.Timestamp.utcnow() if start is None: start = bundle_data.equity_daily_bar_reader.first_trading_day if not broker else now if not trading_calendar.is_session(start.date()): start = trading_calendar.next_open(start) if end is None: end = bundle_data.equity_daily_bar_reader.last_available_dt if not broker else start # date parameter validation if trading_calendar.session_distance(start, end) < 0: raise _RunAlgoError( 'There are no trading days between %s and %s' % ( start.date(), end.date(), ), ) if broker: log.info("Live Trading on %s." % start.date()) else: log.info("Backtest from %s to %s." % (start.date(), end.date())) if benchmark_symbol: benchmark = symbol(benchmark_symbol) benchmark_sid = benchmark.sid benchmark_returns = load_benchmark_data_bundle( bundle_data.equity_daily_bar_reader, benchmark) else: benchmark_sid = None benchmark_returns = pd.Series(index=pd.date_range(start, end, tz='utc'), data=0.0) # emission_rate is a string representing the smallest frequency at which metrics should be reported. # emission_rate will be either minute or daily. When emission_rate is daily, end_of_bar will not be called at all. emission_rate = 'daily' if algotext is not None: if local_namespace: # noinspection PyUnresolvedReferences ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) first_trading_day = \ bundle_data.equity_daily_bar_reader.first_trading_day if isinstance(metrics_set, six.string_types): try: metrics_set = metrics.load(metrics_set) except ValueError as e: raise _RunAlgoError(str(e)) if isinstance(blotter, six.string_types): try: blotter = load(Blotter, blotter) except ValueError as e: raise _RunAlgoError(str(e)) # Special defaults for live trading if broker: data_frequency = 'minute' # No benchmark benchmark_sid = None benchmark_returns = pd.Series(index=pd.date_range(start, end, tz='utc'), data=0.0) broker.daily_bar_reader = bundle_data.equity_daily_bar_reader if start.date() < now.date(): backtest_start = start backtest_end = bundle_data.equity_daily_bar_reader.last_available_dt if not os.path.exists(state_filename): log.info("Backtest from %s to %s." % (backtest_start.date(), backtest_end.date())) backtest_data = DataPortal( bundle_data.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) backtest = create_algo_class( TradingAlgorithm, backtest_start, backtest_end, algofile, algotext, analyze, before_trading_start, benchmark_returns, benchmark_sid, blotter, bundle_data, capital_base, backtest_data, 'daily', emission_rate, handle_data, initialize, metrics_set, namespace, trading_calendar) ctx_blacklist = ['trading_client'] ctx_whitelist = ['perf_tracker'] ctx_excludes = ctx_blacklist + [ e for e in backtest.__dict__.keys() if e not in ctx_whitelist ] backtest.run() #TODO better logic for the checksumq checksum = getattr(algofile, 'name', '<algorithm>') store_context(state_filename, context=backtest, checksum=checksum, exclude_list=ctx_excludes) else: log.warn("State file already exists. Do not run the backtest.") # Set start and end to now for live trading start = pd.Timestamp.utcnow() if not trading_calendar.is_session(start.date()): start = trading_calendar.next_open(start) end = start # TODO inizia qui per creare un prerun dell'algo prima del live trading # usare store_context prima di passare da TradingAlgorithm a LiveTradingAlgorithm TradingAlgorithmClass = (partial( LiveTradingAlgorithm, broker=broker, state_filename=state_filename) if broker else TradingAlgorithm) DataPortalClass = (partial(DataPortalLive, broker) if broker else DataPortal) data = DataPortalClass( bundle_data.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) algo = create_algo_class(TradingAlgorithmClass, start, end, algofile, algotext, analyze, before_trading_start, benchmark_returns, benchmark_sid, blotter, bundle_data, capital_base, data, data_frequency, emission_rate, handle_data, initialize, metrics_set, namespace, trading_calendar) perf = algo.run() if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
def teardown(self): store_context(self.state_filename, context=self, checksum=self.algo_filename, exclude_list=self._context_persistence_excludes)
def handle_data(self, data): super(self.__class__, self).handle_data(data) store_context(self.state_filename, context=self, checksum=self.algo_filename, exclude_list=self._context_persistence_excludes)