def make_pipeline_engine(symbols=['SPY', 'TLT'], bundle='etfs_bundle', calendar='NYSE'): register(bundle, symbols) bundle_data = load(bundle) # Set up pipeline engine # Loader for pricing pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def my_dispatcher(column): return loaders[column] def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader return my_dispatcher(column) trading_calendar = get_calendar(calendar) engine = SimplePipelineEngine( get_loader=choose_loader, calendar=trading_calendar.all_sessions, asset_finder=bundle_data.asset_finder, ) assets = bundle_data.asset_finder.lookup_symbols(symbols, as_of_date=None) return assets, engine
def make_pipeline_engine(bundle, data_dates): """Creates a pipeline engine for the dates in data_dates. Using this allows usage very similar to run_pipeline in Quantopian's env.""" bundle_data = load(bundle, os.environ, None) pipeline_loader = USEquityPricingLoader(bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError("No PipelineLoader registered for column %s." % column) # set up pipeline cal = bundle_data.equity_daily_bar_reader.trading_calendar.all_sessions cal2 = cal[(cal >= data_dates[0]) & (cal <= data_dates[1])] spe = SimplePipelineEngine(get_loader=choose_loader, calendar=cal2, asset_finder=bundle_data.asset_finder) return spe
def _pipeline_engine_and_calendar_for_bundle(bundle): """Create a pipeline engine for the given bundle. Parameters ---------- bundle : str The name of the bundle to create a pipeline engine for. Returns ------- engine : zipline.pipleine.engine.SimplePipelineEngine The pipeline engine which can run pipelines against the bundle. calendar : zipline.utils.calendars.TradingCalendar The trading calendar for the bundle. """ bundle_data = bundles.load(bundle) pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError( 'No PipelineLoader registered for column %s.' % column ) calendar = bundle_data.equity_daily_bar_reader.trading_calendar return ( SimplePipelineEngine( choose_loader, calendar.all_sessions, bundle_data.asset_finder, ), calendar, )
def set_bundle_data(bundle_name='alpaca_api'): global BUNDLE_DATA, PRICING_LOADER BUNDLE_DATA = bundles.load(bundle_name) PRICING_LOADER = USEquityPricingLoader.without_fx( BUNDLE_DATA.equity_daily_bar_reader, BUNDLE_DATA.adjustment_reader)
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, data, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, local_namespace, environ, broker, state_filename, realtime_bar_target): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. """ if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) if not trading_calendar: trading_calendar = get_calendar('SHSZ') elif isinstance(trading_calendar, str): trading_calendar = get_calendar(trading_calendar) if bundle is not None: bundle_data = load( bundle, environ, bundle_timestamp, ) prefix, connstr = re.split( r'sqlite:///', str(bundle_data.asset_finder.engine.url), maxsplit=1, ) if prefix: raise ValueError( "invalid url %r, must begin with 'sqlite:///'" % str(bundle_data.asset_finder.engine.url), ) env = TradingEnvironment(load=load_market_data, bm_symbol='000300', asset_db_path=connstr, environ=environ) first_trading_day = \ bundle_data.equity_minute_bar_reader.first_trading_day DataPortalClass = (partial(DataPortalLive, broker) if broker else DataPortal) data = DataPortalClass( env.asset_finder, trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError("No PipelineLoader registered for column %s." % column) else: env = TradingEnvironment( environ=environ, load=load_market_data, bm_symbol='000300', ) choose_loader = None emission_rate = 'daily' # TODO why daily default if broker: emission_rate = 'minute' start = pd.Timestamp.utcnow() end = start + pd.Timedelta('2 day') TradingAlgorithmClass = (partial(LiveTradingAlgorithm, broker=broker, state_filename=state_filename, realtime_bar_target=realtime_bar_target) if broker else TradingAlgorithm) perf = TradingAlgorithmClass( namespace=namespace, env=env, get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=create_simulation_parameters( start=start, end=end, capital_base=capital_base, emission_rate=emission_rate, data_frequency=data_frequency, trading_calendar=trading_calendar, ), fundamental_reader=bundle_data.fundamental_reader, **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, }).run( data, overwrite_sim_params=False, ) if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
def __init__(self, bundle_data): self.loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader)
from zipline.pipeline import Pipeline from zipline.pipeline.engine import SimplePipelineEngine from zipline.pipeline.data import USEquityPricing from zipline.pipeline.loaders import USEquityPricingLoader from zipline.pipeline import Fundamentals from zipline.data.bundles.core import load from zipline.pipeline.loaders.blaze import global_loader bundle = 'cnstock' bundle_data = load(bundle) pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader elif Fundamentals.has_column(column): return global_loader raise ValueError("No PipelineLoader registered for column %s." % column) def run_pipeline(pipe, start, end): calendar = bundle_data.equity_daily_bar_reader.trading_calendar dates = calendar.all_sessions # 修正日期
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, data, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, metrics_set, local_namespace, environ, bm_symbol): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. """ if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) if trading_calendar is None: trading_calendar = get_calendar('SZSH') if trading_calendar.session_distance(start, end) < 1: raise _RunAlgoError( 'There are no trading days between %s and %s' % ( start.date(), end.date(), ), ) if bundle is not None: bundle_data = bundles.load( bundle, environ, bundle_timestamp, ) prefix, connstr = re.split( r'sqlite:///', str(bundle_data.asset_finder.engine.url), maxsplit=1, ) if prefix: raise ValueError( "invalid url %r, must begin with 'sqlite:///'" % str(bundle_data.asset_finder.engine.url), ) env = TradingEnvironment( asset_db_path=connstr, trading_calendar=trading_calendar, # 构造使用字符串格式 exchange_tz=trading_calendar.tz.zone, bm_symbol=bm_symbol, environ=environ) first_trading_day = bundle_data.equity_minute_bar_reader.first_trading_day data = DataPortal( env.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader # # 简单处理 elif type(column) == BoundColumn: return global_loader raise ValueError("No PipelineLoader registered for column %s." % column) else: env = TradingEnvironment(environ=environ) choose_loader = None if isinstance(metrics_set, six.string_types): try: metrics_set = metrics.load(metrics_set) except ValueError as e: raise _RunAlgoError(str(e)) perf = TradingAlgorithm( namespace=namespace, env=env, get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=create_simulation_parameters( start=start, end=end, capital_base=capital_base, data_frequency=data_frequency, trading_calendar=trading_calendar, ), metrics_set=metrics_set, **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, }).run( data, overwrite_sim_params=False, ) if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, bundle, bundle_timestamp, custom_data_portal, start, end, output, trading_calendar, print_algo, metrics_set, local_namespace, environ, blotter, benchmark_returns): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. """ if benchmark_returns is None: benchmark_returns, _ = load_market_data(environ=environ) if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) if trading_calendar is None: trading_calendar = get_calendar('XNYS') # date parameter validation if trading_calendar.session_distance(start, end) < 1: raise _RunAlgoError( 'There are no trading days between %s and %s' % ( start.date(), end.date(), ), ) bundle_data = bundles.load( bundle, environ, bundle_timestamp, ) # TODO: Fix this for the custom DataPortal case. first_trading_day = \ bundle_data.equity_minute_bar_reader.first_trading_day if custom_data_portal is None: data = DataPortal( bundle_data.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) else: data = custom_data_portal # TODO: Fix this for the custom DataPortal case. pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError( "No PipelineLoader registered for column %s." % column ) if isinstance(metrics_set, six.string_types): try: metrics_set = metrics.load(metrics_set) except ValueError as e: raise _RunAlgoError(str(e)) if isinstance(blotter, six.string_types): try: blotter = load(Blotter, blotter) except ValueError as e: raise _RunAlgoError(str(e)) perf = TradingAlgorithm( namespace=namespace, data_portal=data, get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=SimulationParameters( start_session=start, end_session=end, trading_calendar=trading_calendar, capital_base=capital_base, data_frequency=data_frequency, ), metrics_set=metrics_set, blotter=blotter, benchmark_returns=benchmark_returns, **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, } ).run() if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
import os
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, metrics_set, local_namespace, environ, blotter, benchmark_returns, broker, state_filename, realtime_bar_target, performance_callback, stop_execution_callback, teardown, execution_id): """ Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. zipline-live additions: broker - wrapper to connect to a real broker state_filename - saving the context of the algo to be able to restart performance_callback - a callback to send performance results everyday and not only at the end of the backtest. this allows to run live, and monitor the performance of the algorithm stop_execution_callback - A callback to check if execution should be stopped. it is used to be able to stop live trading (also simulation could be stopped using this) execution. if the callback returns True, then algo execution will be aborted. teardown - algo method like handle_data() or before_trading_start() that is called when the algo execution stops execution_id - unique id to identify this execution (backtest or live instance) """ if benchmark_returns is None: benchmark_returns, _ = load_market_data(environ=environ) emission_rate = 'daily' if broker: emission_rate = 'minute' # if we run zipline as a command line tool, these will probably not be initiated if not start: start = pd.Timestamp.utcnow() if not end: # in cli mode, sessions are 1 day only. and it will be re-ran each day by user end = start + pd.Timedelta('1 day') if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) if trading_calendar is None: trading_calendar = get_calendar('NYSE') # date parameter validation if trading_calendar.session_distance(start, end) < 1: raise _RunAlgoError( 'There are no trading days between %s and %s' % ( start.date(), end.date(), ), ) bundle_data = bundles.load( bundle, environ, bundle_timestamp, ) first_trading_day = \ bundle_data.equity_minute_bar_reader.first_trading_day DataPortalClass = (partial(DataPortalLive, broker) if broker else DataPortal) data = DataPortalClass( bundle_data.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError("No PipelineLoader registered for column %s." % column) if isinstance(metrics_set, six.string_types): try: metrics_set = metrics.load(metrics_set) except ValueError as e: raise _RunAlgoError(str(e)) if isinstance(blotter, six.string_types): try: blotter = load(Blotter, blotter) except ValueError as e: raise _RunAlgoError(str(e)) TradingAlgorithmClass = (partial(LiveTradingAlgorithm, broker=broker, state_filename=state_filename, realtime_bar_target=realtime_bar_target) if broker else TradingAlgorithm) perf = TradingAlgorithmClass( namespace=namespace, data_portal=data, get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=SimulationParameters(start_session=start, end_session=end, trading_calendar=trading_calendar, capital_base=capital_base, emission_rate=emission_rate, data_frequency=data_frequency, execution_id=execution_id), metrics_set=metrics_set, blotter=blotter, benchmark_returns=benchmark_returns, performance_callback=performance_callback, stop_execution_callback=stop_execution_callback, **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, 'teardown': teardown, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, }).run() if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, data, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, local_namespace, environ): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. """ if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) if trading_calendar is None: trading_calendar = get_calendar('NYSE') if bundle is not None: #ronz bundle='quantopian-quandl' bundle_data = load( bundle, environ, bundle_timestamp, ) prefix, connstr = re.split( r'sqlite:///', str(bundle_data.asset_finder.engine.url), maxsplit=1, ) if prefix: raise ValueError( "invalid url %r, must begin with 'sqlite:///'" % str(bundle_data.asset_finder.engine.url), ) env = TradingEnvironment(asset_db_path=connstr, environ=environ) #ronz asset_db_path='/home/gqian/.zipline/data/quantopian-quandl/2017-09-22T11;56;54.022199/assets-6.sqlite' first_trading_day =\ bundle_data.equity_minute_bar_reader.first_trading_day #ronz Timestamp('1990-01-02 00:00:00+0000', tz='UTC') data = DataPortal( #ronz create DataPortal as the data container used all over the application data_portal.py env.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) pipeline_loader = USEquityPricingLoader( #ronz pipeline loader engine for continuous future ?? cannot find much usage bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError( "No PipelineLoader registered for column %s." % column ) else: env = TradingEnvironment(environ=environ) choose_loader = None perf = TradingAlgorithm( namespace=namespace, #ronz input algofile has no namespace env=env, #ronz TradingEnvironment that includes, trading_calendar/load bm data/load input database engine/asset finder/asset writer get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=create_simulation_parameters( #ronz returns SimulationParameters obj that contains all simulation param start=start, end=end, capital_base=capital_base, data_frequency=data_frequency, trading_calendar=trading_calendar, ), **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, } if algotext is None else {#ronz go this branch, all these "api_methods" are got direct from algofilei->algotext now, hence above are passed down from cmdline->main->_run->here as NONE 'algo_filename': getattr(algofile, 'name', '<algorithm>'), #ronz 'buyapple.py' '<algorithm>' is default 'script': algotext, } ).run( data, overwrite_sim_params=False, ) if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
def initialize(context): dates = pd.date_range('2018-01-01', '2018-09-28') # assets = bundle_data.asset_finder.lookup_symbols(['A', 'AAL'], as_of_date=None) # assets = bundle_data.asset_finder sids = bundle_data.asset_finder.sids assets = [sid(item) for item in sids] # The values for Column A will just be a 2D array of numbers ranging from 1 -> N. column_A_frame = pd.DataFrame( data=np.arange(len(dates) * len(assets), dtype=float).reshape(len(dates), len(assets)), index=dates, columns=sids, ) # Column B will always provide True for 0 and False for 1. column_B_frame = pd.DataFrame(data={sids[0]: True, sids[1]: False}, index=dates) loaders = { MyDataSet.column_A: DataFrameLoader(MyDataSet.column_A, column_A_frame), MyDataSet.column_B: DataFrameLoader(MyDataSet.column_B, column_B_frame), } def my_dispatcher(column): return loaders[column] # Set up pipeline engine # Loader for pricing pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader return my_dispatcher(column) engine = SimplePipelineEngine( get_loader=choose_loader, calendar=trading_calendar.all_sessions, asset_finder=bundle_data.asset_finder, ) p = Pipeline( columns={ 'price': USEquityPricing.close.latest, 'col_A': MyDataSet.column_A.latest, 'col_B': MyDataSet.column_B.latest }, screen=StaticAssets(assets) ) df = engine.run_pipeline( p, pd.Timestamp('2016-01-07', tz='utc'), pd.Timestamp('2016-01-07', tz='utc') ) df = df.sort_values(by=['price'], axis=0, ascending=False) print(df)
def _run( handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, metrics_set, local_namespace, environ, blotter, custom_loader, benchmark_spec, ): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. """ bundle_data = bundles.load( bundle, environ, bundle_timestamp, ) if trading_calendar is None: trading_calendar = get_calendar("XNYS") # date parameter validation if trading_calendar.session_distance(start, end) < 1: raise _RunAlgoError( "There are no trading days between %s and %s" % ( start.date(), end.date(), ), ) benchmark_sid, benchmark_returns = benchmark_spec.resolve( asset_finder=bundle_data.asset_finder, start_date=start, end_date=end, ) if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split("=", 2) except ValueError: raise ValueError( "invalid define %r, should be of the form name=value" % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( "failed to execute definition for name %r: %s" % (name, e), ) elif defines: raise _RunAlgoError( "cannot pass define without `algotext`", "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) first_trading_day = bundle_data.equity_minute_bar_reader.first_trading_day data = DataPortal( bundle_data.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, future_minute_reader=bundle_data.equity_minute_bar_reader, future_daily_reader=bundle_data.equity_daily_bar_reader, ) pipeline_loader = USEquityPricingLoader.without_fx( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader try: return custom_loader.get(column) except KeyError: raise ValueError("No PipelineLoader registered for column %s." % column) if isinstance(metrics_set, str): try: metrics_set = metrics.load(metrics_set) except ValueError as e: raise _RunAlgoError(str(e)) if isinstance(blotter, str): try: blotter = load(Blotter, blotter) except ValueError as e: raise _RunAlgoError(str(e)) try: perf = TradingAlgorithm( namespace=namespace, data_portal=data, get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=SimulationParameters( start_session=start, end_session=end, trading_calendar=trading_calendar, capital_base=capital_base, data_frequency=data_frequency, ), metrics_set=metrics_set, blotter=blotter, benchmark_returns=benchmark_returns, benchmark_sid=benchmark_sid, **{ "initialize": initialize, "handle_data": handle_data, "before_trading_start": before_trading_start, "analyze": analyze, } if algotext is None else { "algo_filename": getattr(algofile, "name", "<algorithm>"), "script": algotext, }, ).run() except NoBenchmark: raise _RunAlgoError( ("No ``benchmark_spec`` was provided, and" " ``zipline.api.set_benchmark`` was not called in" " ``initialize``."), ("Neither '--benchmark-symbol' nor '--benchmark-sid' was" " provided, and ``zipline.api.set_benchmark`` was not called" " in ``initialize``. Did you mean to pass '--no-benchmark'?"), ) if output == "-": click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
from zipline.pipeline.loaders import USEquityPricingLoader from zipline.utils.calendars import get_calendar from zipline.pipeline.data import USEquityPricing from zipline.data.data_portal import DataPortal from sharadar.pipeline.engine import symbols, make_pipeline_engine, load_sharadar_bundle import pandas as pd bundle = load_sharadar_bundle() # Set the dataloader pricing_loader = USEquityPricingLoader.without_fx( bundle.equity_daily_bar_reader, bundle.adjustment_reader) # Define the function for the get_loader parameter def choose_loader(column): if column not in USEquityPricing.columns: raise Exception('Column not in USEquityPricing') return pricing_loader # Set the trading calendar trading_calendar = get_calendar('NYSE') start_date = pd.to_datetime('2020-08-26', utc=True) end_date = pd.to_datetime('2020-09-02', utc=True) bar_count = trading_calendar.session_distance(start_date, end_date) # Create a data portal data_portal = DataPortal(bundle.asset_finder, trading_calendar=trading_calendar, first_trading_day=start_date,