def get_source(error): lexer = get_lexer_by_name("javascript", stripall=True) formatter = TerminalFormatter(linenos=True) formatter._lineno = error["line"] - 1 result = highlight(error["source"], lexer, formatter) result = each_line(lambda s: re.sub("^0+", replace_zeros, s), result) return result
def syntax_highlight_file(input_filename, to_stdout=False, bg='light', colors_file=None, style=None): if to_stdout: outfile = sys.stdout out_filename = None else: basename = os.path.basename(input_filename) out_filename = mktemp('.term', basename + '_') try: outfile = open(out_filename, 'w') except: print("Unexpected error in opening output file %s" % out_filename) sys.exit(1) pass pass if input_filename: if not os.path.exists(input_filename): sys.stderr.write("input file %s doesn't exist\n" % input_filename) sys.exit(2) try: infile = open(input_filename) except: print("Unexpected error in opening input file %s" % input_filename) sys.exit(2) pass pass else: infile = sys.stdin pass if style: formatter = Terminal256Formatter(bg=bg, style=style) else: formatter = TerminalFormatter(bg=bg) formatter.colorscheme = TERMINAL_COLORS if colors_file is not None and os.path.isfile(colors_file): try: with open(colors_file) as f: code = compile(f.read(), colors_file, 'exec') exec(code) except: sys.exit(10) pass pass for code_line in infile.readlines(): line = highlight(code_line, BashLexer(), formatter).strip("\r\n") outfile.write(line + "\n") # print line, pass outfile.close if out_filename: print(out_filename) sys.exit(0)
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, data, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, local_namespace, environ, broker, state_filename, realtime_bar_target): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. """ if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) if not trading_calendar: trading_calendar = get_calendar('SHSZ') elif isinstance(trading_calendar, str): trading_calendar = get_calendar(trading_calendar) if bundle is not None: bundle_data = load( bundle, environ, bundle_timestamp, ) prefix, connstr = re.split( r'sqlite:///', str(bundle_data.asset_finder.engine.url), maxsplit=1, ) if prefix: raise ValueError( "invalid url %r, must begin with 'sqlite:///'" % str(bundle_data.asset_finder.engine.url), ) env = TradingEnvironment(load=load_market_data, bm_symbol='000300', asset_db_path=connstr, environ=environ) first_trading_day = \ bundle_data.equity_minute_bar_reader.first_trading_day DataPortalClass = (partial(DataPortalLive, broker) if broker else DataPortal) data = DataPortalClass( env.asset_finder, trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError("No PipelineLoader registered for column %s." % column) else: env = TradingEnvironment( environ=environ, load=load_market_data, bm_symbol='000300', ) choose_loader = None emission_rate = 'daily' # TODO why daily default if broker: emission_rate = 'minute' start = pd.Timestamp.utcnow() end = start + pd.Timedelta('2 day') TradingAlgorithmClass = (partial(CnLiveTradingAlgorithm, broker=broker, state_filename=state_filename, realtime_bar_target=realtime_bar_target) if broker else CnTradingAlgorithm) perf = TradingAlgorithmClass( namespace=namespace, env=env, get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=create_simulation_parameters( start=start, end=end, capital_base=capital_base, emission_rate=emission_rate, data_frequency=data_frequency, trading_calendar=trading_calendar, ), **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, }).run( data, overwrite_sim_params=False, ) if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
def cli( consumer_key, consumer_secret, access_token_key, access_token_secret, delete_older_than, delete_everything_after, minimum_engagement, remove_favorites, ): """A simple program to delete all your tweets! Woohoo!""" from tweet_delete.deleter import Deleter from tweet_delete.util import td_format click.echo(click.style("🐦␡ starting tweet-delete".ljust(76) + "␡🐦", fg="green")) deleter = Deleter( consumer_key, consumer_secret, access_token_key, access_token_secret, delete_older_than, delete_everything_after, minimum_engagement, remove_favorites, ) click.echo(click.style("🔑 validating credentials".ljust(77) + "🔑", fg="yellow")) creds = deleter.validate_creds() click.echo( highlight( json.dumps(creds.AsDict(), sort_keys=True, indent=2), JsonLexer(), TerminalFormatter(), ) ) click.echo( click.style( "👉 tweets older than {} will be deleted".format( td_format(delete_older_than.total_seconds()) ).ljust(77) + "👈", fg="yellow", ) ) if delete_everything_after is not None: click.echo( click.style( "👉 only tweets created after {} will be deleted".format( str(delete_everything_after) ).ljust(77) + "👈", fg="yellow", ) ) if remove_favorites: click.echo( click.style( "👉 deleting favorites older than {} created after {}".format( td_format(delete_older_than.total_seconds()), str(delete_everything_after), ).ljust(77) + "👈", fg="yellow", ) ) else: click.echo( click.style("👉 favorites will NOT be deleted".ljust(77) + "👈", fg="yellow",) ) click.echo(click.style("🦅 off we go".ljust(77) + "🦅", fg="green")) deleter.run()
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, metrics_set, local_namespace, environ, blotter, benchmark_returns, broker, state_filename, realtime_bar_target, performance_callback, stop_execution_callback, teardown, execution_id): """ Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. zipline-live additions: broker - wrapper to connect to a real broker state_filename - saving the context of the algo to be able to restart performance_callback - a callback to send performance results everyday and not only at the end of the backtest. this allows to run live, and monitor the performance of the algorithm stop_execution_callback - A callback to check if execution should be stopped. it is used to be able to stop live trading (also simulation could be stopped using this) execution. if the callback returns True, then algo execution will be aborted. teardown - algo method like handle_data() or before_trading_start() that is called when the algo execution stops execution_id - unique id to identify this execution (backtest or live instance) """ if benchmark_returns is None: benchmark_returns, _ = load_market_data(environ=environ) emission_rate = 'daily' if broker: emission_rate = 'minute' # if we run zipline as a command line tool, these will probably not be initiated if not start: start = pd.Timestamp.utcnow() if not end: # in cli mode, sessions are 1 day only. and it will be re-ran each day by user end = start + pd.Timedelta('1 day') if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) if trading_calendar is None: trading_calendar = get_calendar('NYSE') # date parameter validation if trading_calendar.session_distance(start, end) < 1: raise _RunAlgoError( 'There are no trading days between %s and %s' % ( start.date(), end.date(), ), ) bundle_data = bundles.load( bundle, environ, bundle_timestamp, ) first_trading_day = \ bundle_data.equity_minute_bar_reader.first_trading_day DataPortalClass = (partial(DataPortalLive, broker) if broker else DataPortal) data = DataPortalClass( bundle_data.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) pipeline_loader = USEquityPricingLoader( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError("No PipelineLoader registered for column %s." % column) if isinstance(metrics_set, six.string_types): try: metrics_set = metrics.load(metrics_set) except ValueError as e: raise _RunAlgoError(str(e)) if isinstance(blotter, six.string_types): try: blotter = load(Blotter, blotter) except ValueError as e: raise _RunAlgoError(str(e)) TradingAlgorithmClass = (partial(LiveTradingAlgorithm, broker=broker, state_filename=state_filename, realtime_bar_target=realtime_bar_target) if broker else TradingAlgorithm) perf = TradingAlgorithmClass( namespace=namespace, data_portal=data, get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=SimulationParameters(start_session=start, end_session=end, trading_calendar=trading_calendar, capital_base=capital_base, emission_rate=emission_rate, data_frequency=data_frequency, execution_id=execution_id), metrics_set=metrics_set, blotter=blotter, benchmark_returns=benchmark_returns, performance_callback=performance_callback, stop_execution_callback=stop_execution_callback, **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, 'teardown': teardown, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, }).run() if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
def handle_query(self, query, data=None, stream=False, verbose=False, query_id=None, compress=False, **kwargs): if query.rstrip(';') == '': return elif query.lower() in EXIT_COMMANDS: raise EOFError elif query.lower() in (r'\?', 'help'): rows = [ ['', ''], ["clickhouse-cli's custom commands:", ''], ['---------------------------------', ''], ['USE', "Change the current database."], ['SET', "Set an option for the current CLI session."], ['QUIT', "Exit clickhouse-cli."], ['HELP', "Show this help message."], ['', ''], ["PostgreSQL-like custom commands:", ''], ['--------------------------------', ''], [r'\l', "Show databases."], [r'\c', "Change the current database."], [r'\d, \dt', "Show tables in the current database."], [r'\d+', "Show table's schema."], [r'\ps', "Show current queries."], [r'\kill', "Kill query by its ID."], ['', ''], ["Query suffixes:", ''], ['---------------', ''], [r'\g, \G', "Use the Vertical format."], [r'\p', "Enable the pager."], ] for row in rows: self.echo.success('{:<8s}'.format(row[0]), nl=False) self.echo.info(row[1]) return elif query in (r'\d', r'\dt'): query = 'SHOW TABLES' elif query.startswith(r'\d+ '): query = 'DESCRIBE TABLE ' + query[4:] elif query == r'\l': query = 'SHOW DATABASES' elif query.startswith(r'\c '): query = 'USE ' + query[3:] elif query.startswith(r'\ps'): query = ( "SELECT query_id, user, address, elapsed, read_rows, memory_usage " "FROM system.processes WHERE query_id != '{}'" ).format(query_id) elif query.startswith(r'\kill '): self.client.kill_query(query[6:]) return response = '' self.progress_reset() if self.udf: for regex, replacement in self.udf.items(): query = re.sub(regex, replacement, query) try: response = self.client.query( query, fmt=self.format, data=data, stream=stream, verbose=verbose, query_id=query_id, compress=compress, ) except TimeoutError: self.echo.error("Error: Connection timeout.") return except ConnectionError as e: self.echo.error("Error: Failed to connect. (%s)" % e) return except DBException as e: self.progress_reset() self.echo.error("\nQuery:") self.echo.error(query) self.echo.error("\n\nReceived exception from server:") self.echo.error(e.error) if self.stacktrace and e.stacktrace: self.echo.print("\nStack trace:") self.echo.print(e.stacktrace) self.echo.print('\nElapsed: {elapsed:.3f} sec.\n'.format( elapsed=e.response.elapsed.total_seconds())) return total_rows, total_bytes = self.progress_reset() self.echo.print() if stream: data = response.iter_lines() if hasattr( response, 'iter_lines') else response.data for line in data: print(line.decode('utf-8', 'ignore')) else: if response.data != '': print_func = print if self.config.getboolean('main', 'pager') or kwargs.pop( 'force_pager', False): print_func = self.echo.pager should_highlight_output = (verbose and self.highlight and self.highlight_output and response.format in PRETTY_FORMATS) formatter = TerminalFormatter() if self.highlight and self.highlight_output and self.highlight_truecolor: formatter = TerminalTrueColorFormatter( style=CHPygmentsStyle) if should_highlight_output: print_func( pygments.highlight(response.data, CHPrettyFormatLexer(), formatter)) else: print_func(response.data, end='') if response.message != '': self.echo.print(response.message) self.echo.print() self.echo.success('Ok. ', nl=False) if response.rows is not None: self.echo.print('{rows_count} row{rows_plural} in set.'.format( rows_count=response.rows, rows_plural='s' if response.rows != 1 else '', ), end=' ') if self.config.getboolean( 'main', 'timing') and response.time_elapsed is not None: self.echo.print( 'Elapsed: {elapsed:.3f} sec. Processed: {rows} rows, {bytes} ({avg_rps} rows/s, {avg_bps}/s)' .format( elapsed=response.time_elapsed, rows=numberunit_fmt(total_rows), bytes=sizeof_fmt(total_bytes), avg_rps=numberunit_fmt(total_rows / max(response.time_elapsed, 0.001)), avg_bps=sizeof_fmt(total_bytes / max(response.time_elapsed, 0.001)), ), end='') self.echo.print('\n')
def pygmentize_code(code, color): return pygmentize(code, PythonLexer(), TerminalFormatter()) if color else code
if outfn: if not fmter: try: fmter = get_formatter_for_filename(outfn, **parsed_opts) except (OptionError, ClassNotFound), err: print >>sys.stderr, 'Error:', err return 1 try: outfile = file(outfn, 'wb') except Exception, err: print >>sys.stderr, 'Error: cannot open outfile:', err return 1 else: if not fmter: fmter = TerminalFormatter(**parsed_opts) outfile = sys.stdout # select lexer lexer = opts.pop('-l', None) if lexer: try: lexer = get_lexer_by_name(lexer, **parsed_opts) except (OptionError, ClassNotFound), err: print >>sys.stderr, 'Error:', err return 1 if args: if len(args) > 1: print >>sys.stderr, usage return 2
def do_install(self, url, name, show_install_notes=True): """Download and install a plugin.""" data = self.get_json(url) if name in data: utils.makedirs(self.output_dir) url = data[name] LOGGER.info("Downloading '{0}'".format(url)) try: zip_data = requests.get(url).content except requests.exceptions.SSLError: LOGGER.warning( "SSL error, using http instead of https (press ^C to abort)" ) time.sleep(1) url = url.replace('https', 'http', 1) zip_data = requests.get(url).content zip_file = io.BytesIO() zip_file.write(zip_data) LOGGER.info('Extracting: {0} into {1}/'.format( name, self.output_dir)) utils.extract_all(zip_file, self.output_dir) dest_path = os.path.join(self.output_dir, name) else: try: plugin_path = utils.get_plugin_path(name) except: LOGGER.error("Can't find plugin " + name) return 1 utils.makedirs(self.output_dir) dest_path = os.path.join(self.output_dir, name) if os.path.exists(dest_path): LOGGER.error("{0} is already installed".format(name)) return 1 LOGGER.info('Copying {0} into plugins'.format(plugin_path)) shutil.copytree(plugin_path, dest_path) reqpath = os.path.join(dest_path, 'requirements.txt') if os.path.exists(reqpath): LOGGER.notice('This plugin has Python dependencies.') LOGGER.info('Installing dependencies with pip...') try: subprocess.check_call( (sys.executable, '-m', 'pip', 'install', '-r', reqpath)) except subprocess.CalledProcessError: LOGGER.error('Could not install the dependencies.') print('Contents of the requirements.txt file:\n') with io.open(reqpath, 'r', encoding='utf-8') as fh: print(utils.indent(fh.read(), 4 * ' ')) print('You have to install those yourself or through a ' 'package manager.') else: LOGGER.info('Dependency installation succeeded.') reqnpypath = os.path.join(dest_path, 'requirements-nonpy.txt') if os.path.exists(reqnpypath): LOGGER.notice('This plugin has third-party ' 'dependencies you need to install ' 'manually.') print('Contents of the requirements-nonpy.txt file:\n') with io.open(reqnpypath, 'r', encoding='utf-8') as fh: for l in fh.readlines(): i, j = l.split('::') print(utils.indent(i.strip(), 4 * ' ')) print(utils.indent(j.strip(), 8 * ' ')) print() print('You have to install those yourself or through a package ' 'manager.') req_plug_path = os.path.join(dest_path, 'requirements-plugins.txt') if os.path.exists(req_plug_path): LOGGER.notice('This plugin requires other Nikola plugins.') LOGGER.info('Installing plugins...') plugin_failure = False try: with io.open(req_plug_path, 'r', encoding='utf-8') as inf: for plugname in inf.readlines(): plugin_failure = self.do_install( url, plugname.strip(), show_install_notes) != 0 except Exception: plugin_failure = True if plugin_failure: LOGGER.error('Could not install a plugin.') print('Contents of the requirements-plugins.txt file:\n') with io.open(req_plug_path, 'r', encoding='utf-8') as fh: print(utils.indent(fh.read(), 4 * ' ')) print('You have to install those yourself manually.') else: LOGGER.info('Dependency installation succeeded.') confpypath = os.path.join(dest_path, 'conf.py.sample') if os.path.exists(confpypath) and show_install_notes: LOGGER.notice( 'This plugin has a sample config file. Integrate it with yours in order to make this plugin work!' ) print('Contents of the conf.py.sample file:\n') with io.open(confpypath, 'r', encoding='utf-8') as fh: if self.site.colorful: print( utils.indent( pygments.highlight(fh.read(), PythonLexer(), TerminalFormatter()), 4 * ' ')) else: print(utils.indent(fh.read(), 4 * ' ')) return 0
def main(): parser = argparse.ArgumentParser(description='Cloustack client.') parser.add_argument('--region', metavar='REGION', help='Cloudstack region in ~/.cloudstack.ini', default=os.environ.get('CLOUDSTACK_REGION', 'cloudstack')) parser.add_argument('--post', action='store_true', default=False, help='use POST instead of GET') parser.add_argument('--async', action='store_true', default=False, help='do not wait for async result') parser.add_argument('--quiet', '-q', action='store_true', default=False, help='do not display additional status messages') parser.add_argument('command', metavar="COMMAND", help='Cloudstack API command to execute') def parse_option(x): if '=' not in x: raise ValueError("{!r} is not a correctly formatted " "option".format(x)) return x.split('=', 1) parser.add_argument('arguments', metavar="OPTION=VALUE", nargs='*', type=parse_option, help='Cloudstack API argument') options = parser.parse_args() command = options.command kwargs = defaultdict(set) for arg in options.arguments: key, value = arg kwargs[key].add(value.strip(" \"'")) try: config = read_config(ini_group=options.region) except NoSectionError: raise SystemExit("Error: region '%s' not in config" % options.region) if options.post: config['method'] = 'post' cs = CloudStack(**config) ok = True try: response = getattr(cs, command)(**kwargs) except CloudStackException as e: response = e.args[1] if not options.quiet: sys.stderr.write("Cloudstack error: HTTP response " "{0}\n".format(response.status_code)) sys.stderr.write(response.text) sys.exit(1) if 'Async' not in command and 'jobid' in response and not options. async: if not options.quiet: sys.stderr.write("Polling result... ^C to abort\n") while True: try: res = cs.queryAsyncJobResult(**response) if res['jobstatus'] != 0: response = res if res['jobresultcode'] != 0: ok = False break time.sleep(3) except KeyboardInterrupt: if not options.quiet: sys.stderr.write("Result not ready yet.\n") break data = json.dumps(response, indent=2, sort_keys=True) if pygments and sys.stdout.isatty(): data = pygments.highlight(data, JsonLexer(), TerminalFormatter()) sys.stdout.write(data) sys.stdout.write('\n') sys.exit(int(not ok))
i:integer; begin s1:='hello'; writeln('"', s1, '"'); s1:=s1+' world'; writeln('"', s1, '"'); for i:=0 to 1000 do begin s2:=s2+'*'; end; change(s2); writeln(s2); end. """ print(highlight(code, DelphiLexer(), TerminalFormatter())) input() print("-----------------------") lexer = DelphiLexer() lexer.add_filter(KeywordCaseFilter(case="lower")) print(highlight(code, lexer, TerminalFormatter())) input() print("-----------------------") lexer = DelphiLexer()
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, data, bundle, bundle_timestamp, start, end, output, print_algo, local_namespace, environ, live, exchange, algo_namespace, base_currency, live_graph): """Run a backtest for the given algorithm. This is shared between the cli and :func:`catalyst.run_algo`. """ if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) mode = 'live' if live else 'backtest' log.info('running algo in {mode} mode'.format(mode=mode)) exchange_name = exchange if exchange_name is None: raise ValueError('Please specify at least one exchange.') exchange_list = [x.strip().lower() for x in exchange.split(',')] exchanges = dict() for exchange_name in exchange_list: # Looking for the portfolio from the cache first portfolio = get_algo_object(algo_name=algo_namespace, key='portfolio_{}'.format(exchange_name), environ=environ) if portfolio is None: portfolio = ExchangePortfolio(start_date=pd.Timestamp.utcnow()) # This corresponds to the json file containing api token info exchange_auth = get_exchange_auth(exchange_name) if live and (exchange_auth['key'] == '' or exchange_auth['secret'] == ''): raise ExchangeAuthEmpty(exchange=exchange_name.title(), filename=os.path.join( get_exchange_folder( exchange_name, environ), 'auth.json')) if exchange_name == 'bitfinex': exchanges[exchange_name] = Bitfinex(key=exchange_auth['key'], secret=exchange_auth['secret'], base_currency=base_currency, portfolio=portfolio) elif exchange_name == 'bittrex': exchanges[exchange_name] = Bittrex(key=exchange_auth['key'], secret=exchange_auth['secret'], base_currency=base_currency, portfolio=portfolio) elif exchange_name == 'poloniex': exchanges[exchange_name] = Poloniex(key=exchange_auth['key'], secret=exchange_auth['secret'], base_currency=base_currency, portfolio=portfolio) else: raise ExchangeNotFoundError(exchange_name=exchange_name) open_calendar = get_calendar('OPEN') env = TradingEnvironment( load=partial(load_crypto_market_data, environ=environ, start_dt=start, end_dt=end), environ=environ, exchange_tz='UTC', asset_db_path=None # We don't need an asset db, we have exchanges ) env.asset_finder = AssetFinderExchange() choose_loader = None # TODO: use the DataPortal for in the algorithm class for this if live: start = pd.Timestamp.utcnow() # TODO: fix the end data. end = start + timedelta(hours=8760) data = DataPortalExchangeLive(exchanges=exchanges, asset_finder=env.asset_finder, trading_calendar=open_calendar, first_trading_day=pd.to_datetime( 'today', utc=True)) def fetch_capital_base(exchange, attempt_index=0): """ Fetch the base currency amount required to bootstrap the algorithm against the exchange. The algorithm cannot continue without this value. :param exchange: the targeted exchange :param attempt_index: :return capital_base: the amount of base currency available for trading """ try: log.debug('retrieving capital base in {} to bootstrap ' 'exchange {}'.format(base_currency, exchange_name)) balances = exchange.get_balances() except ExchangeRequestError as e: if attempt_index < 20: log.warn('could not retrieve balances on {}: {}'.format( exchange.name, e)) sleep(5) return fetch_capital_base(exchange, attempt_index + 1) else: raise ExchangeRequestErrorTooManyAttempts( attempts=attempt_index, error=e) if base_currency in balances: return balances[base_currency] else: raise BaseCurrencyNotFoundError(base_currency=base_currency, exchange=exchange_name) capital_base = 0 for exchange_name in exchanges: exchange = exchanges[exchange_name] capital_base += fetch_capital_base(exchange) sim_params = create_simulation_parameters(start=start, end=end, capital_base=capital_base, emission_rate='minute', data_frequency='minute') # TODO: use the constructor instead sim_params._arena = 'live' algorithm_class = partial(ExchangeTradingAlgorithmLive, exchanges=exchanges, algo_namespace=algo_namespace, live_graph=live_graph) else: # Removed the existing Poloniex fork to keep things simple # We can add back the complexity if required. # I don't think that we should have arbitrary price data bundles # Instead, we should center this data around exchanges. # We still need to support bundles for other misc data, but we # can handle this later. data = DataPortalExchangeBacktest(exchanges=exchanges, asset_finder=None, trading_calendar=open_calendar, first_trading_day=start, last_available_session=end) sim_params = create_simulation_parameters( start=start, end=end, capital_base=capital_base, data_frequency=data_frequency, emission_rate=data_frequency, ) algorithm_class = partial(ExchangeTradingAlgorithmBacktest, exchanges=exchanges) perf = algorithm_class( namespace=namespace, env=env, get_pipeline_loader=choose_loader, sim_params=sim_params, **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, }).run( data, overwrite_sim_params=False, ) if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the catalyst magic not write any data perf.to_pickle(output) return perf
def finished(self): from pygments.lexers import (PythonTracebackLexer, PythonLexer, DiffLexer) if ANSI_COLORS_SUPPORT: from pygments.console import colorize from pygments import highlight if self.style in ('light', 'dark'): from pygments.formatters import TerminalFormatter formatter = TerminalFormatter(bg=self.style) if self.colorscheme is not None: from pygments.token import string_to_tokentype for token, value in self.colorscheme.iteritems(): token = string_to_tokentype(token.capitalize()) formatter.colorscheme[token] = (value, value) else: from pygments.formatters import Terminal256Formatter formatter = Terminal256Formatter(style=self.style) else: # ANSI color codes seem not to be supported, make colorize() # and highlight() no-ops. formatter = None def colorize(_format, text): return text def highlight(text, _lexer, _formatter): return text if self.counter: self.progress.finish() print width, _ = utils.get_terminal_size() def show(result): print colorize('bold', result.test_name) if result.test.__doc__: print inspect.getdoc(result.test) print colorize('faint', '─' * width) for line in result.stdout: print colorize('bold', '→'), print line for line in result.stderr: print colorize('red', '→'), print line if self.verbose: for result in self.passes: if result.stdout or result.stderr: show(result) print for result in self.failures: show(result) # result.traceback seems to be in UTF-8 on my system (eg. for # literal unicode strings) but I guess this depends on the source # file encoding. Tell Pygments to guess: try UTF-8 and then latin1. # Without an `encoding` argument, Pygments just uses latin1. print highlight(result.traceback, PythonTracebackLexer(encoding='guess'), formatter) assertion = result.assertion if assertion is not None: print highlight(assertion, PythonLexer(encoding='guess'), formatter) equality_diff = result.equality_diff if equality_diff is not None: print highlight(equality_diff, DiffLexer(encoding='guess'), formatter) result.debug() if self.failures: failed = colorize('red', str(len(self.failures))) else: failed = len(self.failures) print 'Failures: %s/%s (%s assertions, %.3f seconds)' % ( failed, self.counter, statistics.assertions, self.total_time) if self.failures: raise SystemExit(1)
def main_inner(popts, args, usage): opts = {} O_opts = [] P_opts = [] F_opts = [] for opt, arg in popts: if opt == '-O': O_opts.append(arg) elif opt == '-P': P_opts.append(arg) elif opt == '-F': F_opts.append(arg) opts[opt] = arg if opts.pop('-h', None) is not None: print(usage) return 0 if opts.pop('-V', None) is not None: print('Pygments version %s, (c) 2006-2014 by Georg Brandl.' % __version__) return 0 # handle ``pygmentize -L`` L_opt = opts.pop('-L', None) if L_opt is not None: if opts: print(usage, file=sys.stderr) return 2 # print version main(['', '-V']) if not args: args = ['lexer', 'formatter', 'filter', 'style'] for arg in args: _print_list(arg.rstrip('s')) return 0 # handle ``pygmentize -H`` H_opt = opts.pop('-H', None) if H_opt is not None: if opts or len(args) != 2: print(usage, file=sys.stderr) return 2 what, name = args if what not in ('lexer', 'formatter', 'filter'): print(usage, file=sys.stderr) return 2 return _print_help(what, name) # parse -O options parsed_opts = _parse_options(O_opts) opts.pop('-O', None) # parse -P options for p_opt in P_opts: try: name, value = p_opt.split('=', 1) except ValueError: parsed_opts[p_opt] = True else: parsed_opts[name] = value opts.pop('-P', None) # encodings inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding')) outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding')) # handle ``pygmentize -N`` infn = opts.pop('-N', None) if infn is not None: lexer = find_lexer_class_for_filename(infn) if lexer is None: lexer = TextLexer print(lexer.aliases[0]) return 0 # handle ``pygmentize -S`` S_opt = opts.pop('-S', None) a_opt = opts.pop('-a', None) if S_opt is not None: f_opt = opts.pop('-f', None) if not f_opt: print(usage, file=sys.stderr) return 2 if opts or args: print(usage, file=sys.stderr) return 2 try: parsed_opts['style'] = S_opt fmter = get_formatter_by_name(f_opt, **parsed_opts) except ClassNotFound as err: print(err, file=sys.stderr) return 1 print(fmter.get_style_defs(a_opt or '')) return 0 # if no -S is given, -a is not allowed if a_opt is not None: print(usage, file=sys.stderr) return 2 # parse -F options F_opts = _parse_filters(F_opts) opts.pop('-F', None) # select lexer lexer = None # given by name? lexername = opts.pop('-l', None) if lexername: try: lexer = get_lexer_by_name(lexername, **parsed_opts) except (OptionError, ClassNotFound) as err: print('Error:', err, file=sys.stderr) return 1 # read input code code = None if args: if len(args) > 1: print(usage, file=sys.stderr) return 2 if '-s' in opts: print('Error: -s option not usable when input file specified', file=sys.stderr) return 2 infn = args[0] try: with open(infn, 'rb') as infp: code = infp.read() except Exception as err: print('Error: cannot read infile:', err, file=sys.stderr) return 1 if not inencoding: code, inencoding = guess_decode(code) # do we have to guess the lexer? if not lexer: try: lexer = get_lexer_for_filename(infn, code, **parsed_opts) except ClassNotFound as err: if '-g' in opts: try: lexer = guess_lexer(code, **parsed_opts) except ClassNotFound: lexer = TextLexer(**parsed_opts) else: print('Error:', err, file=sys.stderr) return 1 except OptionError as err: print('Error:', err, file=sys.stderr) return 1 elif '-s' not in opts: # treat stdin as full file (-s support is later) # read code from terminal, always in binary mode since we want to # decode ourselves and be tolerant with it if sys.version_info > (3,): # Python 3: we have to use .buffer to get a binary stream code = sys.stdin.buffer.read() else: code = sys.stdin.read() if not inencoding: code, inencoding = guess_decode_from_terminal(code, sys.stdin) # else the lexer will do the decoding if not lexer: try: lexer = guess_lexer(code, **parsed_opts) except ClassNotFound: lexer = TextLexer(**parsed_opts) else: # -s option needs a lexer with -l if not lexer: print('Error: when using -s a lexer has to be selected with -l', file=sys.stderr) return 2 # process filters for fname, fopts in F_opts: try: lexer.add_filter(fname, **fopts) except ClassNotFound as err: print('Error:', err, file=sys.stderr) return 1 # select formatter outfn = opts.pop('-o', None) fmter = opts.pop('-f', None) if fmter: try: fmter = get_formatter_by_name(fmter, **parsed_opts) except (OptionError, ClassNotFound) as err: print('Error:', err, file=sys.stderr) return 1 if outfn: if not fmter: try: fmter = get_formatter_for_filename(outfn, **parsed_opts) except (OptionError, ClassNotFound) as err: print('Error:', err, file=sys.stderr) return 1 try: outfile = open(outfn, 'wb') except Exception as err: print('Error: cannot open outfile:', err, file=sys.stderr) return 1 else: if not fmter: fmter = TerminalFormatter(**parsed_opts) if sys.version_info > (3,): # Python 3: we have to use .buffer to get a binary stream outfile = sys.stdout.buffer else: outfile = sys.stdout # determine output encoding if not explicitly selected if not outencoding: if outfn: # output file? use lexer encoding for now (can still be None) fmter.encoding = inencoding else: # else use terminal encoding fmter.encoding = terminal_encoding(sys.stdout) # provide coloring under Windows, if possible if not outfn and sys.platform in ('win32', 'cygwin') and \ fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover # unfortunately colorama doesn't support binary streams on Py3 if sys.version_info > (3,): from pygments.util import UnclosingTextIOWrapper outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding) fmter.encoding = None try: import colorama.initialise except ImportError: pass else: outfile = colorama.initialise.wrap_stream( outfile, convert=None, strip=None, autoreset=False, wrap=True) # When using the LaTeX formatter and the option `escapeinside` is # specified, we need a special lexer which collects escaped text # before running the chosen language lexer. escapeinside = parsed_opts.get('escapeinside', '') if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter): left = escapeinside[0] right = escapeinside[1] lexer = LatexEmbeddedLexer(left, right, lexer) # ... and do it! if '-s' not in opts: # process whole input as per normal... highlight(code, lexer, fmter, outfile) return 0 else: # line by line processing of stdin (eg: for 'tail -f')... try: while 1: if sys.version_info > (3,): # Python 3: we have to use .buffer to get a binary stream line = sys.stdin.buffer.readline() else: line = sys.stdin.readline() if not line: break if not inencoding: line = guess_decode_from_terminal(line, sys.stdin)[0] highlight(line, lexer, fmter, outfile) if hasattr(outfile, 'flush'): outfile.flush() return 0 except KeyboardInterrupt: # pragma: no cover return 0
conf=args.config[0] file = open(conf, 'r') # read config files for keys and api endpoint for line in file: if 'apikey' in line: apikey=(line.split("=")[1].rstrip('\n')) if 'secretkey' in line: secretkey=(line.split("=")[1].rstrip('\n')) if 'url' in line: url=str(line.split("=")[1].rstrip('\n')) # create header headers = {} headers['api-key'] = apikey headers['secret-key'] = secretkey headers['content-type'] = 'application/json' command = 'FileSystems' url = url+command # get filesystems req = requests.get(url, headers = headers) filesystems = json.dumps(req.json(), indent=4) print(highlight(filesystems, JsonLexer(), TerminalFormatter())) vols=(len(req.json())) print('There are '+str(vols)+' volumes')
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, data, bundle, bundle_timestamp, start, end, output, print_algo, local_namespace, environ): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. """ if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) if bundle is not None: bundle_data = load( bundle, environ, bundle_timestamp, ) prefix, connstr = re.split( r'sqlite:///', str(bundle_data.asset_finder.engine.url), maxsplit=1, ) if prefix: raise ValueError( "invalid url %r, must begin with 'sqlite:///'" % str(bundle_data.asset_finder.engine.url), ) env = TradingEnvironment(asset_db_path=connstr) data = DataPortal( env, equity_minute_reader=bundle_data.minute_bar_reader, equity_daily_reader=bundle_data.daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) pipeline_loader = USEquityPricingLoader( bundle_data.daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError("No PipelineLoader registered for column %s." % column) perf = TradingAlgorithm( namespace=namespace, capital_base=capital_base, start=start, end=end, env=env, get_pipeline_loader=choose_loader, **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, }).run( data, overwrite_sim_params=False, ) if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
options = config.get_options() log.debug('Raw options: %s', options) options.update_from_cli_arguments(args) log.debug('Options: %s', options) # Make request r = requests.request(method=request.method, url=request.url, headers=request.headers, params=request.params, data=request.body, verify=args['verify'], proxies=options.get_proxies()) # Print request if 'application/json' in r.headers['content-type'].lower(): try: from pygments import highlight from pygments.formatters import TerminalFormatter from pygments.lexers import JsonLexer print highlight(json.dumps(r.json(), indent=2), JsonLexer(), TerminalFormatter()) except: print json.dumps(r.json(), indent=2) else: print r.text if __name__ == '__main__': main()
def pprint_json(dics): json_str = json.dumps(dics, sort_keys=True, indent=2) print(highlight(to_unicode(json_str), JsonLexer(), TerminalFormatter()))
def do_install(self, name, data): if name in data: utils.makedirs(self.output_dir) LOGGER.info('Downloading: ' + data[name]) zip_file = BytesIO() zip_file.write(requests.get(data[name]).content) LOGGER.info('Extracting: {0} into plugins'.format(name)) utils.extract_all(zip_file, 'plugins') dest_path = os.path.join('plugins', name) else: try: plugin_path = utils.get_plugin_path(name) except: LOGGER.error("Can't find plugin " + name) return False utils.makedirs(self.output_dir) dest_path = os.path.join(self.output_dir, name) if os.path.exists(dest_path): LOGGER.error("{0} is already installed".format(name)) return False LOGGER.info('Copying {0} into plugins'.format(plugin_path)) shutil.copytree(plugin_path, dest_path) reqpath = os.path.join(dest_path, 'requirements.txt') if os.path.exists(reqpath): LOGGER.notice('This plugin has Python dependencies.') LOGGER.info('Installing dependencies with pip...') try: subprocess.check_call(('pip', 'install', '-r', reqpath)) except subprocess.CalledProcessError: LOGGER.error('Could not install the dependencies.') print('Contents of the requirements.txt file:\n') with codecs.open(reqpath, 'rb', 'utf-8') as fh: print(indent(fh.read(), 4 * ' ')) print('You have to install those yourself or through a ' 'package manager.') else: LOGGER.info('Dependency installation succeeded.') reqnpypath = os.path.join(dest_path, 'requirements-nonpy.txt') if os.path.exists(reqnpypath): LOGGER.notice('This plugin has third-party ' 'dependencies you need to install ' 'manually.') print('Contents of the requirements-nonpy.txt file:\n') with codecs.open(reqnpypath, 'rb', 'utf-8') as fh: for l in fh.readlines(): i, j = l.split('::') print(indent(i.strip(), 4 * ' ')) print(indent(j.strip(), 8 * ' ')) print() print('You have to install those yourself or through a package ' 'manager.') confpypath = os.path.join(dest_path, 'conf.py.sample') if os.path.exists(confpypath): LOGGER.notice( 'This plugin has a sample config file. Integrate it with yours in order to make this plugin work!' ) print('Contents of the conf.py.sample file:\n') with codecs.open(confpypath, 'rb', 'utf-8') as fh: if self.site.colorful: print( indent( pygments.highlight(fh.read(), PythonLexer(), TerminalFormatter()), 4 * ' ')) else: print(indent(fh.read(), 4 * ' ')) return True
def print_json(json_object): json_str = json.dumps(json_object, indent=2, sort_keys=True, default=str) print(highlight(json_str, JsonLexer(), TerminalFormatter()))
def show_js(self, raw): from pygments.lexers import JavascriptLexer from pygments.formatters import TerminalFormatter from pygments import highlight print highlight(raw, JavascriptLexer(), TerminalFormatter())
if outfn: if not fmter: try: fmter = get_formatter_for_filename(outfn, **parsed_opts) except (OptionError, ClassNotFound), err: print >> sys.stderr, 'Error:', err return 1 try: outfile = open(outfn, 'wb') except Exception, err: print >> sys.stderr, 'Error: cannot open outfile:', err return 1 else: if not fmter: fmter = TerminalFormatter(**parsed_opts) outfile = sys.stdout # select lexer lexer = opts.pop('-l', None) if lexer: try: lexer = get_lexer_by_name(lexer, **parsed_opts) except (OptionError, ClassNotFound), err: print >> sys.stderr, 'Error:', err return 1 if args: if len(args) > 1: print >> sys.stderr, usage return 2
Generic.Warning: ('yellow', 'yellow'), Name: ('teal', 'teal'), Number: ('white', 'white'), Text: ('lightgray', 'lightgray'), } do_highlight = not os.environ.has_key("GCC_NO_HIGHLIGHT") pipe = subprocess.Popen(sys.argv[1:], stdout=subprocess.PIPE, stderr=subprocess.PIPE) while True: line = pipe.stderr.readline() if not line: break if do_highlight: output = highlight(line, GccLexer(), TerminalFormatter(colorscheme=colours)) sys.stderr.write(output.encode("latin-1")) else: sys.stderr.write(line) pipe.wait() sys.exit(pipe.returncode)
def after_init(self) -> None: if self.options.format.lower() not in ('default', 'colored'): self.error_format = self.options.format self._lexer = PythonLexer() self._formatter = TerminalFormatter()
def _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, metrics_set, local_namespace, environ, blotter, benchmark_spec): """Run a backtest for the given algorithm. This is shared between the cli and :func:`zipline.run_algo`. """ bundle_data = bundles.load( bundle, environ, bundle_timestamp, ) if trading_calendar is None: trading_calendar = get_calendar('XNYS') # date parameter validation if trading_calendar.session_distance(start, end) < 1: raise _RunAlgoError( 'There are no trading days between %s and %s' % ( start.date(), end.date(), ), ) benchmark_sid, benchmark_returns = benchmark_spec.resolve( asset_finder=bundle_data.asset_finder, start_date=start, end_date=end, ) if algotext is not None: if local_namespace: ip = get_ipython() # noqa namespace = ip.user_ns else: namespace = {} for assign in defines: try: name, value = assign.split('=', 2) except ValueError: raise ValueError( 'invalid define %r, should be of the form name=value' % assign, ) try: # evaluate in the same namespace so names may refer to # eachother namespace[name] = eval(value, namespace) except Exception as e: raise ValueError( 'failed to execute definition for name %r: %s' % (name, e), ) elif defines: raise _RunAlgoError( 'cannot pass define without `algotext`', "cannot pass '-D' / '--define' without '-t' / '--algotext'", ) else: namespace = {} if algofile is not None: algotext = algofile.read() if print_algo: if PYGMENTS: highlight( algotext, PythonLexer(), TerminalFormatter(), outfile=sys.stdout, ) else: click.echo(algotext) first_trading_day = \ bundle_data.equity_minute_bar_reader.first_trading_day data = DataPortal( bundle_data.asset_finder, trading_calendar=trading_calendar, first_trading_day=first_trading_day, equity_minute_reader=bundle_data.equity_minute_bar_reader, equity_daily_reader=bundle_data.equity_daily_bar_reader, adjustment_reader=bundle_data.adjustment_reader, ) pipeline_loader = USEquityPricingLoader.without_fx( bundle_data.equity_daily_bar_reader, bundle_data.adjustment_reader, ) def choose_loader(column): if column in USEquityPricing.columns: return pipeline_loader raise ValueError("No PipelineLoader registered for column %s." % column) if isinstance(metrics_set, six.string_types): try: metrics_set = metrics.load(metrics_set) except ValueError as e: raise _RunAlgoError(str(e)) if isinstance(blotter, six.string_types): try: blotter = load(Blotter, blotter) except ValueError as e: raise _RunAlgoError(str(e)) try: perf = TradingAlgorithm( namespace=namespace, data_portal=data, get_pipeline_loader=choose_loader, trading_calendar=trading_calendar, sim_params=SimulationParameters( start_session=start, end_session=end, trading_calendar=trading_calendar, capital_base=capital_base, data_frequency=data_frequency, ), metrics_set=metrics_set, blotter=blotter, benchmark_returns=benchmark_returns, benchmark_sid=benchmark_sid, **{ 'initialize': initialize, 'handle_data': handle_data, 'before_trading_start': before_trading_start, 'analyze': analyze, } if algotext is None else { 'algo_filename': getattr(algofile, 'name', '<algorithm>'), 'script': algotext, }).run() except NoBenchmark: raise _RunAlgoError( ('No ``benchmark_spec`` was provided, and' ' ``zipline.api.set_benchmark`` was not called in' ' ``initialize``.'), ("Neither '--benchmark-symbol' nor '--benchmark-sid' was" " provided, and ``zipline.api.set_benchmark`` was not called" " in ``initialize``. Did you mean to pass '--no-benchmark'?"), ) if output == '-': click.echo(str(perf)) elif output != os.devnull: # make the zipline magic not write any data perf.to_pickle(output) return perf
def get_output(command): detail = highlight(command.command, BashLexer(), TerminalFormatter(bg="dark")) return '{}#{} {}{}\n{}'.format(BLUE, sid(command), command.summary, END, detail)
def run_pipeline(print_algo=True, **kwargs): """Runs a full zipline pipeline given configuration keyword arguments. 1. Load data (start and end dates can be provided a strings as well as the source and symobls). 2. Instantiate algorithm (supply either algo_text or algofile kwargs containing initialize() and handle_data() functions). If algofile is supplied, will try to look for algofile_analyze.py and append it. 3. Run algorithm (supply capital_base as float). 4. Return performance dataframe. :Arguments: * print_algo : bool <default=True> Whether to print the algorithm to command line. Will use pygments syntax coloring if pygments is found. """ start = kwargs['start'] end = kwargs['end'] # Compare against None because strings/timestamps may have been given if start is not None: start = pd.Timestamp(start, tz='UTC') if end is not None: end = pd.Timestamp(end, tz='UTC') # Fail out if only one bound is provided if ((start is None) or (end is None)) and (start != end): raise PipelineDateError(start=start, end=end) # Check if start and end are provided, and if the sim_params need to read # a start and end from the DataSource if start is None: overwrite_sim_params = True else: overwrite_sim_params = False symbols = kwargs['symbols'].split(',') asset_identifier = kwargs['metadata_index'] # Pull asset metadata asset_metadata = kwargs.get('asset_metadata', None) asset_metadata_path = kwargs['metadata_path'] # Read in a CSV file, if applicable if asset_metadata_path is not None: if os.path.isfile(asset_metadata_path): asset_metadata = pd.read_csv(asset_metadata_path, index_col=asset_identifier) source_arg = kwargs['source'] source_time_column = kwargs['source_time_column'] if source_arg is None: raise NoSourceError() elif source_arg == 'yahoo': source = zipline.data.load_bars_from_yahoo(stocks=symbols, start=start, end=end) elif os.path.isfile(source_arg): source = zipline.data.load_prices_from_csv( filepath=source_arg, identifier_col=source_time_column) elif os.path.isdir(source_arg): source = zipline.data.load_prices_from_csv_folder( folderpath=source_arg, identifier_col=source_time_column) else: raise NotImplementedError('Source %s not implemented.' % kwargs['source']) algo_text = kwargs.get('algo_text', None) if algo_text is None: # Expect algofile to be set algo_fname = kwargs['algofile'] with open(algo_fname, 'r') as fd: algo_text = fd.read() if print_algo: if PYGMENTS: highlight(algo_text, PythonLexer(), TerminalFormatter(), outfile=sys.stdout) else: print_(algo_text) algo = zipline.TradingAlgorithm(script=algo_text, namespace=kwargs.get('namespace', {}), capital_base=float(kwargs['capital_base']), algo_filename=kwargs.get('algofile'), equities_metadata=asset_metadata, start=start, end=end) perf = algo.run(source, overwrite_sim_params=overwrite_sim_params) output_fname = kwargs.get('output', None) if output_fname is not None: perf.to_pickle(output_fname) return perf
def main(): _config = ConfigParser() patterns = [] if Path("setup.cfg").exists(): _config.read("setup.cfg") patterns = [ SkipPattern(x.strip()) for x in _config.get("velin", "ignore_patterns", fallback="").split("\n") if x ] parser = argparse.ArgumentParser(description="reformat the docstrigns of some file") parser.add_argument( "paths", metavar="path", type=str, nargs="+", help="Files or folder to reformat", ) parser.add_argument( "--context", metavar="context", type=int, default=3, help="Number of context lines in the diff", ) parser.add_argument( "--unsafe", action="store_true", help="Lift some safety feature (don't fail if updating the docstring is not indempotent", ) parser.add_argument( "--check", action="store_true", help="Print the list of files/lines number and exit with a non-0 exit status, Use it for CI.", ) parser.add_argument( "--no-diff", action="store_false", dest="print_diff", help="Do not print the diff", ) parser.add_argument( "--black", action="store_true", dest="run_black", help="Do not run black on examples", ) parser.add_argument( "--with-placeholder", action="store_true", dest="with_placeholder", help="insert missing sections/parameters placehoders", ) parser.add_argument("--no-color", action="store_false", dest="do_highlight") parser.add_argument("--compact", action="store_true", help="Please ignore") parser.add_argument("--no-fail", action="store_false", dest="fail") parser.add_argument( "--space-in-see-also-title", action="store_true", dest="space_in_see_also_title" ) parser.add_argument( "--space-in-notes-title", action="store_true", dest="space_in_notes_title" ) parser.add_argument( "--no-fixers", action="store_false", dest="run_fixers", help="try to only reformat and does not run fixers heuristics", ) parser.add_argument( "--write", dest="write", action="store_true", help="Try to write the updated docstring to the files", ) parser.add_argument( "--verbose", action="store_true", help="increase the verbosity of the output", ) args = parser.parse_args() from types import SimpleNamespace config = Config( { "with_placeholder": args.with_placeholder, "compact_param": args.compact, "space_in_see_also_title": args.space_in_see_also_title, "space_in_notes_title": args.space_in_notes_title, "run_fixers": args.run_fixers, } ) global BLACK_REFORMAT if args.run_black: BLACK_REFORMAT = True else: BLACK_REFORMAT = False global print if args.verbose: try: from there import print except ImportError: pass to_format = [] for f in args.paths: p = Path(f) if p.is_dir(): for sf in p.glob("**/*.py"): to_format.append(sf) else: to_format.append(p) def to_skip(file, patterns): for p in patterns: if re.match(p.file, file): if p.obj_pattern is None: return True else: return False return False need_changes = [] for file in to_format: if to_skip(str(file), patterns): print("ignoring", file) continue try: with open(file) as f: data = f.read() except Exception as e: # continue continue raise RuntimeError(f"Fail reading {file}") from e obj_p = [p.obj_pattern for p in patterns if re.match(p.file, str(file))] new = reformat_file( data, file, args.compact, args.unsafe, fail=args.fail, config=config, obj_p=obj_p, ) # test(docstring, file) if new != data: need_changes.append(str(file)) dold = data.splitlines() dnew = new.splitlines() diffs = list( difflib.unified_diff( dold, dnew, n=args.context, fromfile=str(file), tofile=str(file) ), ) if args.print_diff and not args.write: code = "\n".join(diffs) if args.do_highlight: from pygments import highlight from pygments.formatters import TerminalFormatter from pygments.lexers import DiffLexer code = highlight(code, DiffLexer(), TerminalFormatter()) print(code) if args.write: with open(file, "w") as f: f.write(new) if args.check: if len(need_changes) != 0: sys.exit( "Some files/functions need updates:\n - " + "\n - ".join(need_changes) ) else: sys.exit(0)
def create(fsid, url, data, head): data_json = json.dumps(data) req = requests.post(url, headers=head, data=data_json) details = json.dumps(req.json(), indent=4) print('Created volume ' + args.new_mountpoint[0]) print(highlight(details, JsonLexer(), TerminalFormatter()))
elif as_source: logging.debug("asked for lexer: %s" % pattern.lower()) assert(pattern.lower() in context["lexers"]) lexer = get_lexer_by_name(pattern.lower()) # Python => 256 colors, python => 8 colors ask_256 = pattern[0].isupper() if ask_256: logging.debug("256 colors mode") try: formatter = Terminal256Formatter(style=color) except: # style not found logging.warning("style %s not found, fallback to default style" % color) formatter = Terminal256Formatter() else: logging.debug("8 colors mode") formatter = TerminalFormatter() write_all( as_all, sys.stdin, sys.stdout, highlight, lexer, formatter ) # if color else: write_all( as_all, sys.stdin, sys.stdout, colorup, pattern, color, style, on_groups ) except UnknownColor as e: if debug: import traceback for var in context: print(var,context[var]) print(traceback.format_exc()) logging.error("unknown color: %s (maybe you forgot to install python3-pygments?)" % e ) sys.exit( error_codes["UnknownColor"] )
def _pprint_json(dics): json_str = json.dumps(dics, sort_keys=True, indent=4) print highlight(unicode(json_str, 'UTF-8'), JsonLexer(), TerminalFormatter())
def colorsql(value): if pygments: lexer = {'mysql': pygments.lexers.MySqlLexer}['mysql'] return pygments.highlight(value, lexer(), TerminalFormatter()) else: return value
def highlight_xml(xml_str): # Highlights a string containing XML, using terminal color codes return highlight(xml_str, XmlLexer(), TerminalFormatter())
def present(message, options, file=sys.stdout): """Write a message payload to the output, pretty printing and/or coloring it as configured in the options.""" if not options.quiet and (message.opt.location_path or message.opt.location_query): # FIXME: Percent encoding is completely missing; this would be done # most easily with a CRI library location_ref = "/" + "/".join(message.opt.location_path) if message.opt.location_query: location_ref += "?" + "&".join(message.opt.location_query) print(colored( f"Location options indicate new resource: {location_ref}", options, 'green'), file=sys.stderr) if not message.payload: return payload = None cf = message.opt.content_format or message.request.opt.content_format if cf is not None and cf.is_known(): mime = cf.media_type else: mime = 'application/octet-stream' if options.pretty_print: from aiocoap.util.prettyprint import pretty_print prettyprinted = pretty_print(message) if prettyprinted is not None: (infos, mime, payload) = prettyprinted if not options.quiet: for i in infos: print(colored(i, options, 'grey', attrs=['bold']), file=sys.stderr) color = options.color if color: from aiocoap.util.prettyprint import lexer_for_mime import pygments try: lexer = lexer_for_mime(mime) except pygments.util.ClassNotFound: color = False if color and payload is None: # Coloring requires a unicode-string style payload, either from the # mime type or from the pretty printer. try: payload = message.payload.decode('utf8') except UnicodeDecodeError: color = False if color: from pygments.formatters import TerminalFormatter from pygments import highlight highlit = highlight( payload, lexer, TerminalFormatter(), ) # The TerminalFormatter already adds an end-of-line character, not # trying to add one for any missing trailing newlines. print(highlit, file=file, end="") file.flush() else: if payload is None: file.buffer.write(message.payload) if file.isatty() and message.payload[-1:] != b'\n': file.write("\n") else: file.write(payload) if file.isatty() and payload[-1] != '\n': file.write("\n")
try: infile = open(input_filename) except IOError, (errno, strerror): print "I/O in opening debugger input file %s" % input_filename print "error(%s): %s" % (errno, strerror) sys.exit(2) except: print "Unexpected error in opening output file %s" % out_filename sys.exit(2) pass pass else: infile = sys.stdin pass formatter = TerminalFormatter(bg=bg) if colors_file is not None and os.path.isfile(colors_file): try: execfile(colors_file) except: sys.exit(10) pass pass formatter.colorscheme = TERMINAL_COLORS for code_line in infile.readlines(): line = highlight(code_line, BashLexer(), formatter).strip("\r\n") outfile.write(line + "\n") # print line, pass outfile.close if out_filename: print out_filename
if outfn: if not fmter: try: fmter = get_formatter_for_filename(outfn, **parsed_opts) except (OptionError, ClassNotFound), err: print >> sys.stderr, "Error:", err return 1 try: outfile = open(outfn, "wb") except Exception, err: print >> sys.stderr, "Error: cannot open outfile:", err return 1 else: if not fmter: fmter = TerminalFormatter(**parsed_opts) outfile = sys.stdout # select lexer lexer = opts.pop("-l", None) if lexer: try: lexer = get_lexer_by_name(lexer, **parsed_opts) except (OptionError, ClassNotFound), err: print >> sys.stderr, "Error:", err return 1 if args: if len(args) > 1: print >> sys.stderr, usage return 2
def main(args=sys.argv): """ Main command line entry point. """ # pylint: disable-msg=R0911,R0912,R0915 usage = USAGE % ((args[0],) * 6) if sys.platform in ['win32', 'cygwin']: try: # Provide coloring under Windows, if possible import colorama colorama.init() except ImportError: pass try: popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:hVHg") except getopt.GetoptError: print(usage, file=sys.stderr) return 2 opts = {} O_opts = [] P_opts = [] F_opts = [] for opt, arg in popts: if opt == '-O': O_opts.append(arg) elif opt == '-P': P_opts.append(arg) elif opt == '-F': F_opts.append(arg) opts[opt] = arg if not opts and not args: print(usage) return 0 if opts.pop('-h', None) is not None: print(usage) return 0 if opts.pop('-V', None) is not None: print('Pygments version %s, (c) 2006-2014 by Georg Brandl.' % __version__) return 0 # handle ``pygmentize -L`` L_opt = opts.pop('-L', None) if L_opt is not None: if opts: print(usage, file=sys.stderr) return 2 # print version main(['', '-V']) if not args: args = ['lexer', 'formatter', 'filter', 'style'] for arg in args: _print_list(arg.rstrip('s')) return 0 # handle ``pygmentize -H`` H_opt = opts.pop('-H', None) if H_opt is not None: if opts or len(args) != 2: print(usage, file=sys.stderr) return 2 what, name = args if what not in ('lexer', 'formatter', 'filter'): print(usage, file=sys.stderr) return 2 _print_help(what, name) return 0 # parse -O options parsed_opts = _parse_options(O_opts) opts.pop('-O', None) # parse -P options for p_opt in P_opts: try: name, value = p_opt.split('=', 1) except ValueError: parsed_opts[p_opt] = True else: parsed_opts[name] = value opts.pop('-P', None) # handle ``pygmentize -N`` infn = opts.pop('-N', None) if infn is not None: try: lexer = get_lexer_for_filename(infn, **parsed_opts) except ClassNotFound as err: lexer = TextLexer() except OptionError as err: print('Error:', err, file=sys.stderr) return 1 print(lexer.aliases[0]) return 0 # handle ``pygmentize -S`` S_opt = opts.pop('-S', None) a_opt = opts.pop('-a', None) if S_opt is not None: f_opt = opts.pop('-f', None) if not f_opt: print(usage, file=sys.stderr) return 2 if opts or args: print(usage, file=sys.stderr) return 2 try: parsed_opts['style'] = S_opt fmter = get_formatter_by_name(f_opt, **parsed_opts) except ClassNotFound as err: print(err, file=sys.stderr) return 1 arg = a_opt or '' try: print(fmter.get_style_defs(arg)) except Exception as err: print('Error:', err, file=sys.stderr) return 1 return 0 # if no -S is given, -a is not allowed if a_opt is not None: print(usage, file=sys.stderr) return 2 # parse -F options F_opts = _parse_filters(F_opts) opts.pop('-F', None) # select formatter outfn = opts.pop('-o', None) fmter = opts.pop('-f', None) if fmter: try: fmter = get_formatter_by_name(fmter, **parsed_opts) except (OptionError, ClassNotFound) as err: print('Error:', err, file=sys.stderr) return 1 if outfn: if not fmter: try: fmter = get_formatter_for_filename(outfn, **parsed_opts) except (OptionError, ClassNotFound) as err: print('Error:', err, file=sys.stderr) return 1 try: outfile = open(outfn, 'wb') except Exception as err: print('Error: cannot open outfile:', err, file=sys.stderr) return 1 else: if not fmter: fmter = TerminalFormatter(**parsed_opts) outfile = sys.stdout # select lexer lexer = opts.pop('-l', None) if lexer: try: lexer = get_lexer_by_name(lexer, **parsed_opts) except (OptionError, ClassNotFound) as err: print('Error:', err, file=sys.stderr) return 1 if args: if len(args) > 1: print(usage, file=sys.stderr) return 2 infn = args[0] try: code = open(infn, 'rb').read() except Exception as err: print('Error: cannot read infile:', err, file=sys.stderr) return 1 if not lexer: try: lexer = get_lexer_for_filename(infn, code, **parsed_opts) except ClassNotFound as err: if '-g' in opts: try: lexer = guess_lexer(code, **parsed_opts) except ClassNotFound: lexer = TextLexer(**parsed_opts) else: print('Error:', err, file=sys.stderr) return 1 except OptionError as err: print('Error:', err, file=sys.stderr) return 1 else: if '-g' in opts: code = sys.stdin.read() try: lexer = guess_lexer(code, **parsed_opts) except ClassNotFound: lexer = TextLexer(**parsed_opts) elif not lexer: print('Error: no lexer name given and reading ' + \ 'from stdin (try using -g or -l <lexer>)', file=sys.stderr) return 2 else: code = sys.stdin.read() # No encoding given? Use latin1 if output file given, # stdin/stdout encoding otherwise. # (This is a compromise, I'm not too happy with it...) if 'encoding' not in parsed_opts and 'outencoding' not in parsed_opts: if outfn: # encoding pass-through fmter.encoding = 'latin1' else: if sys.version_info < (3,): # use terminal encoding; Python 3's terminals already do that lexer.encoding = getattr(sys.stdin, 'encoding', None) or 'ascii' fmter.encoding = getattr(sys.stdout, 'encoding', None) or 'ascii' elif not outfn and sys.version_info > (3,): # output to terminal with encoding -> use .buffer outfile = sys.stdout.buffer # ... and do it! try: # process filters for fname, fopts in F_opts: lexer.add_filter(fname, **fopts) highlight(code, lexer, fmter, outfile) except Exception: import traceback info = traceback.format_exception(*sys.exc_info()) msg = info[-1].strip() if len(info) >= 3: # extract relevant file and position info msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:] print(file=sys.stderr) print('*** Error while highlighting:', file=sys.stderr) print(msg, file=sys.stderr) return 1 return 0
def main(): usage = "Usage: {0} <command> [option1=value1 " \ "[option2=value2] ...] [--async] [--post] " \ "[--region=<region>]".format(sys.argv[0]) if len(sys.argv) == 1: raise SystemExit(usage) command = sys.argv[1] kwargs = defaultdict(set) flags = set() args = dict() for option in sys.argv[2:]: if option.startswith('--'): option = option.strip('-') if '=' in option: key, value = option.split('=', 1) if not value: raise SystemExit(usage) args[key] = value else: flags.add(option) continue if '=' not in option: raise SystemExit(usage) key, value = option.split('=', 1) kwargs[key].add(value.strip(" \"'")) region = args.get('region', os.environ.get('CLOUDSTACK_REGION', 'cloudstack')) try: config = read_config(ini_group=region) except NoSectionError: raise SystemExit("Error: region '%s' not in config" % region) if 'post' in flags: config['method'] = 'post' cs = CloudStack(**config) try: response = getattr(cs, command)(**kwargs) except CloudStackException as e: response = e.args[2] sys.stderr.write("Cloudstack error:\n") if 'Async' not in command and 'jobid' in response and 'async' not in flags: sys.stderr.write("Polling result... ^C to abort\n") while True: try: res = cs.queryAsyncJobResult(**response) if res['jobprocstatus'] == 0: response = res break time.sleep(3) except KeyboardInterrupt: sys.stderr.write("Result not ready yet.\n") break data = json.dumps(response, indent=2, sort_keys=True) if pygments and sys.stdout.isatty(): data = pygments.highlight(data, JsonLexer(), TerminalFormatter()) sys.stdout.write(data)
def main(args=sys.argv): """ Main command line entry point. """ # pylint: disable-msg=R0911,R0912,R0915 usage = USAGE % ((args[0],) * 6) try: popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:hVHgs") except getopt.GetoptError: print(usage, file=sys.stderr) return 2 opts = {} O_opts = [] P_opts = [] F_opts = [] for opt, arg in popts: if opt == "-O": O_opts.append(arg) elif opt == "-P": P_opts.append(arg) elif opt == "-F": F_opts.append(arg) opts[opt] = arg if opts.pop("-h", None) is not None: print(usage) return 0 if opts.pop("-V", None) is not None: print("Pygments version %s, (c) 2006-2014 by Georg Brandl." % __version__) return 0 # handle ``pygmentize -L`` L_opt = opts.pop("-L", None) if L_opt is not None: if opts: print(usage, file=sys.stderr) return 2 # print version main(["", "-V"]) if not args: args = ["lexer", "formatter", "filter", "style"] for arg in args: _print_list(arg.rstrip("s")) return 0 # handle ``pygmentize -H`` H_opt = opts.pop("-H", None) if H_opt is not None: if opts or len(args) != 2: print(usage, file=sys.stderr) return 2 what, name = args if what not in ("lexer", "formatter", "filter"): print(usage, file=sys.stderr) return 2 _print_help(what, name) return 0 # parse -O options parsed_opts = _parse_options(O_opts) opts.pop("-O", None) # parse -P options for p_opt in P_opts: try: name, value = p_opt.split("=", 1) except ValueError: parsed_opts[p_opt] = True else: parsed_opts[name] = value opts.pop("-P", None) # encodings inencoding = parsed_opts.get("inencoding", parsed_opts.get("encoding")) outencoding = parsed_opts.get("outencoding", parsed_opts.get("encoding")) # handle ``pygmentize -N`` infn = opts.pop("-N", None) if infn is not None: try: lexer = get_lexer_for_filename(infn, **parsed_opts) except ClassNotFound as err: lexer = TextLexer() except OptionError as err: print("Error:", err, file=sys.stderr) return 1 print(lexer.aliases[0]) return 0 # handle ``pygmentize -S`` S_opt = opts.pop("-S", None) a_opt = opts.pop("-a", None) if S_opt is not None: f_opt = opts.pop("-f", None) if not f_opt: print(usage, file=sys.stderr) return 2 if opts or args: print(usage, file=sys.stderr) return 2 try: parsed_opts["style"] = S_opt fmter = get_formatter_by_name(f_opt, **parsed_opts) except ClassNotFound as err: print(err, file=sys.stderr) return 1 arg = a_opt or "" try: print(fmter.get_style_defs(arg)) except Exception as err: print("Error:", err, file=sys.stderr) return 1 return 0 # if no -S is given, -a is not allowed if a_opt is not None: print(usage, file=sys.stderr) return 2 # parse -F options F_opts = _parse_filters(F_opts) opts.pop("-F", None) # select lexer lexer = opts.pop("-l", None) if lexer: try: lexer = get_lexer_by_name(lexer, **parsed_opts) except (OptionError, ClassNotFound) as err: print("Error:", err, file=sys.stderr) return 1 # read input code code = None if args: if len(args) > 1: print(usage, file=sys.stderr) return 2 if "-s" in opts: print("Error: -s option not usable when input file specified", file=sys.stderr) return 1 infn = args[0] try: with open(infn, "rb") as infp: code = infp.read() except Exception as err: print("Error: cannot read infile:", err, file=sys.stderr) return 1 if not inencoding: code, inencoding = guess_decode(code) # do we have to guess the lexer? if not lexer: try: lexer = get_lexer_for_filename(infn, code, **parsed_opts) except ClassNotFound as err: if "-g" in opts: try: lexer = guess_lexer(code, **parsed_opts) except ClassNotFound: lexer = TextLexer(**parsed_opts) else: print("Error:", err, file=sys.stderr) return 1 except OptionError as err: print("Error:", err, file=sys.stderr) return 1 elif "-s" not in opts: # treat stdin as full file (-s support is later) # read code from terminal, always in binary mode since we want to # decode ourselves and be tolerant with it if sys.version_info > (3,): # Python 3: we have to use .buffer to get a binary stream code = sys.stdin.buffer.read() else: code = sys.stdin.read() if not inencoding: code, inencoding = guess_decode_from_terminal(code, sys.stdin) # else the lexer will do the decoding if not lexer: try: lexer = guess_lexer(code, **parsed_opts) except ClassNotFound: lexer = TextLexer(**parsed_opts) # select formatter outfn = opts.pop("-o", None) fmter = opts.pop("-f", None) if fmter: try: fmter = get_formatter_by_name(fmter, **parsed_opts) except (OptionError, ClassNotFound) as err: print("Error:", err, file=sys.stderr) return 1 if outfn: if not fmter: try: fmter = get_formatter_for_filename(outfn, **parsed_opts) except (OptionError, ClassNotFound) as err: print("Error:", err, file=sys.stderr) return 1 try: outfile = open(outfn, "wb") except Exception as err: print("Error: cannot open outfile:", err, file=sys.stderr) return 1 else: if not fmter: fmter = TerminalFormatter(**parsed_opts) if sys.version_info > (3,): # Python 3: we have to use .buffer to get a binary stream outfile = sys.stdout.buffer else: outfile = sys.stdout # determine output encoding if not explicitly selected if not outencoding: if outfn: # output file? use lexer encoding for now (can still be None) fmter.encoding = inencoding else: # else use terminal encoding fmter.encoding = terminal_encoding(sys.stdout) # provide coloring under Windows, if possible if not outfn and sys.platform in ("win32", "cygwin") and fmter.name in ("Terminal", "Terminal256"): # unfortunately colorama doesn't support binary streams on Py3 if sys.version_info > (3,): import io outfile = io.TextIOWrapper(outfile, encoding=fmter.encoding) fmter.encoding = None try: import colorama.initialise except ImportError: pass else: outfile = colorama.initialise.wrap_stream(outfile, convert=None, strip=None, autoreset=False, wrap=True) # When using the LaTeX formatter and the option `escapeinside` is # specified, we need a special lexer which collects escaped text # before running the chosen language lexer. escapeinside = parsed_opts.get("escapeinside", "") if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter): left = escapeinside[0] right = escapeinside[1] lexer = LatexEmbeddedLexer(left, right, lexer) # ... and do it! try: # process filters for fname, fopts in F_opts: lexer.add_filter(fname, **fopts) if "-s" not in opts: # process whole input as per normal... highlight(code, lexer, fmter, outfile) else: if not lexer: print("Error: when using -s a lexer has to be selected with -l", file=sys.stderr) return 1 # line by line processing of stdin (eg: for 'tail -f')... try: while 1: if sys.version_info > (3,): # Python 3: we have to use .buffer to get a binary stream line = sys.stdin.buffer.readline() else: line = sys.stdin.readline() if not line: break if not inencoding: line = guess_decode_from_terminal(line, sys.stdin)[0] highlight(line, lexer, fmter, outfile) if hasattr(outfile, "flush"): outfile.flush() except KeyboardInterrupt: return 0 except Exception: raise import traceback info = traceback.format_exception(*sys.exc_info()) msg = info[-1].strip() if len(info) >= 3: # extract relevant file and position info msg += "\n (f%s)" % info[-2].split("\n")[0].strip()[1:] print(file=sys.stderr) print("*** Error while highlighting:", file=sys.stderr) print(msg, file=sys.stderr) return 1 return 0
def main(args=sys.argv): """ Main command line entry point. """ # pylint: disable-msg=R0911,R0912,R0915 usage = USAGE % ((args[0],) * 6) if sys.platform in ["win32", "cygwin"]: try: # Provide coloring under Windows, if possible import colorama colorama.init() except ImportError: pass try: popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:hVHg") except getopt.GetoptError as err: print(usage, file=sys.stderr) return 2 opts = {} O_opts = [] P_opts = [] F_opts = [] for opt, arg in popts: if opt == "-O": O_opts.append(arg) elif opt == "-P": P_opts.append(arg) elif opt == "-F": F_opts.append(arg) opts[opt] = arg if not opts and not args: print(usage) return 0 if opts.pop("-h", None) is not None: print(usage) return 0 if opts.pop("-V", None) is not None: print("Pygments version %s, (c) 2006-2013 by Georg Brandl." % __version__) return 0 # handle ``pygmentize -L`` L_opt = opts.pop("-L", None) if L_opt is not None: if opts: print(usage, file=sys.stderr) return 2 # print version main(["", "-V"]) if not args: args = ["lexer", "formatter", "filter", "style"] for arg in args: _print_list(arg.rstrip("s")) return 0 # handle ``pygmentize -H`` H_opt = opts.pop("-H", None) if H_opt is not None: if opts or len(args) != 2: print(usage, file=sys.stderr) return 2 what, name = args if what not in ("lexer", "formatter", "filter"): print(usage, file=sys.stderr) return 2 _print_help(what, name) return 0 # parse -O options parsed_opts = _parse_options(O_opts) opts.pop("-O", None) # parse -P options for p_opt in P_opts: try: name, value = p_opt.split("=", 1) except ValueError: parsed_opts[p_opt] = True else: parsed_opts[name] = value opts.pop("-P", None) # handle ``pygmentize -N`` infn = opts.pop("-N", None) if infn is not None: try: lexer = get_lexer_for_filename(infn, **parsed_opts) except ClassNotFound as err: lexer = TextLexer() except OptionError as err: print("Error:", err, file=sys.stderr) return 1 print(lexer.aliases[0]) return 0 # handle ``pygmentize -S`` S_opt = opts.pop("-S", None) a_opt = opts.pop("-a", None) if S_opt is not None: f_opt = opts.pop("-f", None) if not f_opt: print(usage, file=sys.stderr) return 2 if opts or args: print(usage, file=sys.stderr) return 2 try: parsed_opts["style"] = S_opt fmter = get_formatter_by_name(f_opt, **parsed_opts) except ClassNotFound as err: print(err, file=sys.stderr) return 1 arg = a_opt or "" try: print(fmter.get_style_defs(arg)) except Exception as err: print("Error:", err, file=sys.stderr) return 1 return 0 # if no -S is given, -a is not allowed if a_opt is not None: print(usage, file=sys.stderr) return 2 # parse -F options F_opts = _parse_filters(F_opts) opts.pop("-F", None) # select formatter outfn = opts.pop("-o", None) fmter = opts.pop("-f", None) if fmter: try: fmter = get_formatter_by_name(fmter, **parsed_opts) except (OptionError, ClassNotFound) as err: print("Error:", err, file=sys.stderr) return 1 if outfn: if not fmter: try: fmter = get_formatter_for_filename(outfn, **parsed_opts) except (OptionError, ClassNotFound) as err: print("Error:", err, file=sys.stderr) return 1 try: outfile = open(outfn, "wb") except Exception as err: print("Error: cannot open outfile:", err, file=sys.stderr) return 1 else: if not fmter: fmter = TerminalFormatter(**parsed_opts) outfile = sys.stdout # select lexer lexer = opts.pop("-l", None) if lexer: try: lexer = get_lexer_by_name(lexer, **parsed_opts) except (OptionError, ClassNotFound) as err: print("Error:", err, file=sys.stderr) return 1 if args: if len(args) > 1: print(usage, file=sys.stderr) return 2 infn = args[0] try: code = open(infn, "rb").read() except Exception as err: print("Error: cannot read infile:", err, file=sys.stderr) return 1 if not lexer: try: lexer = get_lexer_for_filename(infn, code, **parsed_opts) except ClassNotFound as err: if "-g" in opts: try: lexer = guess_lexer(code, **parsed_opts) except ClassNotFound: lexer = TextLexer(**parsed_opts) else: print("Error:", err, file=sys.stderr) return 1 except OptionError as err: print("Error:", err, file=sys.stderr) return 1 else: if "-g" in opts: code = sys.stdin.read() try: lexer = guess_lexer(code, **parsed_opts) except ClassNotFound: lexer = TextLexer(**parsed_opts) elif not lexer: print( "Error: no lexer name given and reading " + "from stdin (try using -g or -l <lexer>)", file=sys.stderr ) return 2 else: code = sys.stdin.read() # No encoding given? Use latin1 if output file given, # stdin/stdout encoding otherwise. # (This is a compromise, I'm not too happy with it...) if "encoding" not in parsed_opts and "outencoding" not in parsed_opts: if outfn: # encoding pass-through fmter.encoding = "latin1" else: if sys.version_info < (3,): # use terminal encoding; Python 3's terminals already do that lexer.encoding = getattr(sys.stdin, "encoding", None) or "ascii" fmter.encoding = getattr(sys.stdout, "encoding", None) or "ascii" elif not outfn and sys.version_info > (3,): # output to terminal with encoding -> use .buffer outfile = sys.stdout.buffer # ... and do it! try: # process filters for fname, fopts in F_opts: lexer.add_filter(fname, **fopts) highlight(code, lexer, fmter, outfile) except Exception as err: import traceback info = traceback.format_exception(*sys.exc_info()) msg = info[-1].strip() if len(info) >= 3: # extract relevant file and position info msg += "\n (f%s)" % info[-2].split("\n")[0].strip()[1:] print(file=sys.stderr) print("*** Error while highlighting:", file=sys.stderr) print(msg, file=sys.stderr) return 1 return 0