def run_distributed_algorithm(self, algo_req): """run_distributed_algorithm Process an Algorithm using a Celery task that is processed by a Celery worker :param algo_req: dictionary for key/values for running an algorithm using Celery workers """ label = algo_req.get('name', 'ae-algo') verbose = algo_req.get('verbose_task', False) debug = algo_req.get('debug', False) # please be careful logging prod passwords: if debug: log.info(f'task - {label} - start algo_req={algo_req}') elif verbose: log.info(f'task - {label} - start ') # end of start log rec = {} res = build_result.build_result(status=ae_consts.NOT_RUN, err=None, rec=rec) created_algo_object = None custom_algo_module = None new_algo_object = None use_custom_algo = False found_algo_module = True # assume the BaseAlgo should_publish_extract_dataset = False should_publish_history_dataset = False should_publish_report_dataset = False ticker = algo_req.get('ticker', 'SPY') num_days_back = algo_req.get('num_days_back', 75) name = algo_req.get('name', 'ae-algo') algo_module_path = algo_req.get('mod_path', None) module_name = algo_req.get('module_name', 'BaseAlgo') custom_algo_module = algo_req.get('custom_algo_module', None) new_algo_object = algo_req.get('new_algo_object', None) use_custom_algo = algo_req.get('use_custom_algo', False) should_publish_extract_dataset = algo_req.get( 'should_publish_extract_dataset', False) should_publish_history_dataset = algo_req.get( 'should_publish_history_dataset', False) should_publish_report_dataset = algo_req.get( 'should_publish_report_dataset', False) start_date = algo_req.get('start_date', None) end_date = algo_req.get('end_date', None) raise_on_err = algo_req.get('raise_on_err', True) report_config = algo_req.get('report_config', None) history_config = algo_req.get('history_config', None) extract_config = algo_req.get('extract_config', None) err = None if algo_module_path: found_algo_module = False module_name = algo_module_path.split('/')[-1] loader = importlib.machinery.SourceFileLoader(module_name, algo_module_path) custom_algo_module = types.ModuleType(loader.name) loader.exec_module(custom_algo_module) use_custom_algo = True for member in inspect.getmembers(custom_algo_module): if module_name in str(member): found_algo_module = True break # for all members in this custom module file # if loading a custom algorithm module from a file on disk if not found_algo_module: err = (f'{label} - unable to find custom algorithm ' f'module={custom_algo_module} module_path={algo_module_path}') if algo_module_path: err = ( f'{label} - analysis_engine.' 'work_tasks.run_distributed_algorithm was unable ' f'to find custom algorithm module={custom_algo_module} with ' f'provided path to \n file: {algo_module_path} \n' '\n' 'Please confirm ' 'that the class inherits from the BaseAlgo class like:\n' '\n' 'import analysis_engine.algo\n' 'class MyAlgo(analysis_engine.algo.BaseAlgo):\n ' '\n' 'If it is then please file an issue on github:\n ' 'https://github.com/AlgoTraders/stock-analysis-engine/' 'issues/new \n\nFor now this error results in a shutdown' '\n') # if algo_module_path set log.error(err) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) task_result = { 'status': res['status'], 'err': res['err'], 'algo_req': algo_req, 'rec': rec } return task_result # if not found_algo_module use_start_date = start_date use_end_date = end_date if not use_end_date: end_date = datetime.datetime.utcnow() use_end_date = end_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT) if not use_start_date: start_date = end_date - datetime.timedelta(days=num_days_back) use_start_date = start_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT) dataset_publish_extract = algo_req.get('dataset_publish_extract', False) dataset_publish_history = algo_req.get('dataset_publish_history', False) dataset_publish_report = algo_req.get('dataset_publish_report', False) try: if use_custom_algo: if verbose: log.info( f'inspecting {custom_algo_module} for class {module_name}') use_class_member_object = None for member in inspect.getmembers(custom_algo_module): if module_name in str(member): if verbose: log.info(f'start {name} with {member[1]}') use_class_member_object = member break # end of looking over the class definition but did not find it if use_class_member_object: if algo_req.get('backtest', False): new_algo_object = member[1](ticker=algo_req['ticker'], config_dict=algo_req) else: new_algo_object = member[1](**algo_req) else: err = (f'{label} - did not find a derived ' 'analysis_engine.algo.BaseAlgo ' f'class in the module file={algo_module_path} ' f'for ticker={ticker} algo_name={name}') log.error(err) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) task_result = { 'status': res['status'], 'err': res['err'], 'algo_req': algo_req, 'rec': rec } return task_result # end of finding a valid algorithm object else: new_algo_object = ae_algo.BaseAlgo(**algo_req) # if using a custom module path or the BaseAlgo if new_algo_object: # heads up - logging this might have passwords in the algo_req # log.debug(f'{name} algorithm request: {algo_req}') if verbose: log.info(f'{name} - run START ticker={ticker} ' f'from {use_start_date} to {use_end_date}') if algo_req.get('backtest', False): algo_res = run_algo.run_algo(algo=new_algo_object, config_dict=algo_req) created_algo_object = new_algo_object else: algo_res = run_algo.run_algo(algo=new_algo_object, **algo_req) created_algo_object = new_algo_object if verbose: log.info(f'{name} - run DONE ticker={ticker} ' f'from {use_start_date} to {use_end_date}') if debug: if custom_algo_module: log.info(f'{name} - done run_algo ' f'custom_algo_module={custom_algo_module} ' f'module_name={module_name} ticker={ticker} ' f'from {use_start_date} to {use_end_date}') else: log.info( f'{name} - done run_algo BaseAlgo ticker={ticker} ' f'from {use_start_date} to {use_end_date}') else: err = ( f'{label} - missing a derived analysis_engine.algo.BaseAlgo ' f'class in the module file={algo_module_path} for ' f'ticker={ticker} algo_name={name}') log.error(err) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) task_result = { 'status': res['status'], 'err': res['err'], 'algo_req': algo_req, 'rec': rec } return task_result # end of finding a valid algorithm object if not created_algo_object: err = (f'{label} - failed creating algorithm object - ' f'ticker={ticker} ' f'status={ae_consts.get_status(status=algo_res["status"])} ' f'error={algo_res["err"]} algo name={name} ' f'custom_algo_module={custom_algo_module} ' f'module_name={module_name} ' f'from {use_start_date} to {use_end_date}') res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) task_result = { 'status': res['status'], 'err': res['err'], 'algo_req': algo_req, 'rec': rec } return task_result # end of stop early if should_publish_extract_dataset or dataset_publish_extract: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (extract_config['redis_address'] and extract_config['redis_db'] and extract_config['redis_key']): redis_log = (f'redis://{extract_config["redis_address"]}' f'@{extract_config["redis_db"]}' f'/{extract_config["redis_key"]}') use_log += f' {redis_log}' else: extract_config['redis_enabled'] = False if (extract_config['s3_address'] and extract_config['s3_bucket'] and extract_config['s3_key']): s3_log = (f's3://{extract_config["s3_address"]}' f'/{extract_config["s3_bucket"]}' f'/{extract_config["s3_key"]}') use_log += f' {s3_log}' else: extract_config['s3_enabled'] = False if extract_config['output_file']: file_log = f'file:{extract_config["output_file"]}' use_log += f' {file_log}' if verbose: log.info(f'{name} - publish - start ticker={ticker} ' f'algorithm-ready {use_log}') publish_status = created_algo_object.publish_input_dataset( **extract_config) if publish_status != ae_consts.SUCCESS: msg = ('failed to publish algorithm-ready datasets with ' f'status {ae_consts.get_status(status=publish_status)} ' f'attempted to {use_log}') log.error(msg) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) task_result = { 'status': res['status'], 'err': res['err'], 'algo_req': algo_req, 'rec': rec } return task_result # end of stop early if verbose: log.info(f'{name} - publish - done ticker={ticker} ' f'algorithm-ready {use_log}') # if publish the algorithm-ready dataset if should_publish_history_dataset or dataset_publish_history: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (history_config['redis_address'] and history_config['redis_db'] and history_config['redis_key']): redis_log = (f'redis://{history_config["redis_address"]}' f'@{history_config["redis_db"]}' f'/{history_config["redis_key"]}') use_log += f' {redis_log}' if (history_config['s3_address'] and history_config['s3_bucket'] and history_config['s3_key']): s3_log = (f's3://{history_config["s3_address"]}' f'/{history_config["s3_bucket"]}' f'/{history_config["s3_key"]}') use_log += f' {s3_log}' if history_config['output_file']: file_log = f'file:{history_config["output_file"]}' use_log += f' {file_log}' if verbose: log.info(f'{name} - publish - start ticker={ticker} trading ' f'history {use_log}') publish_status = \ created_algo_object.publish_trade_history_dataset( **history_config) if publish_status != ae_consts.SUCCESS: msg = ('failed to publish trading history datasets with ' f'status {ae_consts.get_status(status=publish_status)} ' f'attempted to {use_log}') log.error(msg) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) task_result = { 'status': res['status'], 'err': res['err'], 'algo_req': algo_req, 'rec': rec } return task_result # end of stop early if verbose: log.info(f'{name} - publish - done ticker={ticker} trading ' f'history {use_log}') # if publish an trading history dataset if should_publish_report_dataset or dataset_publish_report: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (report_config['redis_address'] and report_config['redis_db'] and report_config['redis_key']): redis_log = (f'redis://{report_config["redis_address"]}' f'@{report_config["redis_db"]}' f'/{report_config["redis_key"]}') use_log += f' {redis_log}' if (report_config['s3_address'] and report_config['s3_bucket'] and report_config['s3_key']): s3_log = (f's3://{report_config["s3_address"]}' f'/{report_config["s3_bucket"]}' f'/{report_config["s3_key"]}') use_log += f' {s3_log}' if report_config['output_file']: file_log = f' file:{report_config["output_file"]}' use_log += f' {file_log}' if verbose: log.info( f'{name} - publishing ticker={ticker} trading performance ' f'report {use_log}') publish_status = created_algo_object.publish_report_dataset( **report_config) if publish_status != ae_consts.SUCCESS: msg = ('failed to publish trading performance ' 'report datasets with ' f'status {ae_consts.get_status(status=publish_status)} ' f'attempted to {use_log}') log.error(msg) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) task_result = { 'status': res['status'], 'err': res['err'], 'algo_req': algo_req, 'rec': rec } return task_result # end of stop early if verbose: log.info(f'{name} - publish - done ticker={ticker} trading ' f'performance report {use_log}') # if publish an trading performance report dataset if verbose: log.info(f'{name} - done publishing datasets for ticker={ticker} ' f'from {use_start_date} to {use_end_date}') rec['history_config'] = history_config rec['report_config'] = report_config res = build_result.build_result(status=ae_consts.SUCCESS, err=None, rec=rec) except Exception as e: res = build_result.build_result( status=ae_consts.ERR, err=('failed - run_distributed_algorithm ' f'dict={algo_req} with ex={e}'), rec=rec) if raise_on_err: raise e else: log.error(f'{label} - {res["err"]}') # end of try/ex if verbose: log.info('task - run_distributed_algorithm done - ' f'{label} - status={ae_consts.get_status(res["status"])}') task_result = { 'status': res['status'], 'err': res['err'], 'algo_req': algo_req, 'rec': rec } return task_result
def run_backtest_and_plot_history(config_dict): """run_backtest_and_plot_history Run a derived algorithm with an algorithm config dictionary :param config_dict: algorithm config dictionary """ log.debug('start - sa') parser = argparse.ArgumentParser(description=('stock analysis tool')) parser.add_argument('-t', help=('ticker'), required=True, dest='ticker') parser.add_argument('-e', help=('file path to extract an ' 'algorithm-ready datasets from redis'), required=False, dest='algo_extract_loc') parser.add_argument('-l', help=('show dataset in this file'), required=False, dest='show_from_file') parser.add_argument('-H', help=('show trading history dataset in this file'), required=False, dest='show_history_from_file') parser.add_argument( '-E', help=('show trading performance report dataset in this file'), required=False, dest='show_report_from_file') parser.add_argument( '-L', help=('restore an algorithm-ready dataset file back into redis'), required=False, dest='restore_algo_file') parser.add_argument('-f', help=('save the trading history dataframe ' 'to this file'), required=False, dest='history_json_file') parser.add_argument( '-J', help=('plot action - after preparing you can use: ' '-J show to open the image (good for debugging)'), required=False, dest='plot_action') parser.add_argument( '-b', help=('run a backtest using the dataset in ' 'a file path/s3 key/redis key formats: ' 'file:/opt/sa/tests/datasets/algo/SPY-latest.json or ' 's3://algoready/SPY-latest.json or ' 'redis:SPY-latest'), required=False, dest='backtest_loc') parser.add_argument('-B', help=('optional - broker url for Celery'), required=False, dest='broker_url') parser.add_argument('-C', help=('optional - broker url for Celery'), required=False, dest='backend_url') parser.add_argument( '-w', help=('optional - flag for publishing an algorithm job ' 'using Celery to the ae workers'), required=False, dest='run_on_engine', action='store_true') parser.add_argument('-k', help=('optional - s3 access key'), required=False, dest='s3_access_key') parser.add_argument('-K', help=('optional - s3 secret key'), required=False, dest='s3_secret_key') parser.add_argument('-a', help=('optional - s3 address format: <host:port>'), required=False, dest='s3_address') parser.add_argument('-Z', help=('optional - s3 secure: default False'), required=False, dest='s3_secure') parser.add_argument('-s', help=('optional - start date: YYYY-MM-DD'), required=False, dest='start_date') parser.add_argument('-n', help=('optional - end date: YYYY-MM-DD'), required=False, dest='end_date') parser.add_argument('-u', help=('optional - s3 bucket name'), required=False, dest='s3_bucket_name') parser.add_argument('-G', help=('optional - s3 region name'), required=False, dest='s3_region_name') parser.add_argument( '-g', help=('Path to a custom algorithm module file ' 'on disk. This module must have a single ' 'class that inherits from: ' 'https://github.com/AlgoTraders/stock-analysis-engine/' 'blob/master/' 'analysis_engine/algo.py Additionally you ' 'can find the Example-Minute-Algorithm here: ' 'https://github.com/AlgoTraders/stock-analysis-engine/' 'blob/master/analysis_engine/mocks/' 'example_algo_minute.py'), required=False, dest='run_algo_in_file') parser.add_argument('-p', help=('optional - s3 bucket/file for trading history'), required=False, dest='algo_history_loc') parser.add_argument( '-o', help=('optional - s3 bucket/file for trading performance report'), required=False, dest='algo_report_loc') parser.add_argument('-r', help=('optional - redis_address format: <host:port>'), required=False, dest='redis_address') parser.add_argument('-R', help=('optional - redis and s3 key name'), required=False, dest='keyname') parser.add_argument( '-m', help=('optional - redis database number (0 by default)'), required=False, dest='redis_db') parser.add_argument('-x', help=('optional - redis expiration in seconds'), required=False, dest='redis_expire') parser.add_argument( '-c', help=('optional - algorithm config_file path for setting ' 'up internal algorithm trading strategies and ' 'indicators'), required=False, dest='config_file') parser.add_argument('-v', help=('set the Algorithm to verbose logging'), required=False, dest='verbose_algo', action='store_true') parser.add_argument( '-P', help=('set the Algorithm\'s IndicatorProcessor to verbose logging'), required=False, dest='verbose_processor', action='store_true') parser.add_argument( '-I', help=('set all Algorithm\'s Indicators to verbose logging ' '(note indivdual indicators support a \'verbose\' key ' 'that can be set to True to debug just one ' 'indicator)'), required=False, dest='verbose_indicators', action='store_true') parser.add_argument( '-V', help=('inspect the datasets an algorithm is processing - this' 'will slow down processing to show debugging'), required=False, dest='inspect_datasets', action='store_true') parser.add_argument( '-j', help=('run the algorithm on just this specific date in the datasets ' '- specify the date in a format: YYYY-MM-DD like: 2018-11-29'), required=False, dest='run_this_date') parser.add_argument('-d', help=('debug'), required=False, dest='debug', action='store_true') args = parser.parse_args() ticker = ae_consts.TICKER use_balance = 10000.0 use_commission = 6.0 use_start_date = None use_end_date = None use_config_file = None debug = False verbose_algo = None verbose_processor = None verbose_indicators = None inspect_datasets = None history_json_file = None run_this_date = None s3_access_key = ae_consts.S3_ACCESS_KEY s3_secret_key = ae_consts.S3_SECRET_KEY s3_region_name = ae_consts.S3_REGION_NAME s3_address = ae_consts.S3_ADDRESS s3_secure = ae_consts.S3_SECURE redis_address = ae_consts.REDIS_ADDRESS redis_password = ae_consts.REDIS_PASSWORD redis_db = ae_consts.REDIS_DB redis_expire = ae_consts.REDIS_EXPIRE if args.s3_access_key: s3_access_key = args.s3_access_key if args.s3_secret_key: s3_secret_key = args.s3_secret_key if args.s3_region_name: s3_region_name = args.s3_region_name if args.s3_address: s3_address = args.s3_address if args.s3_secure: s3_secure = args.s3_secure if args.redis_address: redis_address = args.redis_address if args.redis_db: redis_db = args.redis_db if args.redis_expire: redis_expire = args.redis_expire if args.history_json_file: history_json_file = args.history_json_file if args.ticker: ticker = args.ticker.upper() if args.debug: debug = True if args.verbose_algo: verbose_algo = True if args.verbose_processor: verbose_processor = True if args.verbose_indicators: verbose_indicators = True if args.inspect_datasets: inspect_datasets = True if args.run_this_date: run_this_date = args.run_this_date if args.start_date: try: use_start_date = f'{str(args.start_date)} 00:00:00' datetime.datetime.strptime(args.start_date, ae_consts.COMMON_DATE_FORMAT) except Exception as e: msg = ('please use a start date formatted as: ' f'{ae_consts.COMMON_DATE_FORMAT}\nerror was: {e}') log.error(msg) sys.exit(1) # end of testing for a valid date # end of args.start_date if args.end_date: try: use_end_date = f'{str(args.end_date)} 00:00:00' datetime.datetime.strptime(args.end_date, ae_consts.COMMON_DATE_FORMAT) except Exception as e: msg = ('please use an end date formatted as: ' f'{ae_consts.COMMON_DATE_FORMAT}\nerror was: {e}') log.error(msg) sys.exit(1) # end of testing for a valid date # end of args.end_date if args.config_file: use_config_file = args.config_file if not os.path.exists(use_config_file): log.error( f'Failed: unable to find config file: -c {use_config_file}') sys.exit(1) if args.backtest_loc: backtest_loc = args.backtest_loc if ('file:/' not in backtest_loc and 's3://' not in backtest_loc and 'redis://' not in backtest_loc): log.error('invalid -b <backtest dataset file> specified. ' f'{backtest_loc} ' 'please use either: ' '-b file:/opt/sa/tests/datasets/algo/SPY-latest.json or ' '-b s3://algoready/SPY-latest.json or ' '-b redis://SPY-latest') sys.exit(1) load_from_s3_bucket = None load_from_s3_key = None load_from_redis_key = None load_from_file = None if 's3://' in backtest_loc: load_from_s3_bucket = backtest_loc.split('/')[-2] load_from_s3_key = backtest_loc.split('/')[-1] elif 'redis://' in backtest_loc: load_from_redis_key = backtest_loc.split('/')[-1] elif 'file:/' in backtest_loc: load_from_file = backtest_loc.split(':')[-1] # end of parsing supported transport - loading an algo-ready load_config = build_publish_request.build_publish_request( ticker=ticker, output_file=load_from_file, s3_bucket=load_from_s3_bucket, s3_key=load_from_s3_key, redis_key=load_from_redis_key, redis_address=redis_address, redis_db=redis_db, redis_password=redis_password, redis_expire=redis_expire, s3_address=s3_address, s3_access_key=s3_access_key, s3_secret_key=s3_secret_key, s3_region_name=s3_region_name, s3_secure=s3_secure, verbose=debug, label=f'load-{backtest_loc}') if load_from_file: load_config['output_file'] = load_from_file if load_from_redis_key: load_config['redis_key'] = load_from_redis_key load_config['redis_enabled'] = True if load_from_s3_bucket and load_from_s3_key: load_config['s3_bucket'] = load_from_s3_bucket load_config['s3_key'] = load_from_s3_key load_config['s3_enabled'] = True if debug: log.info('starting algo') config_dict['ticker'] = ticker config_dict['balance'] = use_balance config_dict['commission'] = use_commission if verbose_algo: config_dict['verbose'] = verbose_algo if verbose_processor: config_dict['verbose_processor'] = verbose_processor if verbose_indicators: config_dict['verbose_indicators'] = verbose_indicators if inspect_datasets: config_dict['inspect_datasets'] = inspect_datasets if run_this_date: config_dict['run_this_date'] = run_this_date algo_obj = ExampleCustomAlgo(ticker=config_dict['ticker'], config_dict=config_dict) algo_res = run_algo.run_algo(ticker=ticker, algo=algo_obj, start_date=use_start_date, end_date=use_end_date, raise_on_err=True) if algo_res['status'] != ae_consts.SUCCESS: log.error('failed running algo backtest ' f'{algo_obj.get_name()} hit status: ' f'{ae_consts.get_status(status=algo_res["status"])} ' f'error: {algo_res["err"]}') return # if not successful log.info(f'backtest: {algo_obj.get_name()} ' f'{ae_consts.get_status(status=algo_res["status"])}') trading_history_dict = algo_obj.get_history_dataset() history_df = trading_history_dict[ticker] if not hasattr(history_df, 'to_json'): return if history_json_file: log.info(f'saving history to: {history_json_file}') history_df.to_json(history_json_file, orient='records', date_format='iso') log.info('plotting history') use_xcol = 'date' use_as_date_format = '%d\n%b' xlabel = f'Dates vs {trading_history_dict["algo_name"]} values' ylabel = f'Algo {trading_history_dict["algo_name"]}\nvalues' df_filter = (history_df['close'] > 0.01) first_date = history_df[df_filter]['date'].iloc[0] end_date = history_df[df_filter]['date'].iloc[-1] if config_dict['timeseries'] == 'minute': use_xcol = 'minute' use_as_date_format = '%d %H:%M:%S\n%b' first_date = history_df[df_filter]['minute'].iloc[0] end_date = history_df[df_filter]['minute'].iloc[-1] title = (f'Trading History {ticker} for Algo ' f'{trading_history_dict["algo_name"]}\n' f'Backtest dates from {first_date} to {end_date}') # set default hloc columns: blue = None green = None orange = None red = 'close' blue = 'balance' if debug: for i, r in history_df.iterrows(): log.debug(f'{r["minute"]} - {r["close"]}') plot_trading_history.plot_trading_history(title=title, df=history_df, red=red, blue=blue, green=green, orange=orange, date_col=use_xcol, date_format=use_as_date_format, xlabel=xlabel, ylabel=ylabel, df_filter=df_filter, show_plot=True, dropna_for_all=True)
def run_custom_algo( mod_path, ticker='SPY', balance=50000, commission=6.0, start_date=None, end_date=None, name='myalgo', auto_fill=True, config_file=None, config_dict=None, load_from_s3_bucket=None, load_from_s3_key=None, load_from_redis_key=None, load_from_file=None, load_compress=False, load_publish=True, load_config=None, report_redis_key=None, report_s3_bucket=None, report_s3_key=None, report_file=None, report_compress=False, report_publish=True, report_config=None, history_redis_key=None, history_s3_bucket=None, history_s3_key=None, history_file=None, history_compress=False, history_publish=True, history_config=None, extract_redis_key=None, extract_s3_bucket=None, extract_s3_key=None, extract_file=None, extract_save_dir=None, extract_compress=False, extract_publish=True, extract_config=None, publish_to_s3=True, publish_to_redis=True, publish_to_slack=True, dataset_type=ae_consts.SA_DATASET_TYPE_ALGO_READY, serialize_datasets=ae_consts.DEFAULT_SERIALIZED_DATASETS, compress=False, encoding='utf-8', redis_enabled=True, redis_key=None, redis_address=None, redis_db=None, redis_password=None, redis_expire=None, redis_serializer='json', redis_encoding='utf-8', s3_enabled=True, s3_key=None, s3_address=None, s3_bucket=None, s3_access_key=None, s3_secret_key=None, s3_region_name=None, s3_secure=False, slack_enabled=False, slack_code_block=False, slack_full_width=False, timeseries=None, trade_strategy=None, verbose=False, debug=False, dataset_publish_extract=False, dataset_publish_history=False, dataset_publish_report=False, run_on_engine=False, auth_url=ae_consts.WORKER_BROKER_URL, backend_url=ae_consts.WORKER_BACKEND_URL, include_tasks=ae_consts.INCLUDE_TASKS, ssl_options=ae_consts.SSL_OPTIONS, transport_options=ae_consts.TRANSPORT_OPTIONS, path_to_config_module=ae_consts.WORKER_CELERY_CONFIG_MODULE, raise_on_err=True): """run_custom_algo Run a custom algorithm that derives the ``analysis_engine.algo.BaseAlgo`` class .. note:: Make sure to only have **1** class defined in an algo module. Imports from other modules should work just fine. **Algorithm arguments** :param mod_path: file path to custom algorithm class module :param ticker: ticker symbol :param balance: float - starting balance capital for creating buys and sells :param commission: float - cost pet buy or sell :param name: string - name for tracking algorithm in the logs :param start_date: string - start date for backtest with format ``YYYY-MM-DD HH:MM:SS`` :param end_date: end date for backtest with format ``YYYY-MM-DD HH:MM:SS`` :param auto_fill: optional - boolean for auto filling buy and sell orders for backtesting (default is ``True``) :param config_file: path to a json file containing custom algorithm object member values (like indicator configuration and predict future date units ahead for a backtest) :param config_dict: optional - dictionary that can be passed to derived class implementations of: ``def load_from_config(config_dict=config_dict)`` **Timeseries** :param timeseries: optional - string to set ``day`` or ``minute`` backtesting or live trading (default is ``minute``) **Trading Strategy** :param trade_strategy: optional - string to set the type of ``Trading Strategy`` for backtesting or live trading (default is ``count``) **Running Distributed Algorithms on the Engine Workers** :param run_on_engine: optional - boolean flag for publishing custom algorithms to Celery ae workers for distributing algorithm workloads (default is ``False`` which will run algos locally) this is required for distributing algorithms :param auth_url: Celery broker address (default is ``redis://localhost:6379/11`` or ``analysis_engine.consts.WORKER_BROKER_URL`` environment variable) this is required for distributing algorithms :param backend_url: Celery backend address (default is ``redis://localhost:6379/12`` or ``analysis_engine.consts.WORKER_BACKEND_URL`` environment variable) this is required for distributing algorithms :param include_tasks: list of modules containing tasks to add (default is ``analysis_engine.consts.INCLUDE_TASKS``) :param ssl_options: security options dictionary (default is ``analysis_engine.consts.SSL_OPTIONS``) :param trasport_options: transport options dictionary (default is ``analysis_engine.consts.TRANSPORT_OPTIONS``) :param path_to_config_module: config module for advanced Celery worker connectivity requirements (default is ``analysis_engine.work_tasks.celery_config`` or ``analysis_engine.consts.WORKER_CELERY_CONFIG_MODULE``) **Load Algorithm-Ready Dataset From Source** Use these arguments to load algorithm-ready datasets from supported sources (file, s3 or redis) :param load_from_s3_bucket: optional - string load the algo from an a previously-created s3 bucket holding an s3 key with an algorithm-ready dataset for use with: ``handle_data`` :param load_from_s3_key: optional - string load the algo from an a previously-created s3 key holding an algorithm-ready dataset for use with: ``handle_data`` :param load_from_redis_key: optional - string load the algo from a a previously-created redis key holding an algorithm-ready dataset for use with: ``handle_data`` :param load_from_file: optional - string path to a previously-created local file holding an algorithm-ready dataset for use with: ``handle_data`` :param load_compress: optional - boolean flag for toggling to decompress or not when loading an algorithm-ready dataset (``True`` means the dataset must be decompressed to load correctly inside an algorithm to run a backtest) :param load_publish: boolean - toggle publishing the load progress to slack, s3, redis or a file (default is ``True``) :param load_config: optional - dictionary for setting member variables to load an agorithm-ready dataset from a file, s3 or redis **Publishing Control Bool Flags** :param publish_to_s3: optional - boolean for toggling publishing to s3 on/off (default is ``True``) :param publish_to_redis: optional - boolean for publishing to redis on/off (default is ``True``) :param publish_to_slack: optional - boolean for publishing to slack (default is ``True``) **Algorithm Trade History Arguments** :param history_redis_key: optional - string where the algorithm trading history will be stored in an redis key :param history_s3_bucket: optional - string where the algorithm trading history will be stored in an s3 bucket :param history_s3_key: optional - string where the algorithm trading history will be stored in an s3 key :param history_file: optional - string key where the algorithm trading history will be stored in a file serialized as a json-string :param history_compress: optional - boolean flag for toggling to decompress or not when loading an algorithm-ready dataset (``True`` means the dataset will be compressed on publish) :param history_publish: boolean - toggle publishing the history to s3, redis or a file (default is ``True``) :param history_config: optional - dictionary for setting member variables to publish an algo ``trade history`` to s3, redis, a file or slack **Algorithm Trade Performance Report Arguments (Output Dataset)** :param report_redis_key: optional - string where the algorithm ``trading performance report`` (report) will be stored in an redis key :param report_s3_bucket: optional - string where the algorithm report will be stored in an s3 bucket :param report_s3_key: optional - string where the algorithm report will be stored in an s3 key :param report_file: optional - string key where the algorithm report will be stored in a file serialized as a json-string :param report_compress: optional - boolean flag for toggling to decompress or not when loading an algorithm-ready dataset (``True`` means the dataset will be compressed on publish) :param report_publish: boolean - toggle publishing the ``trading performance report`` s3, redis or a file (default is ``True``) :param report_config: optional - dictionary for setting member variables to publish an algo ``trading performance report`` to s3, redis, a file or slack **Extract an Algorithm-Ready Dataset Arguments** :param extract_redis_key: optional - string where the algorithm report will be stored in an redis key :param extract_s3_bucket: optional - string where the algorithm report will be stored in an s3 bucket :param extract_s3_key: optional - string where the algorithm report will be stored in an s3 key :param extract_file: optional - string key where the algorithm report will be stored in a file serialized as a json-string :param extract_save_dir: optional - string path to auto-generated files from the algo :param extract_compress: optional - boolean flag for toggling to decompress or not when loading an algorithm-ready dataset (``True`` means the dataset will be compressed on publish) :param extract_publish: boolean - toggle publishing the used ``algorithm-ready dataset`` to s3, redis or a file (default is ``True``) :param extract_config: optional - dictionary for setting member variables to publish an algo ``trading performance report`` to s3, redis, a file or slack **Dataset Arguments** :param dataset_type: optional - dataset type (default is ``SA_DATASET_TYPE_ALGO_READY``) :param serialize_datasets: optional - list of dataset names to deserialize in the dataset (default is ``DEFAULT_SERIALIZED_DATASETS``) :param encoding: optional - string for data encoding **Publish Algorithm Datasets to S3, Redis or a File** :param dataset_publish_extract: optional - bool for publishing the algorithm's ``algorithm-ready`` dataset to: s3, redis or file :param dataset_publish_history: optional - bool for publishing the algorithm's ``trading history`` dataset to: s3, redis or file :param dataset_publish_report: optional - bool for publishing the algorithm's ``trading performance report`` dataset to: s3, redis or file **Redis connectivity arguments** :param redis_enabled: bool - toggle for auto-caching all datasets in Redis (default is ``True``) :param redis_key: string - key to save the data in redis (default is ``None``) :param redis_address: Redis connection string format: ``host:port`` (default is ``localhost:6379``) :param redis_db: Redis db to use (default is ``0``) :param redis_password: optional - Redis password (default is ``None``) :param redis_expire: optional - Redis expire value (default is ``None``) :param redis_serializer: not used yet - support for future pickle objects in redis :param redis_encoding: format of the encoded key in redis **Minio (S3) connectivity arguments** :param s3_enabled: bool - toggle for auto-archiving on Minio (S3) (default is ``True``) :param s3_key: string - key to save the data in redis (default is ``None``) :param s3_address: Minio S3 connection string format: ``host:port`` (default is ``localhost:9000``) :param s3_bucket: S3 Bucket for storing the artifacts (default is ``dev``) which should be viewable on a browser: http://localhost:9000/minio/dev/ :param s3_access_key: S3 Access key (default is ``trexaccesskey``) :param s3_secret_key: S3 Secret key (default is ``trex123321``) :param s3_region_name: S3 region name (default is ``us-east-1``) :param s3_secure: Transmit using tls encryption (default is ``False``) **Slack arguments** :param slack_enabled: optional - boolean for publishing to slack :param slack_code_block: optional - boolean for publishing as a code black in slack :param slack_full_width: optional - boolean for publishing as a to slack using the full width allowed **Debugging arguments** :param debug: optional - bool for debug tracking :param verbose: optional - bool for increasing logging :param raise_on_err: boolean - set this to ``False`` on prod to ensure exceptions do not interrupt services. With the default (``True``) any exceptions from the library and your own algorithm are sent back out immediately exiting the backtest. """ module_name = 'BaseAlgo' custom_algo_module = None new_algo_object = None use_custom_algo = False found_algo_module = True should_publish_extract_dataset = False should_publish_history_dataset = False should_publish_report_dataset = False use_config_file = None use_config_dict = config_dict if config_file: if os.path.exists(config_file): use_config_file = config_file if not config_dict: try: use_config_dict = json.loads(open(config_file, 'r').read()) except Exception as e: msg = (f'failed parsing json config_file={config_file} ' f'with ex={e}') log.error(msg) raise Exception(msg) # end of loading the config_file err = None if mod_path: module_name = mod_path.split('/')[-1] loader = importlib.machinery.SourceFileLoader(module_name, mod_path) custom_algo_module = types.ModuleType(loader.name) loader.exec_module(custom_algo_module) use_custom_algo = True for member in inspect.getmembers(custom_algo_module): if module_name in str(member): found_algo_module = True break # for all members in this custom module file # if loading a custom algorithm module from a file on disk if not found_algo_module: err = (f'unable to find custom algorithm module={custom_algo_module}') if mod_path: err = ( 'analysis_engine.run_custom_algo.run_custom_algo was unable ' f'to find custom algorithm module={custom_algo_module} with ' f'provided path to \n file: {mod_path} \n' '\n' 'Please confirm ' 'that the class inherits from the BaseAlgo class like:\n' '\n' 'import analysis_engine.algo\n' 'class MyAlgo(analysis_engine.algo.BaseAlgo):\n ' '\n' 'If it is then please file an issue on github:\n ' 'https://github.com/AlgoTraders/stock-analysis-engine/' 'issues/new \n\nFor now this error results in a shutdown' '\n') # if mod_path set if verbose or debug: log.error(err) return build_result.build_result(status=ae_consts.ERR, err=err, rec=None) # if not found_algo_module use_start_date = start_date use_end_date = end_date if not use_end_date: end_date = datetime.datetime.utcnow() use_end_date = end_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT) if not use_start_date: start_date = end_date - datetime.timedelta(days=75) use_start_date = start_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT) if verbose: log.info( f'{name} {ticker} setting default start_date={use_start_date}') # Load an algorithm-ready dataset from: # file, s3, or redis if not load_config: load_config = build_publish_request.build_publish_request( ticker=ticker, output_file=None, s3_bucket=None, s3_key=None, redis_key=None, compress=load_compress, redis_enabled=publish_to_redis, redis_address=redis_address, redis_db=redis_db, redis_password=redis_password, redis_expire=redis_expire, redis_serializer=redis_serializer, redis_encoding=redis_encoding, s3_enabled=publish_to_s3, s3_address=s3_address, s3_access_key=s3_access_key, s3_secret_key=s3_secret_key, s3_region_name=s3_region_name, s3_secure=s3_secure, slack_enabled=publish_to_slack, slack_code_block=slack_code_block, slack_full_width=slack_full_width, verbose=verbose, label=f'load-{name}') if load_from_file: load_config['output_file'] = load_from_file if load_from_redis_key: load_config['redis_key'] = load_from_redis_key load_config['redis_enabled'] = True if load_from_s3_bucket and load_from_s3_key: load_config['s3_bucket'] = load_from_s3_bucket load_config['s3_key'] = load_from_s3_key load_config['s3_enabled'] = True # end of building load_config dictionary if not already set # Automatically save all datasets to an algorithm-ready: # file, s3, or redis if not extract_config: extract_config = build_publish_request.build_publish_request( ticker=ticker, output_file=None, s3_bucket=None, s3_key=None, redis_key=None, compress=extract_compress, redis_enabled=publish_to_redis, redis_address=redis_address, redis_db=redis_db, redis_password=redis_password, redis_expire=redis_expire, redis_serializer=redis_serializer, redis_encoding=redis_encoding, s3_enabled=publish_to_s3, s3_address=s3_address, s3_access_key=s3_access_key, s3_secret_key=s3_secret_key, s3_region_name=s3_region_name, s3_secure=s3_secure, slack_enabled=publish_to_slack, slack_code_block=slack_code_block, slack_full_width=slack_full_width, verbose=verbose, label=f'extract-{name}') should_publish_extract_dataset = False if extract_file: extract_config['output_file'] = extract_file should_publish_extract_dataset = True if extract_redis_key and publish_to_redis: extract_config['redis_key'] = extract_redis_key extract_config['redis_enabled'] = True should_publish_extract_dataset = True if extract_s3_bucket and extract_s3_key and publish_to_s3: extract_config['s3_bucket'] = extract_s3_bucket extract_config['s3_key'] = extract_s3_key extract_config['s3_enabled'] = True should_publish_extract_dataset = True else: extract_config['s3_enabled'] = False # end of building extract_config dictionary if not already set # Automatically save the trading performance report: # file, s3, or redis if not report_config: report_config = build_publish_request.build_publish_request( ticker=ticker, output_file=None, s3_bucket=None, s3_key=None, redis_key=None, compress=report_compress, redis_enabled=publish_to_redis, redis_address=redis_address, redis_db=redis_db, redis_password=redis_password, redis_expire=redis_expire, redis_serializer=redis_serializer, redis_encoding=redis_encoding, s3_enabled=publish_to_s3, s3_address=s3_address, s3_access_key=s3_access_key, s3_secret_key=s3_secret_key, s3_region_name=s3_region_name, s3_secure=s3_secure, slack_enabled=publish_to_slack, slack_code_block=slack_code_block, slack_full_width=slack_full_width, verbose=verbose, label=f'report-{name}') should_publish_report_dataset = False if report_file: report_config['output_file'] = report_file should_publish_report_dataset = True if report_redis_key and publish_to_redis: report_config['redis_key'] = report_redis_key report_config['redis_enabled'] = True should_publish_report_dataset = True if report_s3_bucket and report_s3_key and publish_to_s3: report_config['s3_bucket'] = report_s3_bucket report_config['s3_key'] = report_s3_key report_config['s3_enabled'] = True should_publish_report_dataset = True # end of building report_config dictionary if not already set # Automatically save the trade history: # file, s3, or redis if not history_config: history_config = build_publish_request.build_publish_request( ticker=ticker, output_file=None, s3_bucket=None, s3_key=None, redis_key=None, compress=report_compress, redis_enabled=publish_to_redis, redis_address=redis_address, redis_db=redis_db, redis_password=redis_password, redis_expire=redis_expire, redis_serializer=redis_serializer, redis_encoding=redis_encoding, s3_enabled=publish_to_s3, s3_address=s3_address, s3_access_key=s3_access_key, s3_secret_key=s3_secret_key, s3_region_name=s3_region_name, s3_secure=s3_secure, slack_enabled=publish_to_slack, slack_code_block=slack_code_block, slack_full_width=slack_full_width, verbose=verbose, label=f'history-{name}') should_publish_history_dataset = False if history_file: history_config['output_file'] = history_file should_publish_history_dataset = True if history_redis_key and publish_to_redis: history_config['redis_key'] = history_redis_key history_config['redis_enabled'] = True should_publish_history_dataset = True if history_s3_bucket and history_s3_key and publish_to_s3: history_config['s3_bucket'] = history_s3_bucket history_config['s3_key'] = history_s3_key history_config['s3_enabled'] = True should_publish_history_dataset = True # end of building history_config dictionary if not already set if verbose: remove_vals = ['s3_access_key', 's3_secret_key', 'redis_password'] debug_extract_config = {} for k in extract_config: if k not in remove_vals: debug_extract_config[k] = extract_config[k] debug_report_config = {} for k in report_config: if k not in remove_vals: debug_report_config[k] = report_config[k] debug_history_config = {} for k in history_config: if k not in remove_vals: debug_history_config[k] = history_config[k] debug_load_config = {} for k in load_config: if k not in remove_vals: debug_load_config[k] = load_config[k] log.info(f'{name} {ticker} using extract config ' f'{ae_consts.ppj(debug_extract_config)}') log.info(f'{name} {ticker} using report config ' f'{ae_consts.ppj(debug_report_config)}') log.info(f'{name} {ticker} using trade history config ' f'{ae_consts.ppj(debug_history_config)}') log.info(f'{name} {ticker} using load config ' f'{ae_consts.ppj(debug_load_config)}') log.info(f'{name} {ticker} - building algo request') # end of verbose algo_req = build_algo_request.build_algo_request( ticker=ticker, balance=balance, commission=commission, start_date=use_start_date, end_date=use_end_date, timeseries=timeseries, trade_strategy=trade_strategy, config_file=use_config_file, config_dict=use_config_dict, load_config=load_config, history_config=history_config, report_config=report_config, extract_config=extract_config, label=name) algo_req['name'] = name algo_req['should_publish_extract_dataset'] = should_publish_extract_dataset algo_req['should_publish_history_dataset'] = should_publish_history_dataset algo_req['should_publish_report_dataset'] = should_publish_report_dataset algo_res = build_result.build_result(status=ae_consts.NOT_RUN, err=None, rec=None) if run_on_engine: rec = {'algo_req': algo_req, 'task_id': None} task_name = ('analysis_engine.work_tasks.' 'task_run_algo.task_run_algo') if verbose: log.info(f'starting distributed algo task={task_name}') elif debug: log.info( 'starting distributed algo by publishing to ' f'task={task_name} broker={auth_url} backend={backend_url}') # Get the Celery app app = get_celery_app.get_celery_app( name=__name__, auth_url=auth_url, backend_url=backend_url, path_to_config_module=path_to_config_module, ssl_options=ssl_options, transport_options=transport_options, include_tasks=include_tasks) if debug: log.info(f'calling distributed algo task={task_name} ' f'request={ae_consts.ppj(algo_req)}') elif verbose: log.info(f'calling distributed algo task={task_name}') job_id = app.send_task(task_name, (algo_req, )) if verbose: log.info(f'calling task={task_name} - success job_id={job_id}') rec['task_id'] = job_id algo_res = build_result.build_result(status=ae_consts.SUCCESS, err=None, rec=rec) return algo_res # end of run_on_engine if use_custom_algo: if verbose: log.info( f'inspecting {custom_algo_module} for class {module_name}') use_class_member_object = None for member in inspect.getmembers(custom_algo_module): if module_name in str(member): if verbose: log.info(f'start {name} with {member[1]}') use_class_member_object = member break # end of looking over the class definition but did not find it if use_class_member_object: new_algo_object = member[1](**algo_req) else: err = ('did not find a derived analysis_engine.algo.BaseAlgo ' f'class in the module file={mod_path} ' f'for ticker={ticker} algo_name={name}') if verbose or debug: log.error(err) return build_result.build_result(status=ae_consts.ERR, err=err, rec=None) # end of finding a valid algorithm object else: new_algo_object = ae_algo.BaseAlgo(**algo_req) # if using a custom module path or the BaseAlgo if new_algo_object: # heads up - logging this might have passwords in the algo_req # log.debug( # f'{name} algorithm request: {algo_req}') if verbose: log.info(f'{name} - run ticker={ticker} from {use_start_date} ' f'to {use_end_date}') algo_res = run_algo.run_algo(algo=new_algo_object, raise_on_err=raise_on_err, **algo_req) algo_res['algo'] = new_algo_object if verbose: log.info(f'{name} - run ticker={ticker} from {use_start_date} ' f'to {use_end_date}') if custom_algo_module: if verbose: log.info(f'{name} - done run_algo ' f'custom_algo_module={custom_algo_module} ' f'module_name={module_name} ticker={ticker} ' f'from {use_start_date} to {use_end_date}') else: if verbose: log.info(f'{name} - done run_algo BaseAlgo ticker={ticker} ' f'from {use_start_date} to {use_end_date}') else: err = ('missing a derived analysis_engine.algo.BaseAlgo ' f'class in the module file={mod_path} for ticker={ticker} ' f'algo_name={name}') return build_result.build_result(status=ae_consts.ERR, err=err, rec=None) # end of finding a valid algorithm object algo = algo_res.get('algo', None) if not algo: err = (f'failed creating algorithm object - ticker={ticker} ' f'status={ae_consts.get_status(status=algo_res["status"])} ' f'error={algo_res["err"]} algo name={name} ' f'custom_algo_module={custom_algo_module} ' f'module_name={module_name} ' f'from {use_start_date} to {use_end_date}') return build_result.build_result(status=ae_consts.ERR, err=err, rec=None) if should_publish_extract_dataset or dataset_publish_extract: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (extract_config['redis_address'] and extract_config['redis_db'] >= 0 and extract_config['redis_key']): redis_log = ( f'redis://{extract_config["redis_address"]}' f'@{extract_config["redis_db"]}/{extract_config["redis_key"]}') use_log += f' {redis_log}' else: extract_config['redis_enabled'] = False if (extract_config['s3_address'] and extract_config['s3_bucket'] and extract_config['s3_key']): s3_log = ( f's3://{extract_config["s3_address"]}' f'/{extract_config["s3_bucket"]}/{extract_config["s3_key"]}') use_log += f' {s3_log}' else: extract_config['s3_enabled'] = False if extract_config['output_file']: file_log = f'file:{extract_config["output_file"]}' use_log += f' {file_log}' if verbose: log.info(f'{name} - publish - start ticker={ticker} ' f'algorithm-ready {use_log}') publish_status = algo.publish_input_dataset(**extract_config) if publish_status != ae_consts.SUCCESS: msg = ( 'failed to publish algorithm-ready datasets ' f'with status {ae_consts.get_status(status=publish_status)} ' f'attempted to {use_log}') log.error(msg) return build_result.build_result(status=ae_consts.ERR, err=err, rec=None) if verbose: log.info(f'{name} - publish - done ticker={ticker} ' f'algorithm-ready {use_log}') # if publish the algorithm-ready dataset if should_publish_history_dataset or dataset_publish_history: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (history_config['redis_address'] and history_config['redis_db'] >= 0 and history_config['redis_key']): redis_log = ( f'redis://{history_config["redis_address"]}' f'@{history_config["redis_db"]}/{history_config["redis_key"]}') use_log += f' {redis_log}' else: history_config['redis_enabled'] = False if (history_config['s3_address'] and history_config['s3_bucket'] and history_config['s3_key']): s3_log = ( f's3://{history_config["s3_address"]}' f'/{history_config["s3_bucket"]}/{history_config["s3_key"]}') use_log += f' {s3_log}' else: history_config['s3_enabled'] = False if history_config['output_file']: file_log = f'file:{history_config["output_file"]}' use_log += f' {file_log}' if verbose: log.info(f'{name} - publish - start ticker={ticker} trading ' f'history {use_log}') publish_status = algo.publish_trade_history_dataset(**history_config) if publish_status != ae_consts.SUCCESS: msg = ( 'failed to publish trading history datasets ' f'with status {ae_consts.get_status(status=publish_status)} ' f'attempted to {use_log}') log.error(msg) return build_result.build_result(status=ae_consts.ERR, err=err, rec=None) if verbose: log.info(f'{name} - publish - done ticker={ticker} trading ' f'history {use_log}') # if publish an trading history dataset if should_publish_report_dataset or dataset_publish_report: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (report_config['redis_address'] and report_config['redis_db'] >= 0 and report_config['redis_key']): redis_log = ( f'redis://{report_config["redis_address"]}' f'@{report_config["redis_db"]}/{report_config["redis_key"]}') use_log += f' {redis_log}' else: report_config['redis_enabled'] = False if (report_config['s3_address'] and report_config['s3_bucket'] and report_config['s3_key']): s3_log = ( f's3://{report_config["s3_address"]}' f'/{report_config["s3_bucket"]}/{report_config["s3_key"]}') use_log += f' {s3_log}' else: report_config['s3_enabled'] = False if report_config['output_file']: file_log = f'file:{report_config["output_file"]}' use_log += f' {file_log}' if verbose: log.info( f'{name} - publishing ticker={ticker} trading performance ' f'report {use_log}') publish_status = algo.publish_report_dataset(**report_config) if publish_status != ae_consts.SUCCESS: msg = ( 'failed to publish trading performance report datasets ' f'with status {ae_consts.get_status(status=publish_status)} ' f'attempted to {use_log}') log.error(msg) return build_result.build_result(status=ae_consts.ERR, err=err, rec=None) if verbose: log.info( f'{name} - publish - done ticker={ticker} trading performance ' f'report {use_log}') # if publish an trading performance report dataset if verbose: log.info(f'{name} - done publishing datasets for ticker={ticker} ' f'from {use_start_date} to {use_end_date}') return algo_res
def run_distributed_algorithm(self, algo_req): """run_distributed_algorithm Process a distributed Algorithm :param algo_req: dictionary for key/values for running an algorithm using Celery workers """ label = algo_req.get('name', 'ae-algo') verbose = algo_req.get('verbose', False) debug = algo_req.get('debug', False) # please be careful logging prod passwords: if verbose or debug: log.info('task - {} - start ' 'algo_req={}'.format(label, algo_req)) else: log.info('task - {} - start '.format(label)) # end of start log rec = {} res = build_result.build_result(status=ae_consts.NOT_RUN, err=None, rec=rec) created_algo_object = None custom_algo_module = None new_algo_object = None use_custom_algo = False found_algo_module = True # assume the BaseAlgo should_publish_extract_dataset = False should_publish_history_dataset = False should_publish_report_dataset = False ticker = algo_req.get('ticker', 'SPY') num_days_back = algo_req.get('num_days_back', 75) name = algo_req.get('name', 'ae-algo') algo_module_path = algo_req.get('mod_path', None) module_name = algo_req.get('module_name', 'BaseAlgo') custom_algo_module = algo_req.get('custom_algo_module', None) new_algo_object = algo_req.get('new_algo_object', None) use_custom_algo = algo_req.get('use_custom_algo', False) should_publish_extract_dataset = algo_req.get( 'should_publish_extract_dataset', False) should_publish_history_dataset = algo_req.get( 'should_publish_history_dataset', False) should_publish_report_dataset = algo_req.get( 'should_publish_report_dataset', False) start_date = algo_req.get('start_date', None) end_date = algo_req.get('end_date', None) raise_on_err = algo_req.get('raise_on_err', False) report_config = algo_req.get('report_config', None) history_config = algo_req.get('history_config', None) extract_config = algo_req.get('extract_config', None) err = None if algo_module_path: found_algo_module = False module_name = algo_module_path.split('/')[-1] loader = importlib.machinery.SourceFileLoader(module_name, algo_module_path) custom_algo_module = types.ModuleType(loader.name) loader.exec_module(custom_algo_module) use_custom_algo = True for member in inspect.getmembers(custom_algo_module): if module_name in str(member): found_algo_module = True break # for all members in this custom module file # if loading a custom algorithm module from a file on disk if not found_algo_module: err = ('{} - unable to find custom algorithm module={} ' 'module_path={}'.format(label, custom_algo_module, algo_module_path)) if algo_module_path: err = ( '{} - analysis_engine.work_tasks.run_distributed_algorithm ' 'was unable ' 'to find custom algorithm module={} with provided path to \n ' 'file: {} \n' '\n' 'Please confirm ' 'that the class inherits from the BaseAlgo class like:\n' '\n' 'import analysis_engine.algo\n' 'class MyAlgo(analysis_engine.algo.BaseAlgo):\n ' '\n' 'If it is then please file an issue on github:\n ' 'https://github.com/AlgoTraders/stock-analysis-engine/' 'issues/new \n\nFor now this error results in a shutdown' '\n'.format(label, custom_algo_module, algo_module_path)) # if algo_module_path set log.error(err) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) return get_task_results.get_task_results(work_dict=algo_req, result=res) # if not found_algo_module use_start_date = start_date use_end_date = end_date if not use_end_date: end_date = datetime.datetime.utcnow() use_end_date = end_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT) if not use_start_date: start_date = end_date - datetime.timedelta(days=num_days_back) use_start_date = start_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT) dataset_publish_extract = algo_req.get('dataset_publish_extract', False) dataset_publish_history = algo_req.get('dataset_publish_history', False) dataset_publish_report = algo_req.get('dataset_publish_report', False) try: if use_custom_algo: log.info('inspecting {} for class {}'.format( custom_algo_module, module_name)) use_class_member_object = None for member in inspect.getmembers(custom_algo_module): if module_name in str(member): log.info('start {} with {}'.format(name, member[1])) use_class_member_object = member break # end of looking over the class definition but did not find it if use_class_member_object: new_algo_object = member[1](**algo_req) else: err = ('{} - did not find a derived ' 'analysis_engine.algo.BaseAlgo ' 'class in the module file={} ' 'for ticker={} algo_name={}'.format( label, algo_module_path, ticker, name)) log.error(err) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) return get_task_results.get_task_results(work_dict=algo_req, result=res) # end of finding a valid algorithm object else: new_algo_object = ae_algo.BaseAlgo(**algo_req) # if using a custom module path or the BaseAlgo if new_algo_object: # heads up - logging this might have passwords in the algo_req # log.debug( # '{} algorithm request: {}'.format( # name, # algo_req)) log.info('{} - run ticker={} from {} to {}'.format( name, ticker, use_start_date, use_end_date)) algo_res = run_algo.run_algo(algo=new_algo_object, raise_on_err=raise_on_err, **algo_req) created_algo_object = new_algo_object log.info('{} - run ticker={} from {} to {}'.format( name, ticker, use_start_date, use_end_date)) if custom_algo_module: log.info( '{} - done run_algo custom_algo_module={} module_name={} ' 'ticker={} from {} to {}'.format(name, custom_algo_module, module_name, ticker, use_start_date, use_end_date)) else: log.info('{} - done run_algo BaseAlgo ticker={} from {} ' 'to {}'.format(name, ticker, use_start_date, use_end_date)) else: err = ('{} - missing a derived analysis_engine.algo.BaseAlgo ' 'class in the module file={} for ' 'ticker={} algo_name={}'.format(label, algo_module_path, ticker, name)) log.error(err) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) return get_task_results.get_task_results(work_dict=algo_req, result=res) # end of finding a valid algorithm object if not created_algo_object: err = ('{} - failed creating algorithm object - ' 'ticker={} status={} error={}' 'algo name={} custom_algo_module={} module_name={} ' 'from {} to {}'.format( label, ticker, ae_consts.get_status(status=algo_res['status']), algo_res['err'], name, custom_algo_module, module_name, use_start_date, use_end_date)) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) return get_task_results.get_task_results(work_dict=algo_req, result=res) # end of stop early if should_publish_extract_dataset or dataset_publish_extract: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (extract_config['redis_address'] and extract_config['redis_db'] and extract_config['redis_key']): redis_log = 'redis://{}@{}/{}'.format( extract_config['redis_address'], extract_config['redis_db'], extract_config['redis_key']) use_log += ' {}'.format(redis_log) else: extract_config['redis_enabled'] = False if (extract_config['s3_address'] and extract_config['s3_bucket'] and extract_config['s3_key']): s3_log = 's3://{}/{}/{}'.format(extract_config['s3_address'], extract_config['s3_bucket'], extract_config['s3_key']) use_log += ' {}'.format(s3_log) else: extract_config['s3_enabled'] = False if extract_config['output_file']: file_log = 'file:{}'.format(extract_config['output_file']) use_log += ' {}'.format(file_log) log.info('{} - publish - start ticker={} algorithm-ready {}' ''.format(name, ticker, use_log)) publish_status = created_algo_object.publish_input_dataset( **extract_config) if publish_status != ae_consts.SUCCESS: msg = ('failed to publish algorithm-ready datasets ' 'with status {} attempted to {}'.format( ae_consts.get_status(status=publish_status), use_log)) log.error(msg) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) return get_task_results.get_task_results(work_dict=algo_req, result=res) # end of stop early log.info('{} - publish - done ticker={} algorithm-ready {}' ''.format(name, ticker, use_log)) # if publish the algorithm-ready dataset if should_publish_history_dataset or dataset_publish_history: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (history_config['redis_address'] and history_config['redis_db'] and history_config['redis_key']): redis_log = 'redis://{}@{}/{}'.format( history_config['redis_address'], history_config['redis_db'], history_config['redis_key']) use_log += ' {}'.format(redis_log) if (history_config['s3_address'] and history_config['s3_bucket'] and history_config['s3_key']): s3_log = 's3://{}/{}/{}'.format(history_config['s3_address'], history_config['s3_bucket'], history_config['s3_key']) use_log += ' {}'.format(s3_log) if history_config['output_file']: file_log = 'file:{}'.format(history_config['output_file']) use_log += ' {}'.format(file_log) log.info('{} - publish - start ticker={} trading history {}' ''.format(name, ticker, use_log)) publish_status = \ created_algo_object.publish_trade_history_dataset( **history_config) if publish_status != ae_consts.SUCCESS: msg = ('failed to publish trading history datasets ' 'with status {} attempted to {}'.format( ae_consts.get_status(status=publish_status), use_log)) log.error(msg) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) return get_task_results.get_task_results(work_dict=algo_req, result=res) # end of stop early log.info('{} - publish - done ticker={} trading history {}' ''.format(name, ticker, use_log)) # if publish an trading history dataset if should_publish_report_dataset or dataset_publish_report: s3_log = '' redis_log = '' file_log = '' use_log = 'publish' if (report_config['redis_address'] and report_config['redis_db'] and report_config['redis_key']): redis_log = 'redis://{}@{}/{}'.format( report_config['redis_address'], report_config['redis_db'], report_config['redis_key']) use_log += ' {}'.format(redis_log) if (report_config['s3_address'] and report_config['s3_bucket'] and report_config['s3_key']): s3_log = 's3://{}/{}/{}'.format(report_config['s3_address'], report_config['s3_bucket'], report_config['s3_key']) use_log += ' {}'.format(s3_log) if report_config['output_file']: file_log = ' file:{}'.format(report_config['output_file']) use_log += ' {}'.format(file_log) log.info('{} - publishing ticker={} trading performance report {}' ''.format(name, ticker, use_log)) publish_status = created_algo_object.publish_report_dataset( **report_config) if publish_status != ae_consts.SUCCESS: msg = ('failed to publish trading performance report datasets ' 'with status {} attempted to {}'.format( ae_consts.get_status(status=publish_status), use_log)) log.error(msg) res = build_result.build_result(status=ae_consts.ERR, err=err, rec=None) return get_task_results.get_task_results(work_dict=algo_req, result=res) # end of stop early log.info( '{} - publish - done ticker={} trading performance report {}' ''.format(name, ticker, use_log)) # if publish an trading performance report dataset log.info( '{} - done publishing datasets for ticker={} from {} to {}'.format( name, ticker, use_start_date, use_end_date)) res = build_result.build_result(status=ae_consts.SUCCESS, err=None, rec=rec) except Exception as e: res = build_result.build_result( status=ae_consts.ERR, err=('failed - run_distributed_algorithm ' 'dict={} with ex={}').format(algo_req, e), rec=rec) log.error('{} - {}'.format(label, res['err'])) # end of try/ex log.info('task - run_distributed_algorithm done - ' '{} - status={}'.format(label, ae_consts.get_status(res['status']))) return get_task_results.get_task_results(work_dict=algo_req, result=res)
def run_backtest_and_plot_history(config_dict): """run_backtest_and_plot_history Run a derived algorithm with an algorithm config dictionary :param config_dict: algorithm config dictionary """ log.debug('start - sa') parser = argparse.ArgumentParser(description=('stock analysis tool')) parser.add_argument('-t', help=('ticker'), required=False, dest='ticker') parser.add_argument('-e', help=('file path to extract an ' 'algorithm-ready datasets from redis'), required=False, dest='algo_extract_loc') parser.add_argument('-l', help=('show dataset in this file'), required=False, dest='show_from_file') parser.add_argument('-H', help=('show trading history dataset in this file'), required=False, dest='show_history_from_file') parser.add_argument( '-E', help=('show trading performance report dataset in this file'), required=False, dest='show_report_from_file') parser.add_argument( '-L', help=('restore an algorithm-ready dataset file back into redis'), required=False, dest='restore_algo_file') parser.add_argument('-f', help=('save the trading history dataframe ' 'to this file'), required=False, dest='history_json_file') parser.add_argument( '-J', help=('plot action - after preparing you can use: ' '-J show to open the image (good for debugging)'), required=False, dest='plot_action') parser.add_argument( '-b', help=('run a backtest using the dataset in ' 'a file path/s3 key/redis key formats: ' 'file:/opt/sa/tests/datasets/algo/SPY-latest.json or ' 's3://algoready/SPY-latest.json or ' 'redis:SPY-latest'), required=False, dest='backtest_loc') parser.add_argument('-B', help=('optional - broker url for Celery'), required=False, dest='broker_url') parser.add_argument('-C', help=('optional - broker url for Celery'), required=False, dest='backend_url') parser.add_argument( '-w', help=('optional - flag for publishing an algorithm job ' 'using Celery to the analysis_engine workers'), required=False, dest='run_on_engine', action='store_true') parser.add_argument('-k', help=('optional - s3 access key'), required=False, dest='s3_access_key') parser.add_argument('-K', help=('optional - s3 secret key'), required=False, dest='s3_secret_key') parser.add_argument('-a', help=('optional - s3 address format: <host:port>'), required=False, dest='s3_address') parser.add_argument('-Z', help=('optional - s3 secure: default False'), required=False, dest='s3_secure') parser.add_argument('-s', help=('optional - start date: YYYY-MM-DD'), required=False, dest='start_date') parser.add_argument('-n', help=('optional - end date: YYYY-MM-DD'), required=False, dest='end_date') parser.add_argument('-u', help=('optional - s3 bucket name'), required=False, dest='s3_bucket_name') parser.add_argument('-G', help=('optional - s3 region name'), required=False, dest='s3_region_name') parser.add_argument('-g', help=('Path to a custom algorithm module file ' 'on disk. This module must have a single ' 'class that inherits from: ' 'https://github.com/AlgoTraders/stock-ana' 'lysis-engine/blob/master/' 'analysis_engine/algo.py Additionally you ' 'can find the Example-Minute-Algorithm here: ' 'https://github.com/AlgoTraders/stock-anal' 'ysis-engine/blob/master/analysis_engine/mocks/' 'example_algo_minute.py'), required=False, dest='run_algo_in_file') parser.add_argument('-p', help=('optional - s3 bucket/file for trading history'), required=False, dest='algo_history_loc') parser.add_argument( '-o', help=('optional - s3 bucket/file for trading performance report'), required=False, dest='algo_report_loc') parser.add_argument('-r', help=('optional - redis_address format: <host:port>'), required=False, dest='redis_address') parser.add_argument('-R', help=('optional - redis and s3 key name'), required=False, dest='keyname') parser.add_argument( '-m', help=('optional - redis database number (0 by default)'), required=False, dest='redis_db') parser.add_argument('-x', help=('optional - redis expiration in seconds'), required=False, dest='redis_expire') parser.add_argument( '-c', help=('optional - algorithm config_file path for setting ' 'up internal algorithm trading strategies and ' 'indicators'), required=False, dest='config_file') parser.add_argument('-v', help=('set the Algorithm to verbose logging'), required=False, dest='verbose_algo', action='store_true') parser.add_argument( '-P', help=('set the Algorithm\'s IndicatorProcessor to verbose logging'), required=False, dest='verbose_processor', action='store_true') parser.add_argument( '-I', help=('set all Algorithm\'s Indicators to verbose logging ' '(note indivdual indicators support a \'verbose\' key ' 'that can be set to True to debug just one ' 'indicator)'), required=False, dest='verbose_indicators', action='store_true') parser.add_argument( '-V', help=('inspect the datasets an algorithm is processing - this' 'will slow down processing to show debugging'), required=False, dest='inspect_datasets', action='store_true') parser.add_argument( '-j', help=('run the algorithm on just this specific date in the datasets ' '- specify the date in a format: YYYY-MM-DD like: 2018-11-29'), required=False, dest='run_this_date') parser.add_argument('-d', help=('debug'), required=False, dest='debug', action='store_true') args = parser.parse_args() ticker = None use_balance = 10000.0 use_commission = 6.0 use_start_date = None use_end_date = None use_config_file = None debug = False verbose_algo = None verbose_processor = None verbose_indicators = None inspect_datasets = None history_json_file = None run_this_date = None algo_obj = None algo_history_loc = 's3://algohistory' algo_report_loc = 's3://algoreport' algo_extract_loc = 's3://algoready' backtest_loc = None ssl_options = ae_consts.SSL_OPTIONS transport_options = ae_consts.TRANSPORT_OPTIONS broker_url = ae_consts.WORKER_BROKER_URL backend_url = ae_consts.WORKER_BACKEND_URL path_to_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE include_tasks = ae_consts.INCLUDE_TASKS load_from_s3_bucket = None load_from_s3_key = None load_from_redis_key = None load_from_file = None load_compress = False load_publish = True load_config = None report_redis_key = None report_s3_bucket = None report_s3_key = None report_file = None report_compress = False report_publish = True report_config = None history_redis_key = None history_s3_bucket = None history_s3_key = None history_file = None history_compress = False history_publish = True history_config = None extract_redis_key = None extract_s3_bucket = None extract_s3_key = None extract_file = None extract_save_dir = None extract_compress = False extract_publish = True extract_config = None s3_enabled = True s3_access_key = ae_consts.S3_ACCESS_KEY s3_secret_key = ae_consts.S3_SECRET_KEY s3_region_name = ae_consts.S3_REGION_NAME s3_address = ae_consts.S3_ADDRESS s3_bucket_name = ae_consts.S3_BUCKET s3_key = None s3_secure = ae_consts.S3_SECURE redis_enabled = True redis_address = ae_consts.REDIS_ADDRESS redis_key = None redis_password = ae_consts.REDIS_PASSWORD redis_db = ae_consts.REDIS_DB redis_expire = ae_consts.REDIS_EXPIRE redis_serializer = 'json' redis_encoding = 'utf-8' publish_to_s3 = True publish_to_redis = True publish_to_slack = True slack_enabled = False slack_code_block = False slack_full_width = False dataset_type = ae_consts.SA_DATASET_TYPE_ALGO_READY serialize_datasets = ae_consts.DEFAULT_SERIALIZED_DATASETS compress = False encoding = 'utf-8' debug = False run_on_engine = False auto_fill = True timeseries = 'minute' trade_strategy = 'count' if args.s3_access_key: s3_access_key = args.s3_access_key if args.s3_secret_key: s3_secret_key = args.s3_secret_key if args.s3_region_name: s3_region_name = args.s3_region_name if args.s3_address: s3_address = args.s3_address if args.s3_secure: s3_secure = args.s3_secure if args.redis_address: redis_address = args.redis_address if args.redis_db: redis_db = args.redis_db if args.redis_expire: redis_expire = args.redis_expire if args.history_json_file: history_json_file = args.history_json_file if args.ticker: ticker = args.ticker.upper() if args.debug: debug = True if args.verbose_algo: verbose_algo = True if args.verbose_processor: verbose_processor = True if args.verbose_indicators: verbose_indicators = True if args.inspect_datasets: inspect_datasets = True if args.run_this_date: run_this_date = args.run_this_date if args.start_date: try: use_start_date = '{} 00:00:00'.format(str(args.start_date)) datetime.datetime.strptime(args.start_date, ae_consts.COMMON_DATE_FORMAT) except Exception as e: msg = ('please use a start date formatted as: {}' '\n' 'error was: {}'.format(ae_consts.COMMON_DATE_FORMAT, e)) log.error(msg) sys.exit(1) # end of testing for a valid date # end of args.start_date if args.end_date: try: use_end_date = '{} 00:00:00'.format(str(args.end_date)) datetime.datetime.strptime(args.end_date, ae_consts.COMMON_DATE_FORMAT) except Exception as e: msg = ('please use an end date formatted as: {}' '\n' 'error was: {}'.format(ae_consts.COMMON_DATE_FORMAT, e)) log.error(msg) sys.exit(1) # end of testing for a valid date # end of args.end_date algo_mod_path = None if args.run_algo_in_file: if not os.path.exists(args.run_algo_in_file): log.error('missing algorithm module file: {}'.format( args.run_algo_in_file)) sys.exit(1) algo_mod_path = args.run_algo_in_file if args.config_file: use_config_file = args.config_file if not os.path.exists(use_config_file): log.error('Failed: unable to find config file: -c {}'.format( use_config_file)) sys.exit(1) config_dict = json.loads(open(use_config_file).read()) algo_mod_path = config_dict.get('algo_path', algo_mod_path) if not os.path.exists(algo_mod_path): log.error('missing algorithm module file from config: {}'.format( algo_mod_path)) sys.exit(1) """ Finalize the algo config """ if config_dict: use_balance = float(config_dict.get('balance', use_balance)) use_commission = float(config_dict.get('commission', use_commission)) ticker = str(config_dict.get('ticker', ticker)).upper() config_dict['ticker'] = ticker config_dict['balance'] = use_balance config_dict['commission'] = use_commission else: if not ticker: ticker = str(config_dict.get('ticker', ae_consts.TICKER)).upper() if not ticker: log.error('usage error: please set a ticker with -t <TICKER>') sys.exit(1) if verbose_algo: config_dict['verbose'] = verbose_algo if verbose_processor: config_dict['verbose_processor'] = verbose_processor if verbose_indicators: config_dict['verbose_indicators'] = verbose_indicators if inspect_datasets: config_dict['inspect_datasets'] = inspect_datasets if run_this_date: config_dict['run_this_date'] = run_this_date """ Run a custom algo module from disk """ if algo_mod_path: if args.backtest_loc: backtest_loc = args.backtest_loc if ('file:/' not in backtest_loc and 's3://' not in backtest_loc and 'redis://' not in backtest_loc): log.error( 'invalid -b <backtest dataset file> specified. ' '{} ' 'please use either: ' '-b file:/opt/sa/tests/datasets/algo/SPY-latest.json or ' '-b s3://algoready/SPY-latest.json or ' '-b redis://SPY-latest'.format(backtest_loc)) sys.exit(1) if 's3://' in backtest_loc: load_from_s3_bucket = backtest_loc.split('/')[-2] load_from_s3_key = backtest_loc.split('/')[-1] elif 'redis://' in backtest_loc: load_from_redis_key = backtest_loc.split('/')[-1] elif 'file:/' in backtest_loc: load_from_file = backtest_loc.split(':')[-1] load_publish = True # end of parsing supported transport - loading an algo-ready if args.algo_history_loc: algo_history_loc = args.algo_history_loc if ('file:/' not in algo_history_loc and 's3://' not in algo_history_loc and 'redis://' not in algo_history_loc): log.error( 'invalid -b <backtest dataset file> specified. ' '{} ' 'please use either: ' '-p file:/opt/sa/tests/datasets/algo/SPY-latest.json or ' '-p s3://algoready/SPY-latest.json or ' '-p redis://SPY-latest'.format(algo_history_loc)) sys.exit(1) if 's3://' in algo_history_loc: history_s3_bucket = algo_history_loc.split('/')[-2] history_s3_key = algo_history_loc.split('/')[-1] elif 'redis://' in algo_history_loc: history_redis_key = algo_history_loc.split('/')[-1] elif 'file:/' in algo_history_loc: history_file = algo_history_loc.split(':')[-1] history_publish = True # end of parsing supported transport - trading history if args.algo_report_loc: algo_report_loc = args.algo_report_loc if ('file:/' not in algo_report_loc and 's3://' not in algo_report_loc and 'redis://' not in algo_report_loc): log.error( 'invalid -b <backtest dataset file> specified. ' '{} ' 'please use either: ' '-o file:/opt/sa/tests/datasets/algo/SPY-latest.json or ' '-o s3://algoready/SPY-latest.json or ' '-o redis://SPY-latest'.format(algo_report_loc)) sys.exit(1) if 's3://' in algo_report_loc: report_s3_bucket = algo_report_loc.split('/')[-2] report_s3_key = algo_report_loc.split('/')[-1] elif 'redis://' in algo_report_loc: report_redis_key = algo_report_loc.split('/')[-1] elif 'file:/' in algo_report_loc: report_file = algo_report_loc.split(':')[-1] report_publish = True # end of parsing supported transport - trading performance report if args.algo_extract_loc: algo_extract_loc = args.algo_extract_loc if ('file:/' not in algo_extract_loc and 's3://' not in algo_extract_loc and 'redis://' not in algo_extract_loc): log.error( 'invalid -b <backtest dataset file> specified. ' '{} ' 'please use either: ' '-e file:/opt/sa/tests/datasets/algo/SPY-latest.json or ' '-e s3://algoready/SPY-latest.json or ' '-e redis://SPY-latest'.format(algo_extract_loc)) sys.exit(1) if 's3://' in algo_extract_loc: extract_s3_bucket = algo_extract_loc.split('/')[-2] extract_s3_key = algo_extract_loc.split('/')[-1] elif 'redis://' in algo_extract_loc: extract_redis_key = algo_extract_loc.split('/')[-1] elif 'file:/' in algo_extract_loc: extract_file = algo_extract_loc.split(':')[-1] extract_publish = True # end of parsing supported transport - extract algorithm-ready if args.run_on_engine: run_on_engine = True if verbose_algo: log.info('starting algo on the engine') use_name = config_dict.get('name', 'missing-algo-name') auto_fill = config_dict.get('auto_fill', auto_fill) timeseries = config_dict.get('timeseries', timeseries) trade_strategy = config_dict.get('trade_strategy', trade_strategy) algo_res = run_custom_algo.run_custom_algo( mod_path=algo_mod_path, ticker=config_dict['ticker'], balance=config_dict['balance'], commission=config_dict['commission'], name=use_name, start_date=use_start_date, end_date=use_end_date, auto_fill=auto_fill, config_dict=config_dict, load_from_s3_bucket=load_from_s3_bucket, load_from_s3_key=load_from_s3_key, load_from_redis_key=load_from_redis_key, load_from_file=load_from_file, load_compress=load_compress, load_publish=load_publish, load_config=load_config, report_redis_key=report_redis_key, report_s3_bucket=report_s3_bucket, report_s3_key=report_s3_key, report_file=report_file, report_compress=report_compress, report_publish=report_publish, report_config=report_config, history_redis_key=history_redis_key, history_s3_bucket=history_s3_bucket, history_s3_key=history_s3_key, history_file=history_file, history_compress=history_compress, history_publish=history_publish, history_config=history_config, extract_redis_key=extract_redis_key, extract_s3_bucket=extract_s3_bucket, extract_s3_key=extract_s3_key, extract_file=extract_file, extract_save_dir=extract_save_dir, extract_compress=extract_compress, extract_publish=extract_publish, extract_config=extract_config, publish_to_slack=publish_to_slack, publish_to_s3=publish_to_s3, publish_to_redis=publish_to_redis, dataset_type=dataset_type, serialize_datasets=serialize_datasets, compress=compress, encoding=encoding, redis_enabled=redis_enabled, redis_key=redis_key, redis_address=redis_address, redis_db=redis_db, redis_password=redis_password, redis_expire=redis_expire, redis_serializer=redis_serializer, redis_encoding=redis_encoding, s3_enabled=s3_enabled, s3_key=s3_key, s3_address=s3_address, s3_bucket=s3_bucket_name, s3_access_key=s3_access_key, s3_secret_key=s3_secret_key, s3_region_name=s3_region_name, s3_secure=s3_secure, slack_enabled=slack_enabled, slack_code_block=slack_code_block, slack_full_width=slack_full_width, dataset_publish_extract=extract_publish, dataset_publish_history=history_publish, dataset_publish_report=report_publish, run_on_engine=run_on_engine, auth_url=broker_url, backend_url=backend_url, include_tasks=include_tasks, ssl_options=ssl_options, transport_options=transport_options, path_to_config_module=path_to_config_module, timeseries=timeseries, trade_strategy=trade_strategy, verbose=verbose_algo) show_label = 'algo.name={}'.format(use_name) show_extract = '{}'.format(algo_extract_loc) show_history = '{}'.format(algo_history_loc) show_report = '{}'.format(algo_report_loc) base_label = ('load={} ' 'extract={} ' 'history={} ' 'report={}'.format(args.run_algo_in_file, show_extract, show_history, show_report)) algo_obj = algo_res.get('algo', None) if not algo_obj: log.error( '{} - failed creating algorithm object'.format(show_label)) sys.exit(1) if not run_on_engine: algo_trade_history_recs = algo_res['rec'].get('history', []) show_label = ('{} algo.name={} {} trade_history_len={}'.format( ticker, use_name, base_label, len(algo_trade_history_recs))) if args.debug: log.info('algo_res={}'.format(algo_res)) if algo_res['status'] == ae_consts.SUCCESS: log.info('{} - done running {}'.format( ae_consts.get_status(status=algo_res['status']), show_label)) else: log.error('{} - done running {}'.format( ae_consts.get_status(status=algo_res['status']), show_label)) else: if algo_res['status'] == ae_consts.SUCCESS: log.info('{} - done running {}'.format( ae_consts.get_status(status=algo_res['status']), show_label)) else: log.error('run_custom_algo returned error: {}'.format( algo_res['err'])) sys.exit(1) # end of running the custom algo handler # end if running a custom algorithm module else: if args.backtest_loc: backtest_loc = args.backtest_loc if ('file:/' not in backtest_loc and 's3://' not in backtest_loc and 'redis://' not in backtest_loc): log.error( 'invalid -b <backtest dataset file> specified. ' '{} ' 'please use either: ' '-b file:/opt/sa/tests/datasets/algo/SPY-latest.json or ' '-b s3://algoready/SPY-latest.json or ' '-b redis://SPY-latest'.format(backtest_loc)) sys.exit(1) load_from_s3_bucket = None load_from_s3_key = None load_from_redis_key = None load_from_file = None if 's3://' in backtest_loc: load_from_s3_bucket = backtest_loc.split('/')[-2] load_from_s3_key = backtest_loc.split('/')[-1] elif 'redis://' in backtest_loc: load_from_redis_key = backtest_loc.split('/')[-1] elif 'file:/' in backtest_loc: load_from_file = backtest_loc.split(':')[-1] # end of parsing supported transport - loading an algo-ready # end of backtest_loc load_config = build_publish_request.build_publish_request( ticker=ticker, output_file=load_from_file, s3_bucket=load_from_s3_bucket, s3_key=load_from_s3_key, redis_key=load_from_redis_key, redis_address=redis_address, redis_db=redis_db, redis_password=redis_password, redis_expire=redis_expire, s3_address=s3_address, s3_access_key=s3_access_key, s3_secret_key=s3_secret_key, s3_region_name=s3_region_name, s3_secure=s3_secure, verbose=debug, label='load-{}'.format(backtest_loc)) if load_from_file: load_config['output_file'] = load_from_file if load_from_redis_key: load_config['redis_key'] = load_from_redis_key load_config['redis_enabled'] = True if load_from_s3_bucket and load_from_s3_key: load_config['s3_bucket'] = load_from_s3_bucket load_config['s3_key'] = load_from_s3_key load_config['s3_enabled'] = True log.info('starting algo') algo_obj = ExampleCustomAlgo(ticker=config_dict['ticker'], config_dict=config_dict) algo_res = run_algo.run_algo(ticker=ticker, algo=algo_obj, start_date=use_start_date, end_date=use_end_date, raise_on_err=True) if algo_res['status'] != ae_consts.SUCCESS: log.error('failed running algo backtest ' '{} hit status: {} error: {}'.format( algo_obj.get_name(), ae_consts.get_status(status=algo_res['status']), algo_res['err'])) return # if not successful log.info('backtest: {} {}'.format( algo_obj.get_name(), ae_consts.get_status(status=algo_res['status']))) # end of use custom algo or not if algo_obj: trading_history_dict = algo_obj.get_history_dataset() history_df = trading_history_dict[ticker] if not hasattr(history_df, 'to_json'): return if history_json_file: log.info('saving history to: {}'.format(history_json_file)) history_df.to_json(history_json_file, orient='records', date_format='iso') log.info('plotting history') first_date = history_df['date'].iloc[0] end_date = history_df['date'].iloc[-1] title = ('Trading History {} for Algo {}\n' 'Backtest dates from {} to {}'.format( ticker, trading_history_dict['algo_name'], first_date, end_date)) use_xcol = 'date' use_as_date_format = '%d\n%b' if config_dict['timeseries'] == 'minute': use_xcol = 'minute' use_as_date_format = '%d %H:%M:%S\n%b' xlabel = 'Dates vs {} values'.format(trading_history_dict['algo_name']) ylabel = 'Algo {}\nvalues'.format(trading_history_dict['algo_name']) df_filter = (history_df['close'] > 0.01) # set default hloc columns: blue = None green = None orange = None red = 'close' blue = 'balance' if debug: for i, r in history_df.iterrows(): log.debug('{} - {}'.format(r['minute'], r['close'])) plot_trading_history.plot_trading_history( title=title, df=history_df, red=red, blue=blue, green=green, orange=orange, date_col=use_xcol, date_format=use_as_date_format, xlabel=xlabel, ylabel=ylabel, df_filter=df_filter, show_plot=True, dropna_for_all=True)