Esempio n. 1
0
def fetch_new_stock_datasets():
    """fetch_new_stock_datasets

    Collect datasets for a ticker from IEX Cloud or Tradier

    .. warning: IEX Cloud charges per request. Here are example
        commands to help you monitor your usage while handling
        first time users and automation (intraday, daily, and weekly
        options are supported).

    **Setup**

    ::

        export IEX_TOKEN=YOUR_IEX_CLOUD_TOKEN
        export TD_TOKEN=YOUR_TRADIER_TOKEN

    **Pull Data for a Ticker from IEX and Tradier**

    ::

        fetch -t TICKER

    **Pull from All Supported IEX Feeds**

    ::

        fetch -t TICKER -g iex-all

    **Pull from All Supported Tradier Feeds**

    ::

        fetch -t TICKER -g td

    **Intraday IEX and Tradier Feeds (only minute and news to reduce costs)**

    ::

        fetch -t TICKER -g intra
        # or manually:
        # fetch -t TICKER -g td,iex_min,iex_news

    **Daily IEX Feeds (daily and news)**

    ::

        fetch -t TICKER -g daily
        # or manually:
        # fetch -t TICKER -g iex_day,iex_news

    **Weekly IEX Feeds (company, financials, earnings, dividends, and peers)**

    ::

        fetch -t TICKER -g weekly
        # or manually:
        # fetch -t TICKER -g iex_fin,iex_earn,iex_div,iex_peers,iex_news,
        # iex_comp

    **IEX Minute**

    ::

        fetch -t TICKER -g iex_min

    **IEX News**

    ::

        fetch -t TICKER -g iex_news

    **IEX Daily**

    ::

        fetch -t TICKER -g iex_day

    **IEX Stats**

    ::

        fetch -t TICKER -g iex_stats

    **IEX Peers**

    ::

        fetch -t TICKER -g iex_peers

    **IEX Financials**

    ::

        fetch -t TICKER -g iex_fin

    **IEX Earnings**

    ::

        fetch -t TICKER -g iex_earn

    **IEX Dividends**

    ::

        fetch -t TICKER -g iex_div

    **IEX Quote**

    ::

        fetch -t TICKER -g iex_quote

    **IEX Company**

    ::

        fetch -t TICKER -g iex_comp

    .. note:: This requires the following services are listening on:

        - redis ``localhost:6379``
        - minio ``localhost:9000``

    """
    log.info('start - fetch_new_stock_datasets')

    parser = argparse.ArgumentParser(
        description=('Download and store the latest stock pricing, '
                     'news, and options chain data '
                     'and store it in Minio (S3) and Redis. '
                     'Also includes support for getting FinViz '
                     'screener tickers'))
    parser.add_argument('-t', help=('ticker'), required=False, dest='ticker')
    parser.add_argument(
        '-g',
        help=('optional - fetch mode: '
              'initial = default fetch from initial data feeds '
              '(IEX and Tradier), '
              'intra = fetch intraday from IEX and Tradier, '
              'daily or day = fetch daily from IEX, '
              'weekly = fetch weekly from IEX, '
              'all = fetch from all data feeds, '
              'td = fetch from Tradier feeds only, '
              'iex = fetch from IEX Cloud feeds only, '
              'min or minute or iex_min = fetch IEX Cloud intraday '
              'per-minute feed '
              'https://iexcloud.io/docs/api/#historical-prices, '
              'day or daily or iex_day = fetch IEX Cloud daily feed '
              'https://iexcloud.io/docs/api/#historical-prices, '
              'quote or iex_quote = fetch IEX Cloud quotes feed '
              'https://iexcloud.io/docs/api/#quote, '
              'stats or iex_stats = fetch IEX Cloud key stats feed '
              'https://iexcloud.io/docs/api/#key-stats, '
              'peers or iex_peers = fetch from just IEX Cloud peers feed '
              'https://iexcloud.io/docs/api/#peers, '
              'news or iex_news = fetch IEX Cloud news feed '
              'https://iexcloud.io/docs/api/#news, '
              'fin or iex_fin = fetch IEX Cloud financials feed'
              'https://iexcloud.io/docs/api/#financials, '
              'earn or iex_earn = fetch from just IEX Cloud earnings feeed '
              'https://iexcloud.io/docs/api/#earnings, '
              'div or iex_div = fetch from just IEX Cloud dividends feed'
              'https://iexcloud.io/docs/api/#dividends, '
              'iex_comp = fetch from just IEX Cloud company feed '
              'https://iexcloud.io/docs/api/#company'),
        required=False,
        dest='fetch_mode')
    parser.add_argument('-i',
                        help=('optional - ticker id '
                              'not used without a database'),
                        required=False,
                        dest='ticker_id')
    parser.add_argument('-e',
                        help=('optional - options expiration date'),
                        required=False,
                        dest='exp_date_str')
    parser.add_argument('-l',
                        help=('optional - path to the log config file'),
                        required=False,
                        dest='log_config_path')
    parser.add_argument('-b',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-B',
                        help=('optional - backend url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-s',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-S',
                        help=('optional - s3 ssl or not'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-G',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument('-p',
                        help=('optional - redis_password'),
                        required=False,
                        dest='redis_password')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-n',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument('-z',
                        help=('optional - strike price'),
                        required=False,
                        dest='strike')
    parser.add_argument(
        '-c',
        help=('optional - contract type "C" for calls "P" for puts'),
        required=False,
        dest='contract_type')
    parser.add_argument(
        '-P',
        help=('optional - get pricing data if "1" or "0" disabled'),
        required=False,
        dest='get_pricing')
    parser.add_argument(
        '-N',
        help=('optional - get news data if "1" or "0" disabled'),
        required=False,
        dest='get_news')
    parser.add_argument(
        '-O',
        help=('optional - get options data if "1" or "0" disabled'),
        required=False,
        dest='get_options')
    parser.add_argument('-U',
                        help=('optional - s3 enabled for publishing if "1" or '
                              '"0" is disabled'),
                        required=False,
                        dest='s3_enabled')
    parser.add_argument(
        '-R',
        help=('optional - redis enabled for publishing if "1" or '
              '"0" is disabled'),
        required=False,
        dest='redis_enabled')
    parser.add_argument('-A',
                        help=('optional - run an analysis '
                              'supported modes: scn'),
                        required=False,
                        dest='analysis_type')
    parser.add_argument('-L',
                        help=('optional - screener urls to pull '
                              'tickers for analysis'),
                        required=False,
                        dest='urls')
    parser.add_argument(
        '-Z',
        help=('disable run without an engine for local testing and demos'),
        required=False,
        dest='celery_enabled',
        action='store_true')
    parser.add_argument('-F',
                        help=('optional - backfill date for filling in '
                              'gaps for the IEX Cloud minute dataset '
                              'format is YYYY-MM-DD'),
                        required=False,
                        dest='backfill_date')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    run_offline = True
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    fetch_mode = 'initial'
    exp_date_str = ae_consts.NEXT_EXP_STR
    ssl_options = ae_consts.SSL_OPTIONS
    transport_options = ae_consts.TRANSPORT_OPTIONS
    broker_url = ae_consts.WORKER_BROKER_URL
    backend_url = ae_consts.WORKER_BACKEND_URL
    celery_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE
    include_tasks = ae_consts.INCLUDE_TASKS
    s3_access_key = ae_consts.S3_ACCESS_KEY
    s3_secret_key = ae_consts.S3_SECRET_KEY
    s3_region_name = ae_consts.S3_REGION_NAME
    s3_address = ae_consts.S3_ADDRESS
    s3_secure = ae_consts.S3_SECURE
    s3_bucket_name = ae_consts.S3_BUCKET
    s3_key = ae_consts.S3_KEY
    redis_address = ae_consts.REDIS_ADDRESS
    redis_key = ae_consts.REDIS_KEY
    redis_password = ae_consts.REDIS_PASSWORD
    redis_db = ae_consts.REDIS_DB
    redis_expire = ae_consts.REDIS_EXPIRE
    strike = None
    contract_type = None
    get_pricing = True
    get_news = True
    get_options = True
    s3_enabled = True
    redis_enabled = True
    analysis_type = None
    backfill_date = None
    debug = False

    if args.ticker:
        ticker = args.ticker.upper()
    if args.ticker_id:
        ticker_id = args.ticker_id
    if args.exp_date_str:
        exp_date_str = ae_consts.NEXT_EXP_STR
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_password:
        redis_password = args.redis_password
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.strike:
        strike = args.strike
    if args.contract_type:
        contract_type = args.contract_type
    if args.get_pricing:
        get_pricing = args.get_pricing == '1'
    if args.get_news:
        get_news = args.get_news == '1'
    if args.get_options:
        get_options = args.get_options == '1'
    if args.s3_enabled:
        s3_enabled = args.s3_enabled == '1'
    if args.redis_enabled:
        redis_enabled = args.redis_enabled == '1'
    if args.fetch_mode:
        fetch_mode = str(args.fetch_mode).lower()
    if args.analysis_type:
        analysis_type = str(args.analysis_type).lower()
    if args.celery_enabled:
        run_offline = False
    if args.backfill_date:
        backfill_date = args.backfill_date
    if args.debug:
        debug = True

    work = api_requests.build_get_new_pricing_request()

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_key'] = s3_key
    work['redis_key'] = redis_key
    work['strike'] = strike
    work['contract'] = contract_type
    work['exp_date'] = exp_date_str
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['get_pricing'] = get_pricing
    work['get_news'] = get_news
    work['get_options'] = get_options
    work['s3_enabled'] = s3_enabled
    work['redis_enabled'] = redis_enabled
    work['fetch_mode'] = fetch_mode
    work['analysis_type'] = analysis_type
    work['iex_datasets'] = iex_consts.DEFAULT_FETCH_DATASETS
    work['backfill_date'] = backfill_date
    work['debug'] = debug
    work['label'] = f'ticker={ticker}'

    if analysis_type == 'scn':
        label = f'screener={work["ticker"]}'
        fv_urls = []
        if args.urls:
            fv_urls = str(args.urls).split('|')
        if len(fv_urls) == 0:
            fv_urls = os.getenv('SCREENER_URLS', []).split('|')
        screener_req = api_requests.build_screener_analysis_request(
            ticker=ticker, fv_urls=fv_urls, label=label)
        work.update(screener_req)
        start_screener_analysis(req=work)
    # end of analysis_type
    else:
        last_close_date = ae_utils.last_close()
        last_close_str = last_close_date.strftime(ae_consts.COMMON_DATE_FORMAT)
        cache_base_key = f'{ticker}_{last_close_str}'
        if not args.keyname:
            work['s3_key'] = cache_base_key
            work['redis_key'] = cache_base_key

        path_to_tasks = 'analysis_engine.work_tasks'
        task_name = (f'{path_to_tasks}'
                     f'.get_new_pricing_data.get_new_pricing_data')
        task_res = None
        if ae_consts.is_celery_disabled() or run_offline:
            work['celery_disabled'] = True
            work['verbose'] = debug
            log.debug(f'starting without celery work={ae_consts.ppj(work)} '
                      f'offline={run_offline}')
            task_res = task_pricing.get_new_pricing_data(work)
            status_str = ae_consts.get_status(status=task_res['status'])

            cur_date = backfill_date
            if not backfill_date:
                cur_date = ae_utils.get_last_close_str()
            redis_arr = work["redis_address"].split(':')
            include_results = ''
            if debug:
                include_results = task_res['rec']
            if task_res['status'] == ae_consts.SUCCESS:
                if task_res['rec']['num_success'] == 0:
                    log.error(f'failed fetching ticker={work["ticker"]} '
                              f'from {fetch_mode} - please check the '
                              'environment variables')
                else:
                    log.info(f'done fetching ticker={work["ticker"]} '
                             f'mode={fetch_mode} '
                             f'status={status_str} '
                             f'err={task_res["err"]} {include_results}')
                    print('View keys in redis with:\n'
                          f'redis-cli -h {redis_arr[0]} '
                          'keys '
                          f'"{work["ticker"]}_{cur_date}*"')
            elif task_res['status'] == ae_consts.MISSING_TOKEN:
                print('Set an IEX or Tradier token: '
                      '\n'
                      '  export IEX_TOKEN=YOUR_IEX_TOKEN\n'
                      '  export TD_TOKEN=YOUR_TD_TOKEN\n')
            else:
                log.error(f'done fetching ticker={work["ticker"]} '
                          f'mode={fetch_mode} '
                          f'status={status_str} '
                          f'err={task_res["err"]}')
            # if/else debug
        else:
            log.debug(f'connecting to broker={broker_url} '
                      f'backend={backend_url}')

            # Get the Celery app
            app = get_celery_app.get_celery_app(
                name=__name__,
                auth_url=broker_url,
                backend_url=backend_url,
                path_to_config_module=celery_config_module,
                ssl_options=ssl_options,
                transport_options=transport_options,
                include_tasks=include_tasks)

            log.debug(f'calling task={task_name} - work={ae_consts.ppj(work)}')
            job_id = app.send_task(task_name, (work, ))
            log.debug(f'task={task_name} - job_id={job_id}')
Esempio n. 2
0
def fetch_new_stock_datasets():
    """fetch_new_stock_datasets

    Collect all datasets for the ticker **SPY**:

    ::

        fetch_new_stock_datasets.py -t SPY

    .. note:: This requires the following services are listening on:

        - redis ``localhost:6379``
        - minio ``localhost:9000``

    """
    log.info('start - fetch_new_stock_datasets')

    parser = argparse.ArgumentParser(
        description=('Download and store the latest stock pricing, '
                     'news, and options chain data '
                     'and store it in Minio (S3) and Redis. '
                     'Also includes support for getting FinViz '
                     'screener tickers'))
    parser.add_argument('-t', help=('ticker'), required=False, dest='ticker')
    parser.add_argument('-g',
                        help=('optional - fetch mode: '
                              'all = fetch from all data sources (default), '
                              'td = fetch from just Tradier sources, '
                              'iex = fetch from just IEX sources'),
                        required=False,
                        dest='fetch_mode')
    parser.add_argument('-i',
                        help=('optional - ticker id '
                              'not used without a database'),
                        required=False,
                        dest='ticker_id')
    parser.add_argument('-e',
                        help=('optional - options expiration date'),
                        required=False,
                        dest='exp_date_str')
    parser.add_argument('-l',
                        help=('optional - path to the log config file'),
                        required=False,
                        dest='log_config_path')
    parser.add_argument('-b',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-B',
                        help=('optional - backend url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-s',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-S',
                        help=('optional - s3 ssl or not'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-G',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument('-p',
                        help=('optional - redis_password'),
                        required=False,
                        dest='redis_password')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-n',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument('-z',
                        help=('optional - strike price'),
                        required=False,
                        dest='strike')
    parser.add_argument(
        '-c',
        help=('optional - contract type "C" for calls "P" for puts'),
        required=False,
        dest='contract_type')
    parser.add_argument(
        '-P',
        help=('optional - get pricing data if "1" or "0" disabled'),
        required=False,
        dest='get_pricing')
    parser.add_argument(
        '-N',
        help=('optional - get news data if "1" or "0" disabled'),
        required=False,
        dest='get_news')
    parser.add_argument(
        '-O',
        help=('optional - get options data if "1" or "0" disabled'),
        required=False,
        dest='get_options')
    parser.add_argument('-U',
                        help=('optional - s3 enabled for publishing if "1" or '
                              '"0" is disabled'),
                        required=False,
                        dest='s3_enabled')
    parser.add_argument(
        '-R',
        help=('optional - redis enabled for publishing if "1" or '
              '"0" is disabled'),
        required=False,
        dest='redis_enabled')
    parser.add_argument('-A',
                        help=('optional - run an analysis '
                              'supported modes: scn'),
                        required=False,
                        dest='analysis_type')
    parser.add_argument('-L',
                        help=('optional - screener urls to pull '
                              'tickers for analysis'),
                        required=False,
                        dest='urls')
    parser.add_argument(
        '-Z',
        help=('disable run without an engine for local testing and demos'),
        required=False,
        dest='celery_enabled',
        action='store_true')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    run_offline = True
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    fetch_mode = 'all'
    exp_date_str = ae_consts.NEXT_EXP_STR
    ssl_options = ae_consts.SSL_OPTIONS
    transport_options = ae_consts.TRANSPORT_OPTIONS
    broker_url = ae_consts.WORKER_BROKER_URL
    backend_url = ae_consts.WORKER_BACKEND_URL
    celery_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE
    include_tasks = ae_consts.INCLUDE_TASKS
    s3_access_key = ae_consts.S3_ACCESS_KEY
    s3_secret_key = ae_consts.S3_SECRET_KEY
    s3_region_name = ae_consts.S3_REGION_NAME
    s3_address = ae_consts.S3_ADDRESS
    s3_secure = ae_consts.S3_SECURE
    s3_bucket_name = ae_consts.S3_BUCKET
    s3_key = ae_consts.S3_KEY
    redis_address = ae_consts.REDIS_ADDRESS
    redis_key = ae_consts.REDIS_KEY
    redis_password = ae_consts.REDIS_PASSWORD
    redis_db = ae_consts.REDIS_DB
    redis_expire = ae_consts.REDIS_EXPIRE
    strike = None
    contract_type = None
    get_pricing = True
    get_news = True
    get_options = True
    s3_enabled = True
    redis_enabled = True
    analysis_type = None
    debug = False

    if args.ticker:
        ticker = args.ticker.upper()
    if args.ticker_id:
        ticker_id = args.ticker_id
    if args.exp_date_str:
        exp_date_str = ae_consts.NEXT_EXP_STR
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_password:
        redis_password = args.redis_password
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.strike:
        strike = args.strike
    if args.contract_type:
        contract_type = args.contract_type
    if args.get_pricing:
        get_pricing = args.get_pricing == '1'
    if args.get_news:
        get_news = args.get_news == '1'
    if args.get_options:
        get_options = args.get_options == '1'
    if args.s3_enabled:
        s3_enabled = args.s3_enabled == '1'
    if args.redis_enabled:
        redis_enabled = args.redis_enabled == '1'
    if args.fetch_mode:
        fetch_mode = str(args.fetch_mode).lower()
    if args.analysis_type:
        analysis_type = str(args.analysis_type).lower()
    if args.celery_enabled:
        run_offline = False
    if args.debug:
        debug = True

    work = api_requests.build_get_new_pricing_request()

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_key'] = s3_key
    work['redis_key'] = redis_key
    work['strike'] = strike
    work['contract'] = contract_type
    work['exp_date'] = exp_date_str
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['get_pricing'] = get_pricing
    work['get_news'] = get_news
    work['get_options'] = get_options
    work['s3_enabled'] = s3_enabled
    work['redis_enabled'] = redis_enabled
    work['fetch_mode'] = fetch_mode
    work['analysis_type'] = analysis_type
    work['iex_datasets'] = iex_consts.DEFAULT_FETCH_DATASETS
    work['debug'] = debug
    work['label'] = 'ticker={}'.format(ticker)

    if analysis_type == 'scn':
        label = 'screener={}'.format(work['ticker'])
        fv_urls = []
        if args.urls:
            fv_urls = str(args.urls).split('|')
        if len(fv_urls) == 0:
            fv_urls = os.getenv('SCREENER_URLS', []).split('|')
        screener_req = api_requests.build_screener_analysis_request(
            ticker=ticker, fv_urls=fv_urls, label=label)
        work.update(screener_req)
        start_screener_analysis(req=work)
    # end of analysis_type
    else:
        if not args.keyname:
            last_close_date = ae_utils.last_close()
            work['s3_key'] = '{}_{}'.format(
                work['ticker'],
                last_close_date.strftime(ae_consts.COMMON_DATE_FORMAT))
            work['redis_key'] = '{}_{}'.format(
                work['ticker'],
                last_close_date.strftime(ae_consts.COMMON_DATE_FORMAT))

        path_to_tasks = 'analysis_engine.work_tasks'
        task_name = ('{}.get_new_pricing_data.get_new_pricing_data'.format(
            path_to_tasks))
        task_res = None
        if ae_consts.is_celery_disabled() or run_offline:
            work['celery_disabled'] = True
            log.debug('starting without celery work={} offline={}'.format(
                ae_consts.ppj(work), run_offline))
            task_res = task_pricing.get_new_pricing_data(work)

            if debug:
                log.info('done - result={} '
                         'task={} status={} '
                         'err={} label={}'.format(
                             ae_consts.ppj(task_res), task_name,
                             ae_consts.get_status(status=task_res['status']),
                             task_res['err'], work['label']))
            else:
                log.info('done - result '
                         'task={} status={} '
                         'err={} label={}'.format(
                             task_name,
                             ae_consts.get_status(status=task_res['status']),
                             task_res['err'], work['label']))
            # if/else debug
        else:
            log.info('connecting to broker={} backend={}'.format(
                broker_url, backend_url))

            # Get the Celery app
            app = get_celery_app.get_celery_app(
                name=__name__,
                auth_url=broker_url,
                backend_url=backend_url,
                path_to_config_module=celery_config_module,
                ssl_options=ssl_options,
                transport_options=transport_options,
                include_tasks=include_tasks)

            log.info('calling task={} - work={}'.format(
                task_name, ae_consts.ppj(work)))
            job_id = app.send_task(task_name, (work, ))
            log.info('calling task={} - success job_id={}'.format(
                task_name, job_id))
def run_aws_backup():
    """run_aws_backup

    Run buy and sell analysis on a stock to send alerts to subscribed
    users

    """

    log.debug('start - aws-backup')

    parser = argparse.ArgumentParser(description=('stock analysis tool'))
    parser.add_argument('-t', help=('ticker'), required=True, dest='ticker')
    parser.add_argument('-e',
                        help=('file path to extract an '
                              'algorithm-ready datasets from redis'),
                        required=False,
                        dest='algo_extract_loc')
    parser.add_argument('-l',
                        help=('show dataset in this file'),
                        required=False,
                        dest='show_from_file')
    parser.add_argument('-H',
                        help=('show trading history dataset in this file'),
                        required=False,
                        dest='show_history_from_file')
    parser.add_argument(
        '-E',
        help=('show trading performance report dataset in this file'),
        required=False,
        dest='show_report_from_file')
    parser.add_argument(
        '-L',
        help=('restore an algorithm-ready dataset file back into redis'),
        required=False,
        dest='restore_algo_file')
    parser.add_argument('-f',
                        help=('run in mode: prepare dataset from '
                              'redis key or s3 key'),
                        required=False,
                        dest='prepare_mode',
                        action='store_true')
    parser.add_argument(
        '-J',
        help=('plot action - after preparing you can use: '
              '-J show to open the image (good for debugging)'),
        required=False,
        dest='plot_action')
    parser.add_argument(
        '-b',
        help=('run a backtest using the dataset in '
              'a file path/s3 key/redis key formats: '
              'file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
              's3://algoready/SPY-latest.json or '
              'redis:SPY-latest'),
        required=False,
        dest='backtest_loc')
    parser.add_argument('-B',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-C',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument(
        '-w',
        help=('optional - flag for publishing an algorithm job '
              'using Celery to the ae workers'),
        required=False,
        dest='run_on_engine',
        action='store_true')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-K',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-Z',
                        help=('optional - s3 secure: default False'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-s',
                        help=('optional - start date: YYYY-MM-DD'),
                        required=False,
                        dest='start_date')
    parser.add_argument('-n',
                        help=('optional - end date: YYYY-MM-DD'),
                        required=False,
                        dest='end_date')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-G',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument(
        '-g',
        help=('Path to a custom algorithm module file '
              'on disk. This module must have a single '
              'class that inherits from: '
              'https://github.com/AlgoTraders/stock-analysis-engine/'
              'blob/master/'
              'analysis_engine/algo.py Additionally you '
              'can find the Example-Minute-Algorithm here: '
              'https://github.com/AlgoTraders/stock-analysis-engine/'
              'blob/master/analysis_engine/mocks/'
              'example_algo_minute.py'),
        required=False,
        dest='run_algo_in_file')
    parser.add_argument('-p',
                        help=('optional - s3 bucket/file for trading history'),
                        required=False,
                        dest='algo_history_loc')
    parser.add_argument(
        '-o',
        help=('optional - s3 bucket/file for trading performance report'),
        required=False,
        dest='algo_report_loc')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-R',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument('-z',
                        help=('optional - strike price'),
                        required=False,
                        dest='strike')
    parser.add_argument(
        '-c',
        help=('optional - algorithm config_file path for setting '
              'up internal algorithm trading strategies and '
              'indicators'),
        required=False,
        dest='config_file')
    parser.add_argument(
        '-P',
        help=('optional - get pricing data if "1" or "0" disabled'),
        required=False,
        dest='get_pricing')
    parser.add_argument(
        '-N',
        help=('optional - get news data if "1" or "0" disabled'),
        required=False,
        dest='get_news')
    parser.add_argument(
        '-O',
        help=('optional - get options data if "1" or "0" disabled'),
        required=False,
        dest='get_options')
    parser.add_argument(
        '-i',
        help=('optional - ignore column names (comma separated)'),
        required=False,
        dest='ignore_columns')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    mode = 'prepare'
    plot_action = ae_consts.PLOT_ACTION_SHOW
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    ssl_options = ae_consts.SSL_OPTIONS
    transport_options = ae_consts.TRANSPORT_OPTIONS
    broker_url = ae_consts.WORKER_BROKER_URL
    backend_url = ae_consts.WORKER_BACKEND_URL
    path_to_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE
    include_tasks = ae_consts.INCLUDE_TASKS
    s3_access_key = ae_consts.S3_ACCESS_KEY
    s3_secret_key = ae_consts.S3_SECRET_KEY
    s3_region_name = ae_consts.S3_REGION_NAME
    s3_address = ae_consts.S3_ADDRESS
    s3_secure = ae_consts.S3_SECURE
    s3_bucket_name = ae_consts.S3_BUCKET
    s3_key = ae_consts.S3_KEY
    redis_address = ae_consts.REDIS_ADDRESS
    redis_key = ae_consts.REDIS_KEY
    redis_password = ae_consts.REDIS_PASSWORD
    redis_db = ae_consts.REDIS_DB
    redis_expire = ae_consts.REDIS_EXPIRE
    dataset_type = ae_consts.SA_DATASET_TYPE_ALGO_READY
    serialize_datasets = ae_consts.DEFAULT_SERIALIZED_DATASETS
    output_redis_key = None
    output_s3_bucket = None
    output_s3_key = None
    ignore_columns = None
    compress = False
    encoding = 'utf-8'
    slack_enabled = False
    slack_code_block = False
    slack_full_width = False
    verbose = False
    debug = False

    redis_serializer = 'json'
    redis_encoding = 'utf-8'
    output_redis_key = None
    output_s3_bucket = None
    output_s3_key = None
    s3_enabled = True
    redis_enabled = True
    ignore_columns = None
    debug = False

    run_on_engine = False
    show_from_file = None
    show_history_from_file = None
    show_report_from_file = None
    restore_algo_file = None
    backtest_loc = None
    use_custom_algo = False
    algo_history_loc = 's3://algohistory'
    algo_report_loc = 's3://algoreport'
    algo_extract_loc = 's3://algoready'

    use_balance = 5000.0
    use_commission = 6.0
    auto_fill = True
    use_start_date = '2018-11-01 00:00:00'
    use_end_date = None
    use_config_file = None
    use_name = 'myalgo'

    if args.ticker:
        ticker = args.ticker.upper()
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
        s3_enabled = True
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.prepare_mode:
        mode = ae_consts.SA_MODE_PREPARE
    if args.ignore_columns:
        ignore_columns_org = args.ignore_columns
        ignore_columns = ignore_columns_org.split(",")
    if args.plot_action:
        if str(args.plot_action).lower() == 'show':
            plot_action = ae_consts.PLOT_ACTION_SHOW
        elif str(args.plot_action).lower() == 's3':
            plot_action = ae_consts.PLOT_ACTION_SAVE_TO_S3
        elif str(args.plot_action).lower() == 'save':
            plot_action = ae_consts.PLOT_ACTION_SAVE_AS_FILE
        else:
            plot_action = ae_consts.PLOT_ACTION_SHOW
            log.warning(f'unsupported plot_action: {args.plot_action}')

    if args.debug:
        debug = True

    if args.algo_extract_loc:
        mode = ae_consts.SA_MODE_EXTRACT
    if args.show_from_file:
        show_from_file = args.show_from_file
        mode = ae_consts.SA_MODE_SHOW_DATASET
    if args.show_history_from_file:
        show_history_from_file = args.show_history_from_file
        mode = ae_consts.SA_MODE_SHOW_HISTORY_DATASET
    if args.show_report_from_file:
        show_report_from_file = args.show_report_from_file
        mode = ae_consts.SA_MODE_SHOW_REPORT_DATASET
    if args.restore_algo_file:
        restore_algo_file = args.restore_algo_file
        mode = ae_consts.SA_MODE_RESTORE_REDIS_DATASET
    if args.run_algo_in_file:
        mode = ae_consts.SA_MODE_RUN_ALGO
    if args.backtest_loc:
        mode = ae_consts.SA_MODE_RUN_ALGO
    if args.start_date:
        try:
            use_start_date = f'{str(args.start_date)} 00:00:00'
            datetime.datetime.strptime(args.start_date,
                                       ae_consts.COMMON_DATE_FORMAT)
        except Exception as e:
            msg = ('please use a start date formatted as: '
                   f'{ae_consts.COMMON_DATE_FORMAT}\n'
                   f'error was: {e}')
            log.error(msg)
            sys.exit(1)
        # end of testing for a valid date
    # end of args.start_date
    if args.end_date:
        try:
            use_end_date = f'{str(args.end_date)} 00:00:00'
            datetime.datetime.strptime(args.end_date,
                                       ae_consts.COMMON_DATE_FORMAT)
        except Exception as e:
            msg = ('please use an end date formatted as: '
                   f'{ae_consts.COMMON_DATE_FORMAT}\n'
                   f'error was: {e}')
            log.error(msg)
            sys.exit(1)
        # end of testing for a valid date
    # end of args.end_date
    if args.config_file:
        use_config_file = args.config_file
        if not os.path.exists(use_config_file):
            log.error(
                f'Failed: unable to find config file: -c {use_config_file}')
            sys.exit(1)

    config_dict = None
    load_from_s3_bucket = None
    load_from_s3_key = None
    load_from_redis_key = None
    load_from_file = None
    load_compress = False
    load_publish = True
    load_config = None
    report_redis_key = None
    report_s3_bucket = None
    report_s3_key = None
    report_file = None
    report_compress = False
    report_publish = False
    report_config = None
    history_redis_key = None
    history_s3_bucket = None
    history_s3_key = None
    history_file = None
    history_compress = False
    history_publish = False
    history_config = None
    extract_redis_key = None
    extract_s3_bucket = None
    extract_s3_key = None
    extract_file = None
    extract_save_dir = None
    extract_compress = False
    extract_publish = True
    extract_config = None
    publish_to_slack = False
    publish_to_s3 = True
    publish_to_redis = False
    use_timeseries = 'day'
    use_trade_strategy = 'count'

    valid = False
    required_task = False
    work = None
    task_name = None
    work = {}
    path_to_tasks = 'analysis_engine.work_tasks'
    if mode == ae_consts.SA_MODE_PREPARE:
        task_name = (f'{path_to_tasks}.'
                     'prepare_pricing_dataset.prepare_pricing_dataset')
        work = api_requests.build_prepare_dataset_request()
        if output_s3_key:
            work['prepared_s3_key'] = output_s3_key
        if output_s3_bucket:
            work['prepared_s3_bucket'] = output_s3_bucket
        if output_redis_key:
            work['prepared_redis_key'] = output_redis_key
        work['ignore_columns'] = ignore_columns
        valid = True
        required_task = True
    elif mode == ae_consts.SA_MODE_EXTRACT:
        if args.algo_extract_loc:
            algo_extract_loc = args.algo_extract_loc
            if ('file:/' not in algo_extract_loc
                    and 's3://' not in algo_extract_loc
                    and 'redis://' not in algo_extract_loc):
                log.error(
                    'invalid -e <extract_to_file_or_s3_key_or_redis_key> '
                    'specified. please use either: '
                    '-e file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-e s3://algoready/SPY-latest.json or '
                    '-e redis://SPY-latest')
                sys.exit(1)
            if 's3://' in algo_extract_loc:
                extract_s3_bucket = algo_extract_loc.split('/')[-2]
                extract_s3_key = algo_extract_loc.split('/')[-1]
            elif 'redis://' in algo_extract_loc:
                extract_redis_key = algo_extract_loc.split('/')[-1]
            elif 'file:/' in algo_extract_loc:
                extract_file = algo_extract_loc.split(':')[-1]
        # end of parsing supported transport for loading

        use_custom_algo = True
    elif mode == ae_consts.SA_MODE_SHOW_DATASET:
        examine_dataset_in_file(ticker=ticker, path_to_file=show_from_file)
        log.info(f'done showing {ticker} dataset from file={show_from_file}')
        sys.exit(0)
    elif mode == ae_consts.SA_MODE_SHOW_HISTORY_DATASET:
        examine_dataset_in_file(
            ticker=ticker,
            dataset_type=ae_consts.SA_DATASET_TYPE_TRADING_HISTORY,
            path_to_file=show_history_from_file)
        log.info(f'done showing trading history {ticker} dataset from '
                 f'file={show_from_file}')
        sys.exit(0)
    elif mode == ae_consts.SA_MODE_SHOW_REPORT_DATASET:
        examine_dataset_in_file(
            ticker=ticker,
            dataset_type=ae_consts.SA_DATASET_TYPE_TRADING_REPORT,
            path_to_file=show_report_from_file)
        log.info(
            f'done showing trading performance report {ticker} dataset from '
            f'file={show_from_file}')
        sys.exit(0)
    elif mode == ae_consts.SA_MODE_RESTORE_REDIS_DATASET:
        restore_missing_dataset_values_from_algo_ready_file(
            ticker=ticker,
            path_to_file=restore_algo_file,
            redis_address=redis_address,
            redis_password=redis_password,
            redis_db=redis_db,
            output_redis_db=redis_db,
            dataset_type=ae_consts.SA_DATASET_TYPE_ALGO_READY,
            serialize_datasets=ae_consts.DEFAULT_SERIALIZED_DATASETS)
        log.info(
            f'done restoring {ticker} dataset from file={restore_algo_file} '
            f'into redis_db={redis_db}')
        sys.exit(0)
    elif mode == ae_consts.SA_MODE_RUN_ALGO:
        if args.run_algo_in_file:
            if not os.path.exists(args.run_algo_in_file):
                log.error(
                    f'missing algorithm module file: {args.run_algo_in_file}')
                sys.exit(1)

        if args.backtest_loc:
            backtest_loc = args.backtest_loc
            if ('file:/' not in backtest_loc and 's3://' not in backtest_loc
                    and 'redis://' not in backtest_loc):
                log.error(
                    'invalid -b <backtest dataset file> specified. '
                    f'{backtest_loc} '
                    'please use either: '
                    '-b file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-b s3://algoready/SPY-latest.json or '
                    '-b redis://SPY-latest')
                sys.exit(1)
            if 's3://' in backtest_loc:
                load_from_s3_bucket = backtest_loc.split('/')[-2]
                load_from_s3_key = backtest_loc.split('/')[-1]
            elif 'redis://' in backtest_loc:
                load_from_redis_key = backtest_loc.split('/')[-1]
            elif 'file:/' in backtest_loc:
                load_from_file = backtest_loc.split(':')[-1]
            load_publish = True
        # end of parsing supported transport - loading an algo-ready

        if args.algo_history_loc:
            algo_history_loc = args.algo_history_loc
            if ('file:/' not in algo_history_loc
                    and 's3://' not in algo_history_loc
                    and 'redis://' not in algo_history_loc):
                log.error(
                    'invalid -p <backtest dataset file> specified. '
                    f'{algo_history_loc} '
                    'please use either: '
                    '-p file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-p s3://algoready/SPY-latest.json or '
                    '-p redis://SPY-latest')
                sys.exit(1)
            if 's3://' in algo_history_loc:
                history_s3_bucket = algo_history_loc.split('/')[-2]
                history_s3_key = algo_history_loc.split('/')[-1]
            elif 'redis://' in algo_history_loc:
                history_redis_key = algo_history_loc.split('/')[-1]
            elif 'file:/' in algo_history_loc:
                history_file = algo_history_loc.split(':')[-1]
            history_publish = True
        # end of parsing supported transport - trading history

        if args.algo_report_loc:
            algo_report_loc = args.algo_report_loc
            if ('file:/' not in algo_report_loc
                    and 's3://' not in algo_report_loc
                    and 'redis://' not in algo_report_loc):
                log.error(
                    'invalid -o <backtest dataset file> specified. '
                    f'{algo_report_loc} '
                    'please use either: '
                    '-o file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-o s3://algoready/SPY-latest.json or '
                    '-o redis://SPY-latest')
                sys.exit(1)
            if 's3://' in algo_report_loc:
                report_s3_bucket = algo_report_loc.split('/')[-2]
                report_s3_key = algo_report_loc.split('/')[-1]
            elif 'redis://' in algo_report_loc:
                report_redis_key = algo_report_loc.split('/')[-1]
            elif 'file:/' in algo_report_loc:
                report_file = algo_report_loc.split(':')[-1]
            report_publish = True
        # end of parsing supported transport - trading performance report

        if args.algo_extract_loc:
            algo_extract_loc = args.algo_extract_loc
            if ('file:/' not in algo_extract_loc
                    and 's3://' not in algo_extract_loc
                    and 'redis://' not in algo_extract_loc):
                log.error(
                    'invalid -e <backtest dataset file> specified. '
                    f'{algo_extract_loc} '
                    'please use either: '
                    '-e file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-e s3://algoready/SPY-latest.json or '
                    '-e redis://SPY-latest')
                sys.exit(1)
            if 's3://' in algo_extract_loc:
                extract_s3_bucket = algo_extract_loc.split('/')[-2]
                extract_s3_key = algo_extract_loc.split('/')[-1]
            elif 'redis://' in algo_extract_loc:
                extract_redis_key = algo_extract_loc.split('/')[-1]
            elif 'file:/' in algo_extract_loc:
                extract_file = algo_extract_loc.split(':')[-1]
            extract_publish = True
        # end of parsing supported transport - extract algorithm-ready

        use_custom_algo = True
    # end of set up for backtest

    if use_custom_algo:

        if args.run_on_engine:
            run_on_engine = True
            log.info('starting algo on the engine')
        else:
            log.info('starting algo')

        algo_res = run_custom_algo.run_custom_algo(
            mod_path=args.run_algo_in_file,
            ticker=ticker,
            balance=use_balance,
            commission=use_commission,
            start_date=use_start_date,
            end_date=use_end_date,
            config_file=use_config_file,
            name=use_name,
            auto_fill=auto_fill,
            config_dict=config_dict,
            load_from_s3_bucket=load_from_s3_bucket,
            load_from_s3_key=load_from_s3_key,
            load_from_redis_key=load_from_redis_key,
            load_from_file=load_from_file,
            load_compress=load_compress,
            load_publish=load_publish,
            load_config=load_config,
            report_redis_key=report_redis_key,
            report_s3_bucket=report_s3_bucket,
            report_s3_key=report_s3_key,
            report_file=report_file,
            report_compress=report_compress,
            report_publish=report_publish,
            report_config=report_config,
            history_redis_key=history_redis_key,
            history_s3_bucket=history_s3_bucket,
            history_s3_key=history_s3_key,
            history_file=history_file,
            history_compress=history_compress,
            history_publish=history_publish,
            history_config=history_config,
            extract_redis_key=extract_redis_key,
            extract_s3_bucket=extract_s3_bucket,
            extract_s3_key=extract_s3_key,
            extract_file=extract_file,
            extract_save_dir=extract_save_dir,
            extract_compress=extract_compress,
            extract_publish=extract_publish,
            extract_config=extract_config,
            publish_to_slack=publish_to_slack,
            publish_to_s3=publish_to_s3,
            publish_to_redis=publish_to_redis,
            dataset_type=dataset_type,
            serialize_datasets=serialize_datasets,
            compress=compress,
            encoding=encoding,
            redis_enabled=redis_enabled,
            redis_key=redis_key,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=s3_enabled,
            s3_key=s3_key,
            s3_address=s3_address,
            s3_bucket=s3_bucket_name,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=slack_enabled,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            dataset_publish_extract=extract_publish,
            dataset_publish_history=history_publish,
            dataset_publish_report=report_publish,
            run_on_engine=run_on_engine,
            auth_url=broker_url,
            backend_url=backend_url,
            include_tasks=include_tasks,
            ssl_options=ssl_options,
            transport_options=transport_options,
            path_to_config_module=path_to_config_module,
            timeseries=use_timeseries,
            trade_strategy=use_trade_strategy,
            verbose=verbose)

        show_label = f'algo.name={use_name}'
        show_extract = f'{algo_extract_loc}'
        show_history = f'{algo_history_loc}'
        show_report = f'{algo_report_loc}'
        base_label = (f'load={args.run_algo_in_file} extract={show_extract} '
                      f'history={show_history} report={show_report}')
        show_label = (f'{ticker} running in engine '
                      f'''task_id={algo_res["rec"].get(
                "task_id",
                "missing-task-id")} {base_label}''')
        if not run_on_engine:
            algo_trade_history_recs = algo_res['rec'].get('history', [])
            show_label = (f'{ticker} algo.name={use_name} {base_label} '
                          f'trade_history_len={len(algo_trade_history_recs)}')
        if args.debug:
            log.info(f'algo_res={algo_res}')
            if algo_res['status'] == ae_consts.SUCCESS:
                log.info(
                    f'{ae_consts.get_status(status=algo_res["status"])} - '
                    f'done running {show_label}')
            else:
                log.error(
                    f'{ae_consts.get_status(status=algo_res["status"])} - '
                    f'done running {show_label}')
        else:
            if algo_res['status'] == ae_consts.SUCCESS:
                log.info(
                    f'{ae_consts.get_status(status=algo_res["status"])} - '
                    f'done running {show_label}')
            else:
                log.error(f'run_custom_algo returned error: {algo_res["err"]}')
                sys.exit(1)
        # end of running the custom algo handler

        if mode == ae_consts.SA_MODE_EXTRACT:
            log.info(f'done extracting dataset - {ticker}')
        elif mode == ae_consts.SA_MODE_RUN_ALGO:
            log.info(f'done running algo - {ticker}')

        sys.exit(0)
    # end of handling mode-specific arg assignments

    # sanity checking the work and task are valid
    if not valid:
        log.error('usage error: missing a supported mode: '
                  '-f (for prepare a dataset) ')
        sys.exit(1)
    if required_task and not task_name:
        log.error('usage error: missing a supported task_name')
        sys.exit(1)
    # end of sanity checks

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_key'] = s3_key
    work['redis_key'] = redis_key
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['s3_enabled'] = s3_enabled
    work['redis_enabled'] = redis_enabled
    work['debug'] = debug
    work['label'] = f'ticker={ticker}'

    task_res = None
    if ae_consts.is_celery_disabled():
        work['celery_disabled'] = True
        log.debug(f'starting without celery work={ae_consts.ppj(work)}')
        if mode == ae_consts.SA_MODE_PREPARE:
            task_res = prep_dataset.prepare_pricing_dataset(work)

        if debug:
            log.info(
                f'done - result={ae_consts.ppj(task_res)} task={task_name} '
                f'status={ae_consts.get_status(status=task_res["status"])} '
                f'err={task_res["err"]} label={work["label"]}')
        else:
            log.info(
                f'done - result task={task_name} '
                f'status={ae_consts.get_status(status=task_res["status"])} '
                f'err={task_res["err"]} label={work["label"]}')

        if task_res['status'] == ae_consts.SUCCESS:
            image_res = None
            label = work['label']
            ticker = work['ticker']
            if plot_action == ae_consts.PLOT_ACTION_SHOW:
                log.info('showing plot')
                """
                minute_key = f'{redis_key}_minute'
                minute_df_res = build_df.build_df_from_redis(
                    label=label',
                    address=redis_address,
                    db=redis_db,
                    key=minute_key)

                minute_df = None
                if (
                        minute_df_res['status'] == SUCCESS
                        and minute_df_res['rec']['valid_df']):
                    minute_df = minute_df_res['rec']['data']
                    print(minute_df.columns.values)
                    column_list = [
                        'close',
                        'date'
                    ]
                """
                today_str = datetime.datetime.now().strftime('%Y-%m-%d')
                extract_req = work
                extract_req['redis_key'] = f'{work["redis_key"]}_minute'
                extract_status, minute_df = \
                    extract_utils.extract_minute_dataset(
                        work_dict=work)
                if extract_status == ae_consts.SUCCESS:
                    log.info(f'{label} - ticker={ticker} '
                             f'creating chart date={today_str}')
                    """
                    Plot Pricing with the Volume Overlay:
                    """
                    image_res = ae_charts.plot_overlay_pricing_and_volume(
                        log_label=label,
                        ticker=ticker,
                        date_format=ae_consts.IEX_MINUTE_DATE_FORMAT,
                        df=minute_df,
                        show_plot=True)
                    """
                    Plot the High-Low-Open-Close Pricing:
                    """
                    """
                    image_res = ae_charts.plot_hloc_pricing(
                        log_label=label,
                        ticker=ticker,
                        title=f'{ticker} - Minute Pricing - {today_str}',
                        df=minute_df,
                        show_plot=True)
                    """
                    """
                    Plot by custom columns in the DataFrame
                    """
                    """
                    column_list = minute_df.columns.values
                    column_list = [
                        'date',
                        'close',
                        'high',
                        'low',
                        'open'
                    ]
                    image_res = ae_charts.plot_df(
                        log_label=label,
                        title='Pricing Title',
                        column_list=column_list,
                        df=minute_df,
                        xcol='date',
                        xlabel='Date',
                        ylabel='Pricing',
                        show_plot=True)
                    """
            elif plot_action == ae_consts.PLOT_ACTION_SAVE_TO_S3:
                log.info('coming soon - support to save to s3')
            elif plot_action == ae_consts.PLOT_ACTION_SAVE_AS_FILE:
                log.info('coming soon - support to save as file')
            if image_res:
                log.info(f'{label} show plot - '
                         f'status={ae_consts.get_status(image_res["status"])} '
                         f'err={image_res["err"]}')
    else:
        log.info(f'connecting to broker={broker_url} backend={backend_url}')

        # Get the Celery app
        app = get_celery_app.get_celery_app(
            name=__name__,
            auth_url=broker_url,
            backend_url=backend_url,
            path_to_config_module=path_to_config_module,
            ssl_options=ssl_options,
            transport_options=transport_options,
            include_tasks=include_tasks)

        log.info(f'calling task={task_name} - work={ae_consts.ppj(work)}')
        job_id = app.send_task(task_name, (work, ))
        log.info(f'calling task={task_name} - success job_id={job_id}')
def publish_ticker_aggregate_from_s3():
    """publish_ticker_aggregate_from_s3

    Download all ticker data from S3 and publish it's contents
    to Redis and back to S3

    """

    log.info('start - publish_ticker_aggregate_from_s3')

    parser = argparse.ArgumentParser(
        description=('Download and aggregated all ticker data, '
                     'and store it in S3 and Redis. '))
    parser.add_argument('-t', help=('ticker'), required=True, dest='ticker')
    parser.add_argument('-i',
                        help=('optional - ticker id '
                              'not used without a database'),
                        required=False,
                        dest='ticker_id')
    parser.add_argument('-l',
                        help=('optional - path to the log config file'),
                        required=False,
                        dest='log_config_path')
    parser.add_argument('-b',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-B',
                        help=('optional - backend url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-s',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-S',
                        help=('optional - s3 ssl or not'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-c',
                        help=('optional - s3 compiled bucket name'),
                        required=False,
                        dest='s3_compiled_bucket_name')
    parser.add_argument('-g',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument('-p',
                        help=('optional - redis_password'),
                        required=False,
                        dest='redis_password')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-n',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    ticker = TICKER
    ticker_id = TICKER_ID
    ssl_options = SSL_OPTIONS
    transport_options = TRANSPORT_OPTIONS
    broker_url = WORKER_BROKER_URL
    backend_url = WORKER_BACKEND_URL
    celery_config_module = WORKER_CELERY_CONFIG_MODULE
    include_tasks = INCLUDE_TASKS
    s3_access_key = S3_ACCESS_KEY
    s3_secret_key = S3_SECRET_KEY
    s3_region_name = S3_REGION_NAME
    s3_address = S3_ADDRESS
    s3_secure = S3_SECURE
    s3_bucket_name = S3_BUCKET
    s3_compiled_bucket_name = S3_COMPILED_BUCKET
    s3_key = S3_KEY
    redis_address = REDIS_ADDRESS
    redis_key = REDIS_KEY
    redis_password = REDIS_PASSWORD
    redis_db = REDIS_DB
    redis_expire = REDIS_EXPIRE
    debug = False

    if args.ticker:
        ticker = args.ticker.upper()
    if args.ticker_id:
        ticker = args.ticker_id
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.s3_compiled_bucket_name:
        s3_compiled_bucket_name = args.s3_compiled_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_password:
        redis_password = args.redis_password
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.debug:
        debug = True

    work = build_publish_ticker_aggregate_from_s3_request()

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_compiled_bucket'] = s3_compiled_bucket_name
    if args.keyname:
        work['s3_key'] = s3_key
        work['redis_key'] = redis_key
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['debug'] = debug
    work['label'] = 'ticker={}'.format(ticker)

    path_to_tasks = 'analysis_engine.work_tasks'
    task_name = ('{}.publish_ticker_aggregate_from_s3.'
                 'publish_ticker_aggregate_from_s3'.format(path_to_tasks))
    task_res = None
    if is_celery_disabled():
        work['celery_disabled'] = True
        log.debug('starting without celery work={}'.format(ppj(work)))
        task_res = task_publisher.publish_ticker_aggregate_from_s3(
            work_dict=work)
        if debug:
            log.info('done - result={} '
                     'task={} status={} '
                     'err={} label={}'.format(
                         ppj(task_res), task_name,
                         get_status(status=task_res['status']),
                         task_res['err'], work['label']))
        else:
            log.info('done - result '
                     'task={} status={} '
                     'err={} label={}'.format(
                         task_name, get_status(status=task_res['status']),
                         task_res['err'], work['label']))
        # if/else debug
    else:
        log.info('connecting to broker={} backend={}'.format(
            broker_url, backend_url))

        # Get the Celery app
        app = get_celery_app(name=__name__,
                             auth_url=broker_url,
                             backend_url=backend_url,
                             path_to_config_module=celery_config_module,
                             ssl_options=ssl_options,
                             transport_options=transport_options,
                             include_tasks=include_tasks)

        log.info('calling task={} - work={}'.format(task_name, ppj(work)))
        job_id = app.send_task(task_name, (work, ))
        log.info('calling task={} - success job_id={}'.format(
            task_name, job_id))
Esempio n. 5
0
# Disable celery log hijacking
# https://github.com/celery/celery/issues/2509
@celery.signals.setup_logging.connect
def setup_celery_logging(**kwargs):
    pass


log = log_utils.build_colorized_logger(name=consts.APP_NAME,
                                       log_config_path=consts.LOG_CONFIG_PATH)

log.info('start - {}'.format(consts.APP_NAME))

log.info('broker={} backend={} '
         'config={} include_tasks={}'.format(
             consts.WORKER_BROKER_URL, consts.WORKER_BACKEND_URL,
             consts.WORKER_CELERY_CONFIG_MODULE, consts.WORKER_TASKS))

# Get the Celery app from the project's get_celery_app module
app = get_celery_app.get_celery_app(
    name=consts.APP_NAME,
    path_to_config_module=consts.WORKER_CELERY_CONFIG_MODULE,
    auth_url=consts.WORKER_BROKER_URL,
    backend_url=consts.WORKER_BACKEND_URL,
    include_tasks=consts.INCLUDE_TASKS)

log.info('starting celery')
app.start()

log.info('end - {}'.format(consts.APP_NAME))
def run_custom_algo(
        mod_path,
        ticker='SPY',
        balance=50000,
        commission=6.0,
        start_date=None,
        end_date=None,
        name='myalgo',
        auto_fill=True,
        config_file=None,
        config_dict=None,
        load_from_s3_bucket=None,
        load_from_s3_key=None,
        load_from_redis_key=None,
        load_from_file=None,
        load_compress=False,
        load_publish=True,
        load_config=None,
        report_redis_key=None,
        report_s3_bucket=None,
        report_s3_key=None,
        report_file=None,
        report_compress=False,
        report_publish=True,
        report_config=None,
        history_redis_key=None,
        history_s3_bucket=None,
        history_s3_key=None,
        history_file=None,
        history_compress=False,
        history_publish=True,
        history_config=None,
        extract_redis_key=None,
        extract_s3_bucket=None,
        extract_s3_key=None,
        extract_file=None,
        extract_save_dir=None,
        extract_compress=False,
        extract_publish=True,
        extract_config=None,
        publish_to_s3=True,
        publish_to_redis=True,
        publish_to_slack=True,
        dataset_type=ae_consts.SA_DATASET_TYPE_ALGO_READY,
        serialize_datasets=ae_consts.DEFAULT_SERIALIZED_DATASETS,
        compress=False,
        encoding='utf-8',
        redis_enabled=True,
        redis_key=None,
        redis_address=None,
        redis_db=None,
        redis_password=None,
        redis_expire=None,
        redis_serializer='json',
        redis_encoding='utf-8',
        s3_enabled=True,
        s3_key=None,
        s3_address=None,
        s3_bucket=None,
        s3_access_key=None,
        s3_secret_key=None,
        s3_region_name=None,
        s3_secure=False,
        slack_enabled=False,
        slack_code_block=False,
        slack_full_width=False,
        timeseries=None,
        trade_strategy=None,
        verbose=False,
        debug=False,
        dataset_publish_extract=False,
        dataset_publish_history=False,
        dataset_publish_report=False,
        run_on_engine=False,
        auth_url=ae_consts.WORKER_BROKER_URL,
        backend_url=ae_consts.WORKER_BACKEND_URL,
        include_tasks=ae_consts.INCLUDE_TASKS,
        ssl_options=ae_consts.SSL_OPTIONS,
        transport_options=ae_consts.TRANSPORT_OPTIONS,
        path_to_config_module=ae_consts.WORKER_CELERY_CONFIG_MODULE,
        raise_on_err=True):
    """run_custom_algo

    Run a custom algorithm that derives the
    ``analysis_engine.algo.BaseAlgo`` class

    .. note:: Make sure to only have **1**
        class defined in an algo module. Imports from
        other modules should work just fine.

    **Algorithm arguments**

    :param mod_path: file path to custom
        algorithm class module
    :param ticker: ticker symbol
    :param balance: float - starting balance capital
        for creating buys and sells
    :param commission: float - cost pet buy or sell
    :param name: string - name for tracking algorithm
        in the logs
    :param start_date: string - start date for backtest with
        format ``YYYY-MM-DD HH:MM:SS``
    :param end_date: end date for backtest with
        format ``YYYY-MM-DD HH:MM:SS``
    :param auto_fill: optional - boolean for auto filling
        buy and sell orders for backtesting
        (default is ``True``)
    :param config_file: path to a json file
        containing custom algorithm object
        member values (like indicator configuration and
        predict future date units ahead for a backtest)
    :param config_dict: optional - dictionary that
        can be passed to derived class implementations
        of: ``def load_from_config(config_dict=config_dict)``

    **Timeseries**

    :param timeseries: optional - string to
        set ``day`` or ``minute`` backtesting
        or live trading
        (default is ``minute``)

    **Trading Strategy**

    :param trade_strategy: optional - string to
        set the type of ``Trading Strategy``
        for backtesting or live trading
        (default is ``count``)

    **Running Distributed Algorithms on the Engine Workers**

    :param run_on_engine: optional - boolean
        flag for publishing custom algorithms
        to Celery ae workers for distributing
        algorithm workloads
        (default is ``False`` which will run algos locally)
        this is required for distributing algorithms
    :param auth_url: Celery broker address
        (default is ``redis://localhost:6379/11``
        or ``analysis_engine.consts.WORKER_BROKER_URL``
        environment variable)
        this is required for distributing algorithms
    :param backend_url: Celery backend address
        (default is ``redis://localhost:6379/12``
        or ``analysis_engine.consts.WORKER_BACKEND_URL``
        environment variable)
        this is required for distributing algorithms
    :param include_tasks: list of modules containing tasks to add
        (default is ``analysis_engine.consts.INCLUDE_TASKS``)
    :param ssl_options: security options dictionary
        (default is ``analysis_engine.consts.SSL_OPTIONS``)
    :param trasport_options: transport options dictionary
        (default is ``analysis_engine.consts.TRANSPORT_OPTIONS``)
    :param path_to_config_module: config module for advanced
        Celery worker connectivity requirements
        (default is ``analysis_engine.work_tasks.celery_config``
        or ``analysis_engine.consts.WORKER_CELERY_CONFIG_MODULE``)

    **Load Algorithm-Ready Dataset From Source**

    Use these arguments to load algorithm-ready datasets
    from supported sources (file, s3 or redis)

    :param load_from_s3_bucket: optional - string load the algo from an
        a previously-created s3 bucket holding an s3 key with an
        algorithm-ready dataset for use with:
        ``handle_data``
    :param load_from_s3_key: optional - string load the algo from an
        a previously-created s3 key holding an
        algorithm-ready dataset for use with:
        ``handle_data``
    :param load_from_redis_key: optional - string load the algo from a
        a previously-created redis key holding an
        algorithm-ready dataset for use with:
        ``handle_data``
    :param load_from_file: optional - string path to
        a previously-created local file holding an
        algorithm-ready dataset for use with:
        ``handle_data``
    :param load_compress: optional - boolean
        flag for toggling to decompress
        or not when loading an algorithm-ready
        dataset (``True`` means the dataset
        must be decompressed to load correctly inside
        an algorithm to run a backtest)
    :param load_publish: boolean - toggle publishing
        the load progress to slack, s3, redis or a file
        (default is ``True``)
    :param load_config: optional - dictionary
        for setting member variables to load an
        agorithm-ready dataset from
        a file, s3 or redis

    **Publishing Control Bool Flags**

    :param publish_to_s3: optional - boolean for
        toggling publishing to s3 on/off
        (default is ``True``)
    :param publish_to_redis: optional - boolean for
        publishing to redis on/off
        (default is ``True``)
    :param publish_to_slack: optional - boolean for
        publishing to slack
        (default is ``True``)

    **Algorithm Trade History Arguments**

    :param history_redis_key: optional - string
        where the algorithm trading history will be stored in
        an redis key
    :param history_s3_bucket: optional - string
        where the algorithm trading history will be stored in
        an s3 bucket
    :param history_s3_key: optional - string
        where the algorithm trading history will be stored in
        an s3 key
    :param history_file: optional - string key
        where the algorithm trading history will be stored in
        a file serialized as a json-string
    :param history_compress: optional - boolean
        flag for toggling to decompress
        or not when loading an algorithm-ready
        dataset (``True`` means the dataset
        will be compressed on publish)
    :param history_publish: boolean - toggle publishing
        the history to s3, redis or a file
        (default is ``True``)
    :param history_config: optional - dictionary
        for setting member variables to publish
        an algo ``trade history`` to s3, redis, a file
        or slack

    **Algorithm Trade Performance Report Arguments (Output Dataset)**

    :param report_redis_key: optional - string
        where the algorithm ``trading performance report`` (report)
        will be stored in an redis key
    :param report_s3_bucket: optional - string
        where the algorithm report will be stored in
        an s3 bucket
    :param report_s3_key: optional - string
        where the algorithm report will be stored in
        an s3 key
    :param report_file: optional - string key
        where the algorithm report will be stored in
        a file serialized as a json-string
    :param report_compress: optional - boolean
        flag for toggling to decompress
        or not when loading an algorithm-ready
        dataset (``True`` means the dataset
        will be compressed on publish)
    :param report_publish: boolean - toggle publishing
        the ``trading performance report`` s3, redis or a file
        (default is ``True``)
    :param report_config: optional - dictionary
        for setting member variables to publish
        an algo ``trading performance report`` to s3,
        redis, a file or slack

    **Extract an Algorithm-Ready Dataset Arguments**

    :param extract_redis_key: optional - string
        where the algorithm report will be stored in
        an redis key
    :param extract_s3_bucket: optional - string
        where the algorithm report will be stored in
        an s3 bucket
    :param extract_s3_key: optional - string
        where the algorithm report will be stored in
        an s3 key
    :param extract_file: optional - string key
        where the algorithm report will be stored in
        a file serialized as a json-string
    :param extract_save_dir: optional - string path to
        auto-generated files from the algo
    :param extract_compress: optional - boolean
        flag for toggling to decompress
        or not when loading an algorithm-ready
        dataset (``True`` means the dataset
        will be compressed on publish)
    :param extract_publish: boolean - toggle publishing
        the used ``algorithm-ready dataset`` to s3, redis or a file
        (default is ``True``)
    :param extract_config: optional - dictionary
        for setting member variables to publish
        an algo ``trading performance report`` to s3,
        redis, a file or slack

    **Dataset Arguments**

    :param dataset_type: optional - dataset type
        (default is ``SA_DATASET_TYPE_ALGO_READY``)
    :param serialize_datasets: optional - list of dataset names to
        deserialize in the dataset
        (default is ``DEFAULT_SERIALIZED_DATASETS``)
    :param encoding: optional - string for data encoding

    **Publish Algorithm Datasets to S3, Redis or a File**

    :param dataset_publish_extract: optional - bool
        for publishing the algorithm's
        ``algorithm-ready``
        dataset to: s3, redis or file
    :param dataset_publish_history: optional - bool
        for publishing the algorithm's
        ``trading history``
        dataset to: s3, redis or file
    :param dataset_publish_report: optional - bool
        for publishing the algorithm's
        ``trading performance report``
        dataset to: s3, redis or file

    **Redis connectivity arguments**

    :param redis_enabled: bool - toggle for auto-caching all
        datasets in Redis
        (default is ``True``)
    :param redis_key: string - key to save the data in redis
        (default is ``None``)
    :param redis_address: Redis connection string format: ``host:port``
        (default is ``localhost:6379``)
    :param redis_db: Redis db to use
        (default is ``0``)
    :param redis_password: optional - Redis password
        (default is ``None``)
    :param redis_expire: optional - Redis expire value
        (default is ``None``)
    :param redis_serializer: not used yet - support for future
        pickle objects in redis
    :param redis_encoding: format of the encoded key in redis

    **Minio (S3) connectivity arguments**

    :param s3_enabled: bool - toggle for auto-archiving on Minio (S3)
        (default is ``True``)
    :param s3_key: string - key to save the data in redis
        (default is ``None``)
    :param s3_address: Minio S3 connection string format: ``host:port``
        (default is ``localhost:9000``)
    :param s3_bucket: S3 Bucket for storing the artifacts
        (default is ``dev``) which should be viewable on a browser:
        http://localhost:9000/minio/dev/
    :param s3_access_key: S3 Access key
        (default is ``trexaccesskey``)
    :param s3_secret_key: S3 Secret key
        (default is ``trex123321``)
    :param s3_region_name: S3 region name
        (default is ``us-east-1``)
    :param s3_secure: Transmit using tls encryption
        (default is ``False``)

    **Slack arguments**

    :param slack_enabled: optional - boolean for
        publishing to slack
    :param slack_code_block: optional - boolean for
        publishing as a code black in slack
    :param slack_full_width: optional - boolean for
        publishing as a to slack using the full
        width allowed

    **Debugging arguments**

    :param debug: optional - bool for debug tracking
    :param verbose: optional - bool for increasing
        logging
    :param raise_on_err: boolean - set this to ``False`` on prod
        to ensure exceptions do not interrupt services.
        With the default (``True``) any exceptions from the library
        and your own algorithm are sent back out immediately exiting
        the backtest.
    """

    module_name = 'BaseAlgo'
    custom_algo_module = None
    new_algo_object = None
    use_custom_algo = False
    found_algo_module = True
    should_publish_extract_dataset = False
    should_publish_history_dataset = False
    should_publish_report_dataset = False
    use_config_file = None
    use_config_dict = config_dict
    if config_file:
        if os.path.exists(config_file):
            use_config_file = config_file
            if not config_dict:
                try:
                    use_config_dict = json.loads(open(config_file, 'r').read())
                except Exception as e:
                    msg = (f'failed parsing json config_file={config_file} '
                           f'with ex={e}')
                    log.error(msg)
                    raise Exception(msg)
    # end of loading the config_file

    err = None
    if mod_path:
        module_name = mod_path.split('/')[-1]
        loader = importlib.machinery.SourceFileLoader(module_name, mod_path)
        custom_algo_module = types.ModuleType(loader.name)
        loader.exec_module(custom_algo_module)
        use_custom_algo = True

        for member in inspect.getmembers(custom_algo_module):
            if module_name in str(member):
                found_algo_module = True
                break
        # for all members in this custom module file
    # if loading a custom algorithm module from a file on disk

    if not found_algo_module:
        err = (f'unable to find custom algorithm module={custom_algo_module}')
        if mod_path:
            err = (
                'analysis_engine.run_custom_algo.run_custom_algo was unable '
                f'to find custom algorithm module={custom_algo_module} with '
                f'provided path to \n file: {mod_path} \n'
                '\n'
                'Please confirm '
                'that the class inherits from the BaseAlgo class like:\n'
                '\n'
                'import analysis_engine.algo\n'
                'class MyAlgo(analysis_engine.algo.BaseAlgo):\n '
                '\n'
                'If it is then please file an issue on github:\n '
                'https://github.com/AlgoTraders/stock-analysis-engine/'
                'issues/new \n\nFor now this error results in a shutdown'
                '\n')
        # if mod_path set

        if verbose or debug:
            log.error(err)
        return build_result.build_result(status=ae_consts.ERR,
                                         err=err,
                                         rec=None)
    # if not found_algo_module

    use_start_date = start_date
    use_end_date = end_date
    if not use_end_date:
        end_date = datetime.datetime.utcnow()
        use_end_date = end_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT)
    if not use_start_date:
        start_date = end_date - datetime.timedelta(days=75)
        use_start_date = start_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT)
        if verbose:
            log.info(
                f'{name} {ticker} setting default start_date={use_start_date}')

    # Load an algorithm-ready dataset from:
    # file, s3, or redis
    if not load_config:
        load_config = build_publish_request.build_publish_request(
            ticker=ticker,
            output_file=None,
            s3_bucket=None,
            s3_key=None,
            redis_key=None,
            compress=load_compress,
            redis_enabled=publish_to_redis,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=publish_to_s3,
            s3_address=s3_address,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=publish_to_slack,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            verbose=verbose,
            label=f'load-{name}')
        if load_from_file:
            load_config['output_file'] = load_from_file
        if load_from_redis_key:
            load_config['redis_key'] = load_from_redis_key
            load_config['redis_enabled'] = True
        if load_from_s3_bucket and load_from_s3_key:
            load_config['s3_bucket'] = load_from_s3_bucket
            load_config['s3_key'] = load_from_s3_key
            load_config['s3_enabled'] = True
    # end of building load_config dictionary if not already set

    # Automatically save all datasets to an algorithm-ready:
    # file, s3, or redis
    if not extract_config:
        extract_config = build_publish_request.build_publish_request(
            ticker=ticker,
            output_file=None,
            s3_bucket=None,
            s3_key=None,
            redis_key=None,
            compress=extract_compress,
            redis_enabled=publish_to_redis,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=publish_to_s3,
            s3_address=s3_address,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=publish_to_slack,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            verbose=verbose,
            label=f'extract-{name}')
        should_publish_extract_dataset = False
        if extract_file:
            extract_config['output_file'] = extract_file
            should_publish_extract_dataset = True
        if extract_redis_key and publish_to_redis:
            extract_config['redis_key'] = extract_redis_key
            extract_config['redis_enabled'] = True
            should_publish_extract_dataset = True
        if extract_s3_bucket and extract_s3_key and publish_to_s3:
            extract_config['s3_bucket'] = extract_s3_bucket
            extract_config['s3_key'] = extract_s3_key
            extract_config['s3_enabled'] = True
            should_publish_extract_dataset = True
        else:
            extract_config['s3_enabled'] = False
    # end of building extract_config dictionary if not already set

    # Automatically save the trading performance report:
    # file, s3, or redis
    if not report_config:
        report_config = build_publish_request.build_publish_request(
            ticker=ticker,
            output_file=None,
            s3_bucket=None,
            s3_key=None,
            redis_key=None,
            compress=report_compress,
            redis_enabled=publish_to_redis,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=publish_to_s3,
            s3_address=s3_address,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=publish_to_slack,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            verbose=verbose,
            label=f'report-{name}')
        should_publish_report_dataset = False
        if report_file:
            report_config['output_file'] = report_file
            should_publish_report_dataset = True
        if report_redis_key and publish_to_redis:
            report_config['redis_key'] = report_redis_key
            report_config['redis_enabled'] = True
            should_publish_report_dataset = True
        if report_s3_bucket and report_s3_key and publish_to_s3:
            report_config['s3_bucket'] = report_s3_bucket
            report_config['s3_key'] = report_s3_key
            report_config['s3_enabled'] = True
            should_publish_report_dataset = True
    # end of building report_config dictionary if not already set

    # Automatically save the trade history:
    # file, s3, or redis
    if not history_config:
        history_config = build_publish_request.build_publish_request(
            ticker=ticker,
            output_file=None,
            s3_bucket=None,
            s3_key=None,
            redis_key=None,
            compress=report_compress,
            redis_enabled=publish_to_redis,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=publish_to_s3,
            s3_address=s3_address,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=publish_to_slack,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            verbose=verbose,
            label=f'history-{name}')
        should_publish_history_dataset = False
        if history_file:
            history_config['output_file'] = history_file
            should_publish_history_dataset = True
        if history_redis_key and publish_to_redis:
            history_config['redis_key'] = history_redis_key
            history_config['redis_enabled'] = True
            should_publish_history_dataset = True
        if history_s3_bucket and history_s3_key and publish_to_s3:
            history_config['s3_bucket'] = history_s3_bucket
            history_config['s3_key'] = history_s3_key
            history_config['s3_enabled'] = True
            should_publish_history_dataset = True
    # end of building history_config dictionary if not already set

    if verbose:
        remove_vals = ['s3_access_key', 's3_secret_key', 'redis_password']
        debug_extract_config = {}
        for k in extract_config:
            if k not in remove_vals:
                debug_extract_config[k] = extract_config[k]
        debug_report_config = {}
        for k in report_config:
            if k not in remove_vals:
                debug_report_config[k] = report_config[k]
        debug_history_config = {}
        for k in history_config:
            if k not in remove_vals:
                debug_history_config[k] = history_config[k]
        debug_load_config = {}
        for k in load_config:
            if k not in remove_vals:
                debug_load_config[k] = load_config[k]
        log.info(f'{name} {ticker} using extract config '
                 f'{ae_consts.ppj(debug_extract_config)}')
        log.info(f'{name} {ticker} using report config '
                 f'{ae_consts.ppj(debug_report_config)}')
        log.info(f'{name} {ticker} using trade history config '
                 f'{ae_consts.ppj(debug_history_config)}')
        log.info(f'{name} {ticker} using load config '
                 f'{ae_consts.ppj(debug_load_config)}')
        log.info(f'{name} {ticker} - building algo request')
    # end of verbose

    algo_req = build_algo_request.build_algo_request(
        ticker=ticker,
        balance=balance,
        commission=commission,
        start_date=use_start_date,
        end_date=use_end_date,
        timeseries=timeseries,
        trade_strategy=trade_strategy,
        config_file=use_config_file,
        config_dict=use_config_dict,
        load_config=load_config,
        history_config=history_config,
        report_config=report_config,
        extract_config=extract_config,
        label=name)

    algo_req['name'] = name
    algo_req['should_publish_extract_dataset'] = should_publish_extract_dataset
    algo_req['should_publish_history_dataset'] = should_publish_history_dataset
    algo_req['should_publish_report_dataset'] = should_publish_report_dataset

    algo_res = build_result.build_result(status=ae_consts.NOT_RUN,
                                         err=None,
                                         rec=None)

    if run_on_engine:
        rec = {'algo_req': algo_req, 'task_id': None}
        task_name = ('analysis_engine.work_tasks.'
                     'task_run_algo.task_run_algo')
        if verbose:
            log.info(f'starting distributed algo task={task_name}')
        elif debug:
            log.info(
                'starting distributed algo by publishing to '
                f'task={task_name} broker={auth_url} backend={backend_url}')

        # Get the Celery app
        app = get_celery_app.get_celery_app(
            name=__name__,
            auth_url=auth_url,
            backend_url=backend_url,
            path_to_config_module=path_to_config_module,
            ssl_options=ssl_options,
            transport_options=transport_options,
            include_tasks=include_tasks)

        if debug:
            log.info(f'calling distributed algo task={task_name} '
                     f'request={ae_consts.ppj(algo_req)}')
        elif verbose:
            log.info(f'calling distributed algo task={task_name}')

        job_id = app.send_task(task_name, (algo_req, ))
        if verbose:
            log.info(f'calling task={task_name} - success job_id={job_id}')
        rec['task_id'] = job_id
        algo_res = build_result.build_result(status=ae_consts.SUCCESS,
                                             err=None,
                                             rec=rec)
        return algo_res
    # end of run_on_engine

    if use_custom_algo:
        if verbose:
            log.info(
                f'inspecting {custom_algo_module} for class {module_name}')
        use_class_member_object = None
        for member in inspect.getmembers(custom_algo_module):
            if module_name in str(member):
                if verbose:
                    log.info(f'start {name} with {member[1]}')
                use_class_member_object = member
                break
        # end of looking over the class definition but did not find it

        if use_class_member_object:
            new_algo_object = member[1](**algo_req)
        else:
            err = ('did not find a derived analysis_engine.algo.BaseAlgo '
                   f'class in the module file={mod_path} '
                   f'for ticker={ticker} algo_name={name}')

            if verbose or debug:
                log.error(err)

            return build_result.build_result(status=ae_consts.ERR,
                                             err=err,
                                             rec=None)
        # end of finding a valid algorithm object
    else:
        new_algo_object = ae_algo.BaseAlgo(**algo_req)
    # if using a custom module path or the BaseAlgo

    if new_algo_object:
        # heads up - logging this might have passwords in the algo_req
        # log.debug(
        #     f'{name} algorithm request: {algo_req}')
        if verbose:
            log.info(f'{name} - run ticker={ticker} from {use_start_date} '
                     f'to {use_end_date}')
        algo_res = run_algo.run_algo(algo=new_algo_object,
                                     raise_on_err=raise_on_err,
                                     **algo_req)
        algo_res['algo'] = new_algo_object
        if verbose:
            log.info(f'{name} - run ticker={ticker} from {use_start_date} '
                     f'to {use_end_date}')
        if custom_algo_module:
            if verbose:
                log.info(f'{name} - done run_algo '
                         f'custom_algo_module={custom_algo_module} '
                         f'module_name={module_name} ticker={ticker} '
                         f'from {use_start_date} to {use_end_date}')
        else:
            if verbose:
                log.info(f'{name} - done run_algo BaseAlgo ticker={ticker} '
                         f'from {use_start_date} to {use_end_date}')
    else:
        err = ('missing a derived analysis_engine.algo.BaseAlgo '
               f'class in the module file={mod_path} for ticker={ticker} '
               f'algo_name={name}')
        return build_result.build_result(status=ae_consts.ERR,
                                         err=err,
                                         rec=None)
    # end of finding a valid algorithm object

    algo = algo_res.get('algo', None)

    if not algo:
        err = (f'failed creating algorithm object - ticker={ticker} '
               f'status={ae_consts.get_status(status=algo_res["status"])} '
               f'error={algo_res["err"]} algo name={name} '
               f'custom_algo_module={custom_algo_module} '
               f'module_name={module_name} '
               f'from {use_start_date} to {use_end_date}')
        return build_result.build_result(status=ae_consts.ERR,
                                         err=err,
                                         rec=None)

    if should_publish_extract_dataset or dataset_publish_extract:
        s3_log = ''
        redis_log = ''
        file_log = ''
        use_log = 'publish'

        if (extract_config['redis_address'] and extract_config['redis_db'] >= 0
                and extract_config['redis_key']):
            redis_log = (
                f'redis://{extract_config["redis_address"]}'
                f'@{extract_config["redis_db"]}/{extract_config["redis_key"]}')
            use_log += f' {redis_log}'
        else:
            extract_config['redis_enabled'] = False
        if (extract_config['s3_address'] and extract_config['s3_bucket']
                and extract_config['s3_key']):
            s3_log = (
                f's3://{extract_config["s3_address"]}'
                f'/{extract_config["s3_bucket"]}/{extract_config["s3_key"]}')
            use_log += f' {s3_log}'
        else:
            extract_config['s3_enabled'] = False
        if extract_config['output_file']:
            file_log = f'file:{extract_config["output_file"]}'
            use_log += f' {file_log}'

        if verbose:
            log.info(f'{name} - publish - start ticker={ticker} '
                     f'algorithm-ready {use_log}')

        publish_status = algo.publish_input_dataset(**extract_config)
        if publish_status != ae_consts.SUCCESS:
            msg = (
                'failed to publish algorithm-ready datasets '
                f'with status {ae_consts.get_status(status=publish_status)} '
                f'attempted to {use_log}')
            log.error(msg)
            return build_result.build_result(status=ae_consts.ERR,
                                             err=err,
                                             rec=None)

        if verbose:
            log.info(f'{name} - publish - done ticker={ticker} '
                     f'algorithm-ready {use_log}')
    # if publish the algorithm-ready dataset

    if should_publish_history_dataset or dataset_publish_history:
        s3_log = ''
        redis_log = ''
        file_log = ''
        use_log = 'publish'

        if (history_config['redis_address'] and history_config['redis_db'] >= 0
                and history_config['redis_key']):
            redis_log = (
                f'redis://{history_config["redis_address"]}'
                f'@{history_config["redis_db"]}/{history_config["redis_key"]}')
            use_log += f' {redis_log}'
        else:
            history_config['redis_enabled'] = False
        if (history_config['s3_address'] and history_config['s3_bucket']
                and history_config['s3_key']):
            s3_log = (
                f's3://{history_config["s3_address"]}'
                f'/{history_config["s3_bucket"]}/{history_config["s3_key"]}')
            use_log += f' {s3_log}'
        else:
            history_config['s3_enabled'] = False

        if history_config['output_file']:
            file_log = f'file:{history_config["output_file"]}'
            use_log += f' {file_log}'

        if verbose:
            log.info(f'{name} - publish - start ticker={ticker} trading '
                     f'history {use_log}')

        publish_status = algo.publish_trade_history_dataset(**history_config)
        if publish_status != ae_consts.SUCCESS:
            msg = (
                'failed to publish trading history datasets '
                f'with status {ae_consts.get_status(status=publish_status)} '
                f'attempted to {use_log}')
            log.error(msg)
            return build_result.build_result(status=ae_consts.ERR,
                                             err=err,
                                             rec=None)

        if verbose:
            log.info(f'{name} - publish - done ticker={ticker} trading '
                     f'history {use_log}')
    # if publish an trading history dataset

    if should_publish_report_dataset or dataset_publish_report:
        s3_log = ''
        redis_log = ''
        file_log = ''
        use_log = 'publish'

        if (report_config['redis_address'] and report_config['redis_db'] >= 0
                and report_config['redis_key']):
            redis_log = (
                f'redis://{report_config["redis_address"]}'
                f'@{report_config["redis_db"]}/{report_config["redis_key"]}')
            use_log += f' {redis_log}'
        else:
            report_config['redis_enabled'] = False
        if (report_config['s3_address'] and report_config['s3_bucket']
                and report_config['s3_key']):
            s3_log = (
                f's3://{report_config["s3_address"]}'
                f'/{report_config["s3_bucket"]}/{report_config["s3_key"]}')
            use_log += f' {s3_log}'
        else:
            report_config['s3_enabled'] = False
        if report_config['output_file']:
            file_log = f'file:{report_config["output_file"]}'
            use_log += f' {file_log}'

        if verbose:
            log.info(
                f'{name} - publishing ticker={ticker} trading performance '
                f'report {use_log}')

        publish_status = algo.publish_report_dataset(**report_config)
        if publish_status != ae_consts.SUCCESS:
            msg = (
                'failed to publish trading performance report datasets '
                f'with status {ae_consts.get_status(status=publish_status)} '
                f'attempted to {use_log}')
            log.error(msg)
            return build_result.build_result(status=ae_consts.ERR,
                                             err=err,
                                             rec=None)

        if verbose:
            log.info(
                f'{name} - publish - done ticker={ticker} trading performance '
                f'report {use_log}')
    # if publish an trading performance report dataset

    if verbose:
        log.info(f'{name} - done publishing datasets for ticker={ticker} '
                 f'from {use_start_date} to {use_end_date}')

    return algo_res