def run_custom_algo(
        mod_path,
        ticker='SPY',
        balance=50000,
        commission=6.0,
        start_date=None,
        end_date=None,
        name='myalgo',
        auto_fill=True,
        config_file=None,
        config_dict=None,
        load_from_s3_bucket=None,
        load_from_s3_key=None,
        load_from_redis_key=None,
        load_from_file=None,
        load_compress=False,
        load_publish=True,
        load_config=None,
        report_redis_key=None,
        report_s3_bucket=None,
        report_s3_key=None,
        report_file=None,
        report_compress=False,
        report_publish=True,
        report_config=None,
        history_redis_key=None,
        history_s3_bucket=None,
        history_s3_key=None,
        history_file=None,
        history_compress=False,
        history_publish=True,
        history_config=None,
        extract_redis_key=None,
        extract_s3_bucket=None,
        extract_s3_key=None,
        extract_file=None,
        extract_save_dir=None,
        extract_compress=False,
        extract_publish=True,
        extract_config=None,
        publish_to_s3=True,
        publish_to_redis=True,
        publish_to_slack=True,
        dataset_type=ae_consts.SA_DATASET_TYPE_ALGO_READY,
        serialize_datasets=ae_consts.DEFAULT_SERIALIZED_DATASETS,
        compress=False,
        encoding='utf-8',
        redis_enabled=True,
        redis_key=None,
        redis_address=None,
        redis_db=None,
        redis_password=None,
        redis_expire=None,
        redis_serializer='json',
        redis_encoding='utf-8',
        s3_enabled=True,
        s3_key=None,
        s3_address=None,
        s3_bucket=None,
        s3_access_key=None,
        s3_secret_key=None,
        s3_region_name=None,
        s3_secure=False,
        slack_enabled=False,
        slack_code_block=False,
        slack_full_width=False,
        timeseries=None,
        trade_strategy=None,
        verbose=False,
        debug=False,
        dataset_publish_extract=False,
        dataset_publish_history=False,
        dataset_publish_report=False,
        run_on_engine=False,
        auth_url=ae_consts.WORKER_BROKER_URL,
        backend_url=ae_consts.WORKER_BACKEND_URL,
        include_tasks=ae_consts.INCLUDE_TASKS,
        ssl_options=ae_consts.SSL_OPTIONS,
        transport_options=ae_consts.TRANSPORT_OPTIONS,
        path_to_config_module=ae_consts.WORKER_CELERY_CONFIG_MODULE,
        raise_on_err=True):
    """run_custom_algo

    Run a custom algorithm that derives the
    ``analysis_engine.algo.BaseAlgo`` class

    .. note:: Make sure to only have **1**
        class defined in an algo module. Imports from
        other modules should work just fine.

    **Algorithm arguments**

    :param mod_path: file path to custom
        algorithm class module
    :param ticker: ticker symbol
    :param balance: float - starting balance capital
        for creating buys and sells
    :param commission: float - cost pet buy or sell
    :param name: string - name for tracking algorithm
        in the logs
    :param start_date: string - start date for backtest with
        format ``YYYY-MM-DD HH:MM:SS``
    :param end_date: end date for backtest with
        format ``YYYY-MM-DD HH:MM:SS``
    :param auto_fill: optional - boolean for auto filling
        buy and sell orders for backtesting
        (default is ``True``)
    :param config_file: path to a json file
        containing custom algorithm object
        member values (like indicator configuration and
        predict future date units ahead for a backtest)
    :param config_dict: optional - dictionary that
        can be passed to derived class implementations
        of: ``def load_from_config(config_dict=config_dict)``

    **Timeseries**

    :param timeseries: optional - string to
        set ``day`` or ``minute`` backtesting
        or live trading
        (default is ``minute``)

    **Trading Strategy**

    :param trade_strategy: optional - string to
        set the type of ``Trading Strategy``
        for backtesting or live trading
        (default is ``count``)

    **Running Distributed Algorithms on the Engine Workers**

    :param run_on_engine: optional - boolean
        flag for publishing custom algorithms
        to Celery ae workers for distributing
        algorithm workloads
        (default is ``False`` which will run algos locally)
        this is required for distributing algorithms
    :param auth_url: Celery broker address
        (default is ``redis://localhost:6379/11``
        or ``analysis_engine.consts.WORKER_BROKER_URL``
        environment variable)
        this is required for distributing algorithms
    :param backend_url: Celery backend address
        (default is ``redis://localhost:6379/12``
        or ``analysis_engine.consts.WORKER_BACKEND_URL``
        environment variable)
        this is required for distributing algorithms
    :param include_tasks: list of modules containing tasks to add
        (default is ``analysis_engine.consts.INCLUDE_TASKS``)
    :param ssl_options: security options dictionary
        (default is ``analysis_engine.consts.SSL_OPTIONS``)
    :param trasport_options: transport options dictionary
        (default is ``analysis_engine.consts.TRANSPORT_OPTIONS``)
    :param path_to_config_module: config module for advanced
        Celery worker connectivity requirements
        (default is ``analysis_engine.work_tasks.celery_config``
        or ``analysis_engine.consts.WORKER_CELERY_CONFIG_MODULE``)

    **Load Algorithm-Ready Dataset From Source**

    Use these arguments to load algorithm-ready datasets
    from supported sources (file, s3 or redis)

    :param load_from_s3_bucket: optional - string load the algo from an
        a previously-created s3 bucket holding an s3 key with an
        algorithm-ready dataset for use with:
        ``handle_data``
    :param load_from_s3_key: optional - string load the algo from an
        a previously-created s3 key holding an
        algorithm-ready dataset for use with:
        ``handle_data``
    :param load_from_redis_key: optional - string load the algo from a
        a previously-created redis key holding an
        algorithm-ready dataset for use with:
        ``handle_data``
    :param load_from_file: optional - string path to
        a previously-created local file holding an
        algorithm-ready dataset for use with:
        ``handle_data``
    :param load_compress: optional - boolean
        flag for toggling to decompress
        or not when loading an algorithm-ready
        dataset (``True`` means the dataset
        must be decompressed to load correctly inside
        an algorithm to run a backtest)
    :param load_publish: boolean - toggle publishing
        the load progress to slack, s3, redis or a file
        (default is ``True``)
    :param load_config: optional - dictionary
        for setting member variables to load an
        agorithm-ready dataset from
        a file, s3 or redis

    **Publishing Control Bool Flags**

    :param publish_to_s3: optional - boolean for
        toggling publishing to s3 on/off
        (default is ``True``)
    :param publish_to_redis: optional - boolean for
        publishing to redis on/off
        (default is ``True``)
    :param publish_to_slack: optional - boolean for
        publishing to slack
        (default is ``True``)

    **Algorithm Trade History Arguments**

    :param history_redis_key: optional - string
        where the algorithm trading history will be stored in
        an redis key
    :param history_s3_bucket: optional - string
        where the algorithm trading history will be stored in
        an s3 bucket
    :param history_s3_key: optional - string
        where the algorithm trading history will be stored in
        an s3 key
    :param history_file: optional - string key
        where the algorithm trading history will be stored in
        a file serialized as a json-string
    :param history_compress: optional - boolean
        flag for toggling to decompress
        or not when loading an algorithm-ready
        dataset (``True`` means the dataset
        will be compressed on publish)
    :param history_publish: boolean - toggle publishing
        the history to s3, redis or a file
        (default is ``True``)
    :param history_config: optional - dictionary
        for setting member variables to publish
        an algo ``trade history`` to s3, redis, a file
        or slack

    **Algorithm Trade Performance Report Arguments (Output Dataset)**

    :param report_redis_key: optional - string
        where the algorithm ``trading performance report`` (report)
        will be stored in an redis key
    :param report_s3_bucket: optional - string
        where the algorithm report will be stored in
        an s3 bucket
    :param report_s3_key: optional - string
        where the algorithm report will be stored in
        an s3 key
    :param report_file: optional - string key
        where the algorithm report will be stored in
        a file serialized as a json-string
    :param report_compress: optional - boolean
        flag for toggling to decompress
        or not when loading an algorithm-ready
        dataset (``True`` means the dataset
        will be compressed on publish)
    :param report_publish: boolean - toggle publishing
        the ``trading performance report`` s3, redis or a file
        (default is ``True``)
    :param report_config: optional - dictionary
        for setting member variables to publish
        an algo ``trading performance report`` to s3,
        redis, a file or slack

    **Extract an Algorithm-Ready Dataset Arguments**

    :param extract_redis_key: optional - string
        where the algorithm report will be stored in
        an redis key
    :param extract_s3_bucket: optional - string
        where the algorithm report will be stored in
        an s3 bucket
    :param extract_s3_key: optional - string
        where the algorithm report will be stored in
        an s3 key
    :param extract_file: optional - string key
        where the algorithm report will be stored in
        a file serialized as a json-string
    :param extract_save_dir: optional - string path to
        auto-generated files from the algo
    :param extract_compress: optional - boolean
        flag for toggling to decompress
        or not when loading an algorithm-ready
        dataset (``True`` means the dataset
        will be compressed on publish)
    :param extract_publish: boolean - toggle publishing
        the used ``algorithm-ready dataset`` to s3, redis or a file
        (default is ``True``)
    :param extract_config: optional - dictionary
        for setting member variables to publish
        an algo ``trading performance report`` to s3,
        redis, a file or slack

    **Dataset Arguments**

    :param dataset_type: optional - dataset type
        (default is ``SA_DATASET_TYPE_ALGO_READY``)
    :param serialize_datasets: optional - list of dataset names to
        deserialize in the dataset
        (default is ``DEFAULT_SERIALIZED_DATASETS``)
    :param encoding: optional - string for data encoding

    **Publish Algorithm Datasets to S3, Redis or a File**

    :param dataset_publish_extract: optional - bool
        for publishing the algorithm's
        ``algorithm-ready``
        dataset to: s3, redis or file
    :param dataset_publish_history: optional - bool
        for publishing the algorithm's
        ``trading history``
        dataset to: s3, redis or file
    :param dataset_publish_report: optional - bool
        for publishing the algorithm's
        ``trading performance report``
        dataset to: s3, redis or file

    **Redis connectivity arguments**

    :param redis_enabled: bool - toggle for auto-caching all
        datasets in Redis
        (default is ``True``)
    :param redis_key: string - key to save the data in redis
        (default is ``None``)
    :param redis_address: Redis connection string format: ``host:port``
        (default is ``localhost:6379``)
    :param redis_db: Redis db to use
        (default is ``0``)
    :param redis_password: optional - Redis password
        (default is ``None``)
    :param redis_expire: optional - Redis expire value
        (default is ``None``)
    :param redis_serializer: not used yet - support for future
        pickle objects in redis
    :param redis_encoding: format of the encoded key in redis

    **Minio (S3) connectivity arguments**

    :param s3_enabled: bool - toggle for auto-archiving on Minio (S3)
        (default is ``True``)
    :param s3_key: string - key to save the data in redis
        (default is ``None``)
    :param s3_address: Minio S3 connection string format: ``host:port``
        (default is ``localhost:9000``)
    :param s3_bucket: S3 Bucket for storing the artifacts
        (default is ``dev``) which should be viewable on a browser:
        http://localhost:9000/minio/dev/
    :param s3_access_key: S3 Access key
        (default is ``trexaccesskey``)
    :param s3_secret_key: S3 Secret key
        (default is ``trex123321``)
    :param s3_region_name: S3 region name
        (default is ``us-east-1``)
    :param s3_secure: Transmit using tls encryption
        (default is ``False``)

    **Slack arguments**

    :param slack_enabled: optional - boolean for
        publishing to slack
    :param slack_code_block: optional - boolean for
        publishing as a code black in slack
    :param slack_full_width: optional - boolean for
        publishing as a to slack using the full
        width allowed

    **Debugging arguments**

    :param debug: optional - bool for debug tracking
    :param verbose: optional - bool for increasing
        logging
    :param raise_on_err: boolean - set this to ``False`` on prod
        to ensure exceptions do not interrupt services.
        With the default (``True``) any exceptions from the library
        and your own algorithm are sent back out immediately exiting
        the backtest.
    """

    module_name = 'BaseAlgo'
    custom_algo_module = None
    new_algo_object = None
    use_custom_algo = False
    found_algo_module = True
    should_publish_extract_dataset = False
    should_publish_history_dataset = False
    should_publish_report_dataset = False
    use_config_file = None
    use_config_dict = config_dict
    if config_file:
        if os.path.exists(config_file):
            use_config_file = config_file
            if not config_dict:
                try:
                    use_config_dict = json.loads(open(config_file, 'r').read())
                except Exception as e:
                    msg = ('failed parsing json config_file={} '
                           'with ex={}'.format(config_file, e))
                    log.error(msg)
                    raise Exception(msg)
    # end of loading the config_file

    err = None
    if mod_path:
        module_name = mod_path.split('/')[-1]
        loader = importlib.machinery.SourceFileLoader(module_name, mod_path)
        custom_algo_module = types.ModuleType(loader.name)
        loader.exec_module(custom_algo_module)
        use_custom_algo = True

        for member in inspect.getmembers(custom_algo_module):
            if module_name in str(member):
                found_algo_module = True
                break
        # for all members in this custom module file
    # if loading a custom algorithm module from a file on disk

    if not found_algo_module:
        err = ('unable to find custom algorithm module={}'.format(
            custom_algo_module))
        if mod_path:
            err = (
                'analysis_engine.run_custom_algo.run_custom_algo was unable '
                'to find custom algorithm module={} with provided path to \n '
                'file: {} \n'
                '\n'
                'Please confirm '
                'that the class inherits from the BaseAlgo class like:\n'
                '\n'
                'import analysis_engine.algo\n'
                'class MyAlgo(analysis_engine.algo.BaseAlgo):\n '
                '\n'
                'If it is then please file an issue on github:\n '
                'https://github.com/AlgoTraders/stock-analysis-engine/'
                'issues/new \n\nFor now this error results in a shutdown'
                '\n'.format(custom_algo_module, mod_path))
        # if mod_path set

        if verbose or debug:
            log.error(err)
        return build_result.build_result(status=ae_consts.ERR,
                                         err=err,
                                         rec=None)
    # if not found_algo_module

    use_start_date = start_date
    use_end_date = end_date
    if not use_end_date:
        end_date = datetime.datetime.utcnow()
        use_end_date = end_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT)
    if not use_start_date:
        start_date = end_date - datetime.timedelta(days=75)
        use_start_date = start_date.strftime(ae_consts.COMMON_TICK_DATE_FORMAT)
        if verbose:
            log.info('{} {} setting default start_date={}'.format(
                name, ticker, use_start_date))

    # Load an algorithm-ready dataset from:
    # file, s3, or redis
    if not load_config:
        load_config = build_publish_request.build_publish_request(
            ticker=ticker,
            output_file=None,
            s3_bucket=None,
            s3_key=None,
            redis_key=None,
            compress=load_compress,
            redis_enabled=publish_to_redis,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=publish_to_s3,
            s3_address=s3_address,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=publish_to_slack,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            verbose=verbose,
            label='load-{}'.format(name))
        if load_from_file:
            load_config['output_file'] = load_from_file
        if load_from_redis_key:
            load_config['redis_key'] = load_from_redis_key
            load_config['redis_enabled'] = True
        if load_from_s3_bucket and load_from_s3_key:
            load_config['s3_bucket'] = load_from_s3_bucket
            load_config['s3_key'] = load_from_s3_key
            load_config['s3_enabled'] = True
    # end of building load_config dictionary if not already set

    # Automatically save all datasets to an algorithm-ready:
    # file, s3, or redis
    if not extract_config:
        extract_config = build_publish_request.build_publish_request(
            ticker=ticker,
            output_file=None,
            s3_bucket=None,
            s3_key=None,
            redis_key=None,
            compress=extract_compress,
            redis_enabled=publish_to_redis,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=publish_to_s3,
            s3_address=s3_address,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=publish_to_slack,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            verbose=verbose,
            label='extract-{}'.format(name))
        should_publish_extract_dataset = False
        if extract_file:
            extract_config['output_file'] = extract_file
            should_publish_extract_dataset = True
        if extract_redis_key and publish_to_redis:
            extract_config['redis_key'] = extract_redis_key
            extract_config['redis_enabled'] = True
            should_publish_extract_dataset = True
        if extract_s3_bucket and extract_s3_key and publish_to_s3:
            extract_config['s3_bucket'] = extract_s3_bucket
            extract_config['s3_key'] = extract_s3_key
            extract_config['s3_enabled'] = True
            should_publish_extract_dataset = True
        else:
            extract_config['s3_enabled'] = False
    # end of building extract_config dictionary if not already set

    # Automatically save the trading performance report:
    # file, s3, or redis
    if not report_config:
        report_config = build_publish_request.build_publish_request(
            ticker=ticker,
            output_file=None,
            s3_bucket=None,
            s3_key=None,
            redis_key=None,
            compress=report_compress,
            redis_enabled=publish_to_redis,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=publish_to_s3,
            s3_address=s3_address,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=publish_to_slack,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            verbose=verbose,
            label='report-{}'.format(name))
        should_publish_report_dataset = False
        if report_file:
            report_config['output_file'] = report_file
            should_publish_report_dataset = True
        if report_redis_key and publish_to_redis:
            report_config['redis_key'] = report_redis_key
            report_config['redis_enabled'] = True
            should_publish_report_dataset = True
        if report_s3_bucket and report_s3_key and publish_to_s3:
            report_config['s3_bucket'] = report_s3_bucket
            report_config['s3_key'] = report_s3_key
            report_config['s3_enabled'] = True
            should_publish_report_dataset = True
    # end of building report_config dictionary if not already set

    # Automatically save the trade history:
    # file, s3, or redis
    if not history_config:
        history_config = build_publish_request.build_publish_request(
            ticker=ticker,
            output_file=None,
            s3_bucket=None,
            s3_key=None,
            redis_key=None,
            compress=report_compress,
            redis_enabled=publish_to_redis,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=publish_to_s3,
            s3_address=s3_address,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=publish_to_slack,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            verbose=verbose,
            label='history-{}'.format(name))
        should_publish_history_dataset = False
        if history_file:
            history_config['output_file'] = history_file
            should_publish_history_dataset = True
        if history_redis_key and publish_to_redis:
            history_config['redis_key'] = history_redis_key
            history_config['redis_enabled'] = True
            should_publish_history_dataset = True
        if history_s3_bucket and history_s3_key and publish_to_s3:
            history_config['s3_bucket'] = history_s3_bucket
            history_config['s3_key'] = history_s3_key
            history_config['s3_enabled'] = True
            should_publish_history_dataset = True
    # end of building history_config dictionary if not already set

    if verbose:
        remove_vals = ['s3_access_key', 's3_secret_key', 'redis_password']
        debug_extract_config = {}
        for k in extract_config:
            if k not in remove_vals:
                debug_extract_config[k] = extract_config[k]
        debug_report_config = {}
        for k in report_config:
            if k not in remove_vals:
                debug_report_config[k] = report_config[k]
        debug_history_config = {}
        for k in history_config:
            if k not in remove_vals:
                debug_history_config[k] = history_config[k]
        debug_load_config = {}
        for k in load_config:
            if k not in remove_vals:
                debug_load_config[k] = load_config[k]
        log.info('{} {} using extract config {}'.format(
            name, ticker, ae_consts.ppj(debug_extract_config)))
        log.info('{} {} using report config {}'.format(
            name, ticker, ae_consts.ppj(debug_report_config)))
        log.info('{} {} using trade history config {}'.format(
            name, ticker, ae_consts.ppj(debug_history_config)))
        log.info('{} {} using load config {}'.format(
            name, ticker, ae_consts.ppj(debug_load_config)))
        log.info('{} {} - building algo request'.format(name, ticker))
    # end of verbose

    algo_req = build_algo_request.build_algo_request(
        ticker=ticker,
        balance=balance,
        commission=commission,
        start_date=use_start_date,
        end_date=use_end_date,
        timeseries=timeseries,
        trade_strategy=trade_strategy,
        config_file=use_config_file,
        config_dict=use_config_dict,
        load_config=load_config,
        history_config=history_config,
        report_config=report_config,
        extract_config=extract_config,
        label=name)

    algo_req['name'] = name
    algo_req['should_publish_extract_dataset'] = should_publish_extract_dataset
    algo_req['should_publish_history_dataset'] = should_publish_history_dataset
    algo_req['should_publish_report_dataset'] = should_publish_report_dataset

    algo_res = build_result.build_result(status=ae_consts.NOT_RUN,
                                         err=None,
                                         rec=None)

    if run_on_engine:
        rec = {'algo_req': algo_req, 'task_id': None}
        task_name = ('analysis_engine.work_tasks.'
                     'task_run_algo.task_run_algo')
        if verbose:
            log.info('starting distributed algo task={}'.format(task_name))
        elif debug:
            log.info('starting distributed algo by publishing to '
                     'task={} broker={} backend={}'.format(
                         task_name, auth_url, backend_url))

        # Get the Celery app
        app = get_celery_app.get_celery_app(
            name=__name__,
            auth_url=auth_url,
            backend_url=backend_url,
            path_to_config_module=path_to_config_module,
            ssl_options=ssl_options,
            transport_options=transport_options,
            include_tasks=include_tasks)

        if debug:
            log.info('calling distributed algo task={} request={}'.format(
                task_name, ae_consts.ppj(algo_req)))
        elif verbose:
            log.info('calling distributed algo task={}'.format(task_name))

        job_id = app.send_task(task_name, (algo_req, ))
        if verbose:
            log.info('calling task={} - success job_id={}'.format(
                task_name, job_id))
        rec['task_id'] = job_id
        algo_res = build_result.build_result(status=ae_consts.SUCCESS,
                                             err=None,
                                             rec=rec)
        return algo_res
    # end of run_on_engine

    if use_custom_algo:
        if verbose:
            log.info('inspecting {} for class {}'.format(
                custom_algo_module, module_name))
        use_class_member_object = None
        for member in inspect.getmembers(custom_algo_module):
            if module_name in str(member):
                if verbose:
                    log.info('start {} with {}'.format(name, member[1]))
                use_class_member_object = member
                break
        # end of looking over the class definition but did not find it

        if use_class_member_object:
            new_algo_object = member[1](**algo_req)
        else:
            err = ('did not find a derived analysis_engine.algo.BaseAlgo '
                   'class in the module file={} '
                   'for ticker={} algo_name={}'.format(mod_path, ticker, name))

            if verbose or debug:
                log.error(err)

            return build_result.build_result(status=ae_consts.ERR,
                                             err=err,
                                             rec=None)
        # end of finding a valid algorithm object
    else:
        new_algo_object = ae_algo.BaseAlgo(**algo_req)
    # if using a custom module path or the BaseAlgo

    if new_algo_object:
        # heads up - logging this might have passwords in the algo_req
        # log.debug(
        #     '{} algorithm request: {}'.format(
        #         name,
        #         algo_req))
        if verbose:
            log.info('{} - run ticker={} from {} to {}'.format(
                name, ticker, use_start_date, use_end_date))
        algo_res = run_algo.run_algo(algo=new_algo_object,
                                     raise_on_err=raise_on_err,
                                     **algo_req)
        algo_res['algo'] = new_algo_object
        if verbose:
            log.info('{} - run ticker={} from {} to {}'.format(
                name, ticker, use_start_date, use_end_date))
        if custom_algo_module:
            if verbose:
                log.info('{} - done run_algo custom_algo_module={} '
                         'module_name={} '
                         'ticker={} from {} to {}'.format(
                             name, custom_algo_module, module_name, ticker,
                             use_start_date, use_end_date))
        else:
            if verbose:
                log.info('{} - done run_algo BaseAlgo ticker={} '
                         'from {} to {}'.format(name, ticker, use_start_date,
                                                use_end_date))
    else:
        err = ('missing a derived analysis_engine.algo.BaseAlgo '
               'class in the module file={} for ticker={} algo_name={}'.format(
                   mod_path, ticker, name))
        return build_result.build_result(status=ae_consts.ERR,
                                         err=err,
                                         rec=None)
    # end of finding a valid algorithm object

    algo = algo_res.get('algo', None)

    if not algo:
        err = ('failed creating algorithm object - '
               'ticker={} status={} error={}'
               'algo name={} custom_algo_module={} module_name={} '
               'from {} to {}'.format(
                   ticker, ae_consts.get_status(status=algo_res['status']),
                   algo_res['err'], name, custom_algo_module, module_name,
                   use_start_date, use_end_date))
        return build_result.build_result(status=ae_consts.ERR,
                                         err=err,
                                         rec=None)

    if should_publish_extract_dataset or dataset_publish_extract:
        s3_log = ''
        redis_log = ''
        file_log = ''
        use_log = 'publish'

        if (extract_config['redis_address'] and extract_config['redis_db']
                and extract_config['redis_key']):
            redis_log = 'redis://{}@{}/{}'.format(
                extract_config['redis_address'], extract_config['redis_db'],
                extract_config['redis_key'])
            use_log += ' {}'.format(redis_log)
        else:
            extract_config['redis_enabled'] = False
        if (extract_config['s3_address'] and extract_config['s3_bucket']
                and extract_config['s3_key']):
            s3_log = 's3://{}/{}/{}'.format(extract_config['s3_address'],
                                            extract_config['s3_bucket'],
                                            extract_config['s3_key'])
            use_log += ' {}'.format(s3_log)
        else:
            extract_config['s3_enabled'] = False
        if extract_config['output_file']:
            file_log = 'file:{}'.format(extract_config['output_file'])
            use_log += ' {}'.format(file_log)

        if verbose:
            log.info('{} - publish - start ticker={} algorithm-ready {}'
                     ''.format(name, ticker, use_log))

        publish_status = algo.publish_input_dataset(**extract_config)
        if publish_status != ae_consts.SUCCESS:
            msg = ('failed to publish algorithm-ready datasets '
                   'with status {} attempted to {}'.format(
                       ae_consts.get_status(status=publish_status), use_log))
            log.error(msg)
            return build_result.build_result(status=ae_consts.ERR,
                                             err=err,
                                             rec=None)

        if verbose:
            log.info('{} - publish - done ticker={} algorithm-ready {}'
                     ''.format(name, ticker, use_log))
    # if publish the algorithm-ready dataset

    if should_publish_history_dataset or dataset_publish_history:
        s3_log = ''
        redis_log = ''
        file_log = ''
        use_log = 'publish'

        if (history_config['redis_address'] and history_config['redis_db']
                and history_config['redis_key']):
            redis_log = 'redis://{}@{}/{}'.format(
                history_config['redis_address'], history_config['redis_db'],
                history_config['redis_key'])
            use_log += ' {}'.format(redis_log)
        else:
            history_config['redis_enabled'] = False
        if (history_config['s3_address'] and history_config['s3_bucket']
                and history_config['s3_key']):
            s3_log = 's3://{}/{}/{}'.format(history_config['s3_address'],
                                            history_config['s3_bucket'],
                                            history_config['s3_key'])
            use_log += ' {}'.format(s3_log)
        else:
            history_config['s3_enabled'] = False

        if history_config['output_file']:
            file_log = 'file:{}'.format(history_config['output_file'])
            use_log += ' {}'.format(file_log)

        if verbose:
            log.info('{} - publish - start ticker={} trading history {}'
                     ''.format(name, ticker, use_log))

        publish_status = algo.publish_trade_history_dataset(**history_config)
        if publish_status != ae_consts.SUCCESS:
            msg = ('failed to publish trading history datasets '
                   'with status {} attempted to {}'.format(
                       ae_consts.get_status(status=publish_status), use_log))
            log.error(msg)
            return build_result.build_result(status=ae_consts.ERR,
                                             err=err,
                                             rec=None)

        if verbose:
            log.info('{} - publish - done ticker={} trading history {}'
                     ''.format(name, ticker, use_log))
    # if publish an trading history dataset

    if should_publish_report_dataset or dataset_publish_report:
        s3_log = ''
        redis_log = ''
        file_log = ''
        use_log = 'publish'

        if (report_config['redis_address'] and report_config['redis_db']
                and report_config['redis_key']):
            redis_log = 'redis://{}@{}/{}'.format(
                report_config['redis_address'], report_config['redis_db'],
                report_config['redis_key'])
            use_log += ' {}'.format(redis_log)
        else:
            report_config['redis_enabled'] = False
        if (report_config['s3_address'] and report_config['s3_bucket']
                and report_config['s3_key']):
            s3_log = 's3://{}/{}/{}'.format(report_config['s3_address'],
                                            report_config['s3_bucket'],
                                            report_config['s3_key'])
            use_log += ' {}'.format(s3_log)
        else:
            report_config['s3_enabled'] = False
        if report_config['output_file']:
            file_log = ' file:{}'.format(report_config['output_file'])
            use_log += ' {}'.format(file_log)

        if verbose:
            log.info('{} - publishing ticker={} trading performance report {}'
                     ''.format(name, ticker, use_log))

        publish_status = algo.publish_report_dataset(**report_config)
        if publish_status != ae_consts.SUCCESS:
            msg = ('failed to publish trading performance report datasets '
                   'with status {} attempted to {}'.format(
                       ae_consts.get_status(status=publish_status), use_log))
            log.error(msg)
            return build_result.build_result(status=ae_consts.ERR,
                                             err=err,
                                             rec=None)

        if verbose:
            log.info(
                '{} - publish - done ticker={} trading performance report {}'
                ''.format(name, ticker, use_log))
    # if publish an trading performance report dataset

    if verbose:
        log.info(
            '{} - done publishing datasets for ticker={} from {} to {}'.format(
                name, ticker, use_start_date, use_end_date))

    return algo_res
예제 #2
0
def perform_extract(df_type,
                    df_str,
                    work_dict,
                    dataset_id_key='ticker',
                    scrub_mode='sort-by-date'):
    """perform_extract

    Helper for extracting from Redis or S3

    :param df_type: datafeed type enum
    :param ds_str: dataset string name
    :param work_dict: incoming work request dictionary
    :param dataset_id_key: configurable dataset identifier
                           key for tracking scrubbing and
                           debugging errors
    :param scrub_mode: scrubbing mode on extraction for
                       one-off cleanup before analysis
    """
    status = ae_consts.FAILED
    ds_id = work_dict.get(dataset_id_key, None)
    label = work_dict.get('label', 'extract')
    s3_bucket = work_dict.get('s3_bucket', ae_consts.S3_BUCKET)
    s3_key = work_dict.get('s3_key', ae_consts.S3_KEY)
    redis_key = work_dict.get('redis_key', ae_consts.REDIS_KEY)
    s3_enabled = work_dict.get('s3_enabled', ae_consts.ENABLED_S3_UPLOAD)
    s3_access_key = work_dict.get('s3_access_key', ae_consts.S3_ACCESS_KEY)
    s3_secret_key = work_dict.get('s3_secret_key', ae_consts.S3_SECRET_KEY)
    s3_region_name = work_dict.get('s3_region_name', ae_consts.S3_REGION_NAME)
    s3_address = work_dict.get('s3_address', ae_consts.S3_ADDRESS)
    s3_secure = work_dict.get('s3_secure', ae_consts.S3_SECURE)
    redis_address = work_dict.get('redis_address', ae_consts.REDIS_ADDRESS)
    redis_password = work_dict.get('redis_password', ae_consts.REDIS_PASSWORD)
    redis_db = work_dict.get('redis_db', ae_consts.REDIS_DB)
    redis_expire = work_dict.get('redis_expire', ae_consts.REDIS_EXPIRE)

    log.debug('{} - {} - START - ds_id={} scrub_mode={} '
              'redis_address={}@{} redis_key={} '
              's3={} s3_address={} s3_bucket={} s3_key={}'.format(
                  label, df_str, ds_id, scrub_mode, redis_address, redis_db,
                  redis_key, s3_enabled, s3_address, s3_bucket, s3_key))

    if ae_consts.ev('DEBUG_REDIS_EXTRACT', '0') == '1':
        log.info('{} - {} - ds_id={} redis '
                 'pw={} expire={}'.format(label, df_str, ds_id, redis_password,
                                          redis_expire))

    if ae_consts.ev('DEBUG_S3_EXTRACT', '0') == '1':
        log.info('{} - {} - ds_id={} s3 '
                 'ak={} sk={} region={} secure={}'.format(
                     label, df_str, ds_id, s3_access_key, s3_secret_key,
                     s3_region_name, s3_secure))

    extract_res = None
    try:
        extract_res = build_df.build_df_from_redis(label=label,
                                                   address=redis_address,
                                                   db=redis_db,
                                                   key=redis_key)
    except Exception as e:
        extract_res = None
        log.error('{} - {} - ds_id={} failed extract from '
                  'redis={}@{} key={} ex={}'.format(label, df_str, ds_id,
                                                    redis_address, redis_db,
                                                    redis_key, e))
    # end of try/ex extract from redis

    if not extract_res:
        return status, None

    valid_df = (extract_res['status'] == ae_consts.SUCCESS
                and extract_res['rec']['valid_df'])

    if not valid_df:
        if ae_consts.ev('DEBUG_S3_EXTRACT', '0') == '1':
            log.error('{} - {} ds_id={} invalid df '
                      'status={} extract_res={}'.format(
                          label, df_str, ds_id,
                          ae_consts.get_status(status=extract_res['status']),
                          extract_res))
        return status, None

    extract_df = extract_res['rec']['data']

    log.debug('{} - {} ds_id={} extract scrub={}'.format(
        label, df_str, ds_id, scrub_mode))

    scrubbed_df = scrub_utils.extract_scrub_dataset(
        label=label,
        scrub_mode=scrub_mode,
        datafeed_type=df_type,
        msg_format='df={} date_str={}',
        ds_id=ds_id,
        df=extract_df)

    status = ae_consts.SUCCESS

    return status, scrubbed_df
예제 #3
0
def get_data_from_iex(work_dict):
    """get_data_from_iex

    Get pricing from iex

    :param work_dict: request dictionary
    """
    label = 'get_data_from_iex'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    rec = {'data': None, 'updated': None}
    res = {'status': NOT_RUN, 'err': None, 'rec': rec}

    ticker = None
    field = None
    ft_type = None

    try:

        ticker = work_dict.get('ticker', TICKER)
        field = work_dict.get('field', 'daily')
        ft_type = work_dict.get('ft_type', None)
        ft_str = str(ft_type).lower()
        label = work_dict.get('label', label)
        orient = work_dict.get('orient', 'records')

        iex_req = None
        if ft_type == FETCH_DAILY or ft_str == 'daily':
            ft_type == FETCH_DAILY
            iex_req = api_requests.build_iex_fetch_daily_request(label=label)
        elif ft_type == FETCH_MINUTE or ft_str == 'minute':
            ft_type == FETCH_MINUTE
            iex_req = api_requests.build_iex_fetch_minute_request(label=label)
        elif ft_type == FETCH_QUOTE or ft_str == 'quote':
            ft_type == FETCH_QUOTE
            iex_req = api_requests.build_iex_fetch_quote_request(label=label)
        elif ft_type == FETCH_STATS or ft_str == 'stats':
            ft_type == FETCH_STATS
            iex_req = api_requests.build_iex_fetch_stats_request(label=label)
        elif ft_type == FETCH_PEERS or ft_str == 'peers':
            ft_type == FETCH_PEERS
            iex_req = api_requests.build_iex_fetch_peers_request(label=label)
        elif ft_type == FETCH_NEWS or ft_str == 'news':
            ft_type == FETCH_NEWS
            iex_req = api_requests.build_iex_fetch_news_request(label=label)
        elif ft_type == FETCH_FINANCIALS or ft_str == 'financials':
            ft_type == FETCH_FINANCIALS
            iex_req = api_requests.build_iex_fetch_financials_request(
                label=label)
        elif ft_type == FETCH_EARNINGS or ft_str == 'earnings':
            ft_type == FETCH_EARNINGS
            iex_req = api_requests.build_iex_fetch_earnings_request(
                label=label)
        elif ft_type == FETCH_DIVIDENDS or ft_str == 'dividends':
            ft_type == FETCH_DIVIDENDS
            iex_req = api_requests.build_iex_fetch_dividends_request(
                label=label)
        elif ft_type == FETCH_COMPANY or ft_str == 'company':
            ft_type == FETCH_COMPANY
            iex_req = api_requests.build_iex_fetch_company_request(label=label)
        else:
            log.error('{} - unsupported ft_type={} ft_str={} ticker={}'.format(
                label, ft_type, ft_str, ticker))
            raise NotImplemented
        # if supported fetch request type

        clone_keys = [
            'ticker', 's3_address', 's3_bucket', 's3_key', 'redis_address',
            'redis_db', 'redis_password', 'redis_key'
        ]

        for k in clone_keys:
            iex_req[k] = work_dict.get(k, '{}-missing-in-{}'.format(k, label))
        # end of cloning keys

        if not iex_req:
            err = ('{} - ticker={} did not build an IEX request '
                   'for work={}'.format(label, iex_req['ticker'], work_dict))
            log.error(err)
            res = build_result.build_result(status=ERR, err=err, rec=rec)
            return res
        else:
            log.info('{} - ticker={} field={} '
                     'orient={} fetch'.format(label, iex_req['ticker'], field,
                                              orient))
        # if invalid iex request

        df = None
        try:
            if 'from' in work_dict:
                iex_req['from'] = datetime.datetime.strptime(
                    '%Y-%m-%d %H:%M:%S', work_dict['from'])
            df = iex_fetch_data.fetch_data(work_dict=iex_req,
                                           fetch_type=ft_type)
            rec['data'] = df.to_json(orient=orient, date_format='iso')
            rec['updated'] = datetime.datetime.utcnow().strftime(
                '%Y-%m-%d %H:%M:%S')
        except Exception as f:
            log.error('{} - ticker={} field={} failed fetch_data '
                      'with ex={}'.format(label, iex_req['ticker'], ft_type,
                                          f))
        # end of try/ex

        if ev('DEBUG_IEX_DATA', '0') == '1':
            log.info('{} ticker={} field={} data={} to_json'.format(
                label, iex_req['ticker'], field, rec['data']))
        else:
            log.info('{} ticker={} field={} to_json'.format(
                label, iex_req['ticker'], field))
        # end of if/else found data

        upload_and_cache_req = copy.deepcopy(iex_req)
        upload_and_cache_req['celery_disabled'] = True
        upload_and_cache_req['data'] = rec['data']
        if not upload_and_cache_req['data']:
            upload_and_cache_req['data'] = '{}'
        use_field = field
        if use_field == 'news':
            use_field = 'news1'
        if 'redis_key' in work_dict:
            upload_and_cache_req['redis_key'] = '{}_{}'.format(
                work_dict.get('redis_key', iex_req['redis_key']), use_field)
        if 's3_key' in work_dict:
            upload_and_cache_req['s3_key'] = '{}_{}'.format(
                work_dict.get('s3_key', iex_req['s3_key']), use_field)

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=upload_and_cache_req)
            update_status = update_res.get('status', NOT_SET)
            log.info('{} publish update status={} data={}'.format(
                label, get_status(status=update_status), update_res))
        except Exception as f:
            err = ('{} - failed to upload iex data={} to '
                   'to s3_key={} and redis_key={}'.format(
                       label, upload_and_cache_req,
                       upload_and_cache_req['s3_key'],
                       upload_and_cache_req['redis_key']))
            log.error(err)
        # end of try/ex to upload and cache

        if not rec['data']:
            log.info('{} - ticker={} no IEX data field={} to publish'.format(
                label, iex_req['ticker'], field))
        # end of if/else

        res = build_result.build_result(status=SUCCESS, err=None, rec=rec)

    except Exception as e:
        res = build_result.build_result(status=ERR,
                                        err=('failed - get_data_from_iex '
                                             'dict={} with ex={}').format(
                                                 work_dict, e),
                                        rec=rec)
    # end of try/ex

    log.info('task - get_data_from_iex done - '
             '{} - status={} err={}'.format(label, get_status(res['status']),
                                            res['err']))

    return res
def fetch_new_stock_datasets():
    """fetch_new_stock_datasets

    Collect all datasets for the ticker **SPY**:

    ::

        fetch_new_stock_datasets.py -t SPY

    .. note:: This requires the following services are listening on:

        - redis ``localhost:6379``
        - minio ``localhost:9000``

    """
    log.info('start - fetch_new_stock_datasets')

    parser = argparse.ArgumentParser(
        description=('Download and store the latest stock pricing, '
                     'news, and options chain data '
                     'and store it in Minio (S3) and Redis. '
                     'Also includes support for getting FinViz '
                     'screener tickers'))
    parser.add_argument('-t', help=('ticker'), required=False, dest='ticker')
    parser.add_argument('-g',
                        help=('optional - fetch mode: '
                              'all = fetch from all data sources (default), '
                              'yahoo = fetch from just Yahoo sources, '
                              'iex = fetch from just IEX sources'),
                        required=False,
                        dest='fetch_mode')
    parser.add_argument('-i',
                        help=('optional - ticker id '
                              'not used without a database'),
                        required=False,
                        dest='ticker_id')
    parser.add_argument('-e',
                        help=('optional - options expiration date'),
                        required=False,
                        dest='exp_date_str')
    parser.add_argument('-l',
                        help=('optional - path to the log config file'),
                        required=False,
                        dest='log_config_path')
    parser.add_argument('-b',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-B',
                        help=('optional - backend url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-s',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-S',
                        help=('optional - s3 ssl or not'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-G',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument('-p',
                        help=('optional - redis_password'),
                        required=False,
                        dest='redis_password')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-n',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument('-z',
                        help=('optional - strike price'),
                        required=False,
                        dest='strike')
    parser.add_argument(
        '-c',
        help=('optional - contract type "C" for calls "P" for puts'),
        required=False,
        dest='contract_type')
    parser.add_argument(
        '-P',
        help=('optional - get pricing data if "1" or "0" disabled'),
        required=False,
        dest='get_pricing')
    parser.add_argument(
        '-N',
        help=('optional - get news data if "1" or "0" disabled'),
        required=False,
        dest='get_news')
    parser.add_argument(
        '-O',
        help=('optional - get options data if "1" or "0" disabled'),
        required=False,
        dest='get_options')
    parser.add_argument('-U',
                        help=('optional - s3 enabled for publishing if "1" or '
                              '"0" is disabled'),
                        required=False,
                        dest='s3_enabled')
    parser.add_argument(
        '-R',
        help=('optional - redis enabled for publishing if "1" or '
              '"0" is disabled'),
        required=False,
        dest='redis_enabled')
    parser.add_argument('-A',
                        help=('optional - run an analysis '
                              'supported modes: scn'),
                        required=False,
                        dest='analysis_type')
    parser.add_argument('-L',
                        help=('optional - screener urls to pull '
                              'tickers for analysis'),
                        required=False,
                        dest='urls')
    parser.add_argument(
        '-Z',
        help=('disable run without an engine for local testing and demos'),
        required=False,
        dest='celery_enabled',
        action='store_true')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    run_offline = True
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    fetch_mode = 'all'
    exp_date_str = ae_consts.NEXT_EXP_STR
    ssl_options = ae_consts.SSL_OPTIONS
    transport_options = ae_consts.TRANSPORT_OPTIONS
    broker_url = ae_consts.WORKER_BROKER_URL
    backend_url = ae_consts.WORKER_BACKEND_URL
    celery_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE
    include_tasks = ae_consts.INCLUDE_TASKS
    s3_access_key = ae_consts.S3_ACCESS_KEY
    s3_secret_key = ae_consts.S3_SECRET_KEY
    s3_region_name = ae_consts.S3_REGION_NAME
    s3_address = ae_consts.S3_ADDRESS
    s3_secure = ae_consts.S3_SECURE
    s3_bucket_name = ae_consts.S3_BUCKET
    s3_key = ae_consts.S3_KEY
    redis_address = ae_consts.REDIS_ADDRESS
    redis_key = ae_consts.REDIS_KEY
    redis_password = ae_consts.REDIS_PASSWORD
    redis_db = ae_consts.REDIS_DB
    redis_expire = ae_consts.REDIS_EXPIRE
    strike = None
    contract_type = None
    get_pricing = True
    get_news = True
    get_options = True
    s3_enabled = True
    redis_enabled = True
    analysis_type = None
    debug = False

    if args.ticker:
        ticker = args.ticker.upper()
    if args.ticker_id:
        ticker_id = args.ticker_id
    if args.exp_date_str:
        exp_date_str = ae_consts.NEXT_EXP_STR
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_password:
        redis_password = args.redis_password
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.strike:
        strike = args.strike
    if args.contract_type:
        contract_type = args.contract_type
    if args.get_pricing:
        get_pricing = args.get_pricing == '1'
    if args.get_news:
        get_news = args.get_news == '1'
    if args.get_options:
        get_options = args.get_options == '1'
    if args.s3_enabled:
        s3_enabled = args.s3_enabled == '1'
    if args.redis_enabled:
        redis_enabled = args.redis_enabled == '1'
    if args.fetch_mode:
        fetch_mode = str(args.fetch_mode).lower()
    if args.analysis_type:
        analysis_type = str(args.analysis_type).lower()
    if args.celery_enabled:
        run_offline = False
    if args.debug:
        debug = True

    work = api_requests.build_get_new_pricing_request()

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_key'] = s3_key
    work['redis_key'] = redis_key
    work['strike'] = strike
    work['contract'] = contract_type
    work['exp_date'] = exp_date_str
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['get_pricing'] = get_pricing
    work['get_news'] = get_news
    work['get_options'] = get_options
    work['s3_enabled'] = s3_enabled
    work['redis_enabled'] = redis_enabled
    work['fetch_mode'] = fetch_mode
    work['analysis_type'] = analysis_type
    work['debug'] = debug
    work['label'] = 'ticker={}'.format(ticker)

    if analysis_type == 'scn':
        label = 'screener={}'.format(work['ticker'])
        fv_urls = []
        if args.urls:
            fv_urls = str(args.urls).split('|')
        if len(fv_urls) == 0:
            fv_urls = os.getenv('SCREENER_URLS', []).split('|')
        screener_req = api_requests.build_screener_analysis_request(
            ticker=ticker, fv_urls=fv_urls, label=label)
        work.update(screener_req)
        start_screener_analysis(req=work)
    # end of analysis_type
    else:
        if not args.keyname:
            last_close_date = ae_utils.last_close()
            work['s3_key'] = '{}_{}'.format(
                work['ticker'],
                last_close_date.strftime(ae_consts.COMMON_DATE_FORMAT))
            work['redis_key'] = '{}_{}'.format(
                work['ticker'],
                last_close_date.strftime(ae_consts.COMMON_DATE_FORMAT))

        path_to_tasks = 'analysis_engine.work_tasks'
        task_name = ('{}.get_new_pricing_data.get_new_pricing_data'.format(
            path_to_tasks))
        task_res = None
        if ae_consts.is_celery_disabled() or run_offline:
            work['celery_disabled'] = True
            log.debug('starting without celery work={} offline={}'.format(
                ae_consts.ppj(work), run_offline))
            task_res = task_pricing.get_new_pricing_data(work)

            if debug:
                log.info('done - result={} '
                         'task={} status={} '
                         'err={} label={}'.format(
                             ae_consts.ppj(task_res), task_name,
                             ae_consts.get_status(status=task_res['status']),
                             task_res['err'], work['label']))
            else:
                log.info('done - result '
                         'task={} status={} '
                         'err={} label={}'.format(
                             task_name,
                             ae_consts.get_status(status=task_res['status']),
                             task_res['err'], work['label']))
            # if/else debug
        else:
            log.info('connecting to broker={} backend={}'.format(
                broker_url, backend_url))

            # Get the Celery app
            app = get_celery_app.get_celery_app(
                name=__name__,
                auth_url=broker_url,
                backend_url=backend_url,
                path_to_config_module=celery_config_module,
                ssl_options=ssl_options,
                transport_options=transport_options,
                include_tasks=include_tasks)

            log.info('calling task={} - work={}'.format(
                task_name, ae_consts.ppj(work)))
            job_id = app.send_task(task_name, (work, ))
            log.info('calling task={} - success job_id={}'.format(
                task_name, job_id))
예제 #5
0
def start_distributed_algo():
    """start_distributed_algo

    Run a derived algorithm with an algorithm config dictionary

    :param config_dict: algorithm config dictionary
    """

    log.debug('start - sa')

    parser = argparse.ArgumentParser(description=('stock analysis tool'))
    parser.add_argument('-t', help=('ticker'), required=False, dest='ticker')
    parser.add_argument('-e',
                        help=('file path to extract an '
                              'algorithm-ready datasets from redis'),
                        required=False,
                        dest='algo_extract_loc')
    parser.add_argument('-l',
                        help=('show dataset in this file'),
                        required=False,
                        dest='show_from_file')
    parser.add_argument('-H',
                        help=('show trading history dataset in this file'),
                        required=False,
                        dest='show_history_from_file')
    parser.add_argument(
        '-E',
        help=('show trading performance report dataset in this file'),
        required=False,
        dest='show_report_from_file')
    parser.add_argument(
        '-L',
        help=('restore an algorithm-ready dataset file back into redis'),
        required=False,
        dest='restore_algo_file')
    parser.add_argument('-f',
                        help=('save the trading history dataframe '
                              'to this file'),
                        required=False,
                        dest='history_json_file')
    parser.add_argument(
        '-J',
        help=('plot action - after preparing you can use: '
              '-J show to open the image (good for debugging)'),
        required=False,
        dest='plot_action')
    parser.add_argument(
        '-b',
        help=('run a backtest using the dataset in '
              'a file path/s3 key/redis key formats: '
              'file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
              's3://algoready/SPY-latest.json or '
              'redis:SPY-latest'),
        required=False,
        dest='backtest_loc')
    parser.add_argument('-B',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-C',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument(
        '-w',
        help=('optional - flag for publishing an algorithm job '
              'using Celery to the ae workers'),
        required=False,
        dest='run_on_engine',
        action='store_true')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-K',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-Z',
                        help=('optional - s3 secure: default False'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-s',
                        help=('optional - start date: YYYY-MM-DD'),
                        required=False,
                        dest='start_date')
    parser.add_argument('-n',
                        help=('optional - end date: YYYY-MM-DD'),
                        required=False,
                        dest='end_date')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-G',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument(
        '-g',
        help=('Path to a custom algorithm module file '
              'on disk. This module must have a single '
              'class that inherits from: '
              'https://github.com/AlgoTraders/stock-analysis-engine/'
              'blob/master/'
              'analysis_engine/algo.py Additionally you '
              'can find the Example-Minute-Algorithm here: '
              'https://github.com/AlgoTraders/stock-analysis-engine/'
              'blob/master/analysis_engine/mocks/'
              'example_algo_minute.py'),
        required=False,
        dest='run_algo_in_file')
    parser.add_argument('-p',
                        help=('optional - s3 bucket/file for trading history'),
                        required=False,
                        dest='algo_history_loc')
    parser.add_argument(
        '-o',
        help=('optional - s3 bucket/file for trading performance report'),
        required=False,
        dest='algo_report_loc')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-R',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument(
        '-c',
        help=('optional - algorithm config_file path for setting '
              'up internal algorithm trading strategies and '
              'indicators'),
        required=False,
        dest='config_file')
    parser.add_argument('-v',
                        help=('set the Algorithm to verbose logging'),
                        required=False,
                        dest='verbose_algo',
                        action='store_true')
    parser.add_argument(
        '-P',
        help=('set the Algorithm\'s IndicatorProcessor to verbose logging'),
        required=False,
        dest='verbose_processor',
        action='store_true')
    parser.add_argument(
        '-I',
        help=('set all Algorithm\'s Indicators to verbose logging '
              '(note indivdual indicators support a \'verbose\' key '
              'that can be set to True to debug just one '
              'indicator)'),
        required=False,
        dest='verbose_indicators',
        action='store_true')
    parser.add_argument(
        '-V',
        help=('inspect the datasets an algorithm is processing - this'
              'will slow down processing to show debugging'),
        required=False,
        dest='inspect_datasets',
        action='store_true')
    parser.add_argument(
        '-j',
        help=('run the algorithm on just this specific date in the datasets '
              '- specify the date in a format: YYYY-MM-DD like: 2018-11-29'),
        required=False,
        dest='run_this_date')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    ticker = None
    use_balance = 10000.0
    use_commission = 6.0
    use_start_date = None
    use_end_date = None
    use_config_file = None
    debug = False
    verbose_algo = None
    verbose_processor = None
    verbose_indicators = None
    inspect_datasets = None
    history_json_file = None
    run_this_date = None
    algo_obj = None
    algo_history_loc = None
    algo_report_loc = None
    algo_extract_loc = None
    backtest_loc = None

    ssl_options = ae_consts.SSL_OPTIONS
    transport_options = ae_consts.TRANSPORT_OPTIONS
    broker_url = ae_consts.WORKER_BROKER_URL
    backend_url = ae_consts.WORKER_BACKEND_URL
    path_to_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE
    include_tasks = ae_consts.INCLUDE_TASKS
    load_from_s3_bucket = None
    load_from_s3_key = None
    load_from_redis_key = None
    load_from_file = None
    load_compress = True
    load_publish = True
    load_config = None
    report_redis_key = None
    report_s3_bucket = None
    report_s3_key = None
    report_file = None
    report_compress = True
    report_publish = False
    report_config = None
    history_redis_key = None
    history_s3_bucket = None
    history_s3_key = None
    history_file = None
    history_compress = True
    history_publish = True
    history_config = None
    extract_redis_key = None
    extract_s3_bucket = None
    extract_s3_key = None
    extract_file = None
    extract_save_dir = None
    extract_compress = False
    extract_publish = False
    extract_config = None
    s3_enabled = True
    s3_access_key = ae_consts.S3_ACCESS_KEY
    s3_secret_key = ae_consts.S3_SECRET_KEY
    s3_region_name = ae_consts.S3_REGION_NAME
    s3_address = ae_consts.S3_ADDRESS
    s3_bucket_name = ae_consts.S3_BUCKET
    s3_key = None
    s3_secure = ae_consts.S3_SECURE
    redis_enabled = True
    redis_address = ae_consts.REDIS_ADDRESS
    redis_key = None
    redis_password = ae_consts.REDIS_PASSWORD
    redis_db = ae_consts.REDIS_DB
    redis_expire = ae_consts.REDIS_EXPIRE
    redis_serializer = 'json'
    redis_encoding = 'utf-8'
    publish_to_s3 = True
    publish_to_redis = False
    publish_to_slack = False
    slack_enabled = False
    slack_code_block = False
    slack_full_width = False

    dataset_type = ae_consts.SA_DATASET_TYPE_ALGO_READY
    serialize_datasets = ae_consts.DEFAULT_SERIALIZED_DATASETS
    compress = False
    encoding = 'utf-8'
    debug = False
    run_on_engine = False

    auto_fill = True
    timeseries = 'minute'
    trade_strategy = 'count'

    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.history_json_file:
        history_json_file = args.history_json_file
    if args.ticker:
        ticker = args.ticker.upper()
    if args.debug:
        debug = True
    if args.verbose_algo:
        verbose_algo = True
    if args.verbose_processor:
        verbose_processor = True
    if args.verbose_indicators:
        verbose_indicators = True
    if args.inspect_datasets:
        inspect_datasets = True
    if args.run_this_date:
        run_this_date = args.run_this_date
    if args.start_date:
        try:
            use_start_date = '{} 00:00:00'.format(str(args.start_date))
            datetime.datetime.strptime(args.start_date,
                                       ae_consts.COMMON_DATE_FORMAT)
        except Exception as e:
            msg = ('please use a start date formatted as: {}'
                   '\n'
                   'error was: {}'.format(ae_consts.COMMON_DATE_FORMAT, e))
            log.error(msg)
            sys.exit(1)
        # end of testing for a valid date
    # end of args.start_date
    if args.end_date:
        try:
            use_end_date = '{} 00:00:00'.format(str(args.end_date))
            datetime.datetime.strptime(args.end_date,
                                       ae_consts.COMMON_DATE_FORMAT)
        except Exception as e:
            msg = ('please use an end date formatted as: {}'
                   '\n'
                   'error was: {}'.format(ae_consts.COMMON_DATE_FORMAT, e))
            log.error(msg)
            sys.exit(1)
        # end of testing for a valid date
    # end of args.end_date
    algo_mod_path = None
    config_dict = None
    if args.run_algo_in_file:
        if not os.path.exists(args.run_algo_in_file):
            log.error('missing algorithm module file: {}'.format(
                args.run_algo_in_file))
            sys.exit(1)
        algo_mod_path = args.run_algo_in_file
    if args.config_file:
        use_config_file = args.config_file
        if not os.path.exists(use_config_file):
            log.error('Failed: unable to find config file: -c {}'.format(
                use_config_file))
            sys.exit(1)
        config_dict = json.loads(open(use_config_file).read())
        algo_mod_path = config_dict.get('algo_path', algo_mod_path)
        if not os.path.exists(algo_mod_path):
            log.error('missing algorithm module file from config: {}'.format(
                algo_mod_path))
            sys.exit(1)
    """
    Finalize the algo config
    """
    if config_dict:
        use_balance = float(config_dict.get('balance', use_balance))
        use_commission = float(config_dict.get('commission', use_commission))
        ticker = str(config_dict.get('ticker', ticker)).upper()

        config_dict['ticker'] = ticker
        config_dict['balance'] = use_balance
        config_dict['commission'] = use_commission
    else:
        if not ticker:
            ticker = str(config_dict.get('ticker', ae_consts.TICKER)).upper()
    if not ticker:
        log.error('usage error: please set a ticker with -t <TICKER>')
        sys.exit(1)

    if verbose_algo:
        config_dict['verbose'] = verbose_algo
    if verbose_processor:
        config_dict['verbose_processor'] = verbose_processor
    if verbose_indicators:
        config_dict['verbose_indicators'] = verbose_indicators
    if inspect_datasets:
        config_dict['inspect_datasets'] = inspect_datasets
    if run_this_date:
        config_dict['run_this_date'] = run_this_date

    log.info('starting')
    """
    Run a custom algo module from disk
    """
    if algo_mod_path:

        if args.backtest_loc:
            backtest_loc = args.backtest_loc
            if ('file:/' not in backtest_loc and 's3://' not in backtest_loc
                    and 'redis://' not in backtest_loc):
                log.error(
                    'invalid -b <backtest dataset file> specified. '
                    '{} '
                    'please use either: '
                    '-b file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-b s3://algoready/SPY-latest.json or '
                    '-b redis://SPY-latest'.format(backtest_loc))
                sys.exit(1)
            if 's3://' in backtest_loc:
                load_from_s3_bucket = backtest_loc.split('/')[-2]
                load_from_s3_key = backtest_loc.split('/')[-1]
            elif 'redis://' in backtest_loc:
                load_from_redis_key = backtest_loc.split('/')[-1]
            elif 'file:/' in backtest_loc:
                load_from_file = backtest_loc.split(':')[-1]
            load_publish = True
        # end of parsing supported transport - loading an algo-ready

        if args.algo_history_loc:
            algo_history_loc = args.algo_history_loc
            if ('file:/' not in algo_history_loc
                    and 's3://' not in algo_history_loc
                    and 'redis://' not in algo_history_loc):
                log.error(
                    'invalid -p <backtest dataset file> specified. '
                    '{} '
                    'please use either: '
                    '-p file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-p s3://algoready/SPY-latest.json or '
                    '-p redis://SPY-latest'.format(algo_history_loc))
                sys.exit(1)
            if 's3://' in algo_history_loc:
                history_s3_bucket = algo_history_loc.split('/')[-2]
                history_s3_key = algo_history_loc.split('/')[-1]
            elif 'redis://' in algo_history_loc:
                history_redis_key = algo_history_loc.split('/')[-1]
            elif 'file:/' in algo_history_loc:
                history_file = algo_history_loc.split(':')[-1]
            history_publish = True
        # end of parsing supported transport - trading history

        if args.algo_report_loc:
            algo_report_loc = args.algo_report_loc
            if ('file:/' not in algo_report_loc
                    and 's3://' not in algo_report_loc
                    and 'redis://' not in algo_report_loc):
                log.error(
                    'invalid -o <backtest dataset file> specified. '
                    '{} '
                    'please use either: '
                    '-o file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-o s3://algoready/SPY-latest.json or '
                    '-o redis://SPY-latest'.format(algo_report_loc))
                sys.exit(1)
            if 's3://' in algo_report_loc:
                report_s3_bucket = algo_report_loc.split('/')[-2]
                report_s3_key = algo_report_loc.split('/')[-1]
            elif 'redis://' in algo_report_loc:
                report_redis_key = algo_report_loc.split('/')[-1]
            elif 'file:/' in algo_report_loc:
                report_file = algo_report_loc.split(':')[-1]
            report_publish = True
        # end of parsing supported transport - trading performance report

        if args.algo_extract_loc:
            algo_extract_loc = args.algo_extract_loc
            if ('file:/' not in algo_extract_loc
                    and 's3://' not in algo_extract_loc
                    and 'redis://' not in algo_extract_loc):
                log.error(
                    'invalid -e <backtest dataset file> specified. '
                    '{} '
                    'please use either: '
                    '-e file:/opt/sa/tests/datasets/algo/SPY-latest.json or '
                    '-e s3://algoready/SPY-latest.json or '
                    '-e redis://SPY-latest'.format(algo_extract_loc))
                sys.exit(1)
            if 's3://' in algo_extract_loc:
                extract_s3_bucket = algo_extract_loc.split('/')[-2]
                extract_s3_key = algo_extract_loc.split('/')[-1]
            elif 'redis://' in algo_extract_loc:
                extract_redis_key = algo_extract_loc.split('/')[-1]
            elif 'file:/' in algo_extract_loc:
                extract_file = algo_extract_loc.split(':')[-1]
            extract_publish = True
        # end of parsing supported transport - extract algorithm-ready

        if args.run_on_engine:
            run_on_engine = True
            if verbose_algo:
                log.info('starting algo on the engine')

        use_name = config_dict.get('name', 'missing-algo-name')
        auto_fill = config_dict.get('auto_fill', auto_fill)
        timeseries = config_dict.get('timeseries', timeseries)
        trade_strategy = config_dict.get('trade_strategy', trade_strategy)

        algo_res = run_custom_algo.run_custom_algo(
            mod_path=algo_mod_path,
            ticker=config_dict['ticker'],
            balance=config_dict['balance'],
            commission=config_dict['commission'],
            name=use_name,
            start_date=use_start_date,
            end_date=use_end_date,
            auto_fill=auto_fill,
            config_dict=config_dict,
            load_from_s3_bucket=load_from_s3_bucket,
            load_from_s3_key=load_from_s3_key,
            load_from_redis_key=load_from_redis_key,
            load_from_file=load_from_file,
            load_compress=load_compress,
            load_publish=load_publish,
            load_config=load_config,
            report_redis_key=report_redis_key,
            report_s3_bucket=report_s3_bucket,
            report_s3_key=report_s3_key,
            report_file=report_file,
            report_compress=report_compress,
            report_publish=report_publish,
            report_config=report_config,
            history_redis_key=history_redis_key,
            history_s3_bucket=history_s3_bucket,
            history_s3_key=history_s3_key,
            history_file=history_file,
            history_compress=history_compress,
            history_publish=history_publish,
            history_config=history_config,
            extract_redis_key=extract_redis_key,
            extract_s3_bucket=extract_s3_bucket,
            extract_s3_key=extract_s3_key,
            extract_file=extract_file,
            extract_save_dir=extract_save_dir,
            extract_compress=extract_compress,
            extract_publish=extract_publish,
            extract_config=extract_config,
            publish_to_slack=publish_to_slack,
            publish_to_s3=publish_to_s3,
            publish_to_redis=publish_to_redis,
            dataset_type=dataset_type,
            serialize_datasets=serialize_datasets,
            compress=compress,
            encoding=encoding,
            redis_enabled=redis_enabled,
            redis_key=redis_key,
            redis_address=redis_address,
            redis_db=redis_db,
            redis_password=redis_password,
            redis_expire=redis_expire,
            redis_serializer=redis_serializer,
            redis_encoding=redis_encoding,
            s3_enabled=s3_enabled,
            s3_key=s3_key,
            s3_address=s3_address,
            s3_bucket=s3_bucket_name,
            s3_access_key=s3_access_key,
            s3_secret_key=s3_secret_key,
            s3_region_name=s3_region_name,
            s3_secure=s3_secure,
            slack_enabled=slack_enabled,
            slack_code_block=slack_code_block,
            slack_full_width=slack_full_width,
            dataset_publish_extract=extract_publish,
            dataset_publish_history=history_publish,
            dataset_publish_report=report_publish,
            run_on_engine=run_on_engine,
            auth_url=broker_url,
            backend_url=backend_url,
            include_tasks=include_tasks,
            ssl_options=ssl_options,
            transport_options=transport_options,
            path_to_config_module=path_to_config_module,
            timeseries=timeseries,
            trade_strategy=trade_strategy,
            verbose=verbose_algo)

        show_label = 'algo.name={}'.format(use_name)
        show_extract = '{}'.format(algo_extract_loc)
        show_history = '{}'.format(algo_history_loc)
        show_report = '{}'.format(algo_report_loc)
        base_label = ('load={} '
                      'extract={} '
                      'history={} '
                      'report={}'.format(args.run_algo_in_file, show_extract,
                                         show_history, show_report))

        task_id = None
        if run_on_engine:
            task_id = algo_res.get('rec', {}).get('task_id', None)
        if task_id:
            log.info('waiting on task_id={} to finish'.format(task_id))
            res = task_id.get()
            history_config = res.get('algo_req',
                                     {}).get('history_config', None)
            s3_bucket = history_config.get('s3_bucket', None)
            s3_key = history_config.get('s3_key', None)
            load_res = load_history_utils.load_history_dataset(
                s3_bucket=s3_bucket, s3_key=s3_key)

            history_df = load_res[ticker]
            log.info('plotting history df with columns: {}'.format(
                history_df.columns.values))

            history_df['date'] = pd.to_datetime(history_df['date'])
            history_df['minute'] = pd.to_datetime(history_df['minute'])
            ticker = history_df['ticker'].iloc[0]

            first_date = history_df['date'].iloc[0]
            end_date = history_df['date'].iloc[-1]
            title = ('Trading History {}\n'
                     'Backtest dates from {} to {}'.format(
                         ticker, first_date, end_date))
            use_xcol = 'date'
            use_as_date_format = '%d\n%b'
            use_minute = False
            if 'minute' in history_df:
                found_valid_minute = history_df['minute'].iloc[0]
                if found_valid_minute:
                    use_minute = True

            if use_minute:
                use_xcol = 'minute'
                use_as_date_format = '%d %H:%M:%S\n%b'
            xlabel = 'Dates vs Algo values'
            ylabel = 'Algo values'
            df_filter = (history_df['close'] > 1.00)

            # set default columns:
            blue = None
            green = None
            orange = None

            red = 'balance'
            blue = 'close'

            plot_trading_history.plot_trading_history(
                title=title,
                df=history_df,
                red=red,
                blue=blue,
                green=green,
                orange=orange,
                date_col=use_xcol,
                date_format=use_as_date_format,
                xlabel=xlabel,
                ylabel=ylabel,
                df_filter=df_filter,
                show_plot=True,
                dropna_for_all=True)

            return
        else:
            algo_obj = algo_res.get('algo', None)
        # end of getting the algo results from s3

        if not algo_obj:
            log.error(
                '{} - did not create algorithm object'.format(show_label))
            sys.exit(1)

        if not run_on_engine:
            algo_trade_history_recs = algo_res['rec'].get('history', [])
            show_label = ('{} algo.name={} {} trade_history_len={}'.format(
                ticker, use_name, base_label, len(algo_trade_history_recs)))
        if args.debug:
            log.info('algo_res={}'.format(algo_res))
            if algo_res['status'] == ae_consts.SUCCESS:
                log.info('{} - done running {}'.format(
                    ae_consts.get_status(status=algo_res['status']),
                    show_label))
            else:
                log.error('{} - done running {}'.format(
                    ae_consts.get_status(status=algo_res['status']),
                    show_label))
        else:
            if algo_res['status'] == ae_consts.SUCCESS:
                log.info('{} - done running {}'.format(
                    ae_consts.get_status(status=algo_res['status']),
                    show_label))
            else:
                log.error('run_custom_algo returned error: {}'.format(
                    algo_res['err']))
                sys.exit(1)
        # end of running the custom algo handler

    # end if running a custom algorithm module

    if algo_obj:

        trading_history_dict = algo_obj.get_history_dataset()
        history_df = trading_history_dict[ticker]
        if not hasattr(history_df, 'to_json'):
            return

        if history_json_file:
            log.info('saving history to: {}'.format(history_json_file))
            history_df.to_json(history_json_file,
                               orient='records',
                               date_format='iso')

        log.info('plotting history')

        first_date = history_df['date'].iloc[0]
        end_date = history_df['date'].iloc[-1]
        title = ('Trading History {} for Algo {}\n'
                 'Backtest dates from {} to {}'.format(
                     ticker, trading_history_dict['algo_name'], first_date,
                     end_date))
        use_xcol = 'date'
        use_as_date_format = '%d\n%b'
        if config_dict['timeseries'] == 'minute':
            use_xcol = 'minute'
            use_as_date_format = '%d %H:%M:%S\n%b'
        xlabel = 'Dates vs {} values'.format(trading_history_dict['algo_name'])
        ylabel = 'Algo {}\nvalues'.format(trading_history_dict['algo_name'])
        df_filter = (history_df['close'] > 0.01)

        # set default hloc columns:
        blue = None
        green = None
        orange = None

        red = 'close'
        blue = 'balance'

        debug = True
        if debug:
            for i, r in history_df.iterrows():
                log.info('{} - {}'.format(r['minute'], r['close']))

        plot_trading_history.plot_trading_history(
            title=title,
            df=history_df,
            red=red,
            blue=blue,
            green=green,
            orange=orange,
            date_col=use_xcol,
            date_format=use_as_date_format,
            xlabel=xlabel,
            ylabel=ylabel,
            df_filter=df_filter,
            show_plot=True,
            dropna_for_all=True)
def load_history_dataset(
        history_dataset=None,
        dataset_type=None,
        serialize_datasets=None,
        path_to_file=None,
        compress=None,
        encoding='utf-8',
        redis_enabled=None,
        redis_key=None,
        redis_address=None,
        redis_db=None,
        redis_password=None,
        redis_expire=None,
        redis_serializer='json',
        redis_encoding='utf-8',
        s3_enabled=None,
        s3_key=None,
        s3_address=None,
        s3_bucket=None,
        s3_access_key=None,
        s3_secret_key=None,
        s3_region_name=None,
        s3_secure=None,
        slack_enabled=False,
        slack_code_block=False,
        slack_full_width=False,
        verbose=False):
    """load_history_dataset

    Load a ``Trading History`` Dataset from file, s3 - note
    redis is not supported yet

    :param history_dataset: optional - already loaded history dataset
    :param dataset_type: optional - dataset type
        (default is ``analysis_engine.consts.SA_DATASET_TYPE_TRADING_HISTORY``)
    :param path_to_file: optional - path to a trading history dataset
        in a file
    :param serialize_datasets: optional - list of dataset names to
        deserialize in the dataset
    :param compress: optional - boolean flag for decompressing
        the contents of the ``path_to_file`` if necessary
        (default is ``True`` and uses ``zlib`` for compression)
    :param encoding: optional - string for data encoding

    **(Optional) Redis connectivity arguments**

    :param redis_enabled: bool - toggle for auto-caching all
        datasets in Redis
        (default is ``analysis_engine.consts.ENABLED_REDIS_PUBLISH``)
    :param redis_key: string - key to save the data in redis
        (default is ``None``)
    :param redis_address: Redis connection string format: ``host:port``
        (default is ``analysis_engine.consts.REDIS_ADDRESS``)
    :param redis_db: Redis db to use
        (default is ``analysis_engine.consts.REDIS_DB``)
    :param redis_password: optional - Redis password
        (default is ``analysis_engine.consts.REDIS_PASSWORD``)
    :param redis_expire: optional - Redis expire value
        (default is ``None``)
    :param redis_serializer: not used yet - support for future
        pickle objects in redis
        (default is ``json``)
    :param redis_encoding: format of the encoded key in redis
        (default is ``utf-8``)

    **(Optional) Minio (S3) connectivity arguments**

    :param s3_enabled: bool - toggle for auto-archiving on Minio (S3)
        (default is ``analysis_engine.consts.ENABLED_S3_UPLOAD``)
    :param s3_key: string - key to save the data in redis
        (default is ``None``)
    :param s3_address: Minio S3 connection string format: ``host:port``
        (default is ``analysis_engine.consts.S3_ADDRESS``)
    :param s3_bucket: S3 Bucket for storing the artifacts
        (default is ``analysis_engine.consts.S3_BUCKET``) which should be
        viewable on a browser:
        http://localhost:9000/minio/
    :param s3_access_key: S3 Access key
        (default is ``analysis_engine.consts.S3_ACCESS_KEY``)
    :param s3_secret_key: S3 Secret key
        (default is ``analysis_engine.consts.S3_SECRET_KEY``)
    :param s3_region_name: S3 region name
        (default is ``analysis_engine.consts.S3_REGION_NAME``)
    :param s3_secure: Transmit using tls encryption
        (default is ``analysis_engine.consts.S3_SECURE``)

    **(Optional) Slack arguments**

    :param slack_enabled: optional - boolean for
        publishing to slack
    :param slack_code_block: optional - boolean for
        publishing as a code black in slack
    :param slack_full_width: optional - boolean for
        publishing as a to slack using the full
        width allowed

    Additonal arguments

    :param verbose: optional - bool for increasing
        logging
    """

    if not dataset_type:
        dataset_type = consts.SA_DATASET_TYPE_TRADING_HISTORY
    if not serialize_datasets:
        serialize_datasets = consts.DEFAULT_SERIALIZED_DATASETS
    if not redis_enabled:
        redis_enabled = consts.ENABLED_REDIS_PUBLISH
    if not redis_address:
        redis_address = consts.REDIS_ADDRESS
    if not redis_db:
        redis_db = consts.REDIS_DB
    if not redis_password:
        redis_password = consts.REDIS_PASSWORD
    if not s3_enabled:
        s3_enabled = consts.ENABLED_S3_UPLOAD
    if not s3_address:
        s3_address = consts.S3_ADDRESS
    if not s3_bucket:
        s3_bucket = consts.S3_BUCKET
    if not s3_access_key:
        s3_access_key = consts.S3_ACCESS_KEY
    if not s3_secret_key:
        s3_secret_key = consts.S3_SECRET_KEY
    if not s3_region_name:
        s3_region_name = consts.S3_REGION_NAME
    if not s3_secure:
        s3_secure = consts.S3_SECURE
    if compress is None:
        compress = consts.ALGO_HISTORY_COMPRESS

    use_ds = history_dataset
    if not use_ds:
        log.info(
            'loading {} from file={} s3={} redis={}'.format(
                consts.get_status(status=dataset_type),
                path_to_file,
                s3_key,
                redis_key))
    # load if not created

    supported_type = False
    if dataset_type == consts.SA_DATASET_TYPE_TRADING_HISTORY:
        supported_type = True
        if (path_to_file and
                not use_ds):
            if not os.path.exists(path_to_file):
                log.error('missing file: {}'.format(path_to_file))
            use_ds = file_utils.load_history_dataset_from_file(
                path_to_file=path_to_file,
                compress=compress,
                encoding=redis_encoding,
                serialize_datasets=serialize_datasets)
        elif (s3_key and
                not use_ds):
            use_ds = s3_utils.load_history_dataset_from_s3(
                s3_key=s3_key,
                s3_address=s3_address,
                s3_bucket=s3_bucket,
                s3_access_key=s3_access_key,
                s3_secret_key=s3_secret_key,
                s3_region_name=s3_region_name,
                s3_secure=s3_secure,
                compress=compress,
                encoding=redis_encoding,
                serialize_datasets=serialize_datasets)
    else:
        supported_type = False
        use_ds = None
        log.error(
            'loading {} from file={} s3={} redis={}'.format(
                dataset_type,
                path_to_file,
                s3_key,
                redis_key))
    # load if not created

    if not use_ds and supported_type:
        log.error(
            'unable to load a dataset from file={} '
            's3={} redis={}'.format(
                path_to_file,
                s3_key,
                redis_key))

    return use_ds
def build_sell_order(
        ticker,
        num_owned,
        close,
        balance,
        commission,
        date,
        details,
        use_key,
        minute=None,
        shares=None,
        version=1,
        auto_fill=True,
        is_live_trading=False,
        backtest_shares_default=10,
        reason=None):
    """build_sell_order

    Create an algorithm sell order as a dictionary

    :param ticker: ticker
    :param num_owned: integer current owned
        number of shares for this asset
    :param close: float closing price of the asset
    :param balance: float amount of available capital
    :param commission: float for commission costs
    :param date: string trade date for that row usually
        ``COMMON_DATE_FORMAT`` (``YYYY-MM-DD``)
    :param minute: optional - string with the minute that the
        order was placed. format is
        ``COMMON_TICK_DATE_FORMAT`` (``YYYY-MM-DD HH:MM:SS``)
    :param details: dictionary for full row of values to review
        all sells after the algorithm finishes.
        (usually ``row.to_json()``)
    :param use_key: string for redis and s3 publishing of the algorithm
        result dictionary as a json-serialized dictionary
    :param shares: optional - integer number of shares to sell
        if None sell all ``num_owned`` shares at the ``close``.
    :param version: optional - version tracking integer
    :param auto_fill: optional - bool for not assuming the trade
        filled (default ``True``)
    :param is_live_trading: optional - bool for filling trades
        for live trading or for backtest tuning filled
        (default ``False`` which is backtest mode)
    :param backtest_shares_default: optional - integer for
        simulating shares during a backtest even if there
        are not enough funds
        (default ``10``)
    :param reason: optional - string for recording why the algo
        decided to sell for review after the algorithm finishes
    """
    status = ae_consts.TRADE_OPEN
    s3_bucket_name = ae_consts.ALGO_SELLS_S3_BUCKET_NAME
    s3_key = use_key
    redis_key = use_key
    s3_enabled = True
    redis_enabled = True

    cost_of_trade = None
    sell_price = 0.0
    new_shares = num_owned
    new_balance = balance
    created_date = None

    tradable_funds = balance - commission

    if num_owned == 0:
        status = ae_consts.TRADE_NO_SHARES_TO_SELL
    elif close > 0.1 and tradable_funds > 10.0:
        cost_of_trade = commission
        if shares:
            if shares > num_owned:
                shares = num_owned
        else:
            shares = num_owned
        sell_price = ae_consts.to_f(
            val=(shares * close) + commission)
        if cost_of_trade > balance:
            status = ae_consts.TRADE_NOT_ENOUGH_FUNDS
        else:
            created_date = ae_utils.utc_now_str()
            if auto_fill:
                new_shares = num_owned - shares
                new_balance = ae_consts.to_f(balance + sell_price)
                status = ae_consts.TRADE_FILLED
            else:
                new_shares = shares
                new_balance = balance
    else:
        status = ae_consts.TRADE_NOT_ENOUGH_FUNDS

    order_dict = {
        'ticker': ticker,
        'status': status,
        'balance': new_balance,
        'shares': new_shares,
        'sell_price': sell_price,
        'prev_balance': balance,
        'prev_shares': num_owned,
        'close': close,
        'details': details,
        'reason': reason,
        'date': date,
        'minute': minute,
        'created': created_date,
        's3_bucket': s3_bucket_name,
        's3_key': s3_key,
        'redis_key': redis_key,
        's3_enabled': s3_enabled,
        'redis_enabled': redis_enabled,
        'version': version
    }

    use_date = minute
    if not use_date:
        use_date = date

    log.debug(
        '{} {} sell {} order={}'.format(
            ticker,
            use_date,
            ae_consts.get_status(status=order_dict['status']),
            ae_consts.ppj(order_dict)))

    return order_dict
예제 #8
0
def publish(data,
            label=None,
            convert_to_json=False,
            is_df=False,
            output_file=None,
            df_compress=False,
            compress=False,
            redis_enabled=True,
            redis_key=None,
            redis_address=None,
            redis_db=None,
            redis_password=None,
            redis_expire=None,
            redis_serializer='json',
            redis_encoding='utf-8',
            s3_enabled=True,
            s3_key=None,
            s3_address=None,
            s3_bucket=None,
            s3_access_key=None,
            s3_secret_key=None,
            s3_region_name=None,
            s3_secure=False,
            slack_enabled=False,
            slack_code_block=False,
            slack_full_width=False,
            verbose=False,
            silent=False,
            **kwargs):
    """publish

    Publish ``data`` to multiple optional endpoints:
    - a local file path (``output_file``)
    - minio (``s3_bucket`` and ``s3_key``)
    - redis (``redis_key``)
    - slack

    :return: status value
    :param data: data to publish
    :param convert_to_json: convert ``data`` to a
        json-serialized string. this function will throw if
        ``json.dumps(data)`` fails
    :param is_df: convert ``pd.DataFrame`` using
        ``pd.DataFrame.to_json()`` to a
        json-serialized string. this function will throw if
        ``to_json()`` fails
    :param label: log tracking label
    :param output_file: path to save the data
        to a file
    :param df_compress: optional - compress data that is a
        ``pandas.DataFrame`` before publishing
    :param compress: optional - compress before publishing
        (default is ``False``)
    :param verbose: optional - boolean to log output
        (default is ``False``)
    :param silent: optional - boolean no log output
        (default is ``False``)
    :param kwargs: optional - future argument support

    **(Optional) Redis connectivity arguments**

    :param redis_enabled: bool - toggle for auto-caching all
        datasets in Redis
        (default is ``True``)
    :param redis_key: string - key to save the data in redis
        (default is ``None``)
    :param redis_address: Redis connection string format: ``host:port``
        (default is ``localhost:6379``)
    :param redis_db: Redis db to use
        (default is ``0``)
    :param redis_password: optional - Redis password
        (default is ``None``)
    :param redis_expire: optional - Redis expire value
        (default is ``None``)
    :param redis_serializer: not used yet - support for future
        pickle objects in redis
    :param redis_encoding: format of the encoded key in redis

    **(Optional) Minio (S3) connectivity arguments**

    :param s3_enabled: bool - toggle for auto-archiving on Minio (S3)
        (default is ``True``)
    :param s3_key: string - key to save the data in redis
        (default is ``None``)
    :param s3_address: Minio S3 connection string format: ``host:port``
        (default is ``localhost:9000``)
    :param s3_bucket: S3 Bucket for storing the artifacts
        (default is ``dev``) which should be viewable on a browser:
        http://localhost:9000/minio/dev/
    :param s3_access_key: S3 Access key
        (default is ``trexaccesskey``)
    :param s3_secret_key: S3 Secret key
        (default is ``trex123321``)
    :param s3_region_name: S3 region name
        (default is ``us-east-1``)
    :param s3_secure: Transmit using tls encryption
        (default is ``False``)

    **(Optional) Slack arguments**

    :param slack_enabled: optional - boolean for
        publishing to slack
    :param slack_code_block: optional - boolean for
        publishing as a code black in slack
    :param slack_full_width: optional - boolean for
        publishing as a to slack using the full
        width allowed
    """

    status = ae_consts.NOT_RUN
    use_data = data
    if (not df_compress and not is_df and not use_data):
        log.info('missing data')
        return ae_consts.INVALID

    if convert_to_json and not is_df:
        if verbose:
            log.debug('start convert to json')
        use_data = json.dumps(data)
        if verbose:
            log.debug('done convert to json')
    if is_df:
        if verbose:
            log.debug('start df to_json')
        use_data = data.to_json(orient='records', date_format='iso')
        if verbose:
            log.debug('done df to_json')

    if compress:
        if verbose:
            log.debug('compress start')
        use_data = zlib.compress(use_data.encode(redis_encoding), 9)
        if verbose:
            log.debug('compress end')

    already_compressed = False
    if df_compress:
        use_data = compress_data.compress_data(data=data)
        already_compressed = True

    num_bytes = len(use_data)
    num_mb = ae_consts.get_mb(num_bytes)

    if verbose:
        log.debug('start - file={} s3_key={} redis_key={} slack={} '
                  'compress={} size={}MB'.format(output_file, s3_key,
                                                 redis_key, slack_enabled,
                                                 compress, num_mb))

    if s3_enabled and s3_address and s3_bucket and s3_key:
        endpoint_url = 'http://{}'.format(s3_address)
        if s3_secure:
            endpoint_url = 'https://{}'.format(s3_address)

        if verbose:
            log.debug('s3 start - {} endpoint_url={} '
                      'region={}'.format(label, endpoint_url, s3_region_name))

        s3 = boto3.resource(
            's3',
            endpoint_url=endpoint_url,
            aws_access_key_id=s3_access_key,
            aws_secret_access_key=s3_secret_key,
            region_name=s3_region_name,
            config=boto3.session.Config(signature_version='s3v4'))

        if s3.Bucket(s3_bucket) not in s3.buckets.all():
            if verbose:
                log.debug('s3 creating bucket={} {}'.format(s3_bucket, label))
            s3.create_bucket(Bucket=s3_bucket)

        if verbose:
            log.debug('s3 upload start - bytes={} to {}:{} {}'.format(
                num_mb, s3_bucket, s3_key, label))

        s3.Bucket(s3_bucket).put_object(Key=s3_key, Body=use_data)

        if verbose:
            log.debug('s3 upload done - bytes={} to {}:{} {}'.format(
                num_mb, s3_bucket, s3_key, label))

    # end of s3_enabled

    if redis_enabled and redis_address and redis_key:
        redis_split = redis_address.split(':')
        redis_host = redis_split[0]
        redis_port = int(redis_split[1])
        log.debug('{} redis={}:{}@{} connect '
                  'key={} expire={}'.format(label if label else '', redis_host,
                                            redis_port, redis_db, redis_key,
                                            redis_expire))

        rc = redis.Redis(host=redis_host,
                         port=redis_port,
                         password=redis_password,
                         db=redis_db)

        redis_res = redis_utils.set_data_in_redis_key(
            label=label,
            client=rc,
            key=redis_key,
            data=use_data,
            already_compressed=already_compressed,
            serializer=redis_serializer,
            encoding=redis_encoding,
            expire=redis_expire,
            px=None,
            nx=False,
            xx=False)

        if redis_res['status'] != ae_consts.SUCCESS:
            if verbose:
                log.debug('redis failed - {} {}'.format(
                    ae_consts.get_status(status=redis_res['status']),
                    redis_res['err']))
            return ae_consts.REDIS_FAILED
    # end of redis_enabled

    if output_file:
        if verbose:
            log.debug('file start - output_file={}'.format(output_file))
        file_exists = file_utils.write_to_file(output_file=output_file,
                                               data=data)
        if not file_exists:
            if verbose:
                log.debug('file failed - did not find output_file={}'.format(
                    output_file))
            return ae_consts.FILE_FAILED
        if verbose:
            log.debug('file done - output_file={}'.format(output_file))
    # end of writing to file

    if slack_enabled:
        if verbose:
            log.debug('slack start')
        slack_utils.post_success(msg=use_data,
                                 block=slack_code_block,
                                 full_width=slack_full_width)
        if verbose:
            log.debug('slack end')
    # end of sending to slack

    status = ae_consts.SUCCESS

    if verbose:
        log.debug('end - {} file={} s3_key={} redis_key={} slack={} '
                  'compress={} size={}MB'.format(
                      ae_consts.get_status(status=status), output_file, s3_key,
                      redis_key, slack_enabled, compress, num_mb))

    return status
예제 #9
0
def publish_from_s3_to_redis():
    """publish_from_s3_to_redis

    Download an S3 key and publish it's contents to Redis

    """

    log.info('start - publish_from_s3_to_redis')

    parser = argparse.ArgumentParser(
        description=('Download and store the latest stock pricing, '
                     'news, and options chain data '
                     'and store it in S3 and Redis. '
                     'Once stored, this will also '
                     'start the buy and sell trading analysis.'))
    parser.add_argument('-t', help=('ticker'), required=True, dest='ticker')
    parser.add_argument('-i',
                        help=('optional - ticker id '
                              'not used without a database'),
                        required=False,
                        dest='ticker_id')
    parser.add_argument('-l',
                        help=('optional - path to the log config file'),
                        required=False,
                        dest='log_config_path')
    parser.add_argument('-b',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-B',
                        help=('optional - backend url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-s',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-S',
                        help=('optional - s3 ssl or not'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-g',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument('-p',
                        help=('optional - redis_password'),
                        required=False,
                        dest='redis_password')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-n',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    ticker = TICKER
    ticker_id = TICKER_ID
    ssl_options = SSL_OPTIONS
    transport_options = TRANSPORT_OPTIONS
    broker_url = WORKER_BROKER_URL
    backend_url = WORKER_BACKEND_URL
    celery_config_module = WORKER_CELERY_CONFIG_MODULE
    include_tasks = INCLUDE_TASKS
    s3_access_key = S3_ACCESS_KEY
    s3_secret_key = S3_SECRET_KEY
    s3_region_name = S3_REGION_NAME
    s3_address = S3_ADDRESS
    s3_secure = S3_SECURE
    s3_bucket_name = S3_BUCKET
    s3_key = S3_KEY
    redis_address = REDIS_ADDRESS
    redis_key = REDIS_KEY
    redis_password = REDIS_PASSWORD
    redis_db = REDIS_DB
    redis_expire = REDIS_EXPIRE
    debug = False

    if args.ticker:
        ticker = args.ticker.upper()
    if args.ticker_id:
        ticker = args.ticker_id
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_password:
        redis_password = args.redis_password
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.debug:
        debug = True

    work = build_publish_from_s3_to_redis_request()

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_key'] = s3_key
    work['redis_key'] = redis_key
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['debug'] = debug
    work['label'] = 'ticker={}'.format(ticker)

    path_to_tasks = 'analysis_engine.work_tasks'
    task_name = ('{}.publish_from_s3_to_redis.publish_from_s3_to_redis'.format(
        path_to_tasks))
    task_res = None
    if is_celery_disabled():
        work['celery_disabled'] = True
        log.debug('starting without celery work={}'.format(ppj(work)))
        task_res = task_publisher.publish_from_s3_to_redis(work_dict=work)
        if debug:
            log.info('done - result={} '
                     'task={} status={} '
                     'err={} label={}'.format(
                         ppj(task_res), task_name,
                         get_status(status=task_res['status']),
                         task_res['err'], work['label']))
        else:
            log.info('done - result '
                     'task={} status={} '
                     'err={} label={}'.format(
                         task_name, get_status(status=task_res['status']),
                         task_res['err'], work['label']))
        # if/else debug
    else:
        log.info('connecting to broker={} backend={}'.format(
            broker_url, backend_url))

        # Get the Celery app
        app = get_celery_app(name=__name__,
                             auth_url=broker_url,
                             backend_url=backend_url,
                             path_to_config_module=celery_config_module,
                             ssl_options=ssl_options,
                             transport_options=transport_options,
                             include_tasks=include_tasks)

        log.info('calling task={} - work={}'.format(task_name, ppj(work)))
        job_id = app.send_task(task_name, (work, ))
        log.info('calling task={} - success job_id={}'.format(
            task_name, job_id))
예제 #10
0
def get_task_results(
        work_dict=None,
        result=None,
        **kwargs):
    """get_task_results

    If celery is disabled by the
    environment key ``export CELERY_DISABLED=1``
    or requested in the ``work_dict['celery_disabled'] = True`` then
    return the task result dictionary, otherwise
    return ``None``.

    This method is useful for allowing tests
    to override the returned payloads during task chaining
    using ``@mock.patch``.

    :param work_dict: task work dictionary
    :param result: task result dictionary
    :param kwargs: keyword arguments
    """

    send_results_back = None
    cel_disabled = False
    if work_dict:
        if is_celery_disabled(
                work_dict=work_dict):
            send_results_back = result
            cel_disabled = True
    # end of sending back results if told to do so

    if ev('DEBUG_TASK', '0') == '1':
        status = NOT_SET
        err = None
        record = None
        label = None
        if result:
            status = result.get(
                'status',
                NOT_SET)
            err = result.get(
                'err',
                None)
            record = result.get(
                'rec',
                None)
        if work_dict:
            label = work_dict.get(
                'label',
                None)
        log_id = 'get_task_results'
        if label:
            log_id = '{} - get_task_results'.format(
                label)

        result_details = record
        if record:
            result_details = ppj(record)

        status_details = status
        if status:
            status_details = get_status(status=status)

        work_details = work_dict
        if work_dict:
            work_details = ppj(work_dict)

        if status == SUCCESS:
            log.info(
                '{} celery_disabled={} '
                'status={} err={} work_dict={} result={}'.format(
                    log_id,
                    cel_disabled,
                    status_details,
                    err,
                    work_details,
                    result_details))
        else:
            if cel_disabled:
                log.error(
                    '{} celery_disabled={} '
                    'status={} err={} work_dict={} result={}'.format(
                        log_id,
                        cel_disabled,
                        status_details,
                        err,
                        work_details,
                        result_details))
            else:
                log.info(
                    '{} celery_disabled={} '
                    'status={} err={} work_dict={} result={}'.format(
                        log_id,
                        cel_disabled,
                        status_details,
                        err,
                        work_details,
                        result_details))
    # end of if debugging the task results

    return send_results_back
예제 #11
0
def build_option_spread_details(trade_type, spread_type, option_type, close,
                                num_contracts, low_strike, low_ask, low_bid,
                                high_strike, high_ask, high_bid):
    """build_option_spread_details

    Calculate pricing information for supported spreads
    including ``max loss``, ``max profit``, and ``mid price`` (break
    even coming soon)

    :param trade_type: entry (``TRADE_ENTRY``) or
        exit (``TRADE_EXIT``) of a spread position
    :param spread_type: vertical bull (``SPREAD_VERTICAL_BULL``)
        and vertical bear (``SPREAD_VERTICAL_BEAR``)
        are the only supported calculations for now
    :param option_type: call (``OPTION_CALL``) or put
        (``OPTION_PUT``)
    :param close: closing price of the underlying
        asset
    :param num_contracts: integer number of contracts
    :param low_strike: float - strike for
        the low leg of the spread
    :param low_ask: float - ask price for
        the low leg of the spread
    :param low_bid: float - bid price for
        the low leg of the spread
    :param high_strike: float - strike  for
        the high leg of the spread
    :param high_ask: float - ask price for
        the high leg of the spread
    :param high_bid: float - bid price for
        the high leg of the spread
    """

    details = {
        'status': ae_consts.NOT_RUN,
        'trade_type': trade_type,
        'spread_type': spread_type,
        'option_type': option_type,
        'num_contracts': num_contracts,
        'low_strike': low_strike,
        'low_bid': low_bid,
        'low_ask': low_ask,
        'high_strike': high_strike,
        'high_bid': high_bid,
        'high_ask': high_ask,
        'cost': None,
        'revenue': None,
        'low_bidask_mid': None,
        'high_bidask_mid': None,
        'mid_price': None,
        'nat_price': None,
        'strike_width': None,
        'break_even': None,
        'max_loss': None,
        'max_profit': None,
        'spread_id': None
    }

    low_distance = int(close) - low_strike
    high_distance = high_strike - int(close)
    details['strike_width'] = ae_consts.to_f(high_strike - low_strike)
    details['spread_id'] = 'S_{}_O_{}_low_{}_high_{}_w_{}'.format(
        trade_type, spread_type, option_type, low_distance, high_distance,
        details['strike_width'])
    details['low_bidask_mid'] = ae_consts.to_f(low_bid + low_ask / 2.0)
    details['high_bidask_mid'] = ae_consts.to_f(high_bid + high_ask / 2.0)
    details['mid_price'] = ae_consts.to_f(
        abs(details['low_bidask_mid'] - details['high_bidask_mid']))
    details['nat_price'] = ae_consts.to_f(
        abs(details['low_bidask_mid'] - details['high_bidask_mid']))

    cost_of_contracts_at_mid_price = None
    revenue_of_contracts_at_mid_price = None

    if trade_type == ae_consts.TRADE_ENTRY:
        cost_of_contracts_at_mid_price = ae_consts.to_f(100.0 * num_contracts *
                                                        details['mid_price'])
        revenue_of_contracts_at_mid_price = ae_consts.to_f(
            100.0 * num_contracts *
            (details['strike_width'] - details['mid_price']))
        if spread_type == ae_consts.SPREAD_VERTICAL_BULL:
            if option_type == ae_consts.OPTION_CALL:  # debit spread
                details['max_loss'] = cost_of_contracts_at_mid_price
                details['max_profit'] = revenue_of_contracts_at_mid_price
            else:
                details['max_loss'] = cost_of_contracts_at_mid_price
                details['max_profit'] = revenue_of_contracts_at_mid_price
        else:  # bear
            if option_type == ae_consts.OPTION_CALL:  # debit spread
                details['max_loss'] = cost_of_contracts_at_mid_price
                details['max_profit'] = revenue_of_contracts_at_mid_price
            else:
                details['max_loss'] = cost_of_contracts_at_mid_price
                details['max_profit'] = revenue_of_contracts_at_mid_price

    else:  # trade exit calculations:
        revenue_of_contracts_at_mid_price = ae_consts.to_f(
            100.0 * num_contracts * details['mid_price'])
        cost_of_contracts_at_mid_price = ae_consts.to_f(
            100.0 * num_contracts *
            (details['strike_width'] - details['mid_price']))
        if spread_type == ae_consts.SPREAD_VERTICAL_BULL:
            if option_type == ae_consts.OPTION_CALL:  # credit spread
                details['max_profit'] = revenue_of_contracts_at_mid_price
                details['max_loss'] = cost_of_contracts_at_mid_price
            else:
                details['max_profit'] = revenue_of_contracts_at_mid_price
                details['max_loss'] = cost_of_contracts_at_mid_price
        else:  # bear
            if option_type == ae_consts.OPTION_CALL:  # credit spread
                details['max_profit'] = revenue_of_contracts_at_mid_price
                details['max_loss'] = cost_of_contracts_at_mid_price
            else:
                details['max_profit'] = revenue_of_contracts_at_mid_price
                details['max_loss'] = cost_of_contracts_at_mid_price
    # end of supported types of spreads

    details['cost'] = cost_of_contracts_at_mid_price
    details['revenue'] = revenue_of_contracts_at_mid_price

    log.debug('type={} spread={} option={} close={} spread_id={} '
              'revenue={} cost={} mid={} width={} '
              'max_profit={} max_loss={}'.format(
                  ae_consts.get_status(status=trade_type),
                  ae_consts.get_status(status=spread_type),
                  ae_consts.get_status(status=option_type), close,
                  details['spread_id'], revenue_of_contracts_at_mid_price,
                  cost_of_contracts_at_mid_price, details['mid_price'],
                  details['strike_width'], details['max_profit'],
                  details['max_loss']))

    return details
def publish_from_s3_to_redis(self, work_dict):
    """publish_from_s3_to_redis

    Publish Ticker Data from S3 to Redis

    :param work_dict: dictionary for key/values
    """

    label = 'pub-s3-to-redis'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    ticker = TICKER
    ticker_id = TICKER_ID
    rec = {
        'ticker': None,
        'ticker_id': None,
        's3_enabled': True,
        'redis_enabled': True,
        's3_bucket': None,
        's3_key': None,
        'redis_key': None,
        'updated': None
    }
    res = build_result.build_result(status=NOT_RUN, err=None, rec=rec)

    try:
        ticker = work_dict.get('ticker', TICKER)
        ticker_id = int(work_dict.get('ticker_id', TICKER_ID))

        if not ticker:
            res = build_result.build_result(status=ERR,
                                            err='missing ticker',
                                            rec=rec)
            return res

        s3_key = work_dict.get('s3_key', None)
        s3_bucket_name = work_dict.get('s3_bucket', 'pricing')
        redis_key = work_dict.get('redis_key', None)
        updated = work_dict.get('updated', None)
        serializer = work_dict.get('serializer', 'json')
        encoding = work_dict.get('encoding', 'utf-8')
        label = work_dict.get('label', label)

        enable_s3_read = True
        enable_redis_publish = True

        rec['ticker'] = ticker
        rec['ticker_id'] = ticker_id
        rec['s3_bucket'] = s3_bucket_name
        rec['s3_key'] = s3_key
        rec['redis_key'] = redis_key
        rec['updated'] = updated
        rec['s3_enabled'] = enable_s3_read
        rec['redis_enabled'] = enable_redis_publish

        data = None

        if enable_s3_read:

            log.info('{} parsing s3 values'.format(label))
            access_key = work_dict.get('s3_access_key', S3_ACCESS_KEY)
            secret_key = work_dict.get('s3_secret_key', S3_SECRET_KEY)
            region_name = work_dict.get('s3_region_name', S3_REGION_NAME)
            service_address = work_dict.get('s3_address', S3_ADDRESS)
            secure = work_dict.get('s3_secure', S3_SECURE) == '1'

            endpoint_url = 'http://{}'.format(service_address)
            if secure:
                endpoint_url = 'https://{}'.format(service_address)

            log.info('{} building s3 endpoint_url={} '
                     'region={}'.format(label, endpoint_url, region_name))

            s3 = boto3.resource(
                's3',
                endpoint_url=endpoint_url,
                aws_access_key_id=access_key,
                aws_secret_access_key=secret_key,
                region_name=region_name,
                config=boto3.session.Config(signature_version='s3v4'))

            try:
                log.info('{} checking bucket={} exists'.format(
                    label, s3_bucket_name))
                if s3.Bucket(s3_bucket_name) not in s3.buckets.all():
                    log.info('{} creating bucket={}'.format(
                        label, s3_bucket_name))
                    s3.create_bucket(Bucket=s3_bucket_name)
            except Exception as e:
                log.info('{} failed creating bucket={} '
                         'with ex={}'.format(label, s3_bucket_name, e))
            # end of try/ex for creating bucket

            try:
                log.info('{} reading to s3={}/{} '
                         'updated={}'.format(label, s3_bucket_name, s3_key,
                                             updated))
                data = s3_read_contents_from_key.s3_read_contents_from_key(
                    s3=s3,
                    s3_bucket_name=s3_bucket_name,
                    s3_key=s3_key,
                    encoding=encoding,
                    convert_as_json=True)

                initial_size_value = \
                    len(str(data)) / 1024000
                initial_size_str = to_f(initial_size_value)
                if ev('DEBUG_S3', '0') == '1':
                    log.info('{} read s3={}/{} data={}'.format(
                        label, s3_bucket_name, s3_key, ppj(data)))
                else:
                    log.info('{} read s3={}/{} data size={} MB'.format(
                        label, s3_bucket_name, s3_key, initial_size_str))
            except Exception as e:
                err = ('{} failed reading bucket={} '
                       'key={} ex={}').format(label, s3_bucket_name, s3_key, e)
                log.error(err)
                res = build_result.build_result(status=NOT_RUN,
                                                err=err,
                                                rec=rec)
            # end of try/ex for creating bucket
        else:
            log.info('{} SKIP S3 read bucket={} '
                     'key={}'.format(label, s3_bucket_name, s3_key))
        # end of if enable_s3_read

        if enable_redis_publish:
            redis_address = work_dict.get('redis_address', REDIS_ADDRESS)
            redis_key = work_dict.get('redis_key', REDIS_KEY)
            redis_password = work_dict.get('redis_password', REDIS_PASSWORD)
            redis_db = work_dict.get('redis_db', None)
            if not redis_db:
                redis_db = REDIS_DB
            redis_expire = None
            if 'redis_expire' in work_dict:
                redis_expire = work_dict.get('redis_expire', REDIS_EXPIRE)
            log.info('redis enabled address={}@{} '
                     'key={}'.format(redis_address, redis_db, redis_key))
            redis_host = redis_address.split(':')[0]
            redis_port = redis_address.split(':')[1]
            try:
                if ev('DEBUG_REDIS', '0') == '1':
                    log.info('{} publishing redis={}:{} '
                             'db={} key={} '
                             'updated={} expire={} '
                             'data={}'.format(label, redis_host, redis_port,
                                              redis_db, redis_key, updated,
                                              redis_expire, ppj(data)))
                else:
                    log.info('{} publishing redis={}:{} '
                             'db={} key={} '
                             'updated={} expire={}'.format(
                                 label, redis_host, redis_port, redis_db,
                                 redis_key, updated, redis_expire))
                # end of if/else

                rc = redis.Redis(host=redis_host,
                                 port=redis_port,
                                 password=redis_password,
                                 db=redis_db)

                redis_set_res = redis_set.set_data_in_redis_key(
                    label=label,
                    client=rc,
                    key=redis_key,
                    data=data,
                    serializer=serializer,
                    encoding=encoding,
                    expire=redis_expire,
                    px=None,
                    nx=False,
                    xx=False)

                log.info('{} redis_set status={} err={}'.format(
                    label, get_status(redis_set_res['status']),
                    redis_set_res['err']))

            except Exception as e:
                log.error('{} failed - redis publish to '
                          'key={} ex={}'.format(label, redis_key, e))
            # end of try/ex for creating bucket
        else:
            log.info('{} SKIP REDIS publish '
                     'key={}'.format(label, redis_key))
        # end of if enable_redis_publish

        res = build_result.build_result(status=SUCCESS, err=None, rec=rec)

    except Exception as e:
        res = build_result.build_result(
            status=ERR,
            err=('failed - publish_from_s3_to_redis '
                 'dict={} with ex={}').format(work_dict, e),
            rec=rec)
        log.error('{} - {}'.format(label, res['err']))
    # end of try/ex

    log.info('task - publish_from_s3_to_redis done - '
             '{} - status={}'.format(label, get_status(res['status'])))

    return analysis_engine.get_task_results.get_task_results(
        work_dict=work_dict, result=res)
예제 #13
0
def fetch_new_stock_datasets():
    """fetch_new_stock_datasets

    Collect datasets for a ticker from IEX Cloud or Tradier

    .. warning: IEX Cloud charges per request. Here are example
        commands to help you monitor your usage while handling
        first time users and automation (intraday, daily, and weekly
        options are supported).

    **Setup**

    ::

        export IEX_TOKEN=YOUR_IEX_CLOUD_TOKEN
        export TD_TOKEN=YOUR_TRADIER_TOKEN

    **Pull Data for a Ticker from IEX and Tradier**

    ::

        fetch -t TICKER

    **Pull from All Supported IEX Feeds**

    ::

        fetch -t TICKER -g iex-all

    **Pull from All Supported Tradier Feeds**

    ::

        fetch -t TICKER -g td

    **Intraday IEX and Tradier Feeds (only minute and news to reduce costs)**

    ::

        fetch -t TICKER -g intra
        # or manually:
        # fetch -t TICKER -g td,iex_min,iex_news

    **Daily IEX Feeds (daily and news)**

    ::

        fetch -t TICKER -g daily
        # or manually:
        # fetch -t TICKER -g iex_day,iex_news

    **Weekly IEX Feeds (company, financials, earnings, dividends, and peers)**

    ::

        fetch -t TICKER -g weekly
        # or manually:
        # fetch -t TICKER -g iex_fin,iex_earn,iex_div,iex_peers,iex_news,
        # iex_comp

    **IEX Minute**

    ::

        fetch -t TICKER -g iex_min

    **IEX News**

    ::

        fetch -t TICKER -g iex_news

    **IEX Daily**

    ::

        fetch -t TICKER -g iex_day

    **IEX Stats**

    ::

        fetch -t TICKER -g iex_stats

    **IEX Peers**

    ::

        fetch -t TICKER -g iex_peers

    **IEX Financials**

    ::

        fetch -t TICKER -g iex_fin

    **IEX Earnings**

    ::

        fetch -t TICKER -g iex_earn

    **IEX Dividends**

    ::

        fetch -t TICKER -g iex_div

    **IEX Quote**

    ::

        fetch -t TICKER -g iex_quote

    **IEX Company**

    ::

        fetch -t TICKER -g iex_comp

    .. note:: This requires the following services are listening on:

        - redis ``localhost:6379``
        - minio ``localhost:9000``

    """
    log.info(
        'start - fetch_new_stock_datasets')

    parser = argparse.ArgumentParser(
        description=(
            'Download and store the latest stock pricing, '
            'news, and options chain data '
            'and store it in Minio (S3) and Redis. '
            'Also includes support for getting FinViz '
            'screener tickers'))
    parser.add_argument(
        '-t',
        help=(
            'ticker'),
        required=False,
        dest='ticker')
    parser.add_argument(
        '-g',
        help=(
            'optional - fetch mode: '
            'initial = default fetch from initial data feeds '
            '(IEX and Tradier), '
            'intra = fetch intraday from IEX and Tradier, '
            'daily = fetch daily from IEX, '
            'weekly = fetch weekly from IEX, '
            'all = fetch from all data feeds, '
            'td = fetch from Tradier feeds only, '
            'iex = fetch from IEX Cloud feeds only, '
            'iex_min = fetch IEX Cloud intraday per-minute feed '
            'https://iexcloud.io/docs/api/#historical-prices '
            'iex_day = fetch IEX Cloud daily feed '
            'https://iexcloud.io/docs/api/#historical-prices '
            'iex_quote = fetch IEX Cloud quotes feed '
            'https://iexcloud.io/docs/api/#quote '
            'iex_stats = fetch IEX Cloud key stats feed '
            'https://iexcloud.io/docs/api/#key-stats '
            'iex_peers = fetch from just IEX Cloud peers feed '
            'https://iexcloud.io/docs/api/#peers '
            'iex_news = fetch IEX Cloud news feed '
            'https://iexcloud.io/docs/api/#news '
            'iex_fin = fetch IEX Cloud financials feed'
            'https://iexcloud.io/docs/api/#financials '
            'iex_earn = fetch from just IEX Cloud earnings feeed '
            'https://iexcloud.io/docs/api/#earnings '
            'iex_div = fetch from just IEX Cloud dividends feed'
            'https://iexcloud.io/docs/api/#dividends '
            'iex_comp = fetch from just IEX Cloud company feed '
            'https://iexcloud.io/docs/api/#company'),
        required=False,
        dest='fetch_mode')
    parser.add_argument(
        '-i',
        help=(
            'optional - ticker id '
            'not used without a database'),
        required=False,
        dest='ticker_id')
    parser.add_argument(
        '-e',
        help=(
            'optional - options expiration date'),
        required=False,
        dest='exp_date_str')
    parser.add_argument(
        '-l',
        help=(
            'optional - path to the log config file'),
        required=False,
        dest='log_config_path')
    parser.add_argument(
        '-b',
        help=(
            'optional - broker url for Celery'),
        required=False,
        dest='broker_url')
    parser.add_argument(
        '-B',
        help=(
            'optional - backend url for Celery'),
        required=False,
        dest='backend_url')
    parser.add_argument(
        '-k',
        help=(
            'optional - s3 access key'),
        required=False,
        dest='s3_access_key')
    parser.add_argument(
        '-s',
        help=(
            'optional - s3 secret key'),
        required=False,
        dest='s3_secret_key')
    parser.add_argument(
        '-a',
        help=(
            'optional - s3 address format: <host:port>'),
        required=False,
        dest='s3_address')
    parser.add_argument(
        '-S',
        help=(
            'optional - s3 ssl or not'),
        required=False,
        dest='s3_secure')
    parser.add_argument(
        '-u',
        help=(
            'optional - s3 bucket name'),
        required=False,
        dest='s3_bucket_name')
    parser.add_argument(
        '-G',
        help=(
            'optional - s3 region name'),
        required=False,
        dest='s3_region_name')
    parser.add_argument(
        '-p',
        help=(
            'optional - redis_password'),
        required=False,
        dest='redis_password')
    parser.add_argument(
        '-r',
        help=(
            'optional - redis_address format: <host:port>'),
        required=False,
        dest='redis_address')
    parser.add_argument(
        '-n',
        help=(
            'optional - redis and s3 key name'),
        required=False,
        dest='keyname')
    parser.add_argument(
        '-m',
        help=(
            'optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument(
        '-x',
        help=(
            'optional - redis expiration in seconds'),
        required=False,
        dest='redis_expire')
    parser.add_argument(
        '-z',
        help=(
            'optional - strike price'),
        required=False,
        dest='strike')
    parser.add_argument(
        '-c',
        help=(
            'optional - contract type "C" for calls "P" for puts'),
        required=False,
        dest='contract_type')
    parser.add_argument(
        '-P',
        help=(
            'optional - get pricing data if "1" or "0" disabled'),
        required=False,
        dest='get_pricing')
    parser.add_argument(
        '-N',
        help=(
            'optional - get news data if "1" or "0" disabled'),
        required=False,
        dest='get_news')
    parser.add_argument(
        '-O',
        help=(
            'optional - get options data if "1" or "0" disabled'),
        required=False,
        dest='get_options')
    parser.add_argument(
        '-U',
        help=(
            'optional - s3 enabled for publishing if "1" or '
            '"0" is disabled'),
        required=False,
        dest='s3_enabled')
    parser.add_argument(
        '-R',
        help=(
            'optional - redis enabled for publishing if "1" or '
            '"0" is disabled'),
        required=False,
        dest='redis_enabled')
    parser.add_argument(
        '-A',
        help=(
            'optional - run an analysis '
            'supported modes: scn'),
        required=False,
        dest='analysis_type')
    parser.add_argument(
        '-L',
        help=(
            'optional - screener urls to pull '
            'tickers for analysis'),
        required=False,
        dest='urls')
    parser.add_argument(
        '-Z',
        help=(
            'disable run without an engine for local testing and demos'),
        required=False,
        dest='celery_enabled',
        action='store_true')
    parser.add_argument(
        '-d',
        help=(
            'debug'),
        required=False,
        dest='debug',
        action='store_true')
    args = parser.parse_args()

    run_offline = True
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    fetch_mode = 'initial'
    exp_date_str = ae_consts.NEXT_EXP_STR
    ssl_options = ae_consts.SSL_OPTIONS
    transport_options = ae_consts.TRANSPORT_OPTIONS
    broker_url = ae_consts.WORKER_BROKER_URL
    backend_url = ae_consts.WORKER_BACKEND_URL
    celery_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE
    include_tasks = ae_consts.INCLUDE_TASKS
    s3_access_key = ae_consts.S3_ACCESS_KEY
    s3_secret_key = ae_consts.S3_SECRET_KEY
    s3_region_name = ae_consts.S3_REGION_NAME
    s3_address = ae_consts.S3_ADDRESS
    s3_secure = ae_consts.S3_SECURE
    s3_bucket_name = ae_consts.S3_BUCKET
    s3_key = ae_consts.S3_KEY
    redis_address = ae_consts.REDIS_ADDRESS
    redis_key = ae_consts.REDIS_KEY
    redis_password = ae_consts.REDIS_PASSWORD
    redis_db = ae_consts.REDIS_DB
    redis_expire = ae_consts.REDIS_EXPIRE
    strike = None
    contract_type = None
    get_pricing = True
    get_news = True
    get_options = True
    s3_enabled = True
    redis_enabled = True
    analysis_type = None
    debug = False

    if args.ticker:
        ticker = args.ticker.upper()
    if args.ticker_id:
        ticker_id = args.ticker_id
    if args.exp_date_str:
        exp_date_str = ae_consts.NEXT_EXP_STR
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_password:
        redis_password = args.redis_password
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.strike:
        strike = args.strike
    if args.contract_type:
        contract_type = args.contract_type
    if args.get_pricing:
        get_pricing = args.get_pricing == '1'
    if args.get_news:
        get_news = args.get_news == '1'
    if args.get_options:
        get_options = args.get_options == '1'
    if args.s3_enabled:
        s3_enabled = args.s3_enabled == '1'
    if args.redis_enabled:
        redis_enabled = args.redis_enabled == '1'
    if args.fetch_mode:
        fetch_mode = str(args.fetch_mode).lower()
    if args.analysis_type:
        analysis_type = str(args.analysis_type).lower()
    if args.celery_enabled:
        run_offline = False
    if args.debug:
        debug = True

    work = api_requests.build_get_new_pricing_request()

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_key'] = s3_key
    work['redis_key'] = redis_key
    work['strike'] = strike
    work['contract'] = contract_type
    work['exp_date'] = exp_date_str
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['get_pricing'] = get_pricing
    work['get_news'] = get_news
    work['get_options'] = get_options
    work['s3_enabled'] = s3_enabled
    work['redis_enabled'] = redis_enabled
    work['fetch_mode'] = fetch_mode
    work['analysis_type'] = analysis_type
    work['iex_datasets'] = iex_consts.DEFAULT_FETCH_DATASETS
    work['debug'] = debug
    work['label'] = f'ticker={ticker}'

    if analysis_type == 'scn':
        label = f'screener={work["ticker"]}'
        fv_urls = []
        if args.urls:
            fv_urls = str(args.urls).split('|')
        if len(fv_urls) == 0:
            fv_urls = os.getenv('SCREENER_URLS', []).split('|')
        screener_req = api_requests.build_screener_analysis_request(
            ticker=ticker,
            fv_urls=fv_urls,
            label=label)
        work.update(screener_req)
        start_screener_analysis(
            req=work)
    # end of analysis_type
    else:
        if not args.keyname:
            last_close_date = ae_utils.last_close()
            work['s3_key'] = f'''{work["ticker"]}_{last_close_date.strftime(
                ae_consts.COMMON_DATE_FORMAT)}'''
            work['redis_key'] = f'''{work["ticker"]}_{last_close_date.strftime(
                ae_consts.COMMON_DATE_FORMAT)}'''

        path_to_tasks = 'analysis_engine.work_tasks'
        task_name = (
            f'{path_to_tasks}'
            f'.get_new_pricing_data.get_new_pricing_data')
        task_res = None
        if ae_consts.is_celery_disabled() or run_offline:
            work['celery_disabled'] = True
            log.debug(
                f'starting without celery work={ae_consts.ppj(work)} '
                f'offline={run_offline}')
            task_res = task_pricing.get_new_pricing_data(
                work)
            status_str = ae_consts.get_status(status=task_res['status'])

            cur_date = ae_utils.get_last_close_str()
            redis_arr = work["redis_address"].split(':')
            include_results = ''
            if debug:
                include_results = task_res['rec']
            if task_res['status'] == ae_consts.SUCCESS:
                if task_res['rec']['num_success'] == 0:
                    log.error(
                        f'failed fetching ticker={work["ticker"]} '
                        f'from {fetch_mode} - please check the '
                        'environment variables')
                else:
                    log.info(
                        f'done fetching ticker={work["ticker"]} '
                        f'mode={fetch_mode} '
                        f'status={status_str} '
                        f'err={task_res["err"]} {include_results}')
                    print(
                        'View keys in redis with:\n'
                        f'redis-cli -h {redis_arr[0]} '
                        'keys '
                        f'"{work["ticker"]}_{cur_date}*"')
            elif task_res['status'] == ae_consts.MISSING_TOKEN:
                print(
                    'Set an IEX or Tradier token: '
                    '\n'
                    '  export IEX_TOKEN=YOUR_IEX_TOKEN\n'
                    '  export TD_TOKEN=YOUR_TD_TOKEN\n')
            else:
                log.error(
                    f'done fetching ticker={work["ticker"]} '
                    f'mode={fetch_mode} '
                    f'status={status_str} '
                    f'err={task_res["err"]}')
            # if/else debug
        else:
            log.debug(
                f'connecting to broker={broker_url} '
                f'backend={backend_url}')

            # Get the Celery app
            app = get_celery_app.get_celery_app(
                name=__name__,
                auth_url=broker_url,
                backend_url=backend_url,
                path_to_config_module=celery_config_module,
                ssl_options=ssl_options,
                transport_options=transport_options,
                include_tasks=include_tasks)

            log.debug(f'calling task={task_name} - work={ae_consts.ppj(work)}')
            job_id = app.send_task(
                task_name,
                (work,))
            log.debug(f'task={task_name} - job_id={job_id}')
예제 #14
0
def task_run_algo(
        self,
        algo_req):
    """task_run_algo

    Process an Algorithm

    :param algo_req: dictionary for key/values for
        running an algorithm using Celery workers
    """

    label = algo_req.get(
        'name',
        'ae-algo')
    verbose = algo_req.get(
        'verbose_task',
        False)
    debug = algo_req.get(
        'debug',
        False)

    # please be careful logging prod passwords:
    if debug:
        log.info(
            'task - {} - start '
            'algo_req={}'.format(
                label,
                algo_req))
    elif verbose:
        log.info(
            'task - {} - start '.format(
                label))
    # end of start log

    rec = {}
    res = build_result.build_result(
        status=ae_consts.NOT_RUN,
        err=None,
        rec=rec)

    created_algo_object = None
    custom_algo_module = None
    new_algo_object = None
    use_custom_algo = False
    found_algo_module = True  # assume the BaseAlgo
    should_publish_extract_dataset = False
    should_publish_history_dataset = False
    should_publish_report_dataset = False

    ticker = algo_req.get(
        'ticker',
        'SPY')
    num_days_back = algo_req.get(
        'num_days_back',
        75)
    name = algo_req.get(
        'name',
        'ae-algo')
    algo_module_path = algo_req.get(
        'mod_path',
        None)
    module_name = algo_req.get(
        'module_name',
        'BaseAlgo')
    custom_algo_module = algo_req.get(
        'custom_algo_module',
        None)
    new_algo_object = algo_req.get(
        'new_algo_object',
        None)
    use_custom_algo = algo_req.get(
        'use_custom_algo',
        False)
    should_publish_extract_dataset = algo_req.get(
        'should_publish_extract_dataset',
        False)
    should_publish_history_dataset = algo_req.get(
        'should_publish_history_dataset',
        False)
    should_publish_report_dataset = algo_req.get(
        'should_publish_report_dataset',
        False)
    start_date = algo_req.get(
        'start_date',
        None)
    end_date = algo_req.get(
        'end_date',
        None)
    raise_on_err = algo_req.get(
        'raise_on_err',
        True)
    report_config = algo_req.get(
        'report_config',
        None)
    history_config = algo_req.get(
        'history_config',
        None)
    extract_config = algo_req.get(
        'extract_config',
        None)

    err = None
    if algo_module_path:
        found_algo_module = False
        module_name = algo_module_path.split('/')[-1]
        loader = importlib.machinery.SourceFileLoader(
            module_name,
            algo_module_path)
        custom_algo_module = types.ModuleType(
            loader.name)
        loader.exec_module(
            custom_algo_module)
        use_custom_algo = True

        for member in inspect.getmembers(custom_algo_module):
            if module_name in str(member):
                found_algo_module = True
                break
        # for all members in this custom module file
    # if loading a custom algorithm module from a file on disk

    if not found_algo_module:
        err = (
            '{} - unable to find custom algorithm module={} '
            'module_path={}'.format(
                label,
                custom_algo_module,
                algo_module_path))
        if algo_module_path:
            err = (
                '{} - analysis_engine.work_tasks.task_run_algo '
                'was unable '
                'to find custom algorithm module={} with provided path to \n '
                'file: {} \n'
                '\n'
                'Please confirm '
                'that the class inherits from the BaseAlgo class like:\n'
                '\n'
                'import analysis_engine.algo\n'
                'class MyAlgo(analysis_engine.algo.BaseAlgo):\n '
                '\n'
                'If it is then please file an issue on github:\n '
                'https://github.com/AlgoTraders/stock-analysis-engine/'
                'issues/new \n\nFor now this error results in a shutdown'
                '\n'.format(
                    label,
                    custom_algo_module,
                    algo_module_path))
        # if algo_module_path set

        log.error(err)
        res = build_result.build_result(
            status=ae_consts.ERR,
            err=err,
            rec=None)
        task_result = {
            'status': res['status'],
            'err': res['err'],
            'algo_req': algo_req,
            'rec': rec
        }
        return task_result
    # if not found_algo_module

    use_start_date = start_date
    use_end_date = end_date
    if not use_end_date:
        end_date = datetime.datetime.utcnow()
        use_end_date = end_date.strftime(
            ae_consts.COMMON_TICK_DATE_FORMAT)
    if not use_start_date:
        start_date = end_date - datetime.timedelta(days=num_days_back)
        use_start_date = start_date.strftime(
            ae_consts.COMMON_TICK_DATE_FORMAT)
    dataset_publish_extract = algo_req.get(
        'dataset_publish_extract',
        False)
    dataset_publish_history = algo_req.get(
        'dataset_publish_history',
        False)
    dataset_publish_report = algo_req.get(
        'dataset_publish_report',
        False)
    try:
        if use_custom_algo:
            if verbose:
                log.info(
                    'inspecting {} for class {}'.format(
                        custom_algo_module,
                        module_name))
            use_class_member_object = None
            for member in inspect.getmembers(custom_algo_module):
                if module_name in str(member):
                    if verbose:
                        log.info(
                            'start {} with {}'.format(
                                name,
                                member[1]))
                    use_class_member_object = member
                    break
            # end of looking over the class definition but did not find it

            if use_class_member_object:
                if algo_req.get('backtest', False):
                    new_algo_object = member[1](
                        ticker=algo_req['ticker'],
                        config_dict=algo_req)
                else:
                    new_algo_object = member[1](
                        **algo_req)
            else:
                err = (
                    '{} - did not find a derived '
                    'analysis_engine.algo.BaseAlgo '
                    'class in the module file={} '
                    'for ticker={} algo_name={}'.format(
                        label,
                        algo_module_path,
                        ticker,
                        name))
                log.error(err)
                res = build_result.build_result(
                    status=ae_consts.ERR,
                    err=err,
                    rec=None)
                task_result = {
                    'status': res['status'],
                    'err': res['err'],
                    'algo_req': algo_req,
                    'rec': rec
                }
                return task_result
            # end of finding a valid algorithm object
        else:
            new_algo_object = ae_algo.BaseAlgo(
                **algo_req)
        # if using a custom module path or the BaseAlgo

        if new_algo_object:
            # heads up - logging this might have passwords in the algo_req
            # log.debug(
            #     '{} algorithm request: {}'.format(
            #         name,
            #         algo_req))
            if verbose:
                log.info(
                    '{} - run START ticker={} from {} to {}'.format(
                        name,
                        ticker,
                        use_start_date,
                        use_end_date))
            if algo_req.get('backtest', False):
                algo_res = run_algo.run_algo(
                    algo=new_algo_object,
                    config_dict=algo_req)
                created_algo_object = new_algo_object
            else:
                algo_res = run_algo.run_algo(
                    algo=new_algo_object,
                    **algo_req)
                created_algo_object = new_algo_object

            if verbose:
                log.info(
                    '{} - run DONE ticker={} from {} to {}'.format(
                        name,
                        ticker,
                        use_start_date,
                        use_end_date))
            if debug:
                if custom_algo_module:
                    log.info(
                        '{} - done run_algo custom_algo_module={} '
                        'module_name={} '
                        'ticker={} from {} to {}'.format(
                            name,
                            custom_algo_module,
                            module_name,
                            ticker,
                            use_start_date,
                            use_end_date))
                else:
                    log.info(
                        '{} - done run_algo BaseAlgo ticker={} from {} '
                        'to {}'.format(
                            name,
                            ticker,
                            use_start_date,
                            use_end_date))
        else:
            err = (
                '{} - missing a derived analysis_engine.algo.BaseAlgo '
                'class in the module file={} for '
                'ticker={} algo_name={}'.format(
                    label,
                    algo_module_path,
                    ticker,
                    name))
            log.error(err)
            res = build_result.build_result(
                status=ae_consts.ERR,
                err=err,
                rec=None)
            task_result = {
                'status': res['status'],
                'err': res['err'],
                'algo_req': algo_req,
                'rec': rec
            }
            return task_result
        # end of finding a valid algorithm object

        if not created_algo_object:
            err = (
                '{} - failed creating algorithm object - '
                'ticker={} status={} error={}'
                'algo name={} custom_algo_module={} module_name={} '
                'from {} to {}'.format(
                    label,
                    ticker,
                    ae_consts.get_status(status=algo_res['status']),
                    algo_res['err'],
                    name,
                    custom_algo_module,
                    module_name,
                    use_start_date,
                    use_end_date))
            res = build_result.build_result(
                status=ae_consts.ERR,
                err=err,
                rec=None)
            task_result = {
                'status': res['status'],
                'err': res['err'],
                'algo_req': algo_req,
                'rec': rec
            }
            return task_result
        # end of stop early

        if should_publish_extract_dataset or dataset_publish_extract:
            s3_log = ''
            redis_log = ''
            file_log = ''
            use_log = 'publish'

            if (extract_config['redis_address'] and
                    extract_config['redis_db'] and
                    extract_config['redis_key']):
                redis_log = 'redis://{}@{}/{}'.format(
                    extract_config['redis_address'],
                    extract_config['redis_db'],
                    extract_config['redis_key'])
                use_log += ' {}'.format(
                    redis_log)
            else:
                extract_config['redis_enabled'] = False
            if (extract_config['s3_address'] and
                    extract_config['s3_bucket'] and
                    extract_config['s3_key']):
                s3_log = 's3://{}/{}/{}'.format(
                    extract_config['s3_address'],
                    extract_config['s3_bucket'],
                    extract_config['s3_key'])
                use_log += ' {}'.format(
                    s3_log)
            else:
                extract_config['s3_enabled'] = False
            if extract_config['output_file']:
                file_log = 'file:{}'.format(
                    extract_config['output_file'])
                use_log += ' {}'.format(
                    file_log)

            if verbose:
                log.info(
                    '{} - publish - start ticker={} algorithm-ready {}'
                    ''.format(
                        name,
                        ticker,
                        use_log))

            publish_status = created_algo_object.publish_input_dataset(
                **extract_config)
            if publish_status != ae_consts.SUCCESS:
                msg = (
                    'failed to publish algorithm-ready datasets '
                    'with status {} attempted to {}'.format(
                        ae_consts.get_status(status=publish_status),
                        use_log))
                log.error(msg)
                res = build_result.build_result(
                    status=ae_consts.ERR,
                    err=err,
                    rec=None)
                task_result = {
                    'status': res['status'],
                    'err': res['err'],
                    'algo_req': algo_req,
                    'rec': rec
                }
                return task_result
            # end of stop early

            if verbose:
                log.info(
                    '{} - publish - done ticker={} algorithm-ready {}'
                    ''.format(
                        name,
                        ticker,
                        use_log))
        # if publish the algorithm-ready dataset

        if should_publish_history_dataset or dataset_publish_history:
            s3_log = ''
            redis_log = ''
            file_log = ''
            use_log = 'publish'

            if (history_config['redis_address'] and
                    history_config['redis_db'] and
                    history_config['redis_key']):
                redis_log = 'redis://{}@{}/{}'.format(
                    history_config['redis_address'],
                    history_config['redis_db'],
                    history_config['redis_key'])
                use_log += ' {}'.format(
                    redis_log)
            if (history_config['s3_address'] and
                    history_config['s3_bucket'] and
                    history_config['s3_key']):
                s3_log = 's3://{}/{}/{}'.format(
                    history_config['s3_address'],
                    history_config['s3_bucket'],
                    history_config['s3_key'])
                use_log += ' {}'.format(
                    s3_log)
            if history_config['output_file']:
                file_log = 'file:{}'.format(
                    history_config['output_file'])
                use_log += ' {}'.format(
                    file_log)

            if verbose:
                log.info(
                    '{} - publish - start ticker={} trading history {}'
                    ''.format(
                        name,
                        ticker,
                        use_log))

            publish_status = \
                created_algo_object.publish_trade_history_dataset(
                    **history_config)
            if publish_status != ae_consts.SUCCESS:
                msg = (
                    'failed to publish trading history datasets '
                    'with status {} attempted to {}'.format(
                        ae_consts.get_status(status=publish_status),
                        use_log))
                log.error(msg)
                res = build_result.build_result(
                    status=ae_consts.ERR,
                    err=err,
                    rec=None)
                task_result = {
                    'status': res['status'],
                    'err': res['err'],
                    'algo_req': algo_req,
                    'rec': rec
                }
                return task_result
            # end of stop early

            if verbose:
                log.info(
                    '{} - publish - done ticker={} trading history {}'
                    ''.format(
                        name,
                        ticker,
                        use_log))
        # if publish an trading history dataset

        if should_publish_report_dataset or dataset_publish_report:
            s3_log = ''
            redis_log = ''
            file_log = ''
            use_log = 'publish'

            if (report_config['redis_address'] and
                    report_config['redis_db'] and
                    report_config['redis_key']):
                redis_log = 'redis://{}@{}/{}'.format(
                    report_config['redis_address'],
                    report_config['redis_db'],
                    report_config['redis_key'])
                use_log += ' {}'.format(
                    redis_log)
            if (report_config['s3_address'] and
                    report_config['s3_bucket'] and
                    report_config['s3_key']):
                s3_log = 's3://{}/{}/{}'.format(
                    report_config['s3_address'],
                    report_config['s3_bucket'],
                    report_config['s3_key'])
                use_log += ' {}'.format(
                    s3_log)
            if report_config['output_file']:
                file_log = ' file:{}'.format(
                    report_config['output_file'])
                use_log += ' {}'.format(
                    file_log)

            if verbose:
                log.info(
                    '{} - publishing ticker={} trading performance report {}'
                    ''.format(
                        name,
                        ticker,
                        use_log))

            publish_status = created_algo_object.publish_report_dataset(
                **report_config)
            if publish_status != ae_consts.SUCCESS:
                msg = (
                    'failed to publish trading performance report datasets '
                    'with status {} attempted to {}'.format(
                        ae_consts.get_status(status=publish_status),
                        use_log))
                log.error(msg)
                res = build_result.build_result(
                    status=ae_consts.ERR,
                    err=err,
                    rec=None)
                task_result = {
                    'status': res['status'],
                    'err': res['err'],
                    'algo_req': algo_req,
                    'rec': rec
                }
                return task_result
            # end of stop early

            if verbose:
                log.info(
                    '{} - publish - done ticker={} trading '
                    'performance report {}'.format(
                        name,
                        ticker,
                        use_log))
        # if publish an trading performance report dataset

        if verbose:
            log.info(
                '{} - done publishing datasets for ticker={} '
                'from {} to {}'.format(
                    name,
                    ticker,
                    use_start_date,
                    use_end_date))

        rec['history_config'] = history_config
        rec['report_config'] = report_config

        res = build_result.build_result(
            status=ae_consts.SUCCESS,
            err=None,
            rec=rec)

    except Exception as e:
        res = build_result.build_result(
            status=ae_consts.ERR,
            err=(
                'failed - task_run_algo '
                'dict={} with ex={}').format(
                    algo_req,
                    e),
            rec=rec)
        if raise_on_err:
            raise e
        else:
            log.error(
                '{} - {}'.format(
                    label,
                    res['err']))
    # end of try/ex

    if verbose:
        log.info(
            'task - task_run_algo done - '
            '{} - status={}'.format(
                label,
                ae_consts.get_status(res['status'])))

    task_result = {
        'status': res['status'],
        'err': res['err'],
        'algo_req': algo_req,
        'rec': rec
    }
    return task_result
예제 #15
0
def get_new_pricing_data(self, work_dict):
    """get_new_pricing_data

    Get Ticker information on:

    - prices - turn off with ``work_dict.get_pricing = False``
    - news - turn off with ``work_dict.get_news = False``
    - options - turn off with ``work_dict.get_options = False``

    :param work_dict: dictionary for key/values
    """

    label = 'get_new_pricing_data'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    ticker = TICKER
    ticker_id = TICKER_ID
    rec = {
        'pricing': None,
        'options': None,
        'calls': None,
        'puts': None,
        'news': None,
        'daily': None,
        'minute': None,
        'quote': None,
        'stats': None,
        'peers': None,
        'iex_news': None,
        'financials': None,
        'earnings': None,
        'dividends': None,
        'company': None,
        'exp_date': None,
        'publish_pricing_update': None,
        'date': utc_now_str(),
        'updated': None,
        'version': DATASET_COLLECTION_VERSION
    }
    res = {'status': NOT_RUN, 'err': None, 'rec': rec}

    try:
        ticker = work_dict.get('ticker', ticker)
        ticker_id = work_dict.get('ticker_id', TICKER_ID)
        s3_bucket = work_dict.get('s3_bucket', S3_BUCKET)
        s3_key = work_dict.get('s3_key', S3_KEY)
        redis_key = work_dict.get('redis_key', REDIS_KEY)
        exp_date = work_dict.get('exp_date', None)
        cur_date = datetime.datetime.utcnow()
        cur_strike = work_dict.get('strike', None)
        contract_type = str(work_dict.get('contract', 'C')).upper()
        label = work_dict.get('label', label)
        iex_datasets = work_dict.get('iex_datasets', DEFAULT_FETCH_DATASETS)
        fetch_mode = work_dict.get('fetch_mode', FETCH_MODE_ALL)

        # control flags to deal with feed issues:
        get_yahoo_data = True
        get_iex_data = True

        if (fetch_mode == FETCH_MODE_ALL or str(fetch_mode).lower() == 'all'):
            get_yahoo_data = True
            get_iex_data = True
        elif (fetch_mode == FETCH_MODE_YHO
              or str(fetch_mode).lower() == 'yahoo'):
            get_yahoo_data = True
            get_iex_data = False
        elif (fetch_mode == FETCH_MODE_IEX
              or str(fetch_mode).lower() == 'iex'):
            get_yahoo_data = False
            get_iex_data = True
        else:
            log.debug('{} - unsupported fetch_mode={} value'.format(
                label, fetch_mode))

        if not exp_date:
            exp_date = analysis_engine.options_dates.option_expiration(
                date=exp_date)
        else:
            exp_date = datetime.datetime.strptime(exp_date, '%Y-%m-%d')

        rec['updated'] = cur_date.strftime('%Y-%m-%d %H:%M:%S')
        log.info('{} getting pricing for ticker={} '
                 'cur_date={} exp_date={} '
                 'yahoo={} iex={}'.format(label, ticker, cur_date, exp_date,
                                          get_yahoo_data, get_iex_data))

        yahoo_rec = {
            'ticker': ticker,
            'pricing': None,
            'options': None,
            'calls': None,
            'puts': None,
            'news': None,
            'exp_date': None,
            'publish_pricing_update': None,
            'date': None,
            'updated': None
        }

        if get_yahoo_data:
            log.info('{} yahoo ticker={}'.format(label, ticker))
            yahoo_res = yahoo_data.get_data_from_yahoo(work_dict=work_dict)
            if yahoo_res['status'] == SUCCESS:
                yahoo_rec = yahoo_res['rec']
                log.info('{} yahoo ticker={} '
                         'status={} err={}'.format(
                             label, ticker,
                             get_status(status=yahoo_res['status']),
                             yahoo_res['err']))
                rec['pricing'] = yahoo_rec.get('pricing', '{}')
                rec['news'] = yahoo_rec.get('news', '{}')
                rec['options'] = yahoo_rec.get('options', '{}')
                rec['calls'] = rec['options'].get('calls', EMPTY_DF_STR)
                rec['puts'] = rec['options'].get('puts', EMPTY_DF_STR)
            else:
                log.error('{} failed YAHOO ticker={} '
                          'status={} err={}'.format(
                              label, ticker,
                              get_status(status=yahoo_res['status']),
                              yahoo_res['err']))
        # end of get from yahoo

        if get_iex_data:
            num_iex_ds = len(iex_datasets)
            log.debug('{} iex datasaets={}'.format(label, num_iex_ds))
            for idx, ft_type in enumerate(iex_datasets):
                dataset_field = get_ft_str(ft_type=ft_type)

                log.info('{} iex={}/{} field={} ticker={}'.format(
                    label, idx, num_iex_ds, dataset_field, ticker))
                iex_label = '{}-{}'.format(label, dataset_field)
                iex_req = copy.deepcopy(work_dict)
                iex_req['label'] = iex_label
                iex_req['ft_type'] = ft_type
                iex_req['field'] = dataset_field
                iex_req['ticker'] = ticker
                iex_res = iex_data.get_data_from_iex(work_dict=iex_req)

                if iex_res['status'] == SUCCESS:
                    iex_rec = iex_res['rec']
                    log.info('{} iex ticker={} field={} '
                             'status={} err={}'.format(
                                 label, ticker, dataset_field,
                                 get_status(status=iex_res['status']),
                                 iex_res['err']))
                    if dataset_field == 'news':
                        rec['iex_news'] = iex_rec['data']
                    else:
                        rec[dataset_field] = iex_rec['data']
                else:
                    log.debug('{} failed IEX ticker={} field={} '
                              'status={} err={}'.format(
                                  label, ticker, dataset_field,
                                  get_status(status=iex_res['status']),
                                  iex_res['err']))
                # end of if/else succcess
            # end idx, ft_type in enumerate(iex_datasets):
        # end of if get_iex_data

        update_req = {'data': rec}
        update_req['ticker'] = ticker
        update_req['ticker_id'] = ticker_id
        update_req['strike'] = cur_strike
        update_req['contract'] = contract_type
        update_req['s3_enabled'] = work_dict.get('s3_enabled',
                                                 ENABLED_S3_UPLOAD)
        update_req['redis_enabled'] = work_dict.get('redis_enabled',
                                                    ENABLED_REDIS_PUBLISH)
        update_req['s3_bucket'] = s3_bucket
        update_req['s3_key'] = s3_key
        update_req['s3_access_key'] = work_dict.get('s3_access_key',
                                                    S3_ACCESS_KEY)
        update_req['s3_secret_key'] = work_dict.get('s3_secret_key',
                                                    S3_SECRET_KEY)
        update_req['s3_region_name'] = work_dict.get('s3_region_name',
                                                     S3_REGION_NAME)
        update_req['s3_address'] = work_dict.get('s3_address', S3_ADDRESS)
        update_req['s3_secure'] = work_dict.get('s3_secure', S3_SECURE)
        update_req['redis_key'] = redis_key
        update_req['redis_address'] = work_dict.get('redis_address',
                                                    REDIS_ADDRESS)
        update_req['redis_password'] = work_dict.get('redis_password',
                                                     REDIS_PASSWORD)
        update_req['redis_db'] = int(work_dict.get('redis_db', REDIS_DB))
        update_req['redis_expire'] = work_dict.get('redis_expire',
                                                   REDIS_EXPIRE)
        update_req['updated'] = rec['updated']
        update_req['label'] = label
        update_req['celery_disabled'] = True
        update_status = NOT_SET

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=update_req)
            update_status = update_res.get('status', NOT_SET)
            if ev('DEBUG_RESULTS', '0') == '1':
                log.info('{} update_res status={} data={}'.format(
                    label, get_status(status=update_status), ppj(update_res)))
            else:
                log.info('{} run_publish_pricing_update status={}'.format(
                    label, get_status(status=update_status)))
            # end of if/else

            rec['publish_pricing_update'] = update_res
            res = build_result.build_result(status=SUCCESS, err=None, rec=rec)
        except Exception as f:
            err = ('{} publisher.run_publish_pricing_update failed '
                   'with ex={}'.format(label, f))
            log.error(err)
            res = build_result.build_result(status=ERR, err=err, rec=rec)
        # end of trying to publish results to connected services

    except Exception as e:
        res = build_result.build_result(status=ERR,
                                        err=('failed - get_new_pricing_data '
                                             'dict={} with ex={}').format(
                                                 work_dict, e),
                                        rec=rec)
        log.error('{} - {}'.format(label, res['err']))
    # end of try/ex

    if ev('DATASET_COLLECTION_SLACK_ALERTS', '0') == '1':
        env_name = 'DEV'
        if ev('PROD_SLACK_ALERTS', '1') == '1':
            env_name = 'PROD'
        done_msg = ('Dataset collected ticker=*{}* on env=*{}* '
                    'redis_key={} s3_key={} iex={} yahoo={}'.format(
                        ticker, env_name, redis_key, s3_key, get_iex_data,
                        get_yahoo_data))
        log.debug('{} sending slack msg={}'.format(label, done_msg))
        if res['status'] == SUCCESS:
            slack_utils.post_success(msg=done_msg, block=False, jupyter=True)
        else:
            slack_utils.post_failure(msg=done_msg, block=False, jupyter=True)
        # end of if/else success
    # end of publishing to slack

    log.info('task - get_new_pricing_data done - '
             '{} - status={}'.format(label, get_status(res['status'])))

    return analysis_engine.get_task_results.get_task_results(
        work_dict=work_dict, result=res)
예제 #16
0
def get_data_from_td(work_dict):
    """get_data_from_td

    Get pricing data from Tradier

    :param work_dict: request dictionary
    """
    label = 'get_data_from_td'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    rec = {'data': None, 'updated': None}
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    ticker = None
    field = None
    ft_type = None

    try:

        ticker = work_dict.get('ticker', ae_consts.TICKER)
        field = work_dict.get('field', 'daily')
        ft_type = work_dict.get('ft_type', None)
        ft_str = str(ft_type).lower()
        label = work_dict.get('label', label)
        orient = work_dict.get('orient', 'records')

        td_req = None
        if ft_type == td_consts.FETCH_TD_CALLS or ft_str == 'tdcalls':
            ft_type == td_consts.FETCH_TD_CALLS
            td_req = api_requests.build_td_fetch_calls_request(label=label)
        elif ft_type == td_consts.FETCH_TD_PUTS or ft_str == 'tdputs':
            ft_type == td_consts.FETCH_TD_PUTS
            td_req = api_requests.build_td_fetch_puts_request(label=label)
        else:
            log.error('{} - unsupported ft_type={} ft_str={} ticker={}'.format(
                label, ft_type, ft_str, ticker))
            raise NotImplemented
        # if supported fetch request type

        clone_keys = [
            'ticker', 's3_address', 's3_bucket', 's3_key', 'redis_address',
            'redis_db', 'redis_password', 'redis_key'
        ]

        for k in clone_keys:
            td_req[k] = work_dict.get(k, '{}-missing-in-{}'.format(k, label))
        # end of cloning keys

        if not td_req:
            err = ('{} - ticker={} did not build an TD request '
                   'for work={}'.format(label, td_req['ticker'], work_dict))
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
            return res
        else:
            log.info('{} - ticker={} field={} '
                     'orient={} fetch'.format(label, td_req['ticker'], field,
                                              orient))
        # if invalid td request

        df = None
        try:
            if 'from' in work_dict:
                td_req['from'] = datetime.datetime.strptime(
                    '%Y-%m-%d %H:%M:%S', work_dict['from'])
            status_df, df = td_fetch_data.fetch_data(work_dict=td_req,
                                                     fetch_type=ft_type)

            if status_df == ae_consts.SUCCESS:
                rec['data'] = df.to_json(orient=orient)
                rec['updated'] = datetime.datetime.utcnow().strftime(
                    '%Y-%m-%d %H:%M:%S')
            else:
                err = ('{} - ticker={} td_fetch_data.fetch_data field={} '
                       'failed fetch_data'
                       ''.format(label, td_req['ticker'], ft_type))
                log.critical(err)
                res = build_result.build_result(status=ae_consts.ERR,
                                                err=err,
                                                rec=rec)
                return res
        except Exception as f:
            err = ('{} - ticker={} field={} failed fetch_data '
                   'with ex={}'.format(label, td_req['ticker'], ft_type, f))
            log.critical(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
            return res
        # end of try/ex

        if ae_consts.ev('DEBUG_TD_DATA', '0') == '1':
            log.info('{} ticker={} field={} data={} to_json'.format(
                label, td_req['ticker'], field, rec['data']))
        else:
            log.info('{} ticker={} field={} to_json'.format(
                label, td_req['ticker'], field))
        # end of if/else found data

        upload_and_cache_req = copy.deepcopy(td_req)
        upload_and_cache_req['celery_disabled'] = True
        upload_and_cache_req['data'] = rec['data']
        if not upload_and_cache_req['data']:
            upload_and_cache_req['data'] = '{}'
        use_field = field
        if use_field == 'news':
            use_field = 'news1'
        if 'redis_key' in work_dict:
            upload_and_cache_req['redis_key'] = '{}_{}'.format(
                work_dict.get('redis_key', td_req['redis_key']), use_field)
        if 's3_key' in work_dict:
            upload_and_cache_req['s3_key'] = '{}_{}'.format(
                work_dict.get('s3_key', td_req['s3_key']), use_field)

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=upload_and_cache_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            log.info('{} publish update status={} data={}'.format(
                label, ae_consts.get_status(status=update_status), update_res))
        except Exception as f:
            err = ('{} - failed to upload td data={} to '
                   'to s3_key={} and redis_key={}'.format(
                       label, upload_and_cache_req,
                       upload_and_cache_req['s3_key'],
                       upload_and_cache_req['redis_key']))
            log.error(err)
        # end of try/ex to upload and cache

        if not rec['data']:
            log.info(
                '{} - ticker={} no Tradier data field={} to publish'.format(
                    label, td_req['ticker'], field))
        # end of if/else

        res = build_result.build_result(status=ae_consts.SUCCESS,
                                        err=None,
                                        rec=rec)

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=('failed - get_data_from_td '
                                             'dict={} with ex={}').format(
                                                 work_dict, e),
                                        rec=rec)
    # end of try/ex

    log.info('task - get_data_from_td done - '
             '{} - status={} err={}'.format(
                 label, ae_consts.get_status(res['status']), res['err']))

    return res