def get_result(self):
        """get_result"""

        log.info('building results')
        finished_date = ae_utils.utc_now_str()
        self.result = {
            'name': self.name,
            'created': self.created_date,
            'updated': finished_date,
            'open_positions': self.positions,
            'buys': self.get_buys(),
            'sells': self.get_sells(),
            'num_processed': len(self.order_history),
            'history': self.order_history,
            'balance': self.balance,
            'commission': self.commission
        }

        return self.result
Esempio n. 2
0
def build_buy_order(ticker,
                    num_owned,
                    close,
                    balance,
                    commission,
                    date,
                    details,
                    use_key,
                    minute=None,
                    shares=None,
                    version=1,
                    auto_fill=True,
                    is_live_trading=False,
                    backtest_shares_default=10,
                    reason=None):
    """build_buy_order

    Create an algorithm buy order as a dictionary

    .. note:: setting the ``minute`` is required to build
        a minute-by-minute ``Trading History``

    :param ticker: ticker
    :param num_owned: integer current owned
        number of shares for this asset
    :param close: float closing price of the asset
    :param balance: float amount of available capital
    :param commission: float for commission costs
    :param date: string trade date for that row usually
        ``COMMON_DATE_FORMAT`` (``YYYY-MM-DD``)
    :param minute: optional - string with the minute that the
        order was placed. format is
        ``COMMON_TICK_DATE_FORMAT`` (``YYYY-MM-DD HH:MM:SS``)
    :param details: dictionary for full row of values to review
        all buys after the algorithm finishes.
        (usually ``row.to_json()``)
    :param use_key: string for redis and s3 publishing of the algorithm
        result dictionary as a json-serialized dictionary
    :param shares: optional - integer number of shares to buy
        if None buy max number of shares at the ``close`` with the
        available ``balance`` amount.
    :param version: optional - version tracking integer
    :param auto_fill: optional - bool for not assuming the trade
        filled (default ``True``)
    :param is_live_trading: optional - bool for filling trades
        for live trading or for backtest tuning filled
        (default ``False`` which is backtest mode)
    :param backtest_shares_default: optional - integer for
        simulating shares during a backtest even if there
        are not enough funds
        (default ``10``)
    :param reason: optional - string for recording why the algo
        decided to buy for review after the algorithm finishes
    """
    status = ae_consts.TRADE_OPEN
    s3_bucket_name = ae_consts.ALGO_BUYS_S3_BUCKET_NAME
    s3_key = use_key
    redis_key = use_key
    s3_enabled = True
    redis_enabled = True

    cost_of_trade = None
    new_shares = num_owned
    new_balance = balance
    created_date = None

    tradable_funds = balance - (2.0 * commission)

    if not is_live_trading:
        if not shares:
            shares = backtest_shares_default
        tradable_funds = ((shares * close) + (2.0 * commission))

    if close > 0.1 and tradable_funds > 10.0:
        can_buy_num_shares = shares
        if not can_buy_num_shares:
            can_buy_num_shares = int(tradable_funds / close)
        cost_of_trade = ae_consts.to_f(val=(can_buy_num_shares * close) +
                                       commission)
        if can_buy_num_shares > 0:
            if cost_of_trade > balance:
                status = ae_consts.TRADE_NOT_ENOUGH_FUNDS
            else:
                created_date = ae_utils.utc_now_str()
                if auto_fill:
                    new_shares = int(num_owned + can_buy_num_shares)
                    new_balance = ae_consts.to_f(balance - cost_of_trade)
                    status = ae_consts.TRADE_FILLED
                else:
                    new_shares = shares
                    new_balance = balance
        else:
            status = ae_consts.TRADE_NOT_ENOUGH_FUNDS
    else:
        status = ae_consts.TRADE_NOT_ENOUGH_FUNDS

    order_dict = {
        'ticker': ticker,
        'status': status,
        'balance': new_balance,
        'shares': new_shares,
        'buy_price': cost_of_trade,
        'prev_balance': balance,
        'prev_shares': num_owned,
        'close': close,
        'details': details,
        'reason': reason,
        'date': date,
        'minute': minute,
        'created': created_date,
        's3_bucket': s3_bucket_name,
        's3_key': s3_key,
        'redis_key': redis_key,
        's3_enabled': s3_enabled,
        'redis_enabled': redis_enabled,
        'version': version
    }

    use_date = minute
    if not use_date:
        use_date = date

    log.debug('{} {} buy {} order={}'.format(
        ticker, use_date, ae_consts.get_status(status=order_dict['status']),
        ae_consts.ppj(order_dict)))

    return order_dict
Esempio n. 3
0
def get_ds_dict(ticker,
                base_key=None,
                ds_id=None,
                label=None,
                service_dict=None):
    """get_ds_dict

    Get a dictionary with all cache keys for a ticker and return
    the dictionary. Use this method to decouple your apps
    from the underlying cache key implementations (if you
    do not need them).

    :param ticker: ticker
    :param base_key: optional - base key that is prepended
                     in all cache keys
    :param ds_id: optional - dataset id (useful for
                  external database id)
    :param label: optional - tracking label in the logs
    :param service_dict: optional - parent call functions and Celery
                         tasks can use this dictionary to seed the
                         common service routes and endpoints. Refer
                         to ``analysis_engine.consts.SERVICE_VALS``
                         for automatically-copied over keys by this
                         helper.
    """

    if not ticker:
        raise Exception('please pass in a ticker')

    use_base_key = base_key
    if not use_base_key:
        use_base_key = '{}_{}'.format(
            ticker,
            ae_utils.get_last_close_str(fmt=ae_consts.COMMON_DATE_FORMAT))

    date_str = ae_utils.utc_date_str(fmt=ae_consts.COMMON_DATE_FORMAT)
    now_str = ae_utils.utc_now_str(fmt=ae_consts.COMMON_TICK_DATE_FORMAT)

    daily_redis_key = '{}_{}'.format(use_base_key,
                                     ae_consts.DAILY_S3_BUCKET_NAME)
    minute_redis_key = '{}_{}'.format(use_base_key,
                                      ae_consts.MINUTE_S3_BUCKET_NAME)
    quote_redis_key = '{}_{}'.format(use_base_key,
                                     ae_consts.QUOTE_S3_BUCKET_NAME)
    stats_redis_key = '{}_{}'.format(use_base_key,
                                     ae_consts.STATS_S3_BUCKET_NAME)
    peers_redis_key = '{}_{}'.format(use_base_key,
                                     ae_consts.PEERS_S3_BUCKET_NAME)
    news_iex_redis_key = '{}_{}1'.format(use_base_key,
                                         ae_consts.NEWS_S3_BUCKET_NAME)
    financials_redis_key = '{}_{}'.format(use_base_key,
                                          ae_consts.FINANCIALS_S3_BUCKET_NAME)
    earnings_redis_key = '{}_{}'.format(use_base_key,
                                        ae_consts.EARNINGS_S3_BUCKET_NAME)
    dividends_redis_key = '{}_{}'.format(use_base_key,
                                         ae_consts.DIVIDENDS_S3_BUCKET_NAME)
    company_redis_key = '{}_{}'.format(use_base_key,
                                       ae_consts.COMPANY_S3_BUCKET_NAME)
    options_yahoo_redis_key = '{}_{}'.format(use_base_key,
                                             ae_consts.OPTIONS_S3_BUCKET_NAME)
    call_options_yahoo_redis_key = '{}_calls'.format(use_base_key)
    put_options_yahoo_redis_key = '{}_puts'.format(use_base_key)
    pricing_yahoo_redis_key = '{}_{}'.format(use_base_key,
                                             ae_consts.PRICING_S3_BUCKET_NAME)
    news_yahoo_redis_key = '{}_{}'.format(use_base_key,
                                          ae_consts.NEWS_S3_BUCKET_NAME)
    call_options_td_redis_key = '{}_tdcalls'.format(use_base_key)
    put_options_td_redis_key = '{}_tdputs'.format(use_base_key)

    ds_cache_dict = {
        'daily': daily_redis_key,
        'minute': minute_redis_key,
        'quote': quote_redis_key,
        'stats': stats_redis_key,
        'peers': peers_redis_key,
        'news1': news_iex_redis_key,
        'financials': financials_redis_key,
        'earnings': earnings_redis_key,
        'dividends': dividends_redis_key,
        'company': company_redis_key,
        'options': options_yahoo_redis_key,
        'calls': call_options_yahoo_redis_key,
        'puts': put_options_yahoo_redis_key,
        'pricing': pricing_yahoo_redis_key,
        'news': news_yahoo_redis_key,
        'tdcalls': call_options_td_redis_key,
        'tdputs': put_options_td_redis_key,
        'ticker': ticker,
        'ds_id': ds_id,
        'label': label,
        'created': now_str,
        'date': date_str,
        'manifest_key': use_base_key,
        'version': ae_consts.CACHE_DICT_VERSION
    }

    # set keys/values for redis/minio from the
    # service_dict - helper method for
    # launching job chains
    if service_dict:
        for k in ae_consts.SERVICE_VALS:
            ds_cache_dict[k] = service_dict[k]

    return ds_cache_dict
def build_sell_order(ticker,
                     num_owned,
                     close,
                     balance,
                     commission,
                     date,
                     details,
                     use_key,
                     minute=None,
                     shares=None,
                     version=1,
                     auto_fill=True,
                     is_live_trading=False,
                     backtest_shares_default=10,
                     reason=None):
    """build_sell_order

    Create an algorithm sell order as a dictionary

    :param ticker: ticker
    :param num_owned: integer current owned
        number of shares for this asset
    :param close: float closing price of the asset
    :param balance: float amount of available capital
    :param commission: float for commission costs
    :param date: string trade date for that row usually
        ``COMMON_DATE_FORMAT`` (``YYYY-MM-DD``)
    :param minute: optional - string with the minute that the
        order was placed. format is
        ``COMMON_TICK_DATE_FORMAT`` (``YYYY-MM-DD HH:MM:SS``)
    :param details: dictionary for full row of values to review
        all sells after the algorithm finishes.
        (usually ``row.to_json()``)
    :param use_key: string for redis and s3 publishing of the algorithm
        result dictionary as a json-serialized dictionary
    :param shares: optional - integer number of shares to sell
        if None sell all ``num_owned`` shares at the ``close``.
    :param version: optional - version tracking integer
    :param auto_fill: optional - bool for not assuming the trade
        filled (default ``True``)
    :param is_live_trading: optional - bool for filling trades
        for live trading or for backtest tuning filled
        (default ``False`` which is backtest mode)
    :param backtest_shares_default: optional - integer for
        simulating shares during a backtest even if there
        are not enough funds
        (default ``10``)
    :param reason: optional - string for recording why the algo
        decided to sell for review after the algorithm finishes
    """
    status = ae_consts.TRADE_OPEN
    s3_bucket_name = ae_consts.ALGO_SELLS_S3_BUCKET_NAME
    s3_key = use_key
    redis_key = use_key
    s3_enabled = True
    redis_enabled = True

    cost_of_trade = None
    sell_price = 0.0
    new_shares = num_owned
    new_balance = balance
    created_date = None

    tradable_funds = balance - commission

    if num_owned == 0:
        status = ae_consts.TRADE_NO_SHARES_TO_SELL
    elif close > 0.1 and tradable_funds > 10.0:
        cost_of_trade = commission
        if shares:
            if shares > num_owned:
                shares = num_owned
        else:
            shares = num_owned
        sell_price = ae_consts.to_f(val=(shares * close) + commission)
        if cost_of_trade > balance:
            status = ae_consts.TRADE_NOT_ENOUGH_FUNDS
        else:
            created_date = ae_utils.utc_now_str()
            if auto_fill:
                new_shares = num_owned - shares
                new_balance = ae_consts.to_f(balance + sell_price)
                status = ae_consts.TRADE_FILLED
            else:
                new_shares = shares
                new_balance = balance
    else:
        status = ae_consts.TRADE_NOT_ENOUGH_FUNDS

    order_dict = {
        'ticker': ticker,
        'status': status,
        'balance': new_balance,
        'shares': new_shares,
        'sell_price': sell_price,
        'prev_balance': balance,
        'prev_shares': num_owned,
        'close': close,
        'details': details,
        'reason': reason,
        'date': date,
        'minute': minute,
        'created': created_date,
        's3_bucket': s3_bucket_name,
        's3_key': s3_key,
        'redis_key': redis_key,
        's3_enabled': s3_enabled,
        'redis_enabled': redis_enabled,
        'version': version
    }

    use_date = minute
    if not use_date:
        use_date = date

    log.debug(f'{ticker} {use_date} sell '
              f'{ae_consts.get_status(status=order_dict["status"])} '
              f'order={ae_consts.ppj(order_dict)}')

    return order_dict
def build_indicator_node(node, label=None):
    """build_indicator_node

    Parse a dictionary in the algorithm config ``indicators`` list
    and return a dicationry

    Supported values found in:
    `analysis_engine/consts.py <https://github.com/AlgoTraders/sto
    ck-analysis-engine/blob/master/analysis_engine/consts.py>`__

    :param node: single dictionary from the config's ``indicators`` list
    :param label: optional - string log tracking
        this class in the logs (usually just the algo
        name is good enough to help debug issues
        when running distributed)
    :return: dictionary
    """
    if not label:
        label = 'build_indicator_node'

    name = node.get('name', None)
    if not name:
        raise Exception(
            '{} - missing "name" in indicator dictionary={}'.format(
                label, node))
    # end of name check

    ind_id = str(uuid.uuid4()).replace('-', '')
    uses_dataset_str = node.get('uses_data', 'daily')
    uses_dataset = ae_consts.get_indicator_uses_data_as_int(
        val=uses_dataset_str)
    if uses_dataset == ae_consts.INDICATOR_USES_DATA_UNSUPPORTED:
        uses_dataset = ae_consts.INDICATOR_USES_DATA_ANY
        log.debug('{} - unsupported indicator '
                  'uses_dataset={} defaulting to "daily"'.format(
                      label, uses_dataset_str))
    # end of supported indicator dataset types

    ind_category_str = node.get('category', 'momentum')
    ind_category = ae_consts.get_indicator_category_as_int(
        val=ind_category_str)
    if ind_category == ae_consts.INDICATOR_CATEGORY_UNKNOWN:
        ind_category = ae_consts.INDICATOR_CATEGORY_MOMENTUM
        log.debug('{} - unsupported indicator '
                  'category={} defaulting to "momentum"'.format(
                      label, ind_category))
    # end of supported indicator category

    ind_type_str = node.get('type', 'technical')
    ind_type = ae_consts.get_indicator_type_as_int(val=ind_type_str)
    if ind_type == ae_consts.INDICATOR_TYPE_UNKNOWN:
        ind_type = ae_consts.INDICATOR_TYPE_TECHNICAL
        log.debug('{} - unsupported indicator '
                  'type={} defaulting to "technical"'.format(label, ind_type))
    # end of supported indicator type

    # allow easier key discovery
    use_unique_id = node.get('unique_id', False)
    ind_name = '{}_{}_{}_{}'.format(name, ind_category, ind_type, uses_dataset)
    if use_unique_id:
        ind_name = '{}_{}_{}_{}_{}'.format(name, ind_category, ind_type,
                                           uses_dataset, ind_id)

    use_module_name = None
    use_path_to_module = None

    # none will use the BaseIndicator which does nothing
    use_path_to_module = node.get('module_path',
                                  ae_consts.INDICATOR_BASE_MODULE_PATH)
    if not use_path_to_module:
        raise Exception('Failed building Indicator node with missing '
                        'module_path node={}'.format(node))
    use_module_name = node.get('module_name', node.get('name', ind_id))

    default_report_ignore_keys = \
        ae_consts.INDICATOR_IGNORED_CONIGURABLE_KEYS

    report_dict = {
        'id': ind_id,
        'name': ind_name,
        'created': ae_utils.utc_now_str(),
        'version': 1,
        'module_name': use_module_name,
        'path_to_module': use_path_to_module,
        'report_ignore_keys': default_report_ignore_keys,
        'metrics': {
            'type': ind_type,
            'category': ind_category,
            'uses_data': uses_dataset
        }
    }

    labeled_node = copy.deepcopy(node)

    # allow a node's sub report dir to be patched with this
    # tracking + reporting data
    # the algorithms will flatten:
    #    indicators[ind_name]['report']['metrics']
    # for trading performance report generation
    if 'report' in labeled_node:
        labeled_node['report'].update(report_dict)
    else:
        labeled_node['report'] = report_dict

    return labeled_node
    def publish_trading_history(self,
                                records_for_history,
                                pt_s3_access_key=None,
                                pt_s3_secret_key=None,
                                pt_s3_address=None,
                                pt_s3_region=None,
                                pt_s3_bucket=None,
                                pt_s3_key=None,
                                pt_s3_secure=ae_consts.NOT_SET,
                                **kwargs):
        """publish_trading_history

        Helper for publishing a trading history
        to another S3 service like AWS

        :param records_for_history: list of dictionaries
            for the history file
        :param pt_s3_access_key: access key
        :param pt_s3_secret_key: secret
        :param pt_s3_address: address
        :param pt_s3_region: region
        :param pt_s3_bucket: bucket
        :param pt_s3_key: key
        :param pt_s3_secure: secure flag
        :param kwargs: support for keyword arg dict
        """
        use_s3_access_key = self.pt_s3_access_key
        use_s3_secret_key = self.pt_s3_secret_key
        use_s3_address = self.pt_s3_address
        use_s3_region = self.pt_s3_region
        use_s3_bucket = self.pt_s3_bucket
        use_s3_key = self.pt_s3_key
        use_s3_secure = self.pt_s3_secure

        use_s3_enabled = kwargs.get('s3_enabled', True)
        use_redis_enabled = kwargs.get('redis_enabled', False)
        use_redis_address = kwargs.get('redis_address', None)
        use_redis_db = kwargs.get('redis_db', None)
        use_redis_key = kwargs.get('redis_key', None)
        use_redis_password = kwargs.get('redis_password', None)
        use_redis_expire = kwargs.get('redis_expire', None)
        use_redis_serializer = kwargs.get('redis_serializer', 'json')
        use_redis_encoding = kwargs.get('redis_encoding', 'utf-8')
        verbose = kwargs.get('verbose', False)

        if pt_s3_access_key:
            use_s3_access_key = pt_s3_access_key
        if pt_s3_secret_key:
            use_s3_secret_key = pt_s3_secret_key
        if pt_s3_address:
            use_s3_address = pt_s3_address
        if pt_s3_region:
            use_s3_region = pt_s3_region
        if pt_s3_bucket:
            use_s3_bucket = pt_s3_bucket
        if pt_s3_key:
            use_s3_key = pt_s3_key
        if pt_s3_secure != ae_consts.NOT_SET:
            use_s3_secure = pt_s3_secure

        rec = {
            'tickers': self.ticker,
            'version': int(ae_consts.ALGO_HISTORY_VERSION),
            'last_trade_date': ae_utils.get_last_close_str(),
            'algo_config_dict': self.config_dict,
            'algo_name': self.use_name,
            'created': ae_utils.utc_now_str(),
            self.ticker: records_for_history
        }

        num_bytes = len(str(rec))
        num_mb = ae_consts.get_mb(num_bytes)

        msg = (
            f'publish - {self.ticker} - {rec["last_trade_date"]} '
            # f'{use_s3_access_key} with: {use_s3_secret_key} '
            f's3_loc={use_s3_address}/{use_s3_bucket}/{use_s3_key} '
            f'mb={num_mb}MB')
        log.info(msg)

        publish.publish(data=rec,
                        label='pub',
                        df_compress=True,
                        compress=False,
                        convert_to_dict=False,
                        output_file=None,
                        redis_enabled=use_redis_enabled,
                        redis_key=use_redis_key,
                        redis_address=use_redis_address,
                        redis_db=use_redis_db,
                        redis_password=use_redis_password,
                        redis_expire=use_redis_expire,
                        redis_serializer=use_redis_serializer,
                        redis_encoding=use_redis_encoding,
                        s3_enabled=use_s3_enabled,
                        s3_key=use_s3_key,
                        s3_address=use_s3_address,
                        s3_bucket=use_s3_bucket,
                        s3_access_key=use_s3_access_key,
                        s3_secret_key=use_s3_secret_key,
                        s3_region_name=use_s3_region,
                        s3_secure=use_s3_secure,
                        slack_enabled=False,
                        verbose=verbose)
def get_new_pricing_data(self, work_dict):
    """get_new_pricing_data

    Get Ticker information on:

    - prices - turn off with ``work_dict.get_pricing = False``
    - news - turn off with ``work_dict.get_news = False``
    - options - turn off with ``work_dict.get_options = False``

    :param work_dict: dictionary for key/values
    """

    label = 'get_new_pricing_data'

    log.debug(f'task - {label} - start ' f'work_dict={work_dict}')

    num_success = 0
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    rec = {
        'pricing': None,
        'options': None,
        'calls': None,
        'puts': None,
        'news': None,
        'daily': None,
        'minute': None,
        'quote': None,
        'stats': None,
        'peers': None,
        'iex_news': None,
        'financials': None,
        'earnings': None,
        'dividends': None,
        'company': None,
        'exp_date': None,
        'publish_pricing_update': None,
        'num_success': num_success,
        'date': ae_utils.utc_now_str(),
        'updated': None,
        'version': ae_consts.DATASET_COLLECTION_VERSION
    }
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    try:
        ticker = work_dict.get('ticker', ticker)
        ticker_id = work_dict.get('ticker_id', ae_consts.TICKER_ID)
        s3_bucket = work_dict.get('s3_bucket', ae_consts.S3_BUCKET)
        s3_key = work_dict.get('s3_key', ae_consts.S3_KEY)
        redis_key = work_dict.get('redis_key', ae_consts.REDIS_KEY)
        exp_date = work_dict.get('exp_date', None)
        cur_date = ae_utils.last_close()
        cur_strike = work_dict.get('strike', None)
        contract_type = str(work_dict.get('contract', 'C')).upper()
        label = work_dict.get('label', label)
        iex_datasets = work_dict.get('iex_datasets',
                                     iex_consts.DEFAULT_FETCH_DATASETS)
        td_datasets = work_dict.get('td_datasets',
                                    td_consts.DEFAULT_FETCH_DATASETS_TD)
        fetch_mode = work_dict.get('fetch_mode', ae_consts.FETCH_MODE_ALL)
        iex_token = work_dict.get('iex_token', iex_consts.IEX_TOKEN)
        td_token = work_dict.get('td_token', td_consts.TD_TOKEN)
        str_fetch_mode = str(fetch_mode).lower()

        # control flags to deal with feed issues:
        get_iex_data = True
        get_td_data = True

        if (fetch_mode == ae_consts.FETCH_MODE_ALL
                or str_fetch_mode == 'initial'):
            get_iex_data = True
            get_td_data = True
            iex_datasets = ae_consts.IEX_INITIAL_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_ALL
              or str_fetch_mode == 'all'):
            get_iex_data = True
            get_td_data = True
            iex_datasets = ae_consts.IEX_DATASETS_DEFAULT
        elif (fetch_mode == ae_consts.FETCH_MODE_YHO
              or str_fetch_mode == 'yahoo'):
            get_iex_data = False
            get_td_data = False
        elif (fetch_mode == ae_consts.FETCH_MODE_IEX
              or str_fetch_mode == 'iex-all'):
            get_iex_data = True
            get_td_data = False
            iex_datasets = ae_consts.IEX_DATASETS_DEFAULT
        elif (fetch_mode == ae_consts.FETCH_MODE_IEX
              or str_fetch_mode == 'iex'):
            get_iex_data = True
            get_td_data = False
            iex_datasets = ae_consts.IEX_INTRADAY_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_INTRADAY
              or str_fetch_mode == 'intra'):
            get_iex_data = True
            get_td_data = True
            iex_datasets = ae_consts.IEX_INTRADAY_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_DAILY
              or str_fetch_mode == 'daily'):
            get_iex_data = True
            get_td_data = False
            iex_datasets = ae_consts.IEX_DAILY_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_WEEKLY
              or str_fetch_mode == 'weekly'):
            get_iex_data = True
            get_td_data = False
            iex_datasets = ae_consts.IEX_WEEKLY_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_TD or str_fetch_mode == 'td'):
            get_iex_data = False
            get_td_data = True
        else:
            get_iex_data = False
            get_td_data = False

            fetch_arr = str_fetch_mode.split(',')
            found_fetch = False
            iex_datasets = []
            for fetch_name in fetch_arr:
                if fetch_name not in iex_datasets:
                    if fetch_name == 'iex_min':
                        iex_datasets.append('minute')
                    elif fetch_name == 'iex_day':
                        iex_datasets.append('daily')
                    elif fetch_name == 'iex_quote':
                        iex_datasets.append('quote')
                    elif fetch_name == 'iex_stats':
                        iex_datasets.append('stats')
                    elif fetch_name == 'iex_peers':
                        iex_datasets.append('peers')
                    elif fetch_name == 'iex_news':
                        iex_datasets.append('news')
                    elif fetch_name == 'iex_fin':
                        iex_datasets.append('financials')
                    elif fetch_name == 'iex_earn':
                        iex_datasets.append('earnings')
                    elif fetch_name == 'iex_div':
                        iex_datasets.append('dividends')
                    elif fetch_name == 'iex_comp':
                        iex_datasets.append('company')
                    elif fetch_name == 'td':
                        get_td_data = True
                    else:
                        log.warn('unsupported IEX dataset ' f'{fetch_name}')
            found_fetch = (len(iex_datasets) != 0)
            if not found_fetch:
                log.error(f'{label} - unsupported '
                          f'fetch_mode={fetch_mode} value')
            else:
                get_iex_data = True
                log.debug(f'{label} - '
                          f'fetching={len(iex_datasets)} '
                          f'{iex_datasets} '
                          f'fetch_mode={fetch_mode}')
        # end of screening custom fetch_mode settings

        num_tokens = 0

        if get_iex_data:
            if not iex_token:
                log.warn(f'{label} - '
                         'please set a valid IEX Cloud Account token ('
                         'https://iexcloud.io/cloud-login/#/register'
                         ') to fetch data from IEX Cloud. It must be '
                         'set as an environment variable like: '
                         'export IEX_TOKEN=<token>')
                get_iex_data = False
            else:
                num_tokens += 1
        # sanity check - disable IEX fetch if the token is not set
        if get_td_data:
            missing_td_token = [
                'MISSING_TD_TOKEN', 'SETYOURTDTOKEN', 'SETYOURTRADIERTOKENHERE'
            ]
            if td_token in missing_td_token:
                log.warn(f'{label} - '
                         'please set a valid Tradier Account token ('
                         'https://developer.tradier.com/user/sign_up'
                         ') to fetch pricing data from Tradier. It must be '
                         'set as an environment variable like: '
                         'export TD_TOKEN=<token>')
                get_td_data = False
            else:
                num_tokens += 1
        # sanity check - disable Tradier fetch if the token is not set
        """
        as of Thursday, Jan. 3, 2019:
        https://developer.yahoo.com/yql/
        Important EOL Notice: As of Thursday, Jan. 3, 2019
        the YQL service at query.yahooapis.com will be retired
        """
        get_yahoo_data = False

        if (not get_iex_data and not get_td_data and not get_yahoo_data):
            err = None
            if num_tokens == 0:
                res['status'] = ae_consts.MISSING_TOKEN
                err = (f'Please set a valid IEX_TOKEN or TD_TOKEN '
                       f'environment variable')
            else:
                err = (f'Please set at least one supported datafeed from '
                       f'either: '
                       f'IEX Cloud (fetch -t TICKER -g iex) or '
                       f'Tradier (fetch -t TICKER -g td) '
                       f'for '
                       f'ticker={ticker} '
                       f'cur_date={cur_date} '
                       f'IEX enabled={get_iex_data} '
                       f'TD enabled={get_td_data} '
                       f'YHO enabled={get_yahoo_data}')
                res['status'] = ae_consts.ERR
                res['err'] = err
            return get_task_results.get_task_results(work_dict=work_dict,
                                                     result=res)
        # end of checking that there is at least 1 feed on

        if not exp_date:
            exp_date = opt_dates.option_expiration(date=exp_date)
        else:
            exp_date = datetime.datetime.strptime(exp_date, '%Y-%m-%d')

        rec['updated'] = cur_date.strftime('%Y-%m-%d %H:%M:%S')
        log.debug(f'{label} getting pricing for ticker={ticker} '
                  f'cur_date={cur_date} exp_date={exp_date} '
                  f'IEX={get_iex_data} '
                  f'TD={get_td_data} '
                  f'YHO={get_yahoo_data}')

        yahoo_rec = {
            'ticker': ticker,
            'pricing': None,
            'options': None,
            'calls': None,
            'puts': None,
            'news': None,
            'exp_date': None,
            'publish_pricing_update': None,
            'date': None,
            'updated': None
        }

        # disabled on 2019-01-03
        if get_yahoo_data:
            log.debug(f'{label} YHO ticker={ticker}')
            yahoo_res = yahoo_data.get_data_from_yahoo(work_dict=work_dict)
            status_str = ae_consts.get_status(status=yahoo_res['status'])
            if yahoo_res['status'] == ae_consts.SUCCESS:
                yahoo_rec = yahoo_res['rec']
                msg = (f'{label} YHO ticker={ticker} '
                       f'status={status_str} err={yahoo_res["err"]}')
                if ae_consts.ev('SHOW_SUCCESS', '0') == '1':
                    log.info(msg)
                else:
                    log.debug(msg)
                rec['pricing'] = yahoo_rec.get('pricing', '{}')
                rec['news'] = yahoo_rec.get('news', '{}')
                rec['options'] = yahoo_rec.get('options', '{}')
                rec['calls'] = rec['options'].get('calls',
                                                  ae_consts.EMPTY_DF_STR)
                rec['puts'] = rec['options'].get('puts',
                                                 ae_consts.EMPTY_DF_STR)
                num_success += 1
            else:
                log.error(f'{label} failed YHO ticker={ticker} '
                          f'status={status_str} err={yahoo_res["err"]}')
        # end of get from yahoo

        if get_iex_data:
            num_iex_ds = len(iex_datasets)
            log.debug(f'{label} IEX datasets={num_iex_ds}')
            for idx, ft_type in enumerate(iex_datasets):
                dataset_field = iex_consts.get_ft_str(ft_type=ft_type)

                log.debug(f'{label} IEX={idx}/{num_iex_ds} '
                          f'field={dataset_field} ticker={ticker}')
                iex_label = f'{label}-{dataset_field}'
                iex_req = copy.deepcopy(work_dict)
                iex_req['label'] = iex_label
                iex_req['ft_type'] = ft_type
                iex_req['field'] = dataset_field
                iex_req['ticker'] = ticker
                iex_res = iex_data.get_data_from_iex(work_dict=iex_req)

                status_str = (ae_consts.get_status(status=iex_res['status']))
                if iex_res['status'] == ae_consts.SUCCESS:
                    iex_rec = iex_res['rec']
                    msg = (f'{label} IEX ticker={ticker} '
                           f'field={dataset_field} '
                           f'status={status_str} '
                           f'err={iex_res["err"]}')
                    if ae_consts.ev('SHOW_SUCCESS', '0') == '1':
                        log.info(msg)
                    else:
                        log.debug(msg)
                    if dataset_field == 'news':
                        rec['iex_news'] = iex_rec['data']
                    else:
                        rec[dataset_field] = iex_rec['data']
                    num_success += 1
                else:
                    log.debug(f'{label} failed IEX ticker={ticker} '
                              f'field={dataset_field} '
                              f'status={status_str} err={iex_res["err"]}')
                # end of if/else succcess
            # end idx, ft_type in enumerate(iex_datasets):
        # end of if get_iex_data

        if get_td_data:
            num_td_ds = len(td_datasets)
            log.debug(f'{label} TD datasets={num_td_ds}')

            for idx, ft_type in enumerate(td_datasets):
                dataset_field = td_consts.get_ft_str_td(ft_type=ft_type)
                log.debug(f'{label} TD={idx}/{num_td_ds} '
                          f'field={dataset_field} ticker={ticker}')
                td_label = (f'{label}-{dataset_field}')
                td_req = copy.deepcopy(work_dict)
                td_req['label'] = td_label
                td_req['ft_type'] = ft_type
                td_req['field'] = dataset_field
                td_req['ticker'] = ticker
                td_res = td_data.get_data_from_td(work_dict=td_req)

                status_str = (ae_consts.get_status(status=td_res['status']))
                if td_res['status'] == ae_consts.SUCCESS:
                    td_rec = td_res['rec']
                    msg = (f'{label} TD ticker={ticker} '
                           f'field={dataset_field} '
                           f'status={status_str} '
                           f'err={td_res["err"]}')
                    if ae_consts.ev('SHOW_SUCCESS', '0') == '1':
                        log.info(msg)
                    else:
                        log.debug(msg)
                    if dataset_field == 'tdcalls':
                        rec['tdcalls'] = td_rec['data']
                    if dataset_field == 'tdputs':
                        rec['tdputs'] = td_rec['data']
                    else:
                        rec[dataset_field] = td_rec['data']
                    num_success += 1
                else:
                    log.critical(f'{label} failed TD ticker={ticker} '
                                 f'field={dataset_field} '
                                 f'status={status_str} err={td_res["err"]}')
                # end of if/else succcess
            # end idx, ft_type in enumerate(td_datasets):
        # end of if get_td_data

        rec['num_success'] = num_success

        update_req = {'data': rec}
        update_req['ticker'] = ticker
        update_req['ticker_id'] = ticker_id
        update_req['strike'] = cur_strike
        update_req['contract'] = contract_type
        update_req['s3_enabled'] = work_dict.get('s3_enabled',
                                                 ae_consts.ENABLED_S3_UPLOAD)
        update_req['redis_enabled'] = work_dict.get(
            'redis_enabled', ae_consts.ENABLED_REDIS_PUBLISH)
        update_req['s3_bucket'] = s3_bucket
        update_req['s3_key'] = s3_key
        update_req['s3_access_key'] = work_dict.get('s3_access_key',
                                                    ae_consts.S3_ACCESS_KEY)
        update_req['s3_secret_key'] = work_dict.get('s3_secret_key',
                                                    ae_consts.S3_SECRET_KEY)
        update_req['s3_region_name'] = work_dict.get('s3_region_name',
                                                     ae_consts.S3_REGION_NAME)
        update_req['s3_address'] = work_dict.get('s3_address',
                                                 ae_consts.S3_ADDRESS)
        update_req['s3_secure'] = work_dict.get('s3_secure',
                                                ae_consts.S3_SECURE)
        update_req['redis_key'] = redis_key
        update_req['redis_address'] = work_dict.get('redis_address',
                                                    ae_consts.REDIS_ADDRESS)
        update_req['redis_password'] = work_dict.get('redis_password',
                                                     ae_consts.REDIS_PASSWORD)
        update_req['redis_db'] = int(
            work_dict.get('redis_db', ae_consts.REDIS_DB))
        update_req['redis_expire'] = work_dict.get('redis_expire',
                                                   ae_consts.REDIS_EXPIRE)
        update_req['updated'] = rec['updated']
        update_req['label'] = label
        update_req['celery_disabled'] = True
        update_status = ae_consts.NOT_SET

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=update_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            status_str = ae_consts.get_status(status=update_status)
            if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
                log.debug(f'{label} update_res '
                          f'status={status_str} '
                          f'data={ae_consts.ppj(update_res)}')
            else:
                log.debug(f'{label} run_publish_pricing_update '
                          f'status={status_str}')
            # end of if/else

            rec['publish_pricing_update'] = update_res
            res = build_result.build_result(status=ae_consts.SUCCESS,
                                            err=None,
                                            rec=rec)
        except Exception as f:
            err = (f'{label} publisher.run_publish_pricing_update failed '
                   f'with ex={f}')
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
        # end of trying to publish results to connected services

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=('failed - get_new_pricing_data '
                                             f'dict={work_dict} with ex={e}'),
                                        rec=rec)
        log.error(f'{label} - {res["err"]}')
    # end of try/ex

    if ae_consts.ev('DATASET_COLLECTION_SLACK_ALERTS', '0') == '1':
        env_name = 'DEV'
        if ae_consts.ev('PROD_SLACK_ALERTS', '1') == '1':
            env_name = 'PROD'
        done_msg = (f'Dataset collected ticker=*{ticker}* on '
                    f'env=*{env_name}* '
                    f'redis_key={redis_key} s3_key={s3_key} '
                    f'IEX={get_iex_data} '
                    f'TD={get_td_data} '
                    f'YHO={get_yahoo_data}')
        log.debug(f'{label} sending slack msg={done_msg}')
        if res['status'] == ae_consts.SUCCESS:
            slack_utils.post_success(msg=done_msg, block=False, jupyter=True)
        else:
            slack_utils.post_failure(msg=done_msg, block=False, jupyter=True)
        # end of if/else success
    # end of publishing to slack

    log.debug('task - get_new_pricing_data done - '
              f'{label} - status={ae_consts.get_status(res["status"])}')

    return get_task_results.get_task_results(work_dict=work_dict, result=res)
def get_new_pricing_data(self, work_dict):
    """get_new_pricing_data

    Get Ticker information on:

    - prices - turn off with ``work_dict.get_pricing = False``
    - news - turn off with ``work_dict.get_news = False``
    - options - turn off with ``work_dict.get_options = False``

    :param work_dict: dictionary for key/values
    """

    label = 'get_new_pricing_data'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    rec = {
        'pricing': None,
        'options': None,
        'calls': None,
        'puts': None,
        'news': None,
        'daily': None,
        'minute': None,
        'quote': None,
        'stats': None,
        'peers': None,
        'iex_news': None,
        'financials': None,
        'earnings': None,
        'dividends': None,
        'company': None,
        'exp_date': None,
        'publish_pricing_update': None,
        'date': ae_utils.utc_now_str(),
        'updated': None,
        'version': ae_consts.DATASET_COLLECTION_VERSION
    }
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    try:
        ticker = work_dict.get('ticker', ticker)
        ticker_id = work_dict.get('ticker_id', ae_consts.TICKER_ID)
        s3_bucket = work_dict.get('s3_bucket', ae_consts.S3_BUCKET)
        s3_key = work_dict.get('s3_key', ae_consts.S3_KEY)
        redis_key = work_dict.get('redis_key', ae_consts.REDIS_KEY)
        exp_date = work_dict.get('exp_date', None)
        cur_date = datetime.datetime.utcnow()
        cur_strike = work_dict.get('strike', None)
        contract_type = str(work_dict.get('contract', 'C')).upper()
        label = work_dict.get('label', label)
        iex_datasets = work_dict.get('iex_datasets',
                                     iex_consts.DEFAULT_FETCH_DATASETS)
        td_datasets = work_dict.get('td_datasets',
                                    td_consts.DEFAULT_FETCH_DATASETS_TD)
        fetch_mode = work_dict.get('fetch_mode', ae_consts.FETCH_MODE_ALL)

        # control flags to deal with feed issues:
        get_iex_data = True
        get_td_data = True

        if (fetch_mode == ae_consts.FETCH_MODE_ALL
                or str(fetch_mode).lower() == 'all'):
            get_iex_data = True
            get_td_data = True
        elif (fetch_mode == ae_consts.FETCH_MODE_YHO
              or str(fetch_mode).lower() == 'yahoo'):
            get_iex_data = False
            get_td_data = False
        elif (fetch_mode == ae_consts.FETCH_MODE_IEX
              or str(fetch_mode).lower() == 'iex'):
            get_iex_data = True
            get_td_data = False
        elif (fetch_mode == ae_consts.FETCH_MODE_TD
              or str(fetch_mode).lower() == 'td'):
            get_iex_data = False
            get_td_data = True
        else:
            log.debug('{} - unsupported fetch_mode={} value'.format(
                label, fetch_mode))
        """
        as of Thursday, Jan. 3, 2019:
        https://developer.yahoo.com/yql/
        Important EOL Notice: As of Thursday, Jan. 3, 2019
        the YQL service at query.yahooapis.com will be retired
        """
        get_yahoo_data = False

        if not exp_date:
            exp_date = opt_dates.option_expiration(date=exp_date)
        else:
            exp_date = datetime.datetime.strptime(exp_date, '%Y-%m-%d')

        rec['updated'] = cur_date.strftime('%Y-%m-%d %H:%M:%S')
        log.info('{} getting pricing for ticker={} '
                 'cur_date={} exp_date={} '
                 'yahoo={} iex={}'.format(label, ticker, cur_date, exp_date,
                                          get_yahoo_data, get_iex_data))

        yahoo_rec = {
            'ticker': ticker,
            'pricing': None,
            'options': None,
            'calls': None,
            'puts': None,
            'news': None,
            'exp_date': None,
            'publish_pricing_update': None,
            'date': None,
            'updated': None
        }

        # disabled on 2019-01-03
        if get_yahoo_data:
            log.info('{} yahoo ticker={}'.format(label, ticker))
            yahoo_res = yahoo_data.get_data_from_yahoo(work_dict=work_dict)
            if yahoo_res['status'] == ae_consts.SUCCESS:
                yahoo_rec = yahoo_res['rec']
                log.info('{} yahoo ticker={} '
                         'status={} err={}'.format(
                             label, ticker,
                             ae_consts.get_status(status=yahoo_res['status']),
                             yahoo_res['err']))
                rec['pricing'] = yahoo_rec.get('pricing', '{}')
                rec['news'] = yahoo_rec.get('news', '{}')
                rec['options'] = yahoo_rec.get('options', '{}')
                rec['calls'] = rec['options'].get('calls',
                                                  ae_consts.EMPTY_DF_STR)
                rec['puts'] = rec['options'].get('puts',
                                                 ae_consts.EMPTY_DF_STR)
            else:
                log.error('{} failed YAHOO ticker={} '
                          'status={} err={}'.format(
                              label, ticker,
                              ae_consts.get_status(status=yahoo_res['status']),
                              yahoo_res['err']))
        # end of get from yahoo

        if get_iex_data:
            num_iex_ds = len(iex_datasets)
            log.debug('{} iex datasets={}'.format(label, num_iex_ds))
            for idx, ft_type in enumerate(iex_datasets):
                dataset_field = iex_consts.get_ft_str(ft_type=ft_type)

                log.info('{} iex={}/{} field={} ticker={}'.format(
                    label, idx, num_iex_ds, dataset_field, ticker))
                iex_label = '{}-{}'.format(label, dataset_field)
                iex_req = copy.deepcopy(work_dict)
                iex_req['label'] = iex_label
                iex_req['ft_type'] = ft_type
                iex_req['field'] = dataset_field
                iex_req['ticker'] = ticker
                iex_res = iex_data.get_data_from_iex(work_dict=iex_req)

                if iex_res['status'] == ae_consts.SUCCESS:
                    iex_rec = iex_res['rec']
                    log.info(
                        '{} iex ticker={} field={} '
                        'status={} err={}'.format(
                            label, ticker, dataset_field,
                            ae_consts.get_status(status=iex_res['status']),
                            iex_res['err']))
                    if dataset_field == 'news':
                        rec['iex_news'] = iex_rec['data']
                    else:
                        rec[dataset_field] = iex_rec['data']
                else:
                    log.debug(
                        '{} failed IEX ticker={} field={} '
                        'status={} err={}'.format(
                            label, ticker, dataset_field,
                            ae_consts.get_status(status=iex_res['status']),
                            iex_res['err']))
                # end of if/else succcess
            # end idx, ft_type in enumerate(iex_datasets):
        # end of if get_iex_data

        if get_td_data:
            num_td_ds = len(td_datasets)
            log.debug('{} td datasets={}'.format(label, num_td_ds))
            for idx, ft_type in enumerate(td_datasets):
                dataset_field = td_consts.get_ft_str_td(ft_type=ft_type)

                log.info('{} td={}/{} field={} ticker={}'.format(
                    label, idx, num_td_ds, dataset_field, ticker))
                td_label = '{}-{}'.format(label, dataset_field)
                td_req = copy.deepcopy(work_dict)
                td_req['label'] = td_label
                td_req['ft_type'] = ft_type
                td_req['field'] = dataset_field
                td_req['ticker'] = ticker
                td_res = td_data.get_data_from_td(work_dict=td_req)

                if td_res['status'] == ae_consts.SUCCESS:
                    td_rec = td_res['rec']
                    log.info('{} td ticker={} field={} '
                             'status={} err={}'.format(
                                 label, ticker, dataset_field,
                                 ae_consts.get_status(status=td_res['status']),
                                 td_res['err']))
                    if dataset_field == 'tdcalls':
                        rec['tdcalls'] = td_rec['data']
                    if dataset_field == 'tdputs':
                        rec['tdputs'] = td_rec['data']
                    else:
                        rec[dataset_field] = td_rec['data']
                else:
                    log.critical(
                        '{} failed TD ticker={} field={} '
                        'status={} err={}'.format(
                            label, ticker, dataset_field,
                            ae_consts.get_status(status=td_res['status']),
                            td_res['err']))
                # end of if/else succcess
            # end idx, ft_type in enumerate(td_datasets):
        # end of if get_td_data

        update_req = {'data': rec}
        update_req['ticker'] = ticker
        update_req['ticker_id'] = ticker_id
        update_req['strike'] = cur_strike
        update_req['contract'] = contract_type
        update_req['s3_enabled'] = work_dict.get('s3_enabled',
                                                 ae_consts.ENABLED_S3_UPLOAD)
        update_req['redis_enabled'] = work_dict.get(
            'redis_enabled', ae_consts.ENABLED_REDIS_PUBLISH)
        update_req['s3_bucket'] = s3_bucket
        update_req['s3_key'] = s3_key
        update_req['s3_access_key'] = work_dict.get('s3_access_key',
                                                    ae_consts.S3_ACCESS_KEY)
        update_req['s3_secret_key'] = work_dict.get('s3_secret_key',
                                                    ae_consts.S3_SECRET_KEY)
        update_req['s3_region_name'] = work_dict.get('s3_region_name',
                                                     ae_consts.S3_REGION_NAME)
        update_req['s3_address'] = work_dict.get('s3_address',
                                                 ae_consts.S3_ADDRESS)
        update_req['s3_secure'] = work_dict.get('s3_secure',
                                                ae_consts.S3_SECURE)
        update_req['redis_key'] = redis_key
        update_req['redis_address'] = work_dict.get('redis_address',
                                                    ae_consts.REDIS_ADDRESS)
        update_req['redis_password'] = work_dict.get('redis_password',
                                                     ae_consts.REDIS_PASSWORD)
        update_req['redis_db'] = int(
            work_dict.get('redis_db', ae_consts.REDIS_DB))
        update_req['redis_expire'] = work_dict.get('redis_expire',
                                                   ae_consts.REDIS_EXPIRE)
        update_req['updated'] = rec['updated']
        update_req['label'] = label
        update_req['celery_disabled'] = True
        update_status = ae_consts.NOT_SET

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=update_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
                log.info('{} update_res status={} data={}'.format(
                    label, ae_consts.get_status(status=update_status),
                    ae_consts.ppj(update_res)))
            else:
                log.info('{} run_publish_pricing_update status={}'.format(
                    label, ae_consts.get_status(status=update_status)))
            # end of if/else

            rec['publish_pricing_update'] = update_res
            res = build_result.build_result(status=ae_consts.SUCCESS,
                                            err=None,
                                            rec=rec)
        except Exception as f:
            err = ('{} publisher.run_publish_pricing_update failed '
                   'with ex={}'.format(label, f))
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
        # end of trying to publish results to connected services

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=('failed - get_new_pricing_data '
                                             'dict={} with ex={}').format(
                                                 work_dict, e),
                                        rec=rec)
        log.error('{} - {}'.format(label, res['err']))
    # end of try/ex

    if ae_consts.ev('DATASET_COLLECTION_SLACK_ALERTS', '0') == '1':
        env_name = 'DEV'
        if ae_consts.ev('PROD_SLACK_ALERTS', '1') == '1':
            env_name = 'PROD'
        done_msg = ('Dataset collected ticker=*{}* on env=*{}* '
                    'redis_key={} s3_key={} iex={} yahoo={}'.format(
                        ticker, env_name, redis_key, s3_key, get_iex_data,
                        get_yahoo_data))
        log.debug('{} sending slack msg={}'.format(label, done_msg))
        if res['status'] == ae_consts.SUCCESS:
            slack_utils.post_success(msg=done_msg, block=False, jupyter=True)
        else:
            slack_utils.post_failure(msg=done_msg, block=False, jupyter=True)
        # end of if/else success
    # end of publishing to slack

    log.info('task - get_new_pricing_data done - '
             '{} - status={}'.format(label,
                                     ae_consts.get_status(res['status'])))

    return get_task_results.get_task_results(work_dict=work_dict, result=res)