def test_success_publish_pricing_data(self):
     """test_success_publish_pricing_data"""
     work = build_publish_pricing_request()
     res = run_publish_pricing_update(
         work)
     self.assertTrue(
         res['status'] == SUCCESS)
     self.assertTrue(
         res['err'] is None)
     self.assertTrue(
         res['rec'] is not None)
     record = res['rec']
     self.assertEqual(
         record['ticker'],
         TICKER)
     self.assertEqual(
         record['s3_enabled'],
         False)
     self.assertEqual(
         record['redis_enabled'],
         False)
     self.assertEqual(
         record['s3_bucket'],
         work['s3_bucket'])
     self.assertEqual(
         record['s3_key'],
         work['s3_key'])
     self.assertEqual(
         record['redis_key'],
         work['redis_key'])
 def test_err_publish_pricing_data(self):
     """test_err_publish_pricing_data"""
     work = build_publish_pricing_request()
     work['ticker'] = None
     res = run_publish_pricing_update(
         work)
     self.assertTrue(
         res['status'] == ERR)
     self.assertTrue(
         res['err'] == 'missing ticker')
 def test_success_redis_set(self):
     """test_success_redis_set"""
     work = build_publish_pricing_request()
     work['s3_enabled'] = 0
     work['redis_enabled'] = 1
     work['redis_address'] = REDIS_ADDRESS
     work['redis_db'] = REDIS_DB
     work['redis_key'] = REDIS_KEY
     work['redis_password'] = REDIS_PASSWORD
     work['redis_expire'] = REDIS_EXPIRE
     res = run_publish_pricing_update(
         work)
     self.assertTrue(
         res['status'] == SUCCESS)
 def test_success_s3_upload(self):
     """test_success_s3_upload"""
     work = build_publish_pricing_request()
     work['s3_enabled'] = 1
     work['redis_enabled'] = 0
     work['s3_access_key'] = S3_ACCESS_KEY
     work['s3_secret_key'] = S3_SECRET_KEY
     work['s3_region_name'] = S3_REGION_NAME
     work['s3_address'] = S3_ADDRESS
     work['s3_secure'] = S3_SECURE
     res = run_publish_pricing_update(
         work)
     self.assertTrue(
         res['status'] == SUCCESS)
 def test_integration_redis_set(self):
     """test_integration_redis_set"""
     if ev('INT_TESTS', '0') == '1':
         work = build_publish_pricing_request()
         work['s3_enabled'] = 0
         work['redis_enabled'] = 1
         work['redis_address'] = REDIS_ADDRESS
         work['redis_db'] = REDIS_DB
         work['redis_key'] = REDIS_KEY
         work['redis_password'] = REDIS_PASSWORD
         work['redis_expire'] = REDIS_EXPIRE
         work['redis_key'] = 'integration-test-v1'
         work['s3_key'] = 'integration-test-v1'
         res = run_publish_pricing_update(
             work)
         self.assertTrue(
             res['status'] == SUCCESS)
 def test_integration_s3_upload(self):
     """test_integration_s3_upload"""
     if ev('INT_TESTS', '0') == '1':
         work = build_publish_pricing_request()
         work['s3_enabled'] = 1
         work['redis_enabled'] = 0
         work['s3_access_key'] = S3_ACCESS_KEY
         work['s3_secret_key'] = S3_SECRET_KEY
         work['s3_region_name'] = S3_REGION_NAME
         work['s3_address'] = S3_ADDRESS
         work['s3_secure'] = S3_SECURE
         work['s3_bucket'] = 'integration-tests'
         work['s3_key'] = 'integration-test-v1'
         work['redis_key'] = 'integration-test-v1'
         os.environ.pop('AWS_DEFAULT_PROFILE', None)
         res = run_publish_pricing_update(
             work)
         self.assertTrue(
             res['status'] == SUCCESS)
Example #7
0
def get_data_from_iex(work_dict):
    """get_data_from_iex

    Get data from IEX - this requires an account

    :param work_dict: request dictionary
    """
    label = 'get_data_from_iex'

    log.debug(f'task - {label} - start ' f'work_dict={work_dict}')

    rec = {'data': None, 'updated': None}
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    ticker = None
    field = None
    ft_type = None

    try:

        ticker = work_dict.get('ticker', ae_consts.TICKER)
        field = work_dict.get('field', 'daily')
        ft_type = work_dict.get('ft_type', None)
        ft_str = str(ft_type).lower()
        label = work_dict.get('label', label)
        orient = work_dict.get('orient', 'records')
        backfill_date = work_dict.get('backfill_date', None)

        iex_req = None
        if ft_type == iex_consts.FETCH_DAILY or ft_str == 'daily':
            ft_type == iex_consts.FETCH_DAILY
            iex_req = api_requests.build_iex_fetch_daily_request(label=label)
        elif ft_type == iex_consts.FETCH_MINUTE or ft_str == 'minute':
            ft_type == iex_consts.FETCH_MINUTE
            iex_req = api_requests.build_iex_fetch_minute_request(label=label)
        elif ft_type == iex_consts.FETCH_QUOTE or ft_str == 'quote':
            ft_type == iex_consts.FETCH_QUOTE
            iex_req = api_requests.build_iex_fetch_quote_request(label=label)
        elif ft_type == iex_consts.FETCH_STATS or ft_str == 'stats':
            ft_type == iex_consts.FETCH_STATS
            iex_req = api_requests.build_iex_fetch_stats_request(label=label)
        elif ft_type == iex_consts.FETCH_PEERS or ft_str == 'peers':
            ft_type == iex_consts.FETCH_PEERS
            iex_req = api_requests.build_iex_fetch_peers_request(label=label)
        elif ft_type == iex_consts.FETCH_NEWS or ft_str == 'news':
            ft_type == iex_consts.FETCH_NEWS
            iex_req = api_requests.build_iex_fetch_news_request(label=label)
        elif ft_type == iex_consts.FETCH_FINANCIALS or ft_str == 'financials':
            ft_type == iex_consts.FETCH_FINANCIALS
            iex_req = api_requests.build_iex_fetch_financials_request(
                label=label)
        elif ft_type == iex_consts.FETCH_EARNINGS or ft_str == 'earnings':
            ft_type == iex_consts.FETCH_EARNINGS
            iex_req = api_requests.build_iex_fetch_earnings_request(
                label=label)
        elif ft_type == iex_consts.FETCH_DIVIDENDS or ft_str == 'dividends':
            ft_type == iex_consts.FETCH_DIVIDENDS
            iex_req = api_requests.build_iex_fetch_dividends_request(
                label=label)
        elif ft_type == iex_consts.FETCH_COMPANY or ft_str == 'company':
            ft_type == iex_consts.FETCH_COMPANY
            iex_req = api_requests.build_iex_fetch_company_request(label=label)
        else:
            log.error(f'{label} - unsupported ft_type={ft_type} '
                      f'ft_str={ft_str} ticker={ticker}')
            raise NotImplementedError
        # if supported fetch request type

        iex_req['ticker'] = ticker
        clone_keys = [
            'ticker', 's3_address', 's3_bucket', 's3_key', 'redis_address',
            'redis_db', 'redis_password', 'redis_key'
        ]

        for k in clone_keys:
            if k in iex_req:
                iex_req[k] = work_dict.get(k, f'{k}-missing-in-{label}')
        # end of cloning keys

        if not iex_req:
            err = (f'{label} - ticker={ticker} '
                   f'did not build an IEX request '
                   f'for work={work_dict}')
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
            return res
        else:
            log.debug(f'{label} - ticker={ticker} '
                      f'field={field} '
                      f'orient={orient} fetch')
        # if invalid iex request

        df = None
        try:
            if 'from' in work_dict:
                iex_req['from'] = datetime.datetime.strptime(
                    '%Y-%m-%d %H:%M:%S', work_dict['from'])
            if backfill_date:
                iex_req['backfill_date'] = backfill_date
                iex_req['redis_key'] = (f'{ticker}_{backfill_date}_{field}')
                iex_req['s3_key'] = (f'{ticker}_{backfill_date}_{field}')

            if os.getenv('SHOW_SUCCESS', '0') == '1':
                log.info(f'fetching IEX {field} req={iex_req}')
            else:
                log.debug(f'fetching IEX {field} req={iex_req}')

            df = iex_fetch_data.fetch_data(work_dict=iex_req,
                                           fetch_type=ft_type)
            rec['data'] = df.to_json(orient=orient, date_format='iso')
            rec['updated'] = datetime.datetime.utcnow().strftime(
                '%Y-%m-%d %H:%M:%S')
        except Exception as f:
            log.error(f'{label} - ticker={ticker} field={ft_type} '
                      f'failed fetch_data '
                      f'with ex={f}')
        # end of try/ex

        if ae_consts.ev('DEBUG_IEX_DATA', '0') == '1':
            log.debug(f'{label} ticker={ticker} '
                      f'field={field} data={rec["data"]} to_json')
        else:
            log.debug(f'{label} ticker={ticker} field={field} to_json')
        # end of if/else found data

        upload_and_cache_req = copy.deepcopy(iex_req)
        upload_and_cache_req['celery_disabled'] = True
        upload_and_cache_req['data'] = rec['data']
        if not upload_and_cache_req['data']:
            upload_and_cache_req['data'] = '{}'
        use_field = field
        if use_field == 'news':
            use_field = 'news1'
        if 'redis_key' in work_dict:
            rk = work_dict.get('redis_key', iex_req['redis_key'])
            if backfill_date:
                rk = f'{ticker}_{backfill_date}'
            upload_and_cache_req['redis_key'] = (f'{rk}_{use_field}')
        if 's3_key' in work_dict:
            sk = work_dict.get('s3_key', iex_req['s3_key'])
            if backfill_date:
                sk = f'{ticker}_{backfill_date}'
            upload_and_cache_req['s3_key'] = (f'{sk}_{use_field}')

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=upload_and_cache_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            log.debug(f'{label} publish update '
                      f'status={ae_consts.get_status(status=update_status)} '
                      f'data={update_res}')
        except Exception:
            err = (f'{label} - failed to upload iex '
                   f'data={upload_and_cache_req} to '
                   f'to s3_key={upload_and_cache_req["s3_key"]} '
                   f'and redis_key={upload_and_cache_req["redis_key"]}')
            log.error(err)
        # end of try/ex to upload and cache

        if not rec['data']:
            log.debug(f'{label} - ticker={ticker} no IEX data '
                      f'field={field} to publish')
        # end of if/else

        res = build_result.build_result(status=ae_consts.SUCCESS,
                                        err=None,
                                        rec=rec)

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=(f'failed - get_data_from_iex '
                                             f'dict={work_dict} with ex={e}'),
                                        rec=rec)
    # end of try/ex

    log.debug(f'task - get_data_from_iex done - '
              f'{label} - '
              f'status={ae_consts.get_status(res["status"])} '
              f'err={res["err"]}')

    return res
Example #8
0
def get_data_from_td(work_dict):
    """get_data_from_td

    Get pricing data from Tradier

    :param work_dict: request dictionary
    """
    label = 'get_data_from_td'

    log.debug(f'task - {label} - start work_dict={work_dict}')

    rec = {'data': None, 'updated': None}
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    ticker = None
    field = None
    ft_type = None

    try:

        ticker = work_dict.get('ticker', ae_consts.TICKER)
        field = work_dict.get('field', 'daily')
        ft_type = work_dict.get('ft_type', None)
        ft_str = str(ft_type).lower()
        label = work_dict.get('label', label)
        orient = work_dict.get('orient', 'records')

        td_req = None
        if ft_type == td_consts.FETCH_TD_CALLS or ft_str == 'tdcalls':
            ft_type == td_consts.FETCH_TD_CALLS
            td_req = api_requests.build_td_fetch_calls_request(label=label)
        elif ft_type == td_consts.FETCH_TD_PUTS or ft_str == 'tdputs':
            ft_type == td_consts.FETCH_TD_PUTS
            td_req = api_requests.build_td_fetch_puts_request(label=label)
        else:
            log.error(
                f'{label} - unsupported ft_type={ft_type} ft_str={ft_str} '
                f'ticker={ticker}')
            raise NotImplementedError
        # if supported fetch request type

        clone_keys = [
            'ticker', 's3_address', 's3_bucket', 's3_key', 'redis_address',
            'redis_db', 'redis_password', 'redis_key'
        ]

        for k in clone_keys:
            td_req[k] = work_dict.get(k, f'{k}-missing-in-{label}')
        # end of cloning keys

        if not td_req:
            err = (f'{label} - ticker={td_req["ticker"]} did not build a TD '
                   f'request for work={work_dict}')
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
            return res
        else:
            log.debug(f'{label} - ticker={td_req["ticker"]} field={field} '
                      f'orient={orient} fetch')
        # if invalid td request

        df = None
        try:
            if 'from' in work_dict:
                td_req['from'] = datetime.datetime.strptime(
                    '%Y-%m-%d %H:%M:%S', work_dict['from'])
            status_df, df = td_fetch_data.fetch_data(work_dict=td_req,
                                                     fetch_type=ft_type)

            if status_df == ae_consts.SUCCESS:
                rec['data'] = df.to_json(orient=orient)
                rec['updated'] = datetime.datetime.utcnow().strftime(
                    '%Y-%m-%d %H:%M:%S')
            elif status_df == ae_consts.EMPTY:
                res = build_result.build_result(
                    status=ae_consts.ERR,
                    err=(f'did not fetch any data'),
                    rec=rec)
                return res
            else:
                err = (f'{label} - ticker={td_req["ticker"]} '
                       f'td_fetch_data.fetch_data field={ft_type} '
                       'failed fetch_data')
                log.critical(err)
                res = build_result.build_result(status=ae_consts.ERR,
                                                err=err,
                                                rec=rec)
                return res
        except Exception as f:
            err = (f'{label} - ticker={td_req["ticker"]} field={ft_type} '
                   f'failed fetch_data with ex={f}')
            log.critical(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
            return res
        # end of try/ex

        if ae_consts.ev('DEBUG_TD_DATA', '0') == '1':
            log.debug(f'{label} ticker={td_req["ticker"]} field={field} '
                      f'data={rec["data"]} to_json')
        else:
            log.debug(
                f'{label} ticker={td_req["ticker"]} field={field} to_json')
        # end of if/else found data

        upload_and_cache_req = copy.deepcopy(td_req)
        upload_and_cache_req['celery_disabled'] = True
        upload_and_cache_req['data'] = rec['data']
        if not upload_and_cache_req['data']:
            upload_and_cache_req['data'] = '{}'
        use_field = field
        if use_field == 'news':
            use_field = 'news1'
        if 'redis_key' in work_dict:
            upload_and_cache_req['redis_key'] = (f'''{work_dict.get(
                    'redis_key',
                    td_req['redis_key'])}_'''
                                                 f'{use_field}')
        if 's3_key' in work_dict:
            upload_and_cache_req['s3_key'] = (f'''{work_dict.get(
                    's3_key',
                    td_req['s3_key'])}_'''
                                              f'{use_field}')

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=upload_and_cache_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            log.debug(f'{label} publish update '
                      f'status={ae_consts.get_status(status=update_status)} '
                      f'data={update_res}')
        except Exception:
            err = (
                f'{label} - failed to upload td data={upload_and_cache_req} '
                f'to s3_key={upload_and_cache_req["s3_key"]} and '
                f'redis_key={upload_and_cache_req["redis_key"]}')
            log.error(err)
        # end of try/ex to upload and cache

        if not rec['data']:
            log.debug(f'{label} - ticker={td_req["ticker"]} no Tradier data '
                      f'field={field} to publish')
        # end of if/else

        res = build_result.build_result(status=ae_consts.SUCCESS,
                                        err=None,
                                        rec=rec)

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=('failed - get_data_from_td '
                                             f'dict={work_dict} with ex={e}'),
                                        rec=rec)
    # end of try/ex

    log.debug('task - get_data_from_td done - '
              f'{label} - status={ae_consts.get_status(res["status"])} '
              f'err={res["err"]}')

    return res
def get_new_pricing_data(self, work_dict):
    """get_new_pricing_data

    Get Ticker information on:

    - prices - turn off with ``work_dict.get_pricing = False``
    - news - turn off with ``work_dict.get_news = False``
    - options - turn off with ``work_dict.get_options = False``

    :param work_dict: dictionary for key/values
    """

    label = 'get_new_pricing_data'

    log.debug(f'task - {label} - start ' f'work_dict={work_dict}')

    num_success = 0
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    rec = {
        'pricing': None,
        'options': None,
        'calls': None,
        'puts': None,
        'news': None,
        'daily': None,
        'minute': None,
        'quote': None,
        'stats': None,
        'peers': None,
        'iex_news': None,
        'financials': None,
        'earnings': None,
        'dividends': None,
        'company': None,
        'exp_date': None,
        'publish_pricing_update': None,
        'num_success': num_success,
        'date': ae_utils.utc_now_str(),
        'updated': None,
        'version': ae_consts.DATASET_COLLECTION_VERSION
    }
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    try:
        ticker = work_dict.get('ticker', ticker)
        ticker_id = work_dict.get('ticker_id', ae_consts.TICKER_ID)
        s3_bucket = work_dict.get('s3_bucket', ae_consts.S3_BUCKET)
        s3_key = work_dict.get('s3_key', ae_consts.S3_KEY)
        redis_key = work_dict.get('redis_key', ae_consts.REDIS_KEY)
        exp_date = work_dict.get('exp_date', None)
        cur_date = ae_utils.last_close()
        cur_strike = work_dict.get('strike', None)
        contract_type = str(work_dict.get('contract', 'C')).upper()
        label = work_dict.get('label', label)
        iex_datasets = work_dict.get('iex_datasets',
                                     iex_consts.DEFAULT_FETCH_DATASETS)
        td_datasets = work_dict.get('td_datasets',
                                    td_consts.DEFAULT_FETCH_DATASETS_TD)
        fetch_mode = work_dict.get('fetch_mode', ae_consts.FETCH_MODE_ALL)
        iex_token = work_dict.get('iex_token', iex_consts.IEX_TOKEN)
        td_token = work_dict.get('td_token', td_consts.TD_TOKEN)
        str_fetch_mode = str(fetch_mode).lower()

        # control flags to deal with feed issues:
        get_iex_data = True
        get_td_data = True

        if (fetch_mode == ae_consts.FETCH_MODE_ALL
                or str_fetch_mode == 'initial'):
            get_iex_data = True
            get_td_data = True
            iex_datasets = ae_consts.IEX_INITIAL_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_ALL
              or str_fetch_mode == 'all'):
            get_iex_data = True
            get_td_data = True
            iex_datasets = ae_consts.IEX_DATASETS_DEFAULT
        elif (fetch_mode == ae_consts.FETCH_MODE_YHO
              or str_fetch_mode == 'yahoo'):
            get_iex_data = False
            get_td_data = False
        elif (fetch_mode == ae_consts.FETCH_MODE_IEX
              or str_fetch_mode == 'iex-all'):
            get_iex_data = True
            get_td_data = False
            iex_datasets = ae_consts.IEX_DATASETS_DEFAULT
        elif (fetch_mode == ae_consts.FETCH_MODE_IEX
              or str_fetch_mode == 'iex'):
            get_iex_data = True
            get_td_data = False
            iex_datasets = ae_consts.IEX_INTRADAY_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_INTRADAY
              or str_fetch_mode == 'intra'):
            get_iex_data = True
            get_td_data = True
            iex_datasets = ae_consts.IEX_INTRADAY_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_DAILY
              or str_fetch_mode == 'daily'):
            get_iex_data = True
            get_td_data = False
            iex_datasets = ae_consts.IEX_DAILY_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_WEEKLY
              or str_fetch_mode == 'weekly'):
            get_iex_data = True
            get_td_data = False
            iex_datasets = ae_consts.IEX_WEEKLY_DATASETS
        elif (fetch_mode == ae_consts.FETCH_MODE_TD or str_fetch_mode == 'td'):
            get_iex_data = False
            get_td_data = True
        else:
            get_iex_data = False
            get_td_data = False

            fetch_arr = str_fetch_mode.split(',')
            found_fetch = False
            iex_datasets = []
            for fetch_name in fetch_arr:
                if fetch_name not in iex_datasets:
                    if fetch_name == 'iex_min':
                        iex_datasets.append('minute')
                    elif fetch_name == 'iex_day':
                        iex_datasets.append('daily')
                    elif fetch_name == 'iex_quote':
                        iex_datasets.append('quote')
                    elif fetch_name == 'iex_stats':
                        iex_datasets.append('stats')
                    elif fetch_name == 'iex_peers':
                        iex_datasets.append('peers')
                    elif fetch_name == 'iex_news':
                        iex_datasets.append('news')
                    elif fetch_name == 'iex_fin':
                        iex_datasets.append('financials')
                    elif fetch_name == 'iex_earn':
                        iex_datasets.append('earnings')
                    elif fetch_name == 'iex_div':
                        iex_datasets.append('dividends')
                    elif fetch_name == 'iex_comp':
                        iex_datasets.append('company')
                    elif fetch_name == 'td':
                        get_td_data = True
                    else:
                        log.warn('unsupported IEX dataset ' f'{fetch_name}')
            found_fetch = (len(iex_datasets) != 0)
            if not found_fetch:
                log.error(f'{label} - unsupported '
                          f'fetch_mode={fetch_mode} value')
            else:
                get_iex_data = True
                log.debug(f'{label} - '
                          f'fetching={len(iex_datasets)} '
                          f'{iex_datasets} '
                          f'fetch_mode={fetch_mode}')
        # end of screening custom fetch_mode settings

        num_tokens = 0

        if get_iex_data:
            if not iex_token:
                log.warn(f'{label} - '
                         'please set a valid IEX Cloud Account token ('
                         'https://iexcloud.io/cloud-login/#/register'
                         ') to fetch data from IEX Cloud. It must be '
                         'set as an environment variable like: '
                         'export IEX_TOKEN=<token>')
                get_iex_data = False
            else:
                num_tokens += 1
        # sanity check - disable IEX fetch if the token is not set
        if get_td_data:
            missing_td_token = [
                'MISSING_TD_TOKEN', 'SETYOURTDTOKEN', 'SETYOURTRADIERTOKENHERE'
            ]
            if td_token in missing_td_token:
                log.warn(f'{label} - '
                         'please set a valid Tradier Account token ('
                         'https://developer.tradier.com/user/sign_up'
                         ') to fetch pricing data from Tradier. It must be '
                         'set as an environment variable like: '
                         'export TD_TOKEN=<token>')
                get_td_data = False
            else:
                num_tokens += 1
        # sanity check - disable Tradier fetch if the token is not set
        """
        as of Thursday, Jan. 3, 2019:
        https://developer.yahoo.com/yql/
        Important EOL Notice: As of Thursday, Jan. 3, 2019
        the YQL service at query.yahooapis.com will be retired
        """
        get_yahoo_data = False

        if (not get_iex_data and not get_td_data and not get_yahoo_data):
            err = None
            if num_tokens == 0:
                res['status'] = ae_consts.MISSING_TOKEN
                err = (f'Please set a valid IEX_TOKEN or TD_TOKEN '
                       f'environment variable')
            else:
                err = (f'Please set at least one supported datafeed from '
                       f'either: '
                       f'IEX Cloud (fetch -t TICKER -g iex) or '
                       f'Tradier (fetch -t TICKER -g td) '
                       f'for '
                       f'ticker={ticker} '
                       f'cur_date={cur_date} '
                       f'IEX enabled={get_iex_data} '
                       f'TD enabled={get_td_data} '
                       f'YHO enabled={get_yahoo_data}')
                res['status'] = ae_consts.ERR
                res['err'] = err
            return get_task_results.get_task_results(work_dict=work_dict,
                                                     result=res)
        # end of checking that there is at least 1 feed on

        if not exp_date:
            exp_date = opt_dates.option_expiration(date=exp_date)
        else:
            exp_date = datetime.datetime.strptime(exp_date, '%Y-%m-%d')

        rec['updated'] = cur_date.strftime('%Y-%m-%d %H:%M:%S')
        log.debug(f'{label} getting pricing for ticker={ticker} '
                  f'cur_date={cur_date} exp_date={exp_date} '
                  f'IEX={get_iex_data} '
                  f'TD={get_td_data} '
                  f'YHO={get_yahoo_data}')

        yahoo_rec = {
            'ticker': ticker,
            'pricing': None,
            'options': None,
            'calls': None,
            'puts': None,
            'news': None,
            'exp_date': None,
            'publish_pricing_update': None,
            'date': None,
            'updated': None
        }

        # disabled on 2019-01-03
        if get_yahoo_data:
            log.debug(f'{label} YHO ticker={ticker}')
            yahoo_res = yahoo_data.get_data_from_yahoo(work_dict=work_dict)
            status_str = ae_consts.get_status(status=yahoo_res['status'])
            if yahoo_res['status'] == ae_consts.SUCCESS:
                yahoo_rec = yahoo_res['rec']
                msg = (f'{label} YHO ticker={ticker} '
                       f'status={status_str} err={yahoo_res["err"]}')
                if ae_consts.ev('SHOW_SUCCESS', '0') == '1':
                    log.info(msg)
                else:
                    log.debug(msg)
                rec['pricing'] = yahoo_rec.get('pricing', '{}')
                rec['news'] = yahoo_rec.get('news', '{}')
                rec['options'] = yahoo_rec.get('options', '{}')
                rec['calls'] = rec['options'].get('calls',
                                                  ae_consts.EMPTY_DF_STR)
                rec['puts'] = rec['options'].get('puts',
                                                 ae_consts.EMPTY_DF_STR)
                num_success += 1
            else:
                log.error(f'{label} failed YHO ticker={ticker} '
                          f'status={status_str} err={yahoo_res["err"]}')
        # end of get from yahoo

        if get_iex_data:
            num_iex_ds = len(iex_datasets)
            log.debug(f'{label} IEX datasets={num_iex_ds}')
            for idx, ft_type in enumerate(iex_datasets):
                dataset_field = iex_consts.get_ft_str(ft_type=ft_type)

                log.debug(f'{label} IEX={idx}/{num_iex_ds} '
                          f'field={dataset_field} ticker={ticker}')
                iex_label = f'{label}-{dataset_field}'
                iex_req = copy.deepcopy(work_dict)
                iex_req['label'] = iex_label
                iex_req['ft_type'] = ft_type
                iex_req['field'] = dataset_field
                iex_req['ticker'] = ticker
                iex_res = iex_data.get_data_from_iex(work_dict=iex_req)

                status_str = (ae_consts.get_status(status=iex_res['status']))
                if iex_res['status'] == ae_consts.SUCCESS:
                    iex_rec = iex_res['rec']
                    msg = (f'{label} IEX ticker={ticker} '
                           f'field={dataset_field} '
                           f'status={status_str} '
                           f'err={iex_res["err"]}')
                    if ae_consts.ev('SHOW_SUCCESS', '0') == '1':
                        log.info(msg)
                    else:
                        log.debug(msg)
                    if dataset_field == 'news':
                        rec['iex_news'] = iex_rec['data']
                    else:
                        rec[dataset_field] = iex_rec['data']
                    num_success += 1
                else:
                    log.debug(f'{label} failed IEX ticker={ticker} '
                              f'field={dataset_field} '
                              f'status={status_str} err={iex_res["err"]}')
                # end of if/else succcess
            # end idx, ft_type in enumerate(iex_datasets):
        # end of if get_iex_data

        if get_td_data:
            num_td_ds = len(td_datasets)
            log.debug(f'{label} TD datasets={num_td_ds}')

            for idx, ft_type in enumerate(td_datasets):
                dataset_field = td_consts.get_ft_str_td(ft_type=ft_type)
                log.debug(f'{label} TD={idx}/{num_td_ds} '
                          f'field={dataset_field} ticker={ticker}')
                td_label = (f'{label}-{dataset_field}')
                td_req = copy.deepcopy(work_dict)
                td_req['label'] = td_label
                td_req['ft_type'] = ft_type
                td_req['field'] = dataset_field
                td_req['ticker'] = ticker
                td_res = td_data.get_data_from_td(work_dict=td_req)

                status_str = (ae_consts.get_status(status=td_res['status']))
                if td_res['status'] == ae_consts.SUCCESS:
                    td_rec = td_res['rec']
                    msg = (f'{label} TD ticker={ticker} '
                           f'field={dataset_field} '
                           f'status={status_str} '
                           f'err={td_res["err"]}')
                    if ae_consts.ev('SHOW_SUCCESS', '0') == '1':
                        log.info(msg)
                    else:
                        log.debug(msg)
                    if dataset_field == 'tdcalls':
                        rec['tdcalls'] = td_rec['data']
                    if dataset_field == 'tdputs':
                        rec['tdputs'] = td_rec['data']
                    else:
                        rec[dataset_field] = td_rec['data']
                    num_success += 1
                else:
                    log.critical(f'{label} failed TD ticker={ticker} '
                                 f'field={dataset_field} '
                                 f'status={status_str} err={td_res["err"]}')
                # end of if/else succcess
            # end idx, ft_type in enumerate(td_datasets):
        # end of if get_td_data

        rec['num_success'] = num_success

        update_req = {'data': rec}
        update_req['ticker'] = ticker
        update_req['ticker_id'] = ticker_id
        update_req['strike'] = cur_strike
        update_req['contract'] = contract_type
        update_req['s3_enabled'] = work_dict.get('s3_enabled',
                                                 ae_consts.ENABLED_S3_UPLOAD)
        update_req['redis_enabled'] = work_dict.get(
            'redis_enabled', ae_consts.ENABLED_REDIS_PUBLISH)
        update_req['s3_bucket'] = s3_bucket
        update_req['s3_key'] = s3_key
        update_req['s3_access_key'] = work_dict.get('s3_access_key',
                                                    ae_consts.S3_ACCESS_KEY)
        update_req['s3_secret_key'] = work_dict.get('s3_secret_key',
                                                    ae_consts.S3_SECRET_KEY)
        update_req['s3_region_name'] = work_dict.get('s3_region_name',
                                                     ae_consts.S3_REGION_NAME)
        update_req['s3_address'] = work_dict.get('s3_address',
                                                 ae_consts.S3_ADDRESS)
        update_req['s3_secure'] = work_dict.get('s3_secure',
                                                ae_consts.S3_SECURE)
        update_req['redis_key'] = redis_key
        update_req['redis_address'] = work_dict.get('redis_address',
                                                    ae_consts.REDIS_ADDRESS)
        update_req['redis_password'] = work_dict.get('redis_password',
                                                     ae_consts.REDIS_PASSWORD)
        update_req['redis_db'] = int(
            work_dict.get('redis_db', ae_consts.REDIS_DB))
        update_req['redis_expire'] = work_dict.get('redis_expire',
                                                   ae_consts.REDIS_EXPIRE)
        update_req['updated'] = rec['updated']
        update_req['label'] = label
        update_req['celery_disabled'] = True
        update_status = ae_consts.NOT_SET

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=update_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            status_str = ae_consts.get_status(status=update_status)
            if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
                log.debug(f'{label} update_res '
                          f'status={status_str} '
                          f'data={ae_consts.ppj(update_res)}')
            else:
                log.debug(f'{label} run_publish_pricing_update '
                          f'status={status_str}')
            # end of if/else

            rec['publish_pricing_update'] = update_res
            res = build_result.build_result(status=ae_consts.SUCCESS,
                                            err=None,
                                            rec=rec)
        except Exception as f:
            err = (f'{label} publisher.run_publish_pricing_update failed '
                   f'with ex={f}')
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
        # end of trying to publish results to connected services

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=('failed - get_new_pricing_data '
                                             f'dict={work_dict} with ex={e}'),
                                        rec=rec)
        log.error(f'{label} - {res["err"]}')
    # end of try/ex

    if ae_consts.ev('DATASET_COLLECTION_SLACK_ALERTS', '0') == '1':
        env_name = 'DEV'
        if ae_consts.ev('PROD_SLACK_ALERTS', '1') == '1':
            env_name = 'PROD'
        done_msg = (f'Dataset collected ticker=*{ticker}* on '
                    f'env=*{env_name}* '
                    f'redis_key={redis_key} s3_key={s3_key} '
                    f'IEX={get_iex_data} '
                    f'TD={get_td_data} '
                    f'YHO={get_yahoo_data}')
        log.debug(f'{label} sending slack msg={done_msg}')
        if res['status'] == ae_consts.SUCCESS:
            slack_utils.post_success(msg=done_msg, block=False, jupyter=True)
        else:
            slack_utils.post_failure(msg=done_msg, block=False, jupyter=True)
        # end of if/else success
    # end of publishing to slack

    log.debug('task - get_new_pricing_data done - '
              f'{label} - status={ae_consts.get_status(res["status"])}')

    return get_task_results.get_task_results(work_dict=work_dict, result=res)
def get_data_from_yahoo(work_dict):
    """get_data_from_yahoo

    Get data from yahoo

    :param work_dict: request dictionary
    """
    label = 'get_data_from_yahoo'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    num_news_rec = 0
    num_option_calls = 0
    num_option_puts = 0
    cur_high = -1
    cur_low = -1
    cur_open = -1
    cur_close = -1
    cur_volume = -1

    rec = {
        'pricing': None,
        'options': None,
        'calls': None,
        'puts': None,
        'news': None,
        'exp_date': None,
        'publish_pricing_update': None,
        'date': None,
        'updated': None
    }
    res = {'status': NOT_RUN, 'err': None, 'rec': rec}

    try:

        ticker = work_dict.get('ticker', TICKER)
        exp_date = work_dict.get('exp_date', None)
        cur_strike = work_dict.get('strike', None)
        contract_type = str(work_dict.get('contract', 'C')).upper()
        get_pricing = work_dict.get('get_pricing', True)
        get_news = work_dict.get('get_news', True)
        get_options = work_dict.get('get_options', True)
        orient = work_dict.get('orient', 'records')
        label = work_dict.get('label', label)

        ticker_results = pinance.Pinance(ticker)
        num_news_rec = 0

        use_date = exp_date
        if not exp_date:
            exp_date = opt_dates.option_expiration(date=exp_date)
            use_date = exp_date.strftime('%Y-%m-%d')
        """
        Debug control flags

        Quickly turn specific fetches off:

        get_news = False
        get_pricing = False
        get_options = False

        """
        if get_pricing:
            log.info('{} getting ticker={} pricing'.format(label, ticker))
            ticker_results.get_quotes()
            if ticker_results.quotes_data:
                pricing_dict = ticker_results.quotes_data

                cur_high = pricing_dict.get('regularMarketDayHigh', None)
                cur_low = pricing_dict.get('regularMarketDayLow', None)
                cur_open = pricing_dict.get('regularMarketOpen', None)
                cur_close = pricing_dict.get('regularMarketPreviousClose',
                                             None)
                cur_volume = pricing_dict.get('regularMarketVolume', None)
                pricing_dict['high'] = cur_high
                pricing_dict['low'] = cur_low
                pricing_dict['open'] = cur_open
                pricing_dict['close'] = cur_close
                pricing_dict['volume'] = cur_volume
                pricing_dict['date'] = get_last_close_str()
                if 'regularMarketTime' in pricing_dict:
                    pricing_dict['market_time'] = \
                        datetime.datetime.fromtimestamp(
                            pricing_dict['regularMarketTime']).strftime(
                                COMMON_TICK_DATE_FORMAT)
                if 'postMarketTime' in pricing_dict:
                    pricing_dict['post_market_time'] = \
                        datetime.datetime.fromtimestamp(
                            pricing_dict['postMarketTime']).strftime(
                                COMMON_TICK_DATE_FORMAT)

                log.info('{} ticker={} converting pricing to '
                         'df orient={}'.format(label, ticker, orient))

                try:
                    rec['pricing'] = pricing_dict
                except Exception as f:
                    rec['pricing'] = '{}'
                    log.info('{} ticker={} failed converting pricing '
                             'data={} to df ex={}'.format(
                                 label, ticker, ppj(pricing_dict), f))
                # try/ex

                log.info('{} ticker={} done converting pricing to '
                         'df orient={}'.format(label, ticker, orient))

            else:
                log.error('{} ticker={} missing quotes_data={}'.format(
                    label, ticker, ticker_results.quotes_data))
            # end of if ticker_results.quotes_data

            log.info('{} ticker={} close={} vol={}'.format(
                label, ticker, cur_close, cur_volume))
        else:
            log.info('{} skip - getting ticker={} pricing'.format(
                label, ticker, get_pricing))
        # if get_pricing

        if get_news:
            log.info('{} getting ticker={} news'.format(label, ticker))
            ticker_results.get_news()
            if ticker_results.news_data:
                news_list = None
                try:
                    news_list = ticker_results.news_data
                    log.info('{} ticker={} converting news to '
                             'df orient={}'.format(label, ticker, orient))

                    num_news_rec = len(news_list)

                    rec['news'] = news_list
                except Exception as f:
                    rec['news'] = '{}'
                    log.info('{} ticker={} failed converting news '
                             'data={} to df ex={}'.format(
                                 label, ticker, news_list, f))
                # try/ex

                log.info('{} ticker={} done converting news to '
                         'df orient={}'.format(label, ticker, orient))
            else:
                log.info('{} ticker={} Yahoo NO news={}'.format(
                    label, ticker, ticker_results.news_data))
            # end of if ticker_results.news_data
        else:
            log.info('{} skip - getting ticker={} news'.format(label, ticker))
        # end if get_news

        if get_options:

            get_all_strikes = True
            if get_all_strikes:
                cur_strike = None
            else:
                if cur_close:
                    cur_strike = int(cur_close)
                if not cur_strike:
                    cur_strike = 287

            log.info('{} ticker={} num_news={} get options close={} '
                     'exp_date={} contract={} strike={}'.format(
                         label, ticker, num_news_rec, cur_close, use_date,
                         contract_type, cur_strike))

            options_dict = \
                yahoo_get_pricing.get_options(
                    ticker=ticker,
                    exp_date_str=use_date,
                    contract_type=contract_type,
                    strike=cur_strike)

            rec['options'] = '{}'

            try:
                log.info('{} ticker={} converting options to '
                         'df orient={}'.format(label, ticker, orient))

                num_option_calls = options_dict.get('num_calls', None)
                num_option_puts = options_dict.get('num_puts', None)
                rec['options'] = {
                    'exp_date': options_dict.get('exp_date', None),
                    'calls': options_dict.get('calls', None),
                    'puts': options_dict.get('puts', None),
                    'num_calls': num_option_calls,
                    'num_puts': num_option_puts
                }
                rec['calls'] = rec['options'].get('calls', EMPTY_DF_STR)
                rec['puts'] = rec['options'].get('puts', EMPTY_DF_STR)
            except Exception as f:
                rec['options'] = '{}'
                log.info('{} ticker={} failed converting options '
                         'data={} to df ex={}'.format(label, ticker,
                                                      options_dict, f))
            # try/ex

            log.info('{} ticker={} done converting options to '
                     'df orient={} num_calls={} num_puts={}'.format(
                         label, ticker, orient, num_option_calls,
                         num_option_puts))

        else:
            log.info('{} skip - getting ticker={} options'.format(
                label, ticker))
        # end of if get_options

        log.info('{} yahoo pricing for ticker={} close={} '
                 'num_calls={} num_puts={} news={}'.format(
                     label, ticker, cur_close, num_option_calls,
                     num_option_puts, num_news_rec))

        fields_to_upload = ['pricing', 'options', 'calls', 'puts', 'news']

        for field_name in fields_to_upload:
            upload_and_cache_req = copy.deepcopy(work_dict)
            upload_and_cache_req['celery_disabled'] = True
            upload_and_cache_req['data'] = rec[field_name]
            if not upload_and_cache_req['data']:
                upload_and_cache_req['data'] = '{}'

            if 'redis_key' in work_dict:
                upload_and_cache_req['redis_key'] = '{}_{}'.format(
                    work_dict.get('redis_key',
                                  '{}_{}'.format(ticker, field_name)),
                    field_name)
            if 's3_key' in work_dict:
                upload_and_cache_req['s3_key'] = '{}_{}'.format(
                    work_dict.get('s3_key', '{}_{}'.format(ticker,
                                                           field_name)),
                    field_name)
            try:
                update_res = publisher.run_publish_pricing_update(
                    work_dict=upload_and_cache_req)
                update_status = update_res.get('status', NOT_SET)
                log.info('{} publish update status={} data={}'.format(
                    label, get_status(status=update_status), update_res))
            except Exception as f:
                err = ('{} - failed to upload YAHOO data={} to '
                       'to s3_key={} and redis_key={}'.format(
                           label, upload_and_cache_req,
                           upload_and_cache_req['s3_key'],
                           upload_and_cache_req['redis_key']))
                log.error(err)
            # end of try/ex to upload and cache
            if not rec[field_name]:
                log.debug('{} - ticker={} no data from YAHOO for '
                          'field_name={}'.format(label, ticker, field_name))
        # end of for all fields

        res = build_result.build_result(status=SUCCESS, err=None, rec=rec)
    except Exception as e:
        res = build_result.build_result(status=ERR,
                                        err=('failed - get_data_from_yahoo '
                                             'dict={} with ex={}').format(
                                                 work_dict, e),
                                        rec=rec)
        log.error('{} - {}'.format(label, res['err']))
    # end of try/ex

    log.info('task - get_data_from_yahoo done - '
             '{} - status={}'.format(label, get_status(res['status'])))

    return res
def get_new_pricing_data(self, work_dict):
    """get_new_pricing_data

    Get Ticker information on:

    - prices - turn off with ``work_dict.get_pricing = False``
    - news - turn off with ``work_dict.get_news = False``
    - options - turn off with ``work_dict.get_options = False``

    :param work_dict: dictionary for key/values
    """

    label = 'get_new_pricing_data'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    rec = {
        'pricing': None,
        'options': None,
        'calls': None,
        'puts': None,
        'news': None,
        'daily': None,
        'minute': None,
        'quote': None,
        'stats': None,
        'peers': None,
        'iex_news': None,
        'financials': None,
        'earnings': None,
        'dividends': None,
        'company': None,
        'exp_date': None,
        'publish_pricing_update': None,
        'date': ae_utils.utc_now_str(),
        'updated': None,
        'version': ae_consts.DATASET_COLLECTION_VERSION
    }
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    try:
        ticker = work_dict.get('ticker', ticker)
        ticker_id = work_dict.get('ticker_id', ae_consts.TICKER_ID)
        s3_bucket = work_dict.get('s3_bucket', ae_consts.S3_BUCKET)
        s3_key = work_dict.get('s3_key', ae_consts.S3_KEY)
        redis_key = work_dict.get('redis_key', ae_consts.REDIS_KEY)
        exp_date = work_dict.get('exp_date', None)
        cur_date = datetime.datetime.utcnow()
        cur_strike = work_dict.get('strike', None)
        contract_type = str(work_dict.get('contract', 'C')).upper()
        label = work_dict.get('label', label)
        iex_datasets = work_dict.get('iex_datasets',
                                     iex_consts.DEFAULT_FETCH_DATASETS)
        td_datasets = work_dict.get('td_datasets',
                                    td_consts.DEFAULT_FETCH_DATASETS_TD)
        fetch_mode = work_dict.get('fetch_mode', ae_consts.FETCH_MODE_ALL)

        # control flags to deal with feed issues:
        get_iex_data = True
        get_td_data = True

        if (fetch_mode == ae_consts.FETCH_MODE_ALL
                or str(fetch_mode).lower() == 'all'):
            get_iex_data = True
            get_td_data = True
        elif (fetch_mode == ae_consts.FETCH_MODE_YHO
              or str(fetch_mode).lower() == 'yahoo'):
            get_iex_data = False
            get_td_data = False
        elif (fetch_mode == ae_consts.FETCH_MODE_IEX
              or str(fetch_mode).lower() == 'iex'):
            get_iex_data = True
            get_td_data = False
        elif (fetch_mode == ae_consts.FETCH_MODE_TD
              or str(fetch_mode).lower() == 'td'):
            get_iex_data = False
            get_td_data = True
        else:
            log.debug('{} - unsupported fetch_mode={} value'.format(
                label, fetch_mode))
        """
        as of Thursday, Jan. 3, 2019:
        https://developer.yahoo.com/yql/
        Important EOL Notice: As of Thursday, Jan. 3, 2019
        the YQL service at query.yahooapis.com will be retired
        """
        get_yahoo_data = False

        if not exp_date:
            exp_date = opt_dates.option_expiration(date=exp_date)
        else:
            exp_date = datetime.datetime.strptime(exp_date, '%Y-%m-%d')

        rec['updated'] = cur_date.strftime('%Y-%m-%d %H:%M:%S')
        log.info('{} getting pricing for ticker={} '
                 'cur_date={} exp_date={} '
                 'yahoo={} iex={}'.format(label, ticker, cur_date, exp_date,
                                          get_yahoo_data, get_iex_data))

        yahoo_rec = {
            'ticker': ticker,
            'pricing': None,
            'options': None,
            'calls': None,
            'puts': None,
            'news': None,
            'exp_date': None,
            'publish_pricing_update': None,
            'date': None,
            'updated': None
        }

        # disabled on 2019-01-03
        if get_yahoo_data:
            log.info('{} yahoo ticker={}'.format(label, ticker))
            yahoo_res = yahoo_data.get_data_from_yahoo(work_dict=work_dict)
            if yahoo_res['status'] == ae_consts.SUCCESS:
                yahoo_rec = yahoo_res['rec']
                log.info('{} yahoo ticker={} '
                         'status={} err={}'.format(
                             label, ticker,
                             ae_consts.get_status(status=yahoo_res['status']),
                             yahoo_res['err']))
                rec['pricing'] = yahoo_rec.get('pricing', '{}')
                rec['news'] = yahoo_rec.get('news', '{}')
                rec['options'] = yahoo_rec.get('options', '{}')
                rec['calls'] = rec['options'].get('calls',
                                                  ae_consts.EMPTY_DF_STR)
                rec['puts'] = rec['options'].get('puts',
                                                 ae_consts.EMPTY_DF_STR)
            else:
                log.error('{} failed YAHOO ticker={} '
                          'status={} err={}'.format(
                              label, ticker,
                              ae_consts.get_status(status=yahoo_res['status']),
                              yahoo_res['err']))
        # end of get from yahoo

        if get_iex_data:
            num_iex_ds = len(iex_datasets)
            log.debug('{} iex datasets={}'.format(label, num_iex_ds))
            for idx, ft_type in enumerate(iex_datasets):
                dataset_field = iex_consts.get_ft_str(ft_type=ft_type)

                log.info('{} iex={}/{} field={} ticker={}'.format(
                    label, idx, num_iex_ds, dataset_field, ticker))
                iex_label = '{}-{}'.format(label, dataset_field)
                iex_req = copy.deepcopy(work_dict)
                iex_req['label'] = iex_label
                iex_req['ft_type'] = ft_type
                iex_req['field'] = dataset_field
                iex_req['ticker'] = ticker
                iex_res = iex_data.get_data_from_iex(work_dict=iex_req)

                if iex_res['status'] == ae_consts.SUCCESS:
                    iex_rec = iex_res['rec']
                    log.info(
                        '{} iex ticker={} field={} '
                        'status={} err={}'.format(
                            label, ticker, dataset_field,
                            ae_consts.get_status(status=iex_res['status']),
                            iex_res['err']))
                    if dataset_field == 'news':
                        rec['iex_news'] = iex_rec['data']
                    else:
                        rec[dataset_field] = iex_rec['data']
                else:
                    log.debug(
                        '{} failed IEX ticker={} field={} '
                        'status={} err={}'.format(
                            label, ticker, dataset_field,
                            ae_consts.get_status(status=iex_res['status']),
                            iex_res['err']))
                # end of if/else succcess
            # end idx, ft_type in enumerate(iex_datasets):
        # end of if get_iex_data

        if get_td_data:
            num_td_ds = len(td_datasets)
            log.debug('{} td datasets={}'.format(label, num_td_ds))
            for idx, ft_type in enumerate(td_datasets):
                dataset_field = td_consts.get_ft_str_td(ft_type=ft_type)

                log.info('{} td={}/{} field={} ticker={}'.format(
                    label, idx, num_td_ds, dataset_field, ticker))
                td_label = '{}-{}'.format(label, dataset_field)
                td_req = copy.deepcopy(work_dict)
                td_req['label'] = td_label
                td_req['ft_type'] = ft_type
                td_req['field'] = dataset_field
                td_req['ticker'] = ticker
                td_res = td_data.get_data_from_td(work_dict=td_req)

                if td_res['status'] == ae_consts.SUCCESS:
                    td_rec = td_res['rec']
                    log.info('{} td ticker={} field={} '
                             'status={} err={}'.format(
                                 label, ticker, dataset_field,
                                 ae_consts.get_status(status=td_res['status']),
                                 td_res['err']))
                    if dataset_field == 'tdcalls':
                        rec['tdcalls'] = td_rec['data']
                    if dataset_field == 'tdputs':
                        rec['tdputs'] = td_rec['data']
                    else:
                        rec[dataset_field] = td_rec['data']
                else:
                    log.critical(
                        '{} failed TD ticker={} field={} '
                        'status={} err={}'.format(
                            label, ticker, dataset_field,
                            ae_consts.get_status(status=td_res['status']),
                            td_res['err']))
                # end of if/else succcess
            # end idx, ft_type in enumerate(td_datasets):
        # end of if get_td_data

        update_req = {'data': rec}
        update_req['ticker'] = ticker
        update_req['ticker_id'] = ticker_id
        update_req['strike'] = cur_strike
        update_req['contract'] = contract_type
        update_req['s3_enabled'] = work_dict.get('s3_enabled',
                                                 ae_consts.ENABLED_S3_UPLOAD)
        update_req['redis_enabled'] = work_dict.get(
            'redis_enabled', ae_consts.ENABLED_REDIS_PUBLISH)
        update_req['s3_bucket'] = s3_bucket
        update_req['s3_key'] = s3_key
        update_req['s3_access_key'] = work_dict.get('s3_access_key',
                                                    ae_consts.S3_ACCESS_KEY)
        update_req['s3_secret_key'] = work_dict.get('s3_secret_key',
                                                    ae_consts.S3_SECRET_KEY)
        update_req['s3_region_name'] = work_dict.get('s3_region_name',
                                                     ae_consts.S3_REGION_NAME)
        update_req['s3_address'] = work_dict.get('s3_address',
                                                 ae_consts.S3_ADDRESS)
        update_req['s3_secure'] = work_dict.get('s3_secure',
                                                ae_consts.S3_SECURE)
        update_req['redis_key'] = redis_key
        update_req['redis_address'] = work_dict.get('redis_address',
                                                    ae_consts.REDIS_ADDRESS)
        update_req['redis_password'] = work_dict.get('redis_password',
                                                     ae_consts.REDIS_PASSWORD)
        update_req['redis_db'] = int(
            work_dict.get('redis_db', ae_consts.REDIS_DB))
        update_req['redis_expire'] = work_dict.get('redis_expire',
                                                   ae_consts.REDIS_EXPIRE)
        update_req['updated'] = rec['updated']
        update_req['label'] = label
        update_req['celery_disabled'] = True
        update_status = ae_consts.NOT_SET

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=update_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
                log.info('{} update_res status={} data={}'.format(
                    label, ae_consts.get_status(status=update_status),
                    ae_consts.ppj(update_res)))
            else:
                log.info('{} run_publish_pricing_update status={}'.format(
                    label, ae_consts.get_status(status=update_status)))
            # end of if/else

            rec['publish_pricing_update'] = update_res
            res = build_result.build_result(status=ae_consts.SUCCESS,
                                            err=None,
                                            rec=rec)
        except Exception as f:
            err = ('{} publisher.run_publish_pricing_update failed '
                   'with ex={}'.format(label, f))
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
        # end of trying to publish results to connected services

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=('failed - get_new_pricing_data '
                                             'dict={} with ex={}').format(
                                                 work_dict, e),
                                        rec=rec)
        log.error('{} - {}'.format(label, res['err']))
    # end of try/ex

    if ae_consts.ev('DATASET_COLLECTION_SLACK_ALERTS', '0') == '1':
        env_name = 'DEV'
        if ae_consts.ev('PROD_SLACK_ALERTS', '1') == '1':
            env_name = 'PROD'
        done_msg = ('Dataset collected ticker=*{}* on env=*{}* '
                    'redis_key={} s3_key={} iex={} yahoo={}'.format(
                        ticker, env_name, redis_key, s3_key, get_iex_data,
                        get_yahoo_data))
        log.debug('{} sending slack msg={}'.format(label, done_msg))
        if res['status'] == ae_consts.SUCCESS:
            slack_utils.post_success(msg=done_msg, block=False, jupyter=True)
        else:
            slack_utils.post_failure(msg=done_msg, block=False, jupyter=True)
        # end of if/else success
    # end of publishing to slack

    log.info('task - get_new_pricing_data done - '
             '{} - status={}'.format(label,
                                     ae_consts.get_status(res['status'])))

    return get_task_results.get_task_results(work_dict=work_dict, result=res)
def get_data_from_iex(work_dict):
    """get_data_from_iex

    Get pricing from iex

    :param work_dict: request dictionary
    """
    label = 'get_data_from_iex'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    rec = {'data': None, 'updated': None}
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    ticker = None
    field = None
    ft_type = None

    try:

        ticker = work_dict.get('ticker', ae_consts.TICKER)
        field = work_dict.get('field', 'daily')
        ft_type = work_dict.get('ft_type', None)
        ft_str = str(ft_type).lower()
        label = work_dict.get('label', label)
        orient = work_dict.get('orient', 'records')

        iex_req = None
        if ft_type == iex_consts.FETCH_DAILY or ft_str == 'daily':
            ft_type == iex_consts.FETCH_DAILY
            iex_req = api_requests.build_iex_fetch_daily_request(label=label)
        elif ft_type == iex_consts.FETCH_MINUTE or ft_str == 'minute':
            ft_type == iex_consts.FETCH_MINUTE
            iex_req = api_requests.build_iex_fetch_minute_request(label=label)
        elif ft_type == iex_consts.FETCH_QUOTE or ft_str == 'quote':
            ft_type == iex_consts.FETCH_QUOTE
            iex_req = api_requests.build_iex_fetch_quote_request(label=label)
        elif ft_type == iex_consts.FETCH_STATS or ft_str == 'stats':
            ft_type == iex_consts.FETCH_STATS
            iex_req = api_requests.build_iex_fetch_stats_request(label=label)
        elif ft_type == iex_consts.FETCH_PEERS or ft_str == 'peers':
            ft_type == iex_consts.FETCH_PEERS
            iex_req = api_requests.build_iex_fetch_peers_request(label=label)
        elif ft_type == iex_consts.FETCH_NEWS or ft_str == 'news':
            ft_type == iex_consts.FETCH_NEWS
            iex_req = api_requests.build_iex_fetch_news_request(label=label)
        elif ft_type == iex_consts.FETCH_FINANCIALS or ft_str == 'financials':
            ft_type == iex_consts.FETCH_FINANCIALS
            iex_req = api_requests.build_iex_fetch_financials_request(
                label=label)
        elif ft_type == iex_consts.FETCH_EARNINGS or ft_str == 'earnings':
            ft_type == iex_consts.FETCH_EARNINGS
            iex_req = api_requests.build_iex_fetch_earnings_request(
                label=label)
        elif ft_type == iex_consts.FETCH_DIVIDENDS or ft_str == 'dividends':
            ft_type == iex_consts.FETCH_DIVIDENDS
            iex_req = api_requests.build_iex_fetch_dividends_request(
                label=label)
        elif ft_type == iex_consts.FETCH_COMPANY or ft_str == 'company':
            ft_type == iex_consts.FETCH_COMPANY
            iex_req = api_requests.build_iex_fetch_company_request(label=label)
        else:
            log.error('{} - unsupported ft_type={} ft_str={} ticker={}'.format(
                label, ft_type, ft_str, ticker))
            raise NotImplemented
        # if supported fetch request type

        clone_keys = [
            'ticker', 's3_address', 's3_bucket', 's3_key', 'redis_address',
            'redis_db', 'redis_password', 'redis_key'
        ]

        for k in clone_keys:
            iex_req[k] = work_dict.get(k, '{}-missing-in-{}'.format(k, label))
        # end of cloning keys

        if not iex_req:
            err = ('{} - ticker={} did not build an IEX request '
                   'for work={}'.format(label, iex_req['ticker'], work_dict))
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
            return res
        else:
            log.info('{} - ticker={} field={} '
                     'orient={} fetch'.format(label, iex_req['ticker'], field,
                                              orient))
        # if invalid iex request

        df = None
        try:
            if 'from' in work_dict:
                iex_req['from'] = datetime.datetime.strptime(
                    '%Y-%m-%d %H:%M:%S', work_dict['from'])
            df = iex_fetch_data.fetch_data(work_dict=iex_req,
                                           fetch_type=ft_type)
            rec['data'] = df.to_json(orient=orient, date_format='iso')
            rec['updated'] = datetime.datetime.utcnow().strftime(
                '%Y-%m-%d %H:%M:%S')
        except Exception as f:
            log.error('{} - ticker={} field={} failed fetch_data '
                      'with ex={}'.format(label, iex_req['ticker'], ft_type,
                                          f))
        # end of try/ex

        if ae_consts.ev('DEBUG_IEX_DATA', '0') == '1':
            log.info('{} ticker={} field={} data={} to_json'.format(
                label, iex_req['ticker'], field, rec['data']))
        else:
            log.info('{} ticker={} field={} to_json'.format(
                label, iex_req['ticker'], field))
        # end of if/else found data

        upload_and_cache_req = copy.deepcopy(iex_req)
        upload_and_cache_req['celery_disabled'] = True
        upload_and_cache_req['data'] = rec['data']
        if not upload_and_cache_req['data']:
            upload_and_cache_req['data'] = '{}'
        use_field = field
        if use_field == 'news':
            use_field = 'news1'
        if 'redis_key' in work_dict:
            upload_and_cache_req['redis_key'] = '{}_{}'.format(
                work_dict.get('redis_key', iex_req['redis_key']), use_field)
        if 's3_key' in work_dict:
            upload_and_cache_req['s3_key'] = '{}_{}'.format(
                work_dict.get('s3_key', iex_req['s3_key']), use_field)

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=upload_and_cache_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            log.info('{} publish update status={} data={}'.format(
                label, ae_consts.get_status(status=update_status), update_res))
        except Exception as f:
            err = ('{} - failed to upload iex data={} to '
                   'to s3_key={} and redis_key={}'.format(
                       label, upload_and_cache_req,
                       upload_and_cache_req['s3_key'],
                       upload_and_cache_req['redis_key']))
            log.error(err)
        # end of try/ex to upload and cache

        if not rec['data']:
            log.info('{} - ticker={} no IEX data field={} to publish'.format(
                label, iex_req['ticker'], field))
        # end of if/else

        res = build_result.build_result(status=ae_consts.SUCCESS,
                                        err=None,
                                        rec=rec)

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=('failed - get_data_from_iex '
                                             'dict={} with ex={}').format(
                                                 work_dict, e),
                                        rec=rec)
    # end of try/ex

    log.info('task - get_data_from_iex done - '
             '{} - status={} err={}'.format(
                 label, ae_consts.get_status(res['status']), res['err']))

    return res
Example #13
0
def get_data_from_yahoo(work_dict):
    """get_data_from_yahoo

    Get data from yahoo

    :param work_dict: request dictionary
    """
    label = 'get_data_from_yahoo'

    log.info(f'task - {label} - start work_dict={work_dict}')

    num_news_rec = 0
    num_option_calls = 0
    num_option_puts = 0
    cur_high = -1
    cur_low = -1
    cur_open = -1
    cur_close = -1
    cur_volume = -1

    rec = {
        'pricing': None,
        'options': None,
        'calls': None,
        'puts': None,
        'news': None,
        'exp_date': None,
        'publish_pricing_update': None,
        'date': None,
        'updated': None
    }
    res = {'status': NOT_RUN, 'err': None, 'rec': rec}
    log.error('sorry - yahoo is disabled and '
              'pinance is no longer supported '
              'https://github.com/neberej/pinance')
    return res

    try:

        ticker = work_dict.get('ticker', TICKER)
        exp_date = work_dict.get('exp_date', None)
        cur_strike = work_dict.get('strike', None)
        contract_type = str(work_dict.get('contract', 'C')).upper()
        get_pricing = work_dict.get('get_pricing', True)
        get_news = work_dict.get('get_news', True)
        get_options = work_dict.get('get_options', True)
        orient = work_dict.get('orient', 'records')
        label = work_dict.get('label', label)

        ticker_results = None
        num_news_rec = 0

        use_date = exp_date
        if not exp_date:
            exp_date = opt_dates.option_expiration(date=exp_date)
            use_date = exp_date.strftime('%Y-%m-%d')
        """
        Debug control flags

        Quickly turn specific fetches off:

        get_news = False
        get_pricing = False
        get_options = False

        """
        if get_pricing:
            log.info(f'{label} getting ticker={ticker} pricing')
            ticker_results.get_quotes()
            if ticker_results.quotes_data:
                pricing_dict = ticker_results.quotes_data

                cur_high = pricing_dict.get('regularMarketDayHigh', None)
                cur_low = pricing_dict.get('regularMarketDayLow', None)
                cur_open = pricing_dict.get('regularMarketOpen', None)
                cur_close = pricing_dict.get('regularMarketPreviousClose',
                                             None)
                cur_volume = pricing_dict.get('regularMarketVolume', None)
                pricing_dict['high'] = cur_high
                pricing_dict['low'] = cur_low
                pricing_dict['open'] = cur_open
                pricing_dict['close'] = cur_close
                pricing_dict['volume'] = cur_volume
                pricing_dict['date'] = get_last_close_str()
                if 'regularMarketTime' in pricing_dict:
                    pricing_dict['market_time'] = \
                        datetime.datetime.fromtimestamp(
                            pricing_dict['regularMarketTime']).strftime(
                                COMMON_TICK_DATE_FORMAT)
                if 'postMarketTime' in pricing_dict:
                    pricing_dict['post_market_time'] = \
                        datetime.datetime.fromtimestamp(
                            pricing_dict['postMarketTime']).strftime(
                                COMMON_TICK_DATE_FORMAT)

                log.info(f'{label} ticker={ticker} converting pricing to '
                         f'df orient={orient}')

                try:
                    rec['pricing'] = pricing_dict
                except Exception as f:
                    rec['pricing'] = '{}'
                    log.info(
                        f'{label} ticker={ticker} failed converting pricing '
                        f'data={ppj(pricing_dict)} to df ex={f}')
                # try/ex

                log.info(f'{label} ticker={ticker} done converting pricing to '
                         f'df orient={orient}')

            else:
                log.error(f'{label} ticker={ticker} '
                          f'missing quotes_data={ticker_results.quotes_data}')
            # end of if ticker_results.quotes_data

            log.info(
                f'{label} ticker={ticker} close={cur_close} vol={cur_volume}')
        else:
            log.info(f'{label} skip - getting ticker={ticker} pricing')
        # if get_pricing

        if get_news:
            log.info(f'{label} getting ticker={ticker} news')
            ticker_results.get_news()
            if ticker_results.news_data:
                news_list = None
                try:
                    news_list = ticker_results.news_data
                    log.info(f'{label} ticker={ticker} converting news to '
                             f'df orient={orient}')

                    num_news_rec = len(news_list)

                    rec['news'] = news_list
                except Exception as f:
                    rec['news'] = '{}'
                    log.info(f'{label} ticker={ticker} failed converting news '
                             f'data={news_list} to df ex={f}')
                # try/ex

                log.info(f'{label} ticker={ticker} done converting news to '
                         f'df orient={orient}')
            else:
                log.info(f'{label} ticker={ticker} Yahoo NO '
                         f'news={ticker_results.news_data}')
            # end of if ticker_results.news_data
        else:
            log.info(f'{label} skip - getting ticker={ticker} news')
        # end if get_news

        if get_options:

            get_all_strikes = True
            if get_all_strikes:
                cur_strike = None
            else:
                if cur_close:
                    cur_strike = int(cur_close)
                if not cur_strike:
                    cur_strike = 287

            log.info(
                f'{label} ticker={ticker} num_news={num_news_rec} get options '
                f'close={cur_close} exp_date={use_date} '
                f'contract={contract_type} strike={cur_strike}')

            options_dict = \
                yahoo_get_pricing.get_options(
                    ticker=ticker,
                    exp_date_str=use_date,
                    contract_type=contract_type,
                    strike=cur_strike)

            rec['options'] = '{}'

            try:
                log.info(f'{label} ticker={ticker} converting options to '
                         f'df orient={orient}')

                num_option_calls = options_dict.get('num_calls', None)
                num_option_puts = options_dict.get('num_puts', None)
                rec['options'] = {
                    'exp_date': options_dict.get('exp_date', None),
                    'calls': options_dict.get('calls', None),
                    'puts': options_dict.get('puts', None),
                    'num_calls': num_option_calls,
                    'num_puts': num_option_puts
                }
                rec['calls'] = rec['options'].get('calls', EMPTY_DF_STR)
                rec['puts'] = rec['options'].get('puts', EMPTY_DF_STR)
            except Exception as f:
                rec['options'] = '{}'
                log.info(f'{label} ticker={ticker} failed converting options '
                         f'data={options_dict} to df ex={f}')
            # try/ex

            log.info(f'{label} ticker={ticker} done converting options to '
                     f'df orient={orient} num_calls={num_option_calls} '
                     f'num_puts={num_option_puts}')

        else:
            log.info(f'{label} skip - getting ticker={ticker} options')
        # end of if get_options

        log.info(
            f'{label} yahoo pricing for ticker={ticker} close={cur_close} '
            f'num_calls={num_option_calls} num_puts={num_option_puts} '
            f'news={num_news_rec}')

        fields_to_upload = ['pricing', 'options', 'calls', 'puts', 'news']

        for field_name in fields_to_upload:
            upload_and_cache_req = copy.deepcopy(work_dict)
            upload_and_cache_req['celery_disabled'] = True
            upload_and_cache_req['data'] = rec[field_name]
            if not upload_and_cache_req['data']:
                upload_and_cache_req['data'] = '{}'

            if 'redis_key' in work_dict:
                upload_and_cache_req['redis_key'] = f'''{work_dict.get(
                        'redis_key',
                        f'{ticker}_{field_name}')}_{field_name}'''
            if 's3_key' in work_dict:
                upload_and_cache_req['s3_key'] = f'''{work_dict.get(
                        's3_key',
                        f'{ticker}_{field_name}')}_{field_name}'''
            try:
                update_res = publisher.run_publish_pricing_update(
                    work_dict=upload_and_cache_req)
                update_status = update_res.get('status', NOT_SET)
                log.info(f'{label} publish update '
                         f'status={get_status(status=update_status)} '
                         f'data={update_res}')
            except Exception:
                err = (f'{label} - failed to upload YAHOO '
                       f'data={upload_and_cache_req} to '
                       f's3_key={upload_and_cache_req["s3_key"]} and '
                       f'redis_key={upload_and_cache_req["redis_key"]}')
                log.error(err)
            # end of try/ex to upload and cache
            if not rec[field_name]:
                log.debug(f'{label} - ticker={ticker} no data from YAHOO for '
                          f'field_name={field_name}')
        # end of for all fields

        res = build_result.build_result(status=SUCCESS, err=None, rec=rec)
    except Exception as e:
        res = build_result.build_result(status=ERR,
                                        err=('failed - get_data_from_yahoo '
                                             f'dict={work_dict} with ex={e}'),
                                        rec=rec)
        log.error(f'{label} - {res["err"]}')
    # end of try/ex

    log.info('task - get_data_from_yahoo done - '
             f'{label} - status={get_status(res["status"])}')

    return res
Example #14
0
def get_data_from_td(work_dict):
    """get_data_from_td

    Get pricing data from Tradier

    :param work_dict: request dictionary
    """
    label = 'get_data_from_td'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    rec = {'data': None, 'updated': None}
    res = {'status': ae_consts.NOT_RUN, 'err': None, 'rec': rec}

    ticker = None
    field = None
    ft_type = None

    try:

        ticker = work_dict.get('ticker', ae_consts.TICKER)
        field = work_dict.get('field', 'daily')
        ft_type = work_dict.get('ft_type', None)
        ft_str = str(ft_type).lower()
        label = work_dict.get('label', label)
        orient = work_dict.get('orient', 'records')

        td_req = None
        if ft_type == td_consts.FETCH_TD_CALLS or ft_str == 'tdcalls':
            ft_type == td_consts.FETCH_TD_CALLS
            td_req = api_requests.build_td_fetch_calls_request(label=label)
        elif ft_type == td_consts.FETCH_TD_PUTS or ft_str == 'tdputs':
            ft_type == td_consts.FETCH_TD_PUTS
            td_req = api_requests.build_td_fetch_puts_request(label=label)
        else:
            log.error('{} - unsupported ft_type={} ft_str={} ticker={}'.format(
                label, ft_type, ft_str, ticker))
            raise NotImplemented
        # if supported fetch request type

        clone_keys = [
            'ticker', 's3_address', 's3_bucket', 's3_key', 'redis_address',
            'redis_db', 'redis_password', 'redis_key'
        ]

        for k in clone_keys:
            td_req[k] = work_dict.get(k, '{}-missing-in-{}'.format(k, label))
        # end of cloning keys

        if not td_req:
            err = ('{} - ticker={} did not build an TD request '
                   'for work={}'.format(label, td_req['ticker'], work_dict))
            log.error(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
            return res
        else:
            log.info('{} - ticker={} field={} '
                     'orient={} fetch'.format(label, td_req['ticker'], field,
                                              orient))
        # if invalid td request

        df = None
        try:
            if 'from' in work_dict:
                td_req['from'] = datetime.datetime.strptime(
                    '%Y-%m-%d %H:%M:%S', work_dict['from'])
            status_df, df = td_fetch_data.fetch_data(work_dict=td_req,
                                                     fetch_type=ft_type)

            if status_df == ae_consts.SUCCESS:
                rec['data'] = df.to_json(orient=orient)
                rec['updated'] = datetime.datetime.utcnow().strftime(
                    '%Y-%m-%d %H:%M:%S')
            else:
                err = ('{} - ticker={} td_fetch_data.fetch_data field={} '
                       'failed fetch_data'
                       ''.format(label, td_req['ticker'], ft_type))
                log.critical(err)
                res = build_result.build_result(status=ae_consts.ERR,
                                                err=err,
                                                rec=rec)
                return res
        except Exception as f:
            err = ('{} - ticker={} field={} failed fetch_data '
                   'with ex={}'.format(label, td_req['ticker'], ft_type, f))
            log.critical(err)
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)
            return res
        # end of try/ex

        if ae_consts.ev('DEBUG_TD_DATA', '0') == '1':
            log.info('{} ticker={} field={} data={} to_json'.format(
                label, td_req['ticker'], field, rec['data']))
        else:
            log.info('{} ticker={} field={} to_json'.format(
                label, td_req['ticker'], field))
        # end of if/else found data

        upload_and_cache_req = copy.deepcopy(td_req)
        upload_and_cache_req['celery_disabled'] = True
        upload_and_cache_req['data'] = rec['data']
        if not upload_and_cache_req['data']:
            upload_and_cache_req['data'] = '{}'
        use_field = field
        if use_field == 'news':
            use_field = 'news1'
        if 'redis_key' in work_dict:
            upload_and_cache_req['redis_key'] = '{}_{}'.format(
                work_dict.get('redis_key', td_req['redis_key']), use_field)
        if 's3_key' in work_dict:
            upload_and_cache_req['s3_key'] = '{}_{}'.format(
                work_dict.get('s3_key', td_req['s3_key']), use_field)

        try:
            update_res = publisher.run_publish_pricing_update(
                work_dict=upload_and_cache_req)
            update_status = update_res.get('status', ae_consts.NOT_SET)
            log.info('{} publish update status={} data={}'.format(
                label, ae_consts.get_status(status=update_status), update_res))
        except Exception as f:
            err = ('{} - failed to upload td data={} to '
                   'to s3_key={} and redis_key={}'.format(
                       label, upload_and_cache_req,
                       upload_and_cache_req['s3_key'],
                       upload_and_cache_req['redis_key']))
            log.error(err)
        # end of try/ex to upload and cache

        if not rec['data']:
            log.info(
                '{} - ticker={} no Tradier data field={} to publish'.format(
                    label, td_req['ticker'], field))
        # end of if/else

        res = build_result.build_result(status=ae_consts.SUCCESS,
                                        err=None,
                                        rec=rec)

    except Exception as e:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err=('failed - get_data_from_td '
                                             'dict={} with ex={}').format(
                                                 work_dict, e),
                                        rec=rec)
    # end of try/ex

    log.info('task - get_data_from_td done - '
             '{} - status={} err={}'.format(
                 label, ae_consts.get_status(res['status']), res['err']))

    return res