示例#1
0
    def on_failure(self, exc, task_id, args, kwargs, einfo):
        """on_failure

        Handle custom actions when a task completes
        not successfully. As an example, if the task throws an
        exception, then this ``on_failure`` method can
        customize how to handle **exceptional** cases.

        http://docs.celeryproject.org/en/latest/userguide/tasks.html#task-inheritance

        :param exc: exception
        :param task_id: task id
        :param args: arguments passed into task
        :param kwargs: keyword arguments passed into task
        :param einfo: exception info
        """

        self.build_log_label_from_args(args=args)

        use_exc = str(exc)
        if ev('DEBUG_TASK', '0') == '1':
            log.error('on_failure {} - exc={} '
                      'args={} kwargs={}'.format(self.log_label, use_exc, args,
                                                 kwargs))
        else:
            log.error('on_failure {} - exc={} '.format(self.log_label,
                                                       use_exc))
        if ev('PROD_SLACK_ALERTS', '0') == '1':
            post_failure([
                'on_failure {}'.format(self.log_label),
                'exc={}'.format(use_exc)
            ])
def run_screener_analysis(work_dict):
    """run_screener_analysis

    Celery wrapper for running without celery

    :param work_dict: task data
    """

    fn_name = 'run_screener_analysis'
    label = '{} - {}'.format(fn_name, work_dict.get('label', ''))

    log.info('{} - start'.format(label))

    response = build_result.build_result(status=ae_consts.NOT_RUN,
                                         err=None,
                                         rec={})
    task_res = {}

    # allow running without celery
    if ae_consts.is_celery_disabled(work_dict=work_dict):
        work_dict['celery_disabled'] = True
        task_res = task_screener_analysis(work_dict)
        if task_res:
            response = task_res.get('result', task_res)
            if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
                response_details = response
                try:
                    response_details = ae_consts.ppj(response)
                except Exception:
                    response_details = response

                log.info('{} task result={}'.format(label, response_details))
        else:
            log.error('{} celery was disabled but the task={} '
                      'did not return anything'.format(label, response))
        # end of if response
    else:
        task_res = task_screener_analysis.delay(work_dict=work_dict)
        rec = {'task_id': task_res}
        response = build_result.build_result(status=ae_consts.SUCCESS,
                                             err=None,
                                             rec=rec)
    # if celery enabled

    if response:
        if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
            log.info('{} - done '
                     'status={} err={} rec={}'.format(
                         label, ae_consts.get_status(response['status']),
                         response['err'], response['rec']))
        else:
            log.info('{} - done '
                     'status={} err={}'.format(
                         label, ae_consts.get_status(response['status']),
                         response['err']))
    else:
        log.info('{} - done ' 'no response'.format(label))
    # end of if/else response

    return response
示例#3
0
def run_screener_analysis(work_dict):
    """run_screener_analysis

    Celery wrapper for running without celery

    :param work_dict: task data
    """

    fn_name = 'run_screener_analysis'
    label = f'''{fn_name} - {work_dict.get(
        'label',
        '')}'''

    log.info(f'{label} - start')

    response = build_result.build_result(status=ae_consts.NOT_RUN,
                                         err=None,
                                         rec={})
    task_res = {}

    # allow running without celery
    if ae_consts.is_celery_disabled(work_dict=work_dict):
        work_dict['celery_disabled'] = True
        task_res = task_screener_analysis(work_dict)
        if task_res:
            response = task_res.get('result', task_res)
            if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
                response_details = response
                try:
                    response_details = ae_consts.ppj(response)
                except Exception:
                    response_details = response

                log.info(f'{label} task result={response_details}')
        else:
            log.error(f'{label} celery was disabled but the task={response} '
                      'did not return anything')
        # end of if response
    else:
        task_res = task_screener_analysis.delay(work_dict=work_dict)
        rec = {'task_id': task_res}
        response = build_result.build_result(status=ae_consts.SUCCESS,
                                             err=None,
                                             rec=rec)
    # if celery enabled

    if response:
        if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
            log.info(f'{label} - done '
                     f'status={ae_consts.get_status(response["status"])} '
                     f'err={response["err"]} rec={response["rec"]}')
        else:
            log.info(f'{label} - done '
                     f'status={ae_consts.get_status(response["status"])} '
                     f'err={response["err"]}')
    else:
        log.info(f'{label} - done no response')
    # end of if/else response

    return response
def run_get_new_pricing_data(work_dict):
    """run_get_new_pricing_data

    Celery wrapper for running without celery

    :param work_dict: task data
    """

    label = work_dict.get('label', '')

    log.debug(f'run_get_new_pricing_data - {label} - start')

    response = build_result.build_result(status=ae_consts.NOT_RUN,
                                         err=None,
                                         rec={})
    task_res = {}

    # allow running without celery
    if ae_consts.is_celery_disabled(work_dict=work_dict):
        work_dict['celery_disabled'] = True
        task_res = get_new_pricing_data(work_dict)
        if task_res:
            response = task_res.get('result', task_res)
            if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
                response_details = response
                try:
                    response_details = ae_consts.ppj(response)
                except Exception:
                    response_details = response

                log.debug(f'{label} task result={response_details}')
        else:
            log.error(f'{label} celery was disabled but the task={response} '
                      'did not return anything')
        # end of if response
    else:
        task_res = get_new_pricing_data.delay(work_dict=work_dict)
        rec = {'task_id': task_res}
        response = build_result.build_result(status=ae_consts.SUCCESS,
                                             err=None,
                                             rec=rec)
    # if celery enabled

    if response:
        status_str = ae_consts.get_status(response['status'])
        if ae_consts.ev('DEBUG_RESULTS', '0') == '1':
            log.debug(f'run_get_new_pricing_data - {label} - done '
                      f'status={status_str} '
                      f'err={response["err"]} '
                      f'rec={response["rec"]}')
        else:
            log.debug(f'run_get_new_pricing_data - {label} - done '
                      f'status={status_str} '
                      f'rec={response["rec"]}')
    else:
        log.debug(f'run_get_new_pricing_data - {label} - done ' 'no response')
    # end of if/else response

    return response
def run_publish_ticker_aggregate_from_s3(work_dict):
    """run_publish_ticker_aggregate_from_s3

    Celery wrapper for running without celery

    :param work_dict: task data
    """

    label = work_dict.get('label', '')

    log.info('run_publish_ticker_aggregate_from_s3 - {} - start'.format(label))

    response = build_result.build_result(status=NOT_RUN, err=None, rec={})
    task_res = {}

    # allow running without celery
    if is_celery_disabled(work_dict=work_dict):
        work_dict['celery_disabled'] = True
        task_res = publish_ticker_aggregate_from_s3(work_dict=work_dict)
        if task_res:
            response = task_res.get('result', task_res)
            if ev('DEBUG_RESULTS', '0') == '1':
                response_details = response
                try:
                    response_details = ppj(response)
                except Exception:
                    response_details = response
                log.info('{} task result={}'.format(label, response_details))
        else:
            log.error('{} celery was disabled but the task={} '
                      'did not return anything'.format(label, response))
        # end of if response
    else:
        task_res = publish_ticker_aggregate_from_s3.delay(work_dict=work_dict)
        rec = {'task_id': task_res}
        response = build_result.build_result(status=SUCCESS, err=None, rec=rec)
    # if celery enabled

    if response:
        if ev('DEBUG_RESULTS', '0') == '1':
            log.info('run_publish_ticker_aggregate_from_s3 - {} - done '
                     'status={} err={} rec={}'.format(
                         label, get_status(response['status']),
                         response['err'], response['rec']))
        else:
            log.info('run_publish_ticker_aggregate_from_s3 - {} - done '
                     'status={} err={}'.format(label,
                                               get_status(response['status']),
                                               response['err']))
    else:
        log.info('run_publish_ticker_aggregate_from_s3 - {} - done '
                 'no response'.format(label))
    # end of if/else response

    return response
示例#6
0
def add_footnote(
        fig=None,
        xpos=0.90,
        ypos=0.01,
        text=None,
        color='#888888',
        fontsize=8):
    """add_footnote

    Add a footnote based off the environment key:
    ``PLOT_FOOTNOTE``

    :param fig: add the footnote to this figure object
    :param xpos: x-axes position
    :param ypos: y-axis position
    :param text: text in the footnote
    :param color: font color
    :param fontsize: text size for the footnote text
    """
    if not fig:
        return

    use_footnote = text
    if not use_footnote:
        use_footnote = ae_consts.ev(
            'PLOT_FOOTNOTE',
            'algotraders')

    fig.text(
        xpos,
        ypos,
        use_footnote,
        va='bottom',
        fontsize=fontsize,
        color=color)
    def test_integration_daily_indicator_with_algo_config(self):
        """test_integration_daily_indicator_with_algo_config"""
        if ae_consts.ev('INT_TESTS', '0') == '0':
            return

        algo = base_algo.BaseAlgo(
            ticker=self.ticker,
            balance=self.balance,
            start_date_str=self.start_date_str,
            end_date_str=self.end_date_str,
            config_dict=self.algo_config_dict)
        self.assertEqual(
            algo.name,
            self.algo_config_dict['name'])
        self.assertEqual(
            algo.tickers,
            [self.ticker])
        algo.handle_data(
            data=self.data)

        res = algo.get_result()
        print(ae_consts.ppj(res))
        self.assertTrue(
            res['history'][0]['total_sells'] >= 1)
        self.assertTrue(
            res['history'][0]['total_buys'] == 0)
示例#8
0
    def test_integration_publish_from_s3_to_redis(self):
        """test_integration_publish_from_s3_to_redis"""
        if ev('INT_TESTS', '0') == '0':
            return

        work = build_publish_from_s3_to_redis_request()
        work['s3_enabled'] = 1
        work['redis_enabled'] = 1
        work['s3_access_key'] = S3_ACCESS_KEY
        work['s3_secret_key'] = S3_SECRET_KEY
        work['s3_region_name'] = S3_REGION_NAME
        work['s3_address'] = S3_ADDRESS
        work['s3_secure'] = S3_SECURE
        work['redis_address'] = REDIS_ADDRESS
        work['redis_db'] = REDIS_DB
        work['redis_key'] = REDIS_KEY
        work['redis_password'] = REDIS_PASSWORD
        work['redis_expire'] = REDIS_EXPIRE
        work['s3_bucket'] = 'integration-tests'
        work['s3_key'] = 'integration-test-v1'
        work['redis_key'] = 'integration-test-v1'

        res = run_publish_from_s3_to_redis(work)
        self.assertTrue(res['status'] == SUCCESS)
        self.assertTrue(res['err'] is None)
        self.assertTrue(res['rec'] is not None)
        record = res['rec']
        self.assertEqual(record['ticker'], TICKER)
        self.assertEqual(record['s3_enabled'], True)
        self.assertEqual(record['redis_enabled'], True)
        self.assertEqual(record['s3_bucket'], work['s3_bucket'])
        self.assertEqual(record['s3_key'], work['s3_key'])
        self.assertEqual(record['redis_key'], work['redis_key'])
    def get(self, name=None):
        """get

        mock redis get

        :param name: name of the key to check
        """
        if not name:
            err = ('mock - MockRedis.get('
                   'name={}'
                   ') - missing a name'.format(name))
            log.error(err)
            raise Exception(err)
        value_in_dict = self.cache_dict.get(name, None)
        if not value_in_dict:
            value_in_env = ae_consts.ev(name, None)
            log.info('mock - MockRedis.get('
                     'name={}) '
                     'env={}'.format(name, value_in_env))
            if value_in_env:
                return value_in_env.encode('utf-8')
            else:
                return None
        else:
            log.info('mock - MockRedis.get('
                     'name={}) '
                     'cached_value={}'.format(name, value_in_dict))
            return value_in_dict
示例#10
0
    def test_integration_test_fetch_tickers_from_screener(self):
        """test_integration_test_fetch_tickers_from_screener"""
        # 2020-09-05 - Finviz is kicking back:
        # 403 - Forbidden: Access is denied
        # sounds like they require some kind of auth now
        return 0
        if ev('INT_TESTS', '0') == '0':
            return

        default_url = ('https://finviz.com/screener.ashx?'
                       'v=111&'
                       'f=an_recom_strongbuy,'
                       'exch_nyse,fa_ltdebteq_low,fa_sales5years_o10&'
                       'ft=4')
        url = ev('INT_TEST_FINVIZ_SCREEN_URL', default_url)
        res = fetch_tickers_from_screener(url=url)
        self.assertIsNotNone(res)
        self.assertTrue(len(res['rec']['data']) > 0)
        self.assertEqual(get_status(status=res['status']), 'SUCCESS')
        self.debug_df(df=res['rec']['data'])
    def test_integration_extract_minute_dataset(self):
        """test_integration_extract_minute_dataset"""
        if ev('INT_TESTS', '0') == '0':
            return
        ticker = 'NFLX'
        label = 'IEX minute dataset'
        # build dataset cache dictionary
        work = get_ds_dict(ticker=ticker, label=label)

        status, df = extract_minute_dataset(work_dict=work)
        self._check(df=df, status=status, label=label, work=work)
    def test_integration_fetch_company(self):
        """test_integration_fetch_company"""
        if ev('INT_TESTS', '0') == '0':
            return

        # store data
        work = build_iex_fetch_company_request(
            label='test_integration_fetch_company')

        res = fetch_data(work_dict=work)
        self.assertIsNotNone(res)
        self.debug_df(df=res)
    def test_integration_fetch_dividends(self):
        """test_integration_fetch_dividends"""
        if ev('INT_TESTS', '0') == '0':
            return

        # store data
        work = build_iex_fetch_dividends_request(
            label='test_integration_fetch_dividends')
        work['ticker'] = 'AAPL'

        res = fetch_data(work_dict=work)
        self.assertIsNotNone(res)
        self.debug_df(df=res)
    def test_integration_test_fetch_tickers_from_screener(self):
        """test_integration_test_fetch_tickers_from_screener"""
        if ev('INT_TESTS', '0') == '0':
            return

        default_url = (
            'https://finviz.com/screener.ashx?'
            'v=111&'
            'f=an_recom_strongbuy,'
            'exch_nyse,fa_ltdebteq_low,fa_sales5years_o10&'
            'ft=4')
        url = ev('INT_TEST_FINVIZ_SCREEN_URL', default_url)
        res = fetch_tickers_from_screener(
            url=url)
        self.assertIsNotNone(
            res)
        self.assertTrue(
            len(res['rec']['data']) > 0)
        self.assertEqual(
            get_status(status=res['status']),
            'SUCCESS')
        self.debug_df(df=res['rec']['data'])
示例#15
0
    def test_integration_get_financials_helper(self):
        """test_integration_get_financials_helper

        After running, there should be an updated timestamp on
        the s3 key:

        ::

            testing_<TICKER>_financials

        View the financials bucket:

        ::

            aws --endpoint-url http://localhost:9000 s3 ls s3://financials

        View the redis cache using the redis-cli:

        ::

            ./tools/redis-cli.sh
            127.0.0.1:6379> keys testing_TSLA_financials
            1) "testing_TSLA_financials"

        """
        if ev('INT_TESTS', '0') == '0':
            return

        label = 'test_integration_get_financials_helper'

        # store data
        work = build_get_new_pricing_request(
            label=label)

        work['fetch_mode'] = FETCH_MODE_IEX
        work['iex_datasets'] = [
            FETCH_FINANCIALS
        ]
        work['ticker'] = 'AAPL'
        work['s3_bucket'] = 'testing'
        work['s3_key'] = f'testing_{work["ticker"]}'
        work['redis_key'] = f'testing_{work["ticker"]}'
        work['celery_disabled'] = True
        dataset_results = get_new_pricing_data(
            work)

        self.assertIsNotNone(
            dataset_results)
        self.assertIsNotNone(
            len(dataset_results['rec']['financials']) >= 5)
 def test_integration_account_credentials(self):
     """test_integration_account_credentials"""
     if ae_consts.ev('INT_TESTS', '0') == '0':
         return
     headers = td_consts.get_auth_headers()
     session = requests.Session()
     session.headers = headers
     self.exp_date = opt_dates.option_expiration().strftime(
         ae_consts.COMMON_DATE_FORMAT)
     use_url = td_consts.TD_URLS['options'].format(self.ticker,
                                                   self.exp_date)
     response = url_helper.url_helper(sess=session).get(use_url)
     self.assertEqual(response.status_code, 200)
     self.assertTrue(len(json.loads(response.text)) > 0)
    def test_integration_fetch_financials(self):
        """test_integration_fetch_financials"""
        if ev('INT_TESTS', '0') == '0':
            return

        label = 'test_integration_fetch_financials'

        # store data
        work = build_iex_fetch_financials_request(label=label)
        work['ticker'] = 'TSLA'

        res = fetch_data(work_dict=work)
        self.assertIsNotNone(res)
        self.debug_df(df=res)
示例#18
0
def post(attachments, jupyter=False):
    """Send created attachments to slack

    :param attachments: Values to post to slack
    """
    SLACK_WEBHOOK = ae_consts.ev('SLACK_WEBHOOK', None)
    result = {'status': ae_consts.FAILED}
    if not os.getenv('SLACK_WEBHOOK', False):
        log.info('post - please add a SLACK_WEBHOOK environment '
                 'variable to publish messages')
        return result
    if attachments and SLACK_WEBHOOK:
        try:
            # if not jupyter:
            #     log.debug(('Attempting to post attachments={} '
            #               'to slack_webhook exists').format(attachments))
            for attachment in attachments:
                r = requests.post(SLACK_WEBHOOK, data=json.dumps(attachment))
                if str(r.status_code) == "200":
                    # log.info((
                    #   'Successful post of attachment={} '
                    #   'to slack_webhook').format(
                    #       attachment if not jupyter else
                    #       True if attachment else False))
                    result['status'] = ae_consts.SUCCESS
                else:
                    log.error(
                        ('Failed to post attachment={} '
                         'with status_code={}').format(
                             attachment if not jupyter else
                             True if attachment else False, r.status_code))
                    result['status'] = ae_consts.FAILED
                    break
        except Exception as e:
            log.error(('Failed to post attachments={} '
                       'with ex={}').format(
                           attachments if not jupyter else
                           True if attachments else False, e))
            result['status'] = ae_consts.ERR
            result['err'] = e
    else:
        log.info(('Skipping post to slack due to missing '
                  'attachments={} or SLACK_WEBHOOK missing={}').format(
                      attachments
                      if not jupyter else True if attachments else False,
                      False if SLACK_WEBHOOK else True))
    return result
    def test_integration_extract_minute_dataset(self):
        """test_integration_extract_minute_dataset"""
        if ev('INT_TESTS', '0') == '0':
            return
        ticker = 'NFLX'
        label = 'IEX minute dataset'
        # build dataset cache dictionary
        work = get_ds_dict(ticker=ticker, label=label)

        status, df = extract_minute_dataset(work_dict=work)
        if status == SUCCESS:
            self.assertIsNotNone(df)
            self.debug_df(df=df)
        else:
            log.critical('{} is missing in redis '
                         'for ticker={} status={}'.format(
                             label, work['ticker'], get_status(status=status)))
 def test_integration_redis_set(self):
     """test_integration_redis_set"""
     if ev('INT_TESTS', '0') == '1':
         work = build_publish_pricing_request()
         work['s3_enabled'] = 0
         work['redis_enabled'] = 1
         work['redis_address'] = REDIS_ADDRESS
         work['redis_db'] = REDIS_DB
         work['redis_key'] = REDIS_KEY
         work['redis_password'] = REDIS_PASSWORD
         work['redis_expire'] = REDIS_EXPIRE
         work['redis_key'] = 'integration-test-v1'
         work['s3_key'] = 'integration-test-v1'
         res = run_publish_pricing_update(
             work)
         self.assertTrue(
             res['status'] == SUCCESS)
示例#21
0
    def test_integration_extract_option_puts(self):
        """test_integration_extract_option_puts"""
        if ev('INT_TESTS', '0') == '0':
            return

        # build dataset cache dictionary
        work = get_ds_dict(ticker='NFLX',
                           label='test_integration_extract_option_puts')

        status, df = extract_option_puts_dataset(work_dict=work)
        if status == SUCCESS:
            self.assertIsNotNone(df)
            self.debug_df(df=df)
        else:
            log.critical('Yahoo Option Puts are missing in redis '
                         'for ticker={} status={}'.format(
                             work['ticker'], get_status(status=status)))
示例#22
0
    def test_integration_extract_option_puts(self):
        """test_integration_extract_option_puts"""
        if ae_consts.ev('INT_TESTS', '0') == '0':
            return

        # build dataset cache dictionary
        work = api_requests.get_ds_dict(
            ticker='SPY', label='test_integration_extract_option_puts')

        status, df = yahoo_extract.extract_option_puts_dataset(work_dict=work)
        if status == ae_consts.SUCCESS:
            self.assertIsNotNone(df)
            self.debug_df(df=df)
        else:
            log.critical('Yahoo Option Puts are missing in redis '
                         f'for ticker={work["ticker"]} '
                         f'status={ae_consts.get_status(status=status)}')
    def test_integration_fetch_puts_dataset(self):
        """test_integration_fetch_puts_dataset"""
        if ae_consts.ev('INT_TESTS', '0') == '0':
            return
        ticker = 'SPY'
        label = 'TD puts dataset'
        # build dataset cache dictionary
        work = api_requests.get_ds_dict(ticker=ticker, label=label)

        status, df = td_fetch.fetch_data(work_dict=work, fetch_type='tdputs')
        if status == ae_consts.SUCCESS:
            self.assertIsNotNone(df)
            self.debug_df(df=df)
        else:
            log.critical(f'{label} is missing in redis '
                         f'for ticker={work["ticker"]} '
                         f'status={ae_consts.get_status(status=status)}')
def post(attachments, jupyter=False):
    """Send created attachments to slack

    :param attachments: Values to post to slack
    """
    SLACK_WEBHOOK = ae_consts.ev('SLACK_WEBHOOK', None)
    result = {'status': ae_consts.FAILED}
    if not os.getenv('SLACK_WEBHOOK', False):
        log.info(f'post - {publish_msg_error}')
        return result
    if attachments and SLACK_WEBHOOK:
        try:
            # if not jupyter:
            #     log.debug(f'Attempting to post attachments={attachments} '
            #               'to slack_webhook exists')
            for attachment in attachments:
                r = requests.post(SLACK_WEBHOOK, data=json.dumps(attachment))
                if str(r.status_code) == "200":
                    # log.info((
                    #   f'''Successful post of attachment={
                    #       attachment if not jupyter else
                    #       True if attachment else False} '''
                    #   'to slack_webhook'))
                    result['status'] = ae_consts.SUCCESS
                else:
                    log.error(f'''Failed to post attachment={
                            attachment if not jupyter else
                            True if attachment else False} '''
                              f'with status_code={r.status_code}')
                    result['status'] = ae_consts.FAILED
                    break
        except Exception as e:
            log.error(f'''Failed to post attachments={
                    attachments if not jupyter else
                    True if attachments else False} '''
                      f'with ex={e}')
            result['status'] = ae_consts.ERR
            result['err'] = e
    else:
        log.info('Skipping post to slack due to missing '
                 f'''attachments={
                attachments if not jupyter else
                True if attachments else False} or SLACK_WEBHOOK '''
                 f'missing={False if SLACK_WEBHOOK else True}')
    return result
示例#25
0
def debug_msg(label, datafeed_type, msg_format, date_str, df):
    """debug_msg

    Debug helper for debugging scrubbing handlers

    :param label: log label
    :param datafeed_type: fetch type
    :param msg_format: message to include
    :param date_str: date string
    :param df: ``pandas DataFrame`` or ``None``
    """

    msg = msg_format.format('_', date_str)

    dft_msg = ''
    if (datafeed_type == yahoo_consts.DATAFEED_PRICING_YAHOO
            or datafeed_type == yahoo_consts.DATAFEED_OPTIONS_YAHOO
            or datafeed_type == yahoo_consts.DATAFEED_NEWS_YAHOO):
        dft_msg = yahoo_consts.get_datafeed_str_yahoo(df_type=datafeed_type)
    elif (datafeed_type == td_consts.DATAFEED_TD_CALLS
          or datafeed_type == td_consts.DATAFEED_TD_PUTS):
        dft_msg = td_consts.get_datafeed_str_td(df_type=datafeed_type)
    else:
        dft_msg = iex_consts.get_datafeed_str(df_type=datafeed_type)

    if ae_consts.ev('DEBUG_FETCH', '0') == '1':
        if 'START' in msg:
            log.info('{} - {} -------------------------'
                     '------------------------------------'.format(
                         label, dft_msg))
        msg = msg_format.format(df, date_str),
        if hasattr(df, 'empty'):
            log.info('{} - {} - {} found df={} '
                     'columns={}'.format(label, dft_msg, msg, df,
                                         df.columns.values))
        else:
            log.info('{} - {} - {} not df={}'.format(label, dft_msg, msg, df))

        if 'END' in msg:
            log.info('{} - {} -------------------------'
                     '------------------------------------'.format(
                         label, dft_msg))
    else:
        log.info('{} - {} - {}'.format(label, dft_msg, msg))
 def test_integration_s3_upload(self):
     """test_integration_s3_upload"""
     if ev('INT_TESTS', '0') == '1':
         work = build_publish_pricing_request()
         work['s3_enabled'] = 1
         work['redis_enabled'] = 0
         work['s3_access_key'] = S3_ACCESS_KEY
         work['s3_secret_key'] = S3_SECRET_KEY
         work['s3_region_name'] = S3_REGION_NAME
         work['s3_address'] = S3_ADDRESS
         work['s3_secure'] = S3_SECURE
         work['s3_bucket'] = 'integration-tests'
         work['s3_key'] = 'integration-test-v1'
         work['redis_key'] = 'integration-test-v1'
         os.environ.pop('AWS_DEFAULT_PROFILE', None)
         res = run_publish_pricing_update(
             work)
         self.assertTrue(
             res['status'] == SUCCESS)
示例#27
0
    def test_latest(self):
        """test_latest"""
        if ae_consts.ev('INT_TESTS', '0') == '0':
            return
        ticker = 'SPY'
        start_date = ae_utils.get_last_close_str()
        # build dataset cache dictionary
        runner = algo_runner.AlgoRunner(ticker=ticker,
                                        start_date=start_date,
                                        end_date=None,
                                        history_loc=self.algo_history_loc,
                                        algo_config=self.algo_config,
                                        verbose_algo=True,
                                        verbose_processor=False,
                                        verbose_indicators=False)

        req = {'ticker': ticker, 'date_str': start_date, 'start_row': -200}
        df = runner.latest(**req)
        self.assertEqual(len(df.index), len(runner.get_history().index))
示例#28
0
    def on_success(self, retval, task_id, args, kwargs):
        """on_success

        Handle custom actions when a task completes
        successfully.

        http://docs.celeryproject.org/en/latest/reference/celery.app.task.html

        :param retval: return value
        :param task_id: celery task id
        :param args: arguments passed into task
        :param kwargs: keyword arguments passed into task
        """

        self.build_log_label_from_args(args=args)

        if ae_consts.ev('DEBUG_TASK', '0') == '1':
            log.info(f'on_success {self.log_label} - retval={retval} '
                     f'task_id={task_id} args={args} kwargs={kwargs}')
        else:
            log.info(f'on_success {self.log_label} - task_id={task_id}')
def mock_publish_from_s3_to_redis_err(work_dict):
    """mock_publish_from_s3_to_redis_err

    :param work_dict: dictionary for driving the task
    """

    env_key = 'TEST_S3_CONTENTS'
    redis_key = work_dict.get('redis_key', env_key)
    str_dict = ae_consts.ev(env_key, None)
    log.info('mock_publish_from_s3_to_redis_err - '
             f'setting key={redis_key} value={str_dict}')
    data = None
    if str_dict:
        os.environ[redis_key] = str_dict
        data = str_dict.encode('utf-8')
    else:
        os.environ[redis_key] = ''
        data = None

    status = ae_consts.ERR
    err = None
    return {'status': status, 'err': err, 'rec': {'data': data}}
def mock_publish_from_s3_to_redis(work_dict):
    """mock_publish_from_s3_to_redis

    :param work_dict: dictionary for driving the task
    """

    env_key = 'TEST_S3_CONTENTS'
    redis_key = work_dict.get('redis_key', env_key)
    str_dict = ev(env_key, None)
    log.info('mock_publish_from_s3_to_redis - '
             'setting key={} value={}'.format(redis_key, str_dict))
    data = None
    if str_dict:
        os.environ[redis_key] = str_dict
        data = str_dict.encode('utf-8')
    else:
        os.environ[redis_key] = ''
        data = None

    status = SUCCESS
    err = None
    return {'status': status, 'err': err, 'rec': {'data': data}}