Exemplo n.º 1
0
 def test_fetch_tickers_from_screener_exception(self):
     """test_fetch_tickers_from_screener_exception"""
     url = ('exception-'
            'https://finviz.com/screener.ashx?'
            'v=111&'
            'f=an_recom_strongbuy,'
            'exch_nyse,fa_ltdebteq_low,fa_sales5years_o10&'
            'ft=4')
     res = fetch_tickers_from_screener(url=url)
     self.assertIsNotNone(res)
     self.assertEqual(get_status(status=res['status']), 'EX')
     self.assertIsNone(res['rec']['data'])
Exemplo n.º 2
0
 def test_fetch_tickers_from_screener_failure_data(self):
     """test_fetch_tickers_from_screener_failure_data"""
     url = ('failure-'
            'https://finviz.com/screener.ashx?'
            'v=111&'
            'f=an_recom_strongbuy,'
            'exch_nyse,fa_ltdebteq_low,fa_sales5years_o10&'
            'ft=4')
     res = fetch_tickers_from_screener(url=url)
     self.assertIsNotNone(res)
     self.assertEqual(get_status(status=res['status']), 'ERR')
     self.assertTrue('finviz returned non-ok HTTP' in res['err'])
     self.assertIsNone(res['rec']['data'])
Exemplo n.º 3
0
 def test_fetch_tickers_from_screener_success(self):
     """test_fetch_tickers_from_screener_success"""
     url = ('success-'
            'https://finviz.com/screener.ashx?'
            'v=111&'
            'f=an_recom_strongbuy,'
            'exch_nyse,fa_ltdebteq_low,fa_sales5years_o10&'
            'ft=4')
     res = fetch_tickers_from_screener(url=url)
     self.assertIsNotNone(res)
     self.assertTrue(len(res['rec']['data']) > 0)
     self.assertEqual(get_status(status=res['status']), 'SUCCESS')
     self.assertEqual(res['rec']['data']['ticker'][0], 'QS')
     self.assertEqual(res['rec']['data']['ticker'][1], 'SPY')
     self.assertEqual(res['rec']['data']['ticker'][2], 'VXX')
     self.assertEqual(res['rec']['tickers'][0], 'QS')
     self.assertEqual(res['rec']['tickers'][1], 'SPY')
     self.assertEqual(res['rec']['tickers'][2], 'VXX')
Exemplo n.º 4
0
    def test_integration_test_fetch_tickers_from_screener(self):
        """test_integration_test_fetch_tickers_from_screener"""
        # 2020-09-05 - Finviz is kicking back:
        # 403 - Forbidden: Access is denied
        # sounds like they require some kind of auth now
        return 0
        if ev('INT_TESTS', '0') == '0':
            return

        default_url = ('https://finviz.com/screener.ashx?'
                       'v=111&'
                       'f=an_recom_strongbuy,'
                       'exch_nyse,fa_ltdebteq_low,fa_sales5years_o10&'
                       'ft=4')
        url = ev('INT_TEST_FINVIZ_SCREEN_URL', default_url)
        res = fetch_tickers_from_screener(url=url)
        self.assertIsNotNone(res)
        self.assertTrue(len(res['rec']['data']) > 0)
        self.assertEqual(get_status(status=res['status']), 'SUCCESS')
        self.debug_df(df=res['rec']['data'])
    def test_integration_test_fetch_tickers_from_screener(self):
        """test_integration_test_fetch_tickers_from_screener"""
        if ev('INT_TESTS', '0') == '0':
            return

        default_url = (
            'https://finviz.com/screener.ashx?'
            'v=111&'
            'f=an_recom_strongbuy,'
            'exch_nyse,fa_ltdebteq_low,fa_sales5years_o10&'
            'ft=4')
        url = ev('INT_TEST_FINVIZ_SCREEN_URL', default_url)
        res = fetch_tickers_from_screener(
            url=url)
        self.assertIsNotNone(
            res)
        self.assertTrue(
            len(res['rec']['data']) > 0)
        self.assertEqual(
            get_status(status=res['status']),
            'SUCCESS')
        self.debug_df(df=res['rec']['data'])
def task_screener_analysis(self, work_dict):
    """task_screener_analysis

    :param work_dict: task dictionary
    """

    label = work_dict.get('label', 'screener')

    log.info('{} - start'.format(label))

    rec = {}
    res = build_result.build_result(status=ae_consts.NOT_RUN,
                                    err=None,
                                    rec=rec)
    """
    Input - Set up dataset sources to collect
    """

    ticker = work_dict.get('ticker', None)
    org_tickers = work_dict.get('tickers', None)

    if not ticker and not org_tickers:
        res = build_result.build_result(status=ae_consts.ERR,
                                        err='missing ticker or tickers',
                                        rec=rec)

    tickers = []
    if not org_tickers:
        if ticker:
            tickers = [ticker]
    else:
        for t in org_tickers:
            upper_cased_ticker = str(t).upper()
            if upper_cased_ticker not in tickers:
                tickers.append(upper_cased_ticker)
        # build a unique ticker list
    # end of ensuring tickers is a unique list of
    # upper-cased ticker symbol strings

    # fetch from: 'all', 'iex' or 'yahoo'
    fetch_mode = work_dict.get('fetch_mode', os.getenv('FETCH_MODE', 'iex'))
    iex_datasets = work_dict.get(
        'iex_datasets',
        os.getenv('IEX_DATASETS_DEFAULT', ae_consts.IEX_DATASETS_DEFAULT))

    # if defined, these are task functions for
    # calling customiized determine Celery tasks
    determine_sells_callback = work_dict.get('determine_sells', None)
    determine_buys_callback = work_dict.get('determine_buys', None)

    try:

        log.info('{} fetch={} tickers={} '
                 'iex_datasets={} '
                 'sell_task={} '
                 'buy_task={}'.format(label, fetch_mode, tickers, iex_datasets,
                                      determine_sells_callback,
                                      determine_buys_callback))
        """
        Input - Set up required urls for building buckets
        """
        fv_urls = work_dict.get('urls', )

        if not fv_urls:
            res = build_result.build_result(
                status=ae_consts.ERR,
                err='missing required urls list of screeners',
                rec=rec)

        # stop if something errored out with the
        # celery helper for turning off celery to debug
        # without an engine running
        if res['err']:
            log.error('{} - tickers={} fetch={} iex_datasets={} '
                      'hit validation err={}'.format(label, tickers,
                                                     fetch_mode, iex_datasets,
                                                     res['err']))

            return get_task_results.get_task_results(work_dict=work_dict,
                                                     result=res)
        # end of input validation checks

        num_urls = len(fv_urls)
        log.info('{} - running urls={}'.format(label, fv_urls))

        fv_dfs = []
        for uidx, url in enumerate(fv_urls):
            log.info('{} - url={}/{} url={}'.format(label, uidx, num_urls,
                                                    url))
            fv_res = finviz_utils.fetch_tickers_from_screener(url=url)
            if fv_res['status'] == ae_consts.SUCCESS:
                fv_dfs.append(fv_res['rec']['data'])
                for ft_tick in fv_res['rec']['tickers']:
                    upper_ft_ticker = ft_tick.upper()
                    if upper_ft_ticker not in tickers:
                        tickers.append(upper_ft_ticker)
                # end of for all found tickers
            else:
                log.error('{} - failed url={}/{} url={}'.format(
                    label, uidx, num_urls, url))
            # if success vs log the error
        # end of urls to get pandas.DataFrame and unique tickers
        """
        Find tickers in screens
        """

        num_tickers = len(tickers)

        log.info('{} - fetching tickers={} from urls={}'.format(
            label, num_tickers, num_urls))
        """
        pull ticker data
        """

        fetch_recs = fetch_utils.fetch(tickers=tickers,
                                       fetch_mode=fetch_mode,
                                       iex_datasets=iex_datasets)

        if fetch_recs:
            rec = fetch_recs
            """
            Output - Where is data getting cached and archived?
            (this helps to retroactively evaluate trading performance)
            """

            res = build_result.build_result(status=ae_consts.SUCCESS,
                                            err=None,
                                            rec=rec)
        else:
            err = ('{} - tickers={} failed fetch={} '
                   'iex_datasets={}'.format(label, tickers, fetch_mode,
                                            iex_datasets))
            res = build_result.build_result(status=ae_consts.ERR,
                                            err=err,
                                            rec=rec)

        log.info('{} - done'.format(label))
    except Exception as e:
        err = ('{} - tickers={} fetch={} hit ex={} '.format(
            label, tickers, fetch_mode, e))
        log.error(err)
        res = build_result.build_result(status=ae_consts.ERR, err=err, rec=rec)
    # end of try/ex

    return get_task_results.get_task_results(work_dict=work_dict, result=res)