コード例 #1
0
 def test_err_get_new_pricing(self):
     """test_err_get_new_pricing"""
     work = api_requests.build_get_new_pricing_request()
     work['label'] = 'test_err_get_new_pricing'
     res = run_get.run_get_new_pricing_data(work)
     self.assertTrue(res['status'] == ae_consts.MISSING_TOKEN
                     or res['status'] == ae_consts.ERR)
コード例 #2
0
 def test_success_if_iex_errors(self):
     """test_success_if_iex_errors"""
     work = api_requests.build_get_new_pricing_request()
     work['label'] = 'test_success_if_iex_errors'
     res = run_get.run_get_new_pricing_data(
         work)
     self.assertTrue(
         res['status'] == ae_consts.SUCCESS)
コード例 #3
0
 def test_success_get_new_pricing(self):
     """test_success_get_new_pricing"""
     work = build_get_new_pricing_request()
     work['label'] = 'test_success_get_new_pricing'
     res = run_get_new_pricing_data(work)
     self.assertTrue(res['status'] == SUCCESS)
     self.assertTrue(res['err'] is None)
     self.assertIsNotNone(res['rec']['news'])
     self.assertTrue(len(res['rec']['news']) >= 1)
     self.assertTrue(len(res['rec']['pricing']) >= 1)
     self.assertTrue(len(res['rec']['options']) >= 1)
コード例 #4
0
 def test_success_get_new_pricing(self):
     """test_success_get_new_pricing"""
     # yahoo is disabled
     return 0
     work = api_requests.build_get_new_pricing_request()
     work['label'] = 'test_success_get_new_pricing'
     res = run_get.run_get_new_pricing_data(work)
     self.assertTrue(res['status'] == ae_consts.SUCCESS)
     self.assertTrue(res['err'] is None)
     self.assertIsNotNone(res['rec']['news'])
     self.assertTrue(len(res['rec']['news']) >= 1)
     self.assertTrue(len(res['rec']['pricing']) >= 1)
     self.assertTrue(len(res['rec']['options']) >= 1)
コード例 #5
0
    def test_integration_get_financials_helper(self):
        """test_integration_get_financials_helper

        After running, there should be an updated timestamp on
        the s3 key:

        ::

            testing_<TICKER>_financials

        View the financials bucket:

        ::

            aws --endpoint-url http://localhost:9000 s3 ls s3://financials

        View the redis cache using the redis-cli:

        ::

            ./tools/redis-cli.sh
            127.0.0.1:6379> keys testing_TSLA_financials
            1) "testing_TSLA_financials"

        """
        if ev('INT_TESTS', '0') == '0':
            return

        label = 'test_integration_get_financials_helper'

        # store data
        work = build_get_new_pricing_request(
            label=label)

        work['fetch_mode'] = FETCH_MODE_IEX
        work['iex_datasets'] = [
            FETCH_FINANCIALS
        ]
        work['ticker'] = 'AAPL'
        work['s3_bucket'] = 'testing'
        work['s3_key'] = f'testing_{work["ticker"]}'
        work['redis_key'] = f'testing_{work["ticker"]}'
        work['celery_disabled'] = True
        dataset_results = get_new_pricing_data(
            work)

        self.assertIsNotNone(
            dataset_results)
        self.assertIsNotNone(
            len(dataset_results['rec']['financials']) >= 5)
コード例 #6
0
def fetch_new_stock_datasets():
    """fetch_new_stock_datasets

    Collect datasets for a ticker from IEX Cloud or Tradier

    .. warning: IEX Cloud charges per request. Here are example
        commands to help you monitor your usage while handling
        first time users and automation (intraday, daily, and weekly
        options are supported).

    **Setup**

    ::

        export IEX_TOKEN=YOUR_IEX_CLOUD_TOKEN
        export TD_TOKEN=YOUR_TRADIER_TOKEN

    **Pull Data for a Ticker from IEX and Tradier**

    ::

        fetch -t TICKER

    **Pull from All Supported IEX Feeds**

    ::

        fetch -t TICKER -g iex-all

    **Pull from All Supported Tradier Feeds**

    ::

        fetch -t TICKER -g td

    **Intraday IEX and Tradier Feeds (only minute and news to reduce costs)**

    ::

        fetch -t TICKER -g intra
        # or manually:
        # fetch -t TICKER -g td,iex_min,iex_news

    **Daily IEX Feeds (daily and news)**

    ::

        fetch -t TICKER -g daily
        # or manually:
        # fetch -t TICKER -g iex_day,iex_news

    **Weekly IEX Feeds (company, financials, earnings, dividends, and peers)**

    ::

        fetch -t TICKER -g weekly
        # or manually:
        # fetch -t TICKER -g iex_fin,iex_earn,iex_div,iex_peers,iex_news,
        # iex_comp

    **IEX Minute**

    ::

        fetch -t TICKER -g iex_min

    **IEX News**

    ::

        fetch -t TICKER -g iex_news

    **IEX Daily**

    ::

        fetch -t TICKER -g iex_day

    **IEX Stats**

    ::

        fetch -t TICKER -g iex_stats

    **IEX Peers**

    ::

        fetch -t TICKER -g iex_peers

    **IEX Financials**

    ::

        fetch -t TICKER -g iex_fin

    **IEX Earnings**

    ::

        fetch -t TICKER -g iex_earn

    **IEX Dividends**

    ::

        fetch -t TICKER -g iex_div

    **IEX Quote**

    ::

        fetch -t TICKER -g iex_quote

    **IEX Company**

    ::

        fetch -t TICKER -g iex_comp

    .. note:: This requires the following services are listening on:

        - redis ``localhost:6379``
        - minio ``localhost:9000``

    """
    log.info('start - fetch_new_stock_datasets')

    parser = argparse.ArgumentParser(
        description=('Download and store the latest stock pricing, '
                     'news, and options chain data '
                     'and store it in Minio (S3) and Redis. '
                     'Also includes support for getting FinViz '
                     'screener tickers'))
    parser.add_argument('-t', help=('ticker'), required=False, dest='ticker')
    parser.add_argument(
        '-g',
        help=('optional - fetch mode: '
              'initial = default fetch from initial data feeds '
              '(IEX and Tradier), '
              'intra = fetch intraday from IEX and Tradier, '
              'daily or day = fetch daily from IEX, '
              'weekly = fetch weekly from IEX, '
              'all = fetch from all data feeds, '
              'td = fetch from Tradier feeds only, '
              'iex = fetch from IEX Cloud feeds only, '
              'min or minute or iex_min = fetch IEX Cloud intraday '
              'per-minute feed '
              'https://iexcloud.io/docs/api/#historical-prices, '
              'day or daily or iex_day = fetch IEX Cloud daily feed '
              'https://iexcloud.io/docs/api/#historical-prices, '
              'quote or iex_quote = fetch IEX Cloud quotes feed '
              'https://iexcloud.io/docs/api/#quote, '
              'stats or iex_stats = fetch IEX Cloud key stats feed '
              'https://iexcloud.io/docs/api/#key-stats, '
              'peers or iex_peers = fetch from just IEX Cloud peers feed '
              'https://iexcloud.io/docs/api/#peers, '
              'news or iex_news = fetch IEX Cloud news feed '
              'https://iexcloud.io/docs/api/#news, '
              'fin or iex_fin = fetch IEX Cloud financials feed'
              'https://iexcloud.io/docs/api/#financials, '
              'earn or iex_earn = fetch from just IEX Cloud earnings feeed '
              'https://iexcloud.io/docs/api/#earnings, '
              'div or iex_div = fetch from just IEX Cloud dividends feed'
              'https://iexcloud.io/docs/api/#dividends, '
              'iex_comp = fetch from just IEX Cloud company feed '
              'https://iexcloud.io/docs/api/#company'),
        required=False,
        dest='fetch_mode')
    parser.add_argument('-i',
                        help=('optional - ticker id '
                              'not used without a database'),
                        required=False,
                        dest='ticker_id')
    parser.add_argument('-e',
                        help=('optional - options expiration date'),
                        required=False,
                        dest='exp_date_str')
    parser.add_argument('-l',
                        help=('optional - path to the log config file'),
                        required=False,
                        dest='log_config_path')
    parser.add_argument('-b',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-B',
                        help=('optional - backend url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-s',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-S',
                        help=('optional - s3 ssl or not'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-G',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument('-p',
                        help=('optional - redis_password'),
                        required=False,
                        dest='redis_password')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-n',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument('-z',
                        help=('optional - strike price'),
                        required=False,
                        dest='strike')
    parser.add_argument(
        '-c',
        help=('optional - contract type "C" for calls "P" for puts'),
        required=False,
        dest='contract_type')
    parser.add_argument(
        '-P',
        help=('optional - get pricing data if "1" or "0" disabled'),
        required=False,
        dest='get_pricing')
    parser.add_argument(
        '-N',
        help=('optional - get news data if "1" or "0" disabled'),
        required=False,
        dest='get_news')
    parser.add_argument(
        '-O',
        help=('optional - get options data if "1" or "0" disabled'),
        required=False,
        dest='get_options')
    parser.add_argument('-U',
                        help=('optional - s3 enabled for publishing if "1" or '
                              '"0" is disabled'),
                        required=False,
                        dest='s3_enabled')
    parser.add_argument(
        '-R',
        help=('optional - redis enabled for publishing if "1" or '
              '"0" is disabled'),
        required=False,
        dest='redis_enabled')
    parser.add_argument('-A',
                        help=('optional - run an analysis '
                              'supported modes: scn'),
                        required=False,
                        dest='analysis_type')
    parser.add_argument('-L',
                        help=('optional - screener urls to pull '
                              'tickers for analysis'),
                        required=False,
                        dest='urls')
    parser.add_argument(
        '-Z',
        help=('disable run without an engine for local testing and demos'),
        required=False,
        dest='celery_enabled',
        action='store_true')
    parser.add_argument('-F',
                        help=('optional - backfill date for filling in '
                              'gaps for the IEX Cloud minute dataset '
                              'format is YYYY-MM-DD'),
                        required=False,
                        dest='backfill_date')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    run_offline = True
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    fetch_mode = 'initial'
    exp_date_str = ae_consts.NEXT_EXP_STR
    ssl_options = ae_consts.SSL_OPTIONS
    transport_options = ae_consts.TRANSPORT_OPTIONS
    broker_url = ae_consts.WORKER_BROKER_URL
    backend_url = ae_consts.WORKER_BACKEND_URL
    celery_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE
    include_tasks = ae_consts.INCLUDE_TASKS
    s3_access_key = ae_consts.S3_ACCESS_KEY
    s3_secret_key = ae_consts.S3_SECRET_KEY
    s3_region_name = ae_consts.S3_REGION_NAME
    s3_address = ae_consts.S3_ADDRESS
    s3_secure = ae_consts.S3_SECURE
    s3_bucket_name = ae_consts.S3_BUCKET
    s3_key = ae_consts.S3_KEY
    redis_address = ae_consts.REDIS_ADDRESS
    redis_key = ae_consts.REDIS_KEY
    redis_password = ae_consts.REDIS_PASSWORD
    redis_db = ae_consts.REDIS_DB
    redis_expire = ae_consts.REDIS_EXPIRE
    strike = None
    contract_type = None
    get_pricing = True
    get_news = True
    get_options = True
    s3_enabled = True
    redis_enabled = True
    analysis_type = None
    backfill_date = None
    debug = False

    if args.ticker:
        ticker = args.ticker.upper()
    if args.ticker_id:
        ticker_id = args.ticker_id
    if args.exp_date_str:
        exp_date_str = ae_consts.NEXT_EXP_STR
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_password:
        redis_password = args.redis_password
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.strike:
        strike = args.strike
    if args.contract_type:
        contract_type = args.contract_type
    if args.get_pricing:
        get_pricing = args.get_pricing == '1'
    if args.get_news:
        get_news = args.get_news == '1'
    if args.get_options:
        get_options = args.get_options == '1'
    if args.s3_enabled:
        s3_enabled = args.s3_enabled == '1'
    if args.redis_enabled:
        redis_enabled = args.redis_enabled == '1'
    if args.fetch_mode:
        fetch_mode = str(args.fetch_mode).lower()
    if args.analysis_type:
        analysis_type = str(args.analysis_type).lower()
    if args.celery_enabled:
        run_offline = False
    if args.backfill_date:
        backfill_date = args.backfill_date
    if args.debug:
        debug = True

    work = api_requests.build_get_new_pricing_request()

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_key'] = s3_key
    work['redis_key'] = redis_key
    work['strike'] = strike
    work['contract'] = contract_type
    work['exp_date'] = exp_date_str
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['get_pricing'] = get_pricing
    work['get_news'] = get_news
    work['get_options'] = get_options
    work['s3_enabled'] = s3_enabled
    work['redis_enabled'] = redis_enabled
    work['fetch_mode'] = fetch_mode
    work['analysis_type'] = analysis_type
    work['iex_datasets'] = iex_consts.DEFAULT_FETCH_DATASETS
    work['backfill_date'] = backfill_date
    work['debug'] = debug
    work['label'] = f'ticker={ticker}'

    if analysis_type == 'scn':
        label = f'screener={work["ticker"]}'
        fv_urls = []
        if args.urls:
            fv_urls = str(args.urls).split('|')
        if len(fv_urls) == 0:
            fv_urls = os.getenv('SCREENER_URLS', []).split('|')
        screener_req = api_requests.build_screener_analysis_request(
            ticker=ticker, fv_urls=fv_urls, label=label)
        work.update(screener_req)
        start_screener_analysis(req=work)
    # end of analysis_type
    else:
        last_close_date = ae_utils.last_close()
        last_close_str = last_close_date.strftime(ae_consts.COMMON_DATE_FORMAT)
        cache_base_key = f'{ticker}_{last_close_str}'
        if not args.keyname:
            work['s3_key'] = cache_base_key
            work['redis_key'] = cache_base_key

        path_to_tasks = 'analysis_engine.work_tasks'
        task_name = (f'{path_to_tasks}'
                     f'.get_new_pricing_data.get_new_pricing_data')
        task_res = None
        if ae_consts.is_celery_disabled() or run_offline:
            work['celery_disabled'] = True
            work['verbose'] = debug
            log.debug(f'starting without celery work={ae_consts.ppj(work)} '
                      f'offline={run_offline}')
            task_res = task_pricing.get_new_pricing_data(work)
            status_str = ae_consts.get_status(status=task_res['status'])

            cur_date = backfill_date
            if not backfill_date:
                cur_date = ae_utils.get_last_close_str()
            redis_arr = work["redis_address"].split(':')
            include_results = ''
            if debug:
                include_results = task_res['rec']
            if task_res['status'] == ae_consts.SUCCESS:
                if task_res['rec']['num_success'] == 0:
                    log.error(f'failed fetching ticker={work["ticker"]} '
                              f'from {fetch_mode} - please check the '
                              'environment variables')
                else:
                    log.info(f'done fetching ticker={work["ticker"]} '
                             f'mode={fetch_mode} '
                             f'status={status_str} '
                             f'err={task_res["err"]} {include_results}')
                    print('View keys in redis with:\n'
                          f'redis-cli -h {redis_arr[0]} '
                          'keys '
                          f'"{work["ticker"]}_{cur_date}*"')
            elif task_res['status'] == ae_consts.MISSING_TOKEN:
                print('Set an IEX or Tradier token: '
                      '\n'
                      '  export IEX_TOKEN=YOUR_IEX_TOKEN\n'
                      '  export TD_TOKEN=YOUR_TD_TOKEN\n')
            else:
                log.error(f'done fetching ticker={work["ticker"]} '
                          f'mode={fetch_mode} '
                          f'status={status_str} '
                          f'err={task_res["err"]}')
            # if/else debug
        else:
            log.debug(f'connecting to broker={broker_url} '
                      f'backend={backend_url}')

            # Get the Celery app
            app = get_celery_app.get_celery_app(
                name=__name__,
                auth_url=broker_url,
                backend_url=backend_url,
                path_to_config_module=celery_config_module,
                ssl_options=ssl_options,
                transport_options=transport_options,
                include_tasks=include_tasks)

            log.debug(f'calling task={task_name} - work={ae_consts.ppj(work)}')
            job_id = app.send_task(task_name, (work, ))
            log.debug(f'task={task_name} - job_id={job_id}')
コード例 #7
0
def fetch_new_stock_datasets():
    """fetch_new_stock_datasets

    Collect all datasets for the ticker **SPY**:

    ::

        fetch_new_stock_datasets.py -t SPY

    .. note:: This requires the following services are listening on:

        - redis ``localhost:6379``
        - minio ``localhost:9000``

    """
    log.info('start - fetch_new_stock_datasets')

    parser = argparse.ArgumentParser(
        description=('Download and store the latest stock pricing, '
                     'news, and options chain data '
                     'and store it in Minio (S3) and Redis. '
                     'Also includes support for getting FinViz '
                     'screener tickers'))
    parser.add_argument('-t', help=('ticker'), required=False, dest='ticker')
    parser.add_argument('-g',
                        help=('optional - fetch mode: '
                              'all = fetch from all data sources (default), '
                              'td = fetch from just Tradier sources, '
                              'iex = fetch from just IEX sources'),
                        required=False,
                        dest='fetch_mode')
    parser.add_argument('-i',
                        help=('optional - ticker id '
                              'not used without a database'),
                        required=False,
                        dest='ticker_id')
    parser.add_argument('-e',
                        help=('optional - options expiration date'),
                        required=False,
                        dest='exp_date_str')
    parser.add_argument('-l',
                        help=('optional - path to the log config file'),
                        required=False,
                        dest='log_config_path')
    parser.add_argument('-b',
                        help=('optional - broker url for Celery'),
                        required=False,
                        dest='broker_url')
    parser.add_argument('-B',
                        help=('optional - backend url for Celery'),
                        required=False,
                        dest='backend_url')
    parser.add_argument('-k',
                        help=('optional - s3 access key'),
                        required=False,
                        dest='s3_access_key')
    parser.add_argument('-s',
                        help=('optional - s3 secret key'),
                        required=False,
                        dest='s3_secret_key')
    parser.add_argument('-a',
                        help=('optional - s3 address format: <host:port>'),
                        required=False,
                        dest='s3_address')
    parser.add_argument('-S',
                        help=('optional - s3 ssl or not'),
                        required=False,
                        dest='s3_secure')
    parser.add_argument('-u',
                        help=('optional - s3 bucket name'),
                        required=False,
                        dest='s3_bucket_name')
    parser.add_argument('-G',
                        help=('optional - s3 region name'),
                        required=False,
                        dest='s3_region_name')
    parser.add_argument('-p',
                        help=('optional - redis_password'),
                        required=False,
                        dest='redis_password')
    parser.add_argument('-r',
                        help=('optional - redis_address format: <host:port>'),
                        required=False,
                        dest='redis_address')
    parser.add_argument('-n',
                        help=('optional - redis and s3 key name'),
                        required=False,
                        dest='keyname')
    parser.add_argument(
        '-m',
        help=('optional - redis database number (0 by default)'),
        required=False,
        dest='redis_db')
    parser.add_argument('-x',
                        help=('optional - redis expiration in seconds'),
                        required=False,
                        dest='redis_expire')
    parser.add_argument('-z',
                        help=('optional - strike price'),
                        required=False,
                        dest='strike')
    parser.add_argument(
        '-c',
        help=('optional - contract type "C" for calls "P" for puts'),
        required=False,
        dest='contract_type')
    parser.add_argument(
        '-P',
        help=('optional - get pricing data if "1" or "0" disabled'),
        required=False,
        dest='get_pricing')
    parser.add_argument(
        '-N',
        help=('optional - get news data if "1" or "0" disabled'),
        required=False,
        dest='get_news')
    parser.add_argument(
        '-O',
        help=('optional - get options data if "1" or "0" disabled'),
        required=False,
        dest='get_options')
    parser.add_argument('-U',
                        help=('optional - s3 enabled for publishing if "1" or '
                              '"0" is disabled'),
                        required=False,
                        dest='s3_enabled')
    parser.add_argument(
        '-R',
        help=('optional - redis enabled for publishing if "1" or '
              '"0" is disabled'),
        required=False,
        dest='redis_enabled')
    parser.add_argument('-A',
                        help=('optional - run an analysis '
                              'supported modes: scn'),
                        required=False,
                        dest='analysis_type')
    parser.add_argument('-L',
                        help=('optional - screener urls to pull '
                              'tickers for analysis'),
                        required=False,
                        dest='urls')
    parser.add_argument(
        '-Z',
        help=('disable run without an engine for local testing and demos'),
        required=False,
        dest='celery_enabled',
        action='store_true')
    parser.add_argument('-d',
                        help=('debug'),
                        required=False,
                        dest='debug',
                        action='store_true')
    args = parser.parse_args()

    run_offline = True
    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    fetch_mode = 'all'
    exp_date_str = ae_consts.NEXT_EXP_STR
    ssl_options = ae_consts.SSL_OPTIONS
    transport_options = ae_consts.TRANSPORT_OPTIONS
    broker_url = ae_consts.WORKER_BROKER_URL
    backend_url = ae_consts.WORKER_BACKEND_URL
    celery_config_module = ae_consts.WORKER_CELERY_CONFIG_MODULE
    include_tasks = ae_consts.INCLUDE_TASKS
    s3_access_key = ae_consts.S3_ACCESS_KEY
    s3_secret_key = ae_consts.S3_SECRET_KEY
    s3_region_name = ae_consts.S3_REGION_NAME
    s3_address = ae_consts.S3_ADDRESS
    s3_secure = ae_consts.S3_SECURE
    s3_bucket_name = ae_consts.S3_BUCKET
    s3_key = ae_consts.S3_KEY
    redis_address = ae_consts.REDIS_ADDRESS
    redis_key = ae_consts.REDIS_KEY
    redis_password = ae_consts.REDIS_PASSWORD
    redis_db = ae_consts.REDIS_DB
    redis_expire = ae_consts.REDIS_EXPIRE
    strike = None
    contract_type = None
    get_pricing = True
    get_news = True
    get_options = True
    s3_enabled = True
    redis_enabled = True
    analysis_type = None
    debug = False

    if args.ticker:
        ticker = args.ticker.upper()
    if args.ticker_id:
        ticker_id = args.ticker_id
    if args.exp_date_str:
        exp_date_str = ae_consts.NEXT_EXP_STR
    if args.broker_url:
        broker_url = args.broker_url
    if args.backend_url:
        backend_url = args.backend_url
    if args.s3_access_key:
        s3_access_key = args.s3_access_key
    if args.s3_secret_key:
        s3_secret_key = args.s3_secret_key
    if args.s3_region_name:
        s3_region_name = args.s3_region_name
    if args.s3_address:
        s3_address = args.s3_address
    if args.s3_secure:
        s3_secure = args.s3_secure
    if args.s3_bucket_name:
        s3_bucket_name = args.s3_bucket_name
    if args.keyname:
        s3_key = args.keyname
        redis_key = args.keyname
    if args.redis_address:
        redis_address = args.redis_address
    if args.redis_password:
        redis_password = args.redis_password
    if args.redis_db:
        redis_db = args.redis_db
    if args.redis_expire:
        redis_expire = args.redis_expire
    if args.strike:
        strike = args.strike
    if args.contract_type:
        contract_type = args.contract_type
    if args.get_pricing:
        get_pricing = args.get_pricing == '1'
    if args.get_news:
        get_news = args.get_news == '1'
    if args.get_options:
        get_options = args.get_options == '1'
    if args.s3_enabled:
        s3_enabled = args.s3_enabled == '1'
    if args.redis_enabled:
        redis_enabled = args.redis_enabled == '1'
    if args.fetch_mode:
        fetch_mode = str(args.fetch_mode).lower()
    if args.analysis_type:
        analysis_type = str(args.analysis_type).lower()
    if args.celery_enabled:
        run_offline = False
    if args.debug:
        debug = True

    work = api_requests.build_get_new_pricing_request()

    work['ticker'] = ticker
    work['ticker_id'] = ticker_id
    work['s3_bucket'] = s3_bucket_name
    work['s3_key'] = s3_key
    work['redis_key'] = redis_key
    work['strike'] = strike
    work['contract'] = contract_type
    work['exp_date'] = exp_date_str
    work['s3_access_key'] = s3_access_key
    work['s3_secret_key'] = s3_secret_key
    work['s3_region_name'] = s3_region_name
    work['s3_address'] = s3_address
    work['s3_secure'] = s3_secure
    work['redis_address'] = redis_address
    work['redis_password'] = redis_password
    work['redis_db'] = redis_db
    work['redis_expire'] = redis_expire
    work['get_pricing'] = get_pricing
    work['get_news'] = get_news
    work['get_options'] = get_options
    work['s3_enabled'] = s3_enabled
    work['redis_enabled'] = redis_enabled
    work['fetch_mode'] = fetch_mode
    work['analysis_type'] = analysis_type
    work['iex_datasets'] = iex_consts.DEFAULT_FETCH_DATASETS
    work['debug'] = debug
    work['label'] = 'ticker={}'.format(ticker)

    if analysis_type == 'scn':
        label = 'screener={}'.format(work['ticker'])
        fv_urls = []
        if args.urls:
            fv_urls = str(args.urls).split('|')
        if len(fv_urls) == 0:
            fv_urls = os.getenv('SCREENER_URLS', []).split('|')
        screener_req = api_requests.build_screener_analysis_request(
            ticker=ticker, fv_urls=fv_urls, label=label)
        work.update(screener_req)
        start_screener_analysis(req=work)
    # end of analysis_type
    else:
        if not args.keyname:
            last_close_date = ae_utils.last_close()
            work['s3_key'] = '{}_{}'.format(
                work['ticker'],
                last_close_date.strftime(ae_consts.COMMON_DATE_FORMAT))
            work['redis_key'] = '{}_{}'.format(
                work['ticker'],
                last_close_date.strftime(ae_consts.COMMON_DATE_FORMAT))

        path_to_tasks = 'analysis_engine.work_tasks'
        task_name = ('{}.get_new_pricing_data.get_new_pricing_data'.format(
            path_to_tasks))
        task_res = None
        if ae_consts.is_celery_disabled() or run_offline:
            work['celery_disabled'] = True
            log.debug('starting without celery work={} offline={}'.format(
                ae_consts.ppj(work), run_offline))
            task_res = task_pricing.get_new_pricing_data(work)

            if debug:
                log.info('done - result={} '
                         'task={} status={} '
                         'err={} label={}'.format(
                             ae_consts.ppj(task_res), task_name,
                             ae_consts.get_status(status=task_res['status']),
                             task_res['err'], work['label']))
            else:
                log.info('done - result '
                         'task={} status={} '
                         'err={} label={}'.format(
                             task_name,
                             ae_consts.get_status(status=task_res['status']),
                             task_res['err'], work['label']))
            # if/else debug
        else:
            log.info('connecting to broker={} backend={}'.format(
                broker_url, backend_url))

            # Get the Celery app
            app = get_celery_app.get_celery_app(
                name=__name__,
                auth_url=broker_url,
                backend_url=backend_url,
                path_to_config_module=celery_config_module,
                ssl_options=ssl_options,
                transport_options=transport_options,
                include_tasks=include_tasks)

            log.info('calling task={} - work={}'.format(
                task_name, ae_consts.ppj(work)))
            job_id = app.send_task(task_name, (work, ))
            log.info('calling task={} - success job_id={}'.format(
                task_name, job_id))
コード例 #8
0
def fetch(ticker=None,
          tickers=None,
          fetch_mode=None,
          iex_datasets=None,
          redis_enabled=True,
          redis_address=None,
          redis_db=None,
          redis_password=None,
          redis_expire=None,
          s3_enabled=True,
          s3_address=None,
          s3_bucket=None,
          s3_access_key=None,
          s3_secret_key=None,
          s3_region_name=None,
          s3_secure=False,
          celery_disabled=True,
          broker_url=None,
          result_backend=None,
          label=None,
          verbose=False):
    """fetch

    Fetch all supported datasets for a stock ``ticker`` or
    a list of ``tickers`` and returns a dictionary. Once
    run, the datasets will all be cached in Redis and archived
    in Minio (S3) by default.

    Python example:

    .. code-block:: python

        from analysis_engine.fetch import fetch
        d = fetch(ticker='NFLX')
        print(d)
        for k in d['NFLX']:
            print('dataset key: {}'.format(k))

    By default, it synchronously automates:

        - fetching all datasets
        - caching all datasets in Redis
        - archiving all datasets in Minio (S3)
        - returns all datasets in a single dictionary

    This was created for reducing the amount of typying in
    Jupyter notebooks. It can be set up for use with a
    distributed engine as well with the optional arguments
    depending on your connectitivty requirements.

    .. note:: Please ensure Redis and Minio are running
              before trying to extract tickers

    **Stock tickers to fetch**

    :param ticker: single stock ticker/symbol/ETF to fetch
    :param tickers: optional - list of tickers to fetch

    **(Optional) Data sources, datafeeds and datasets to gather**

    :param fetch_mode: data sources - default is ``all`` (both IEX
        and Yahoo), ``iex`` for only IEX, ``yahoo`` for only Yahoo.
    :param iex_datasets: list of strings for gathering specific `IEX
        datasets <https://iextrading.com/developer/docs/#stocks>`__
        which are set as consts: ``analysis_engine.iex.consts.FETCH_*``.

    **(Optional) Redis connectivity arguments**

    :param redis_enabled: bool - toggle for auto-caching all
        datasets in Redis
        (default is ``True``)
    :param redis_address: Redis connection string format: ``host:port``
        (default is ``localhost:6379``)
    :param redis_db: Redis db to use
        (default is ``0``)
    :param redis_password: optional - Redis password
        (default is ``None``)
    :param redis_expire: optional - Redis expire value
        (default is ``None``)

    **(Optional) Minio (S3) connectivity arguments**

    :param s3_enabled: bool - toggle for auto-archiving on Minio (S3)
        (default is ``True``)
    :param s3_address: Minio S3 connection string format: ``host:port``
        (default is ``localhost:9000``)
    :param s3_bucket: S3 Bucket for storing the artifacts
        (default is ``dev``) which should be viewable on a browser:
        http://localhost:9000/minio/dev/
    :param s3_access_key: S3 Access key
        (default is ``trexaccesskey``)
    :param s3_secret_key: S3 Secret key
        (default is ``trex123321``)
    :param s3_region_name: S3 region name
        (default is ``us-east-1``)
    :param s3_secure: Transmit using tls encryption
        (default is ``False``)

    **(Optional) Celery worker broker connectivity arguments**

    :param celery_disabled: bool - toggle synchronous mode or publish
        to an engine connected to the `Celery broker and backend
        <https://github.com/celery/celery#transports-and-backends>`__
        (default is ``True`` - synchronous mode without an engine
        or need for a broker or backend for Celery)
    :param broker_url: Celery broker url
        (default is ``redis://0.0.0.0:6379/13``)
    :param result_backend: Celery backend url
        (default is ``redis://0.0.0.0:6379/14``)
    :param label: tracking log label

    **(Optional) Debugging**

    :param verbose: bool - show fetch warnings
        and other debug logging (default is False)

    **Supported environment variables**

    ::

        export REDIS_ADDRESS="localhost:6379"
        export REDIS_DB="0"
        export S3_ADDRESS="localhost:9000"
        export S3_BUCKET="dev"
        export AWS_ACCESS_KEY_ID="trexaccesskey"
        export AWS_SECRET_ACCESS_KEY="trex123321"
        export AWS_DEFAULT_REGION="us-east-1"
        export S3_SECURE="0"
        export WORKER_BROKER_URL="redis://0.0.0.0:6379/13"
        export WORKER_BACKEND_URL="redis://0.0.0.0:6379/14"
    """

    rec = {}

    extract_records = []

    use_tickers = tickers
    if ticker:
        use_tickers = [ticker]
    else:
        if not use_tickers:
            use_tickers = []

    default_iex_datasets = [
        'daily', 'minute', 'quote', 'stats', 'peers', 'news', 'financials',
        'earnings', 'dividends', 'company'
    ]

    if not iex_datasets:
        iex_datasets = default_iex_datasets
    if not fetch_mode:
        fetch_mode = 'all'

    if redis_enabled:
        if not redis_address:
            redis_address = os.getenv('REDIS_ADDRESS', 'localhost:6379')
        if not redis_password:
            redis_password = os.getenv('REDIS_PASSWORD', None)
        if not redis_db:
            redis_db = int(os.getenv('REDIS_DB', '0'))
        if not redis_expire:
            redis_expire = os.getenv('REDIS_EXPIRE', None)
    if s3_enabled:
        if not s3_address:
            s3_address = os.getenv('S3_ADDRESS', 'localhost:9000')
        if not s3_access_key:
            s3_access_key = os.getenv('AWS_ACCESS_KEY_ID', 'trexaccesskey')
        if not s3_secret_key:
            s3_secret_key = os.getenv('AWS_SECRET_ACCESS_KEY', 'trex123321')
        if not s3_region_name:
            s3_region_name = os.getenv('AWS_DEFAULT_REGION', 'us-east-1')
        if not s3_secure:
            s3_secure = os.getenv('S3_SECURE', '0') == '1'
        if not s3_bucket:
            s3_bucket = os.getenv('S3_BUCKET', 'dev')
    if not broker_url:
        broker_url = os.getenv('WORKER_BROKER_URL', 'redis://0.0.0.0:6379/13')
    if not result_backend:
        result_backend = os.getenv('WORKER_BACKEND_URL',
                                   'redis://0.0.0.0:6379/14')

    if not label:
        label = 'get-latest'

    num_tickers = len(use_tickers)
    last_close_str = get_last_close_str()

    if iex_datasets:
        log.info('{} - getting latest for tickers={} '
                 'iex={}'.format(label, num_tickers, json.dumps(iex_datasets)))
    else:
        log.info('{} - getting latest for tickers={}'.format(
            label, num_tickers))

    for ticker in use_tickers:

        ticker_key = '{}_{}'.format(ticker, last_close_str)

        fetch_req = build_get_new_pricing_request()
        fetch_req['base_key'] = ticker_key
        fetch_req['celery_disabled'] = celery_disabled
        fetch_req['ticker'] = ticker
        fetch_req['label'] = label
        fetch_req['fetch_mode'] = fetch_mode
        fetch_req['iex_datasets'] = iex_datasets
        fetch_req['s3_enabled'] = s3_enabled
        fetch_req['s3_bucket'] = s3_bucket
        fetch_req['s3_address'] = s3_address
        fetch_req['s3_secure'] = s3_secure
        fetch_req['s3_region_name'] = s3_region_name
        fetch_req['s3_access_key'] = s3_access_key
        fetch_req['s3_secret_key'] = s3_secret_key
        fetch_req['s3_key'] = ticker_key
        fetch_req['redis_enabled'] = redis_enabled
        fetch_req['redis_address'] = redis_address
        fetch_req['redis_password'] = redis_password
        fetch_req['redis_db'] = redis_db
        fetch_req['redis_key'] = ticker_key
        fetch_req['redis_expire'] = redis_expire

        fetch_req['redis_address'] = redis_address
        fetch_req['s3_address'] = s3_address

        log.info('{} - fetching ticker={} last_close={} '
                 'redis_address={} s3_address={}'.format(
                     label, ticker, last_close_str, fetch_req['redis_address'],
                     fetch_req['s3_address']))

        fetch_res = price_utils.run_get_new_pricing_data(work_dict=fetch_req)
        if fetch_res['status'] == SUCCESS:
            log.info('{} - fetched ticker={} '
                     'preparing for extraction'.format(label, ticker))
            extract_req = fetch_req
            extract_records.append(extract_req)
        else:
            log.warning('{} - failed getting ticker={} data '
                        'status={} err={}'.format(
                            label, ticker,
                            get_status(status=fetch_res['status']),
                            fetch_res['err']))
        # end of if worked or not
    # end for all tickers to fetch
    """
    Extract Datasets
    """

    iex_daily_status = FAILED
    iex_minute_status = FAILED
    iex_quote_status = FAILED
    iex_stats_status = FAILED
    iex_peers_status = FAILED
    iex_news_status = FAILED
    iex_financials_status = FAILED
    iex_earnings_status = FAILED
    iex_dividends_status = FAILED
    iex_company_status = FAILED
    yahoo_news_status = FAILED
    yahoo_options_status = FAILED
    yahoo_pricing_status = FAILED

    iex_daily_df = None
    iex_minute_df = None
    iex_quote_df = None
    iex_stats_df = None
    iex_peers_df = None
    iex_news_df = None
    iex_financials_df = None
    iex_earnings_df = None
    iex_dividends_df = None
    iex_company_df = None
    yahoo_option_calls_df = None
    yahoo_option_puts_df = None
    yahoo_pricing_df = None
    yahoo_news_df = None

    extract_iex = True
    if fetch_mode not in ['all', 'iex']:
        extract_iex = False

    extract_yahoo = True
    if fetch_mode not in ['all', 'yahoo']:
        extract_yahoo = False

    for service_dict in extract_records:
        ticker_data = {}
        ticker = service_dict['ticker']

        extract_req = get_ds_dict(ticker=ticker,
                                  base_key=service_dict.get('base_key', None),
                                  ds_id=label,
                                  service_dict=service_dict)

        if 'daily' in iex_datasets or extract_iex:
            iex_daily_status, iex_daily_df = \
                iex_extract_utils.extract_daily_dataset(
                    extract_req)
            if iex_daily_status != SUCCESS:
                if verbose:
                    log.warning('unable to fetch iex_daily={}'.format(ticker))
        if 'minute' in iex_datasets or extract_iex:
            iex_minute_status, iex_minute_df = \
                iex_extract_utils.extract_minute_dataset(
                    extract_req)
            if iex_minute_status != SUCCESS:
                if verbose:
                    log.warning('unable to fetch iex_minute={}'.format(ticker))
        if 'quote' in iex_datasets or extract_iex:
            iex_quote_status, iex_quote_df = \
                iex_extract_utils.extract_quote_dataset(
                    extract_req)
            if iex_quote_status != SUCCESS:
                if verbose:
                    log.warning('unable to fetch iex_quote={}'.format(ticker))
        if 'stats' in iex_datasets or extract_iex:
            iex_stats_df, iex_stats_df = \
                iex_extract_utils.extract_stats_dataset(
                    extract_req)
            if iex_stats_status != SUCCESS:
                if verbose:
                    log.warning('unable to fetch iex_stats={}'.format(ticker))
        if 'peers' in iex_datasets or extract_iex:
            iex_peers_df, iex_peers_df = \
                iex_extract_utils.extract_peers_dataset(
                    extract_req)
            if iex_peers_status != SUCCESS:
                if verbose:
                    log.warning('unable to fetch iex_peers={}'.format(ticker))
        if 'news' in iex_datasets or extract_iex:
            iex_news_status, iex_news_df = \
                iex_extract_utils.extract_news_dataset(
                    extract_req)
            if iex_news_status != SUCCESS:
                if verbose:
                    log.warning('unable to fetch iex_news={}'.format(ticker))
        if 'financials' in iex_datasets or extract_iex:
            iex_financials_status, iex_financials_df = \
                iex_extract_utils.extract_financials_dataset(
                    extract_req)
            if iex_financials_status != SUCCESS:
                if verbose:
                    log.warning(
                        'unable to fetch iex_financials={}'.format(ticker))
        if 'earnings' in iex_datasets or extract_iex:
            iex_earnings_status, iex_earnings_df = \
                iex_extract_utils.extract_dividends_dataset(
                    extract_req)
            if iex_earnings_status != SUCCESS:
                if verbose:
                    log.warning(
                        'unable to fetch iex_earnings={}'.format(ticker))
        if 'dividends' in iex_datasets or extract_iex:
            iex_dividends_status, iex_dividends_df = \
                iex_extract_utils.extract_dividends_dataset(
                    extract_req)
            if iex_dividends_status != SUCCESS:
                if verbose:
                    log.warning(
                        'unable to fetch iex_dividends={}'.format(ticker))
        if 'company' in iex_datasets or extract_iex:
            iex_company_status, iex_company_df = \
                iex_extract_utils.extract_dividends_dataset(
                    extract_req)
            if iex_company_status != SUCCESS:
                if verbose:
                    log.warning(
                        'unable to fetch iex_company={}'.format(ticker))
        # end of iex extracts

        if extract_yahoo:
            yahoo_options_status, yahoo_option_calls_df = \
                yahoo_extract_utils.extract_option_calls_dataset(
                    extract_req)
            yahoo_options_status, yahoo_option_puts_df = \
                yahoo_extract_utils.extract_option_puts_dataset(
                    extract_req)
            if yahoo_options_status != SUCCESS:
                if verbose:
                    log.warning(
                        'unable to fetch yahoo_options={}'.format(ticker))
            yahoo_pricing_status, yahoo_pricing_df = \
                yahoo_extract_utils.extract_pricing_dataset(
                    extract_req)
            if yahoo_pricing_status != SUCCESS:
                if verbose:
                    log.warning(
                        'unable to fetch yahoo_pricing={}'.format(ticker))
            yahoo_news_status, yahoo_news_df = \
                yahoo_extract_utils.extract_yahoo_news_dataset(
                    extract_req)
            if yahoo_news_status != SUCCESS:
                if verbose:
                    log.warning('unable to fetch yahoo_news={}'.format(ticker))
        # end of yahoo extracts

        ticker_data['daily'] = iex_daily_df
        ticker_data['minute'] = iex_minute_df
        ticker_data['quote'] = iex_quote_df
        ticker_data['stats'] = iex_stats_df
        ticker_data['peers'] = iex_peers_df
        ticker_data['news1'] = iex_news_df
        ticker_data['financials'] = iex_financials_df
        ticker_data['earnings'] = iex_earnings_df
        ticker_data['dividends'] = iex_dividends_df
        ticker_data['company'] = iex_company_df
        ticker_data['calls'] = yahoo_option_calls_df
        ticker_data['puts'] = yahoo_option_puts_df
        ticker_data['pricing'] = yahoo_pricing_df
        ticker_data['news'] = yahoo_news_df

        rec[ticker] = ticker_data
    # end of for service_dict in extract_records

    return rec
コード例 #9
0
 def test_err_get_new_pricing(self):
     """test_err_get_new_pricing"""
     work = build_get_new_pricing_request()
     work['label'] = 'test_err_get_new_pricing'
     res = run_get_new_pricing_data(work)
     self.assertTrue(res['status'] == ERR)