Exemplo n.º 1
0
def read_daily_option_quote(
    browser: ChromeDriver,
    symbol: str,
    option_type: str,
    strike_price: float,
    exp_date: int,
    use_barchart: bool = False,
    suppress_log: bool = False,
) -> DailyOptionQuote:
    option_quote = DailyOptionQuote(symbol, option_type, strike_price,
                                    exp_date, int(get_date_str()))
    if use_barchart:
        url = "https://www.barchart.com/stocks/quotes/"
        url += f"{symbol}/options?expiration={str(exp_date)}&moneyness=allRows"
        eclass = "bc-options-quotes"
        web_data = browser.download_data(
            url=url,
            wait_base=1,
            element_class=eclass,
            suppress_log=suppress_log,
        )
    else:
        datetime_diff = get_date(str(exp_date)) - get_datetime_start()
        date_url = int(datetime_diff.total_seconds())
        url = f"https://finance.yahoo.com/quote/{symbol}/options?date={date_url}"
        eid = 'Col1-1-OptionContracts-Proxy'
        web_data = browser.download_data(
            url=url,
            wait_base=1,
            element_id=eid,
            suppress_log=suppress_log,
        )
    parse_daily_option_quote(option_quote, web_data.splitlines(), symbol,
                             option_type, strike_price, exp_date, use_barchart)
    return option_quote
Exemplo n.º 2
0
def read_daily_option_info(
    browser: ChromeDriver,
    symbol: str,
    use_barchart: bool = True,
    suppress_all_log: bool = False,
    suppress_sub_log: bool = False,
) -> DailyOptionInfo:
    option_info = DailyOptionInfo(symbol, int(get_date_str()))
    if use_barchart:
        all_exp_dates = read_exp_dates(
            browser,
            symbol,
            suppress_log=suppress_all_log,
        )
        eclass = 'bc-futures-options-quotes-totals'
        pre_eclass = 'bc-datatable'
        for exp_date in all_exp_dates:
            url = "https://www.barchart.com/stocks/quotes/"
            url += f"{symbol}/options?expiration={str(exp_date)}"
            web_data = browser.download_data(
                url=url,
                wait_base=1,
                pre_element_class=pre_eclass,
                element_class=eclass,
                suppress_log=suppress_sub_log,
            )
            parse_daily_option_info(option_info, web_data.splitlines())
    else:
        logger.error('other data sources not implemented')
        raise ValueError(f"use_barchart={use_barchart}")
    return option_info
Exemplo n.º 3
0
def read_info(
    symbol_queue: mp.Queue,
    info_queue: mp.Queue,
):
    pname = mp.current_process().name
    logger.info(f"History builder worker-{pname} starting")
    num_items = 0
    with ChromeDriver() as browser:
        while True:
            try:
                symbol = symbol_queue.get(block=True, timeout=600)
            except Exception as e:
                logger.warning(f"Unexpected expcetion: {str(e)}")
                break
            if symbol is None:
                symbol_queue.put(None, block=True, timeout=60)
                break
            num_items += 1
            try:
                daily_option_info = read_daily_option_info(
                    browser,
                    symbol,
                    suppress_all_log=False,
                    suppress_sub_log=True,
                )
            except Exception as e:
                logger.info(f"Encountered exception ({str(e)}) on {symbol}")
                continue
            else:
                info_queue.put(daily_option_info)
    info_queue.put(None)
    logger.info(f"History builder worker-{pname} processed {num_items} items")
Exemplo n.º 4
0
def evaluate_option_activity(
    in_queue: mp.Queue,
    out_queue: mp.Queue,
    cache_queue: mp.Queue,
):
    pname = mp.current_process().name
    logger.info(f"OA evaluation worker-{pname} starting")
    cache = {}
    num_items = 0
    with ChromeDriver() as browser:
        while True:
            try:
                raw_data = in_queue.get(block=True, timeout=600)
            except Exception as e:
                logger.warning(f"Unexpected exception: {str(e)}")
                break
            if raw_data is None:
                break
            num_items += 1
            option_activity: OptionActivity = raw_data[0]
            avg_option_info: AvgOptionInfo = raw_data[1]
            # filtering
            if filter(option_activity, avg_option_info, browser, cache):
                out_queue.put(option_activity)
    out_queue.put(None)
    cache_queue.put(cache)
    logger.info(f"OA evaluation worker-{pname} processed {num_items} items")
Exemplo n.º 5
0
def read_effect(
    oa_queue: mp.Queue,
    effect_queue: mp.Queue,
):
    with ChromeDriver() as browser:
        while True:
            try:
                option_activity = oa_queue.get(block=True, timeout=600)
            except Exception as e:
                logger.warning(f"Unexpected expcetion on get: {str(e)}")
                break
            if option_activity is None:
                oa_queue.put(None, block=True, timeout=60)
                break
            option_quote = read_daily_option_quote(
                browser,
                option_activity.get("symbol"),
                option_activity.get("option_type"),
                option_activity.get("strike_price"),
                option_activity.get("exp_date"),
                use_barchart=True,
                suppress_log=False,
            )
            stock_quote = read_stock_quote(
                browser,
                option_activity.get("symbol"),
                suppress_log=False,
            )
            option_effect = OptionEffect(daily_option_quote=option_quote,
                                         daily_stock_quote=stock_quote)
            effect_queue.put(option_effect, block=True, timeout=600)
        effect_queue.put(None, block=True, timeout=600)
Exemplo n.º 6
0
def read_option_activity(browser: ChromeDriver,
                         save_file=False,
                         folder='logs') -> list:
    retry_timeout = 4
    url = 'https://www.barchart.com/options/unusual-activity/stocks?page=all'
    eid = 'main-content-column'
    #buttons = ['a.show-all']
    buttons = None
    num_retry = 0
    while num_retry < retry_timeout:
        num_retry += 1
        try:
            web_data = browser.download_data(url=url,
                                             wait_base=num_retry,
                                             button_css=buttons,
                                             element_id=eid)
        except Exception as e:
            logger.error(f'error {str(e)} (retry={num_retry}/{retry_timeout})')
            time.sleep(num_retry)
            continue
        else:
            option_activity_list = parse_option_activity(web_data.splitlines())
            if len(option_activity_list) > 0:
                logger.info(
                    'retrieved option activity list: # items={:d}'.format(
                        len(option_activity_list)))
                break
            else:
                logger.warning(
                    'option activity list empty? retry={}/{}'.format(
                        num_retry, retry_timeout))
                time.sleep(num_retry)
                continue
    if num_retry >= retry_timeout:
        return []
    # save a copy
    if save_file:
        # try to remove duplicates if folder exists
        if os.path.exists(folder):
            today_str = get_date_str()
            for item in os.listdir(folder):
                if item.startswith('OA_%s' % (today_str)):
                    os.remove(os.path.join(folder, item))
        else:
            # otherwise create folder
            os.makedirs(folder)
        # write new one
        filename = os.path.join(folder, f'OA_{get_datetime_str()}.txt.gz')
        with openw(filename, 'wt') as fout:
            fout.write('\n'.join(option_activity_list))
        logger.info(f'{get_time_log()} save option activity to {filename}')
    return option_activity_list
Exemplo n.º 7
0
def read_stock_quote(
    browser: ChromeDriver,
    symbol: str,
    suppress_log: bool = False,
) -> DailyStockQuote:
    stock_quote = DailyStockQuote(symbol, int(get_date_str()))
    url = f"https://finance.yahoo.com/quote/{symbol}"
    eid = "quote-summary"
    web_data = browser.download_data(
        url=url,
        element_id=eid,
        suppress_log=suppress_log,
    )
    parse_stock_quote(stock_quote, web_data.splitlines())
    return stock_quote
Exemplo n.º 8
0
        if daily_stock_quote:
            for k in DailyStockQuote.fields.keys():
                if k in OptionEffect.fields:
                    self.__set(k, daily_stock_quote.get(k))

    def set(self, key, value):
        raise RuntimeError("Should not be used")

    def __set(self, key, value):
        if key not in OptionEffect.fields:
            raise KeyError(f"Invalid key={key}")
        self._values[key] = value


if __name__ == '__main__':
    metadata_dir = setup_metadata_dir()
    setup_logger(__file__)
    logger.setLevel(logging.DEBUG)
    logging.getLogger("web_chrome_driver").setLevel(logging.DEBUG)
    # test from online reading
    with ChromeDriver() as browser:
        option_quote = read_daily_option_quote(browser,
                                               "AMD",
                                               "Call",
                                               60.0,
                                               20220121,
                                               use_barchart=True)
        stock_quote = read_stock_quote(browser, "AMD")
        option_effect = OptionEffect(daily_option_quote=option_quote,
                                     daily_stock_quote=stock_quote)
        print(json.dumps(option_effect.__dict__, indent=4))
Exemplo n.º 9
0
def hunt_option_activity(
    max_oa: int = 0,
    option_activity_file: str = None,
) -> typing.Mapping[str, DailyOptionInfo]:
    logger.info("================================================")
    logger.info(f"{get_time_log()} Hunt or to be hunted !!!")
    # synchronized queues
    raw_oa_queues = [mp.Queue(maxsize=1000) for _ in range(mp.cpu_count())]
    filtered_oa_queue = mp.Queue(maxsize=1000)
    info_cache_queue = mp.Queue(maxsize=100)
    # pull option activity list
    option_activity_list = []
    if option_activity_file is None:
        with ChromeDriver() as browser:
            option_activity_list = read_option_activity(
                browser,
                save_file=True,
                folder="records",
            )
    else:
        with openw(option_activity_file, "rt") as fp:
            option_activity_list = fp.readlines()
    if max_oa > 0 and max_oa < len(option_activity_list):
        option_activity_list = option_activity_list[0:max_oa]
    # launch down-stream workers
    worker_procs = [
        mp.Process(
            target=evaluate_option_activity,
            args=(
                raw_oa_queues[idx],
                filtered_oa_queue,
                info_cache_queue,
            ),
            name=f"ProcWorker{idx}",
        ) for idx in range(len(raw_oa_queues))
    ]
    _ = [p.start() for p in worker_procs]
    # scan UOA list and enqueue candidates
    with DBMS(get_db_file()) as db:
        for line in option_activity_list:
            option_activity = OptionActivity()
            option_activity.from_activity_str(line)
            # lookup volume info
            symbol = option_activity.get('symbol')
            avg_option_info = query_avg_option_info(db, symbol)
            if avg_option_info.get("count") == 0:
                # add to tracking list
                add_to_symbol_table(db, symbol)
            # add to queue for processing
            worker_idx = hash_symbol(symbol) % len(raw_oa_queues)
            raw_oa_queues[worker_idx].put(
                [option_activity, avg_option_info],
                block=True,
            )
        _ = [x.put(None) for x in raw_oa_queues]
        _ = [p.join() for p in worker_procs]
        logger.info("Hunting done !!!")
        return process_unusual_option_activity(filtered_oa_queue,
                                               info_cache_queue,
                                               len(raw_oa_queues), db)
    return {}