Beispiel #1
0
 def load_eoddata_files(self, sd, ed, stks='', batch=False):
     print('Loading eoddata files...')
     dt = sd
     # dt = stxcal.move_busdays(sd, -25)
     fnames = [
         os.path.join(self.in_dir, 'AMEX_{0:s}.txt'),
         os.path.join(self.in_dir, 'NASDAQ_{0:s}.txt'),
         os.path.join(self.in_dir, 'NYSE_{0:s}.txt')
     ]
     while dt <= ed:
         print('eoddata: {0:s}'.format(dt))
         data_available = True
         dtc = dt.replace('-', '')
         for fname in fnames:
             fname_dt = fname.format(dtc)
             if not os.path.isfile(fname_dt):
                 print('Could not find file {0:s}'.format(fname_dt))
                 data_available = False
         if not data_available:
             print('Data is missing for date {0:s}. Exiting.'.format(dt))
             return
         for fname in fnames:
             fname_dt = fname.format(dtc)
             self.load_eoddata_file(fname_dt, dt, dtc, stks, batch=batch)
         stxdb.db_write_cmd("update analyses set dt='{0:s}' where "
                            "analysis='eod_datafeed'".format(dt))
         dt = stxcal.next_busday(dt)
     print('Loaded eoddata files')
Beispiel #2
0
 def __init__(self, test=False):
     '''
     - Create US exchange, if not already there
     - Define the directories from which we will be reading the data
     - Specify the atem commands used to get stock info and prices
     '''
     self.root_dir = os.getenv('NORGATE_TEST_DIR') if test \
         else os.getenv('NORGATE_DIR')
     self.upload_dir = '/tmp'
     xchgs = stxdb.db_read_cmd("select * from exchanges where name='US'")
     if not xchgs:
         stxdb.db_write_cmd("insert into exchanges values('US')")
     self.input_dirs = [
         'AMEX', 'Indices', 'NASDAQ', 'NYSE', 'NYSE Arca',
         'Delisted Securities'
     ]
     self.atem_prices_cmd = [
         'atem', '-o', '{0:s}/prices.txt'.format(self.upload_dir),
         '--format=symbol,date,open,high,low,close,volume,openint',
         '--float-openint'
     ]
     self.atem_info_cmd = [
         'atem', '-o', '{0:s}/names.txt'.format(self.upload_dir),
         '--format=symbol,long_name'
     ]
     pd.options.mode.chained_assignment = None
Beispiel #3
0
 def load_mypivots_splits(cls, fname='/home/cma/mypivots_splits.csv'):
     with open(fname, 'r') as f:
         lines = f.readlines()
     print('self.test_stx={0:s}'.format(cls.test_stx))
     stx_lst = cls.test_stx.replace('(', '').replace(')', '').replace(
         ' ', '').replace("'", '')
     stx_lst = stx_lst.split(',')
     for line in lines:
         tokens = line.split()
         if tokens[0] in stx_lst:
             stxdb.db_write_cmd("insert into dividends values "
                                "('{0:s}', '{1:s}', {2:.4f}, 0)".format(
                                 tokens[0], tokens[1], float(tokens[2])))
Beispiel #4
0
 def parseeodline(self, line):
     stk, _, dt, o, h, l, c, v, oi = line.split(',')
     # look only at the US stocks, for the time being
     if not stk.endswith('.US'):
         return
     dt = '{0:s}-{1:s}-{2:s}'.format(dt[0:4], dt[4:6], dt[6:8])
     if not stxcal.is_busday(dt):
         raise Exception('{0:s} is not a business day'.format(dt))
     o = int(100 * float(o))
     h = int(100 * float(h))
     l = int(100 * float(l))
     c = int(100 * float(c))
     # Make sure o and c are in the interval [l, h]
     o = o if o <= h and o >= l else (h if o > h else l)
     c = c if c <= h and c >= l else (h if c > h else l)
     if o >= 2147483647 or h >= 2147483647 or l >= 2147483647 or \
        c >= 2147483647:
         return
     v, oi = int(v), int(oi)
     if stk.endswith('.US'):  # proces stock tickers, volume must be > 0
         stk = stk[:-3].replace("-.", ".P.").replace("_", ".").replace(
             '-', '.')
         if v == 0:
             raise Exception('Zero volume for stock')
         if len(stk) > 8:
             raise Exception('Ticker {0:s} too long'.format(stk))
         v = v // 1000
         if v == 0:
             v = 1
     elif stk.endswith('.B'):  # multiply bond prices by 10000
         o, h, l, c = self.multiply_prices(o, h, l, c, 10000)
     elif stk.endswith('6.F'):  # multiply currency future prices by 10000
         o, h, l, c = self.multiply_prices(o, h, l, c, 10000)
     elif stk in ['HO.F', 'NG.F', 'RB.F']:  # express prices in cents
         o, h, l, c = self.multiply_prices(o, h, l, c, 100)
     elif stk.startswith('^'):  # divide index volumes by 1000
         v = 1 if v == 0 else v // 1000
     elif '.' not in stk and 'XAG' not in stk and 'XAU' not in stk:
         # multiply FX/Money Market prices by 10000
         o, h, l, c = self.multiply_prices(o, h, l, c, 10000)
     # all tickers ending in .F are futures, except the LME tickers
     v = 1 if v == 0 else v
     # if stk not in db_stx:
     #     insert_stx = "INSERT INTO equities VALUES "\
     #                  "('{0:s}', '', 'US Stocks', 'US')".format(stk)
     #     stxdb.db_write_cmd(insert_stx)
     db_cmd = "insert into {0:s} values('{1:s}','{2:s}',{3:d},{4:d},"\
         "{5:d},{6:d},{7:d},{8:d}) on conflict (stk, dt) do update "\
         "set v={9:d}, oi={10:d}".format(
             self.eod_tbl, stk, dt, o, h, l, c, v, oi, v, oi)
     stxdb.db_write_cmd(db_cmd)
Beispiel #5
0
 def remove_invalid_days(self):
     invalid_days = "('2002-02-01', '2002-02-04', '2002-02-05', "\
                    "'2002-02-06', '2002-02-07', '2002-05-30', "\
                    "'2002-05-31', '2002-06-14', '2002-06-17', "\
                    "'2002-12-02', '2002-12-03', '2002-12-04', "\
                    "'2002-12-05', '2002-12-06', '2002-12-09', "\
                    "'2002-12-10')"
     print('{0:s}: removing invalid days from opt_spots and opts tables.'.
           format(stxcal.print_current_time()))
     stxdb.db_write_cmd('delete from {0:s} where dt in {1:s}'.format(
         self.spot_tbl, invalid_days))
     print('{0:s}: removed invalid days from {1:s} table.'.format(
         stxcal.print_current_time(), self.spot_tbl))
     stxdb.db_write_cmd('delete from {0:s} where dt in {1:s}'.format(
         self.opt_tbl, invalid_days))
     print('{0:s}: removed invalid days from {1:s} table.'.format(
         stxcal.print_current_time(), self.opt_tbl))
Beispiel #6
0
 def upload_prices(self, in_dir, stx):
     df = pd.read_csv('{0:s}/prices.txt'.format(self.upload_dir),
                      sep='\t',
                      header=0)
     df['symbol'] = df['symbol'].str.replace('_', '.')
     df['symbol'] = df['symbol'].str.replace('-', '.')
     df['symbol'] = df['symbol'].str.replace('$', '^')
     for stk in stx:
         stk_df = df.query("symbol=='{0:s}'".format(stk))
         stk_df, splits = self.get_and_adjust_for_splits(stk_df)
         for dt, ratio in splits.items():
             stxdb.db_write_cmd(
                 "INSERT INTO dividends VALUES ('{0:s}','{1:s}',{2:f},0) ON"
                 " CONFLICT (stk, dt) DO NOTHING".format(stk, dt, ratio))
         fname = '{0:s}/eod_upload.txt'.format(self.upload_dir)
         stk_df.to_csv(fname, sep='\t', header=False, index=False)
         stxdb.db_upload_file(fname, 'eods')
Beispiel #7
0
def gen_cal(start_date='1984-12-31', end_date='2025-12-31'):
    busday_cal = get_cal(start_date, end_date)
    s_date = np.datetime64(start_date)
    e_date = np.datetime64(end_date)
    day_num = -1
    busday_num = -1
    while s_date <= e_date:
        day_num += 1
        ibd = -1
        if np.is_busday(s_date, busdaycal=busday_cal):
            busday_num += 1
            ibd = 1
        res = ibd * ((busday_num << 16) | day_num)
        sql_cmd = "INSERT INTO calendar VALUES ('{0:s}', {1:d})".format(
            str(s_date), res)
        stxdb.db_write_cmd(sql_cmd)
        if day_num % 1000 == 0:
            print('Inserted {0:s}'.format(str(s_date)))
        s_date += np.timedelta64(1, 'D')
Beispiel #8
0
def apply_adjustments(adj_fname):
    with open(adj_fname, 'r+') as f:
        lines = f.readlines()
        write_lines = []
        for line in lines:
            if line.startswith('#'):
                write_lines.append(line.strip())
                continue
            sql = line.strip()
            try:
                stxdb.db_write_cmd(sql)
                sql = '# ' + sql
            except:
                print('Failed to execute {0:s}\n  Error: {1:s}'.
                      format(sql, str(sys.exc_info()[1])))
            write_lines.append(sql)
        f.seek(0)
        f.write('\n'.join(write_lines))
        f.truncate()
Beispiel #9
0
 def upload_splits(self):
     with open(self.fname, 'r') as f:
         lines = f.readlines()
     lines = list(set(lines))
     dct = {}
     for l in lines:
         tokens = l.split('\t')
         ticker = tokens[0].strip()
         dct[ticker] = ''
     db_stx = {x[0]: 0 for x in stxdb.db_read_cmd("select * from equities")}
     print('len(dct) = {0:d}, len(db_stx) = {1:d}'.
           format(len(dct), len(db_stx)))
     for stk in dct:
         if stk not in db_stx:
             insert_stx = "INSERT INTO equities VALUES "\
                 "('{0:s}', '', 'US Stocks', 'US')".format(stk)
             print('insert_stx = {0:s}'.format(insert_stx))
             stxdb.db_write_cmd(insert_stx)
     try:
         stxdb.db_upload_file(self.fname, 'dividends', '\t')
     except Exception as ex:
         print('Failed to upload {0:s}: {1:s}'.format(self.fname, str(ex)))
Beispiel #10
0
 def upload_names(self, in_dir):
     with open('{0:s}/names.txt'.format(self.upload_dir), 'r') as f:
         lines = f.readlines()
     dct = {}
     for l in lines[1:]:
         tokens = l.strip().split('\t')
         ticker = tokens[0].replace("_",
                                    ".").replace('-',
                                                 '.').replace('$', '^')
         if len(ticker) <= 16:
             dct[ticker] = tokens[1]
         else:
             print('This ticker is too long: {0:s}'.format(ticker))
     print('{0:s}: processing {1:d} stocks'.format(in_dir, len(dct)))
     print('{0:s}'.format(','.join(sorted(dct.keys()))))
     equity_type = 'US Indices' if 'Indices' in in_dir else 'US_Stocks'
     for ticker, name in dct.items():
         stxdb.db_write_cmd(
             "INSERT INTO equities VALUES ('{0:s}', '{1:s}', '{2:s}', "
             "'US') ON CONFLICT (ticker) DO NOTHING".format(
                 ticker, name.replace("'", ''), equity_type))
     return dct.keys()
Beispiel #11
0
 def upload_splits(self, splits_file):
     print('Uploading stocks from file {0:s}'.format(splits_file))
     with open(splits_file, 'r') as f:
         lines = f.readlines()
     num = 0
     for line in lines:
         tokens = line.split()
         if len(tokens) < 3:
             print('Skipping line {0:s}'.format(line))
             continue
         stk = tokens[0].strip()
         dt = stxcal.prev_busday(tokens[1].strip())
         ratio = float(tokens[2].strip())
         db_cmd = "insert into {0:s} values('{1:s}','{2:s}',{3:f},0) "\
             "on conflict (stk, dt) do update set ratio={4:f}".format(
             self.divi_tbl, stk, dt, ratio, ratio)
         try:
             stxdb.db_write_cmd(db_cmd)
             num += 1
         except Exception as ex:
             print('Failed to upload split {0:s}, {1:s}, '
                   'error {2:s}'.format(stk, dt, str(ex)))
     print('Successfully uploaded {0:d} out of {1:d} stock splits'.format(
         num, len(lines)))
Beispiel #12
0
 def move_downloaded_options(self, start_yymm, end_yymm):
     start_yy, start_mm = start_yymm.split('-')
     start_year = int(start_yy)
     start_month = int(start_mm)
     end_yy, end_mm = end_yymm.split('-')
     end_year = int(end_yy)
     end_month = int(end_mm)
     start_date = '{0:d}-{1:02d}-01'.format(start_year, start_month)
     logging.info('start_date = {0:s}'.format(start_date))
     if not stxcal.is_busday(start_date):
         start_date = stxcal.next_busday(start_date)
     if end_month == 12:
         end_year += 1
     end_month = (end_month + 1) % 12
     end_date = '{0:d}-{1:02d}-01'.format(end_year, end_month)
     logging.info('end_date = {0:s}'.format(end_date))
     end_date = stxcal.prev_busday(end_date)
     logging.info('Moving to downloaded_options table all options dated '
                  'between {0:s} and {1:s}'.format(start_date, end_date))
     sql = 'INSERT INTO downloaded_options '\
         '(expiry, und, cp, strike, dt, bid, ask, v, oi) '\
         "SELECT * FROM options WHERE dt BETWEEN '{0:s}' AND '{1:s}' "\
         'ON CONFLICT (expiry, und, cp, strike, dt) DO NOTHING'.format(
         start_date, end_date)
     logging.info('sql cmd: {0:s}'.format(sql))
     stxdb.db_write_cmd(sql)
     logging.info('Moved to downloaded_options table all options dated '
                  'between {0:s} and {1:s}'.format(start_date, end_date))
     logging.info('Removing from options table all options downloaded '
                  'between {0:s} and {1:s}'.format(start_date, end_date))
     sql = "DELETE FROM options WHERE dt BETWEEN '{0:s}' AND '{1:s}'"\
         " ".format(start_date, end_date)
     logging.info('sql cmd: {0:s}'.format(sql))
     stxdb.db_write_cmd(sql)
     logging.info('Removed from options table all options downloaded '
                  'between {0:s} and {1:s}'.format(start_date, end_date))
Beispiel #13
0
    def parse_stooq_new(self, last_db_date):
        logging.info('Checking if a new stooq file has been downloaded')
        # stooq_file = os.path.join(os.getenv('DOWNLOAD_DIR'), 'data_d.txt')
        download_dir = self.config.get('datafeed', 'download_dir')
        stooq_file = os.path.join(download_dir, 'data_d.txt')
        if not os.path.exists(stooq_file):
            logging.info('No new stooq data file found.  Nothing to do.')
            return
        logging.info('Reading stooq file, renaming columns, getting daily '
                     'US stocks data')
        df = pd.read_csv(stooq_file,
                         dtype={
                             "<TICKER>": "string",
                             "<PER>": "string",
                             "<DATE>": "string",
                             "<TIME>": "string",
                             "<OPEN>": float,
                             "<HIGH>": float,
                             "<LOW>": float,
                             "<CLOSE>": float,
                             "<VOL>": int,
                             "<OPENINT>": int
                         })
        df.columns = [x[1:-1].lower() for x in df.columns]
        stx_df = df.query('ticker.str.endswith(".US") and per == "D"',
                          engine='python').copy()
        logging.info(
            'Getting {0:d} daily US stocks out of {1:d} records'.format(
                len(stx_df), len(df)))
        stx_df['date'] = stx_df['date'].astype(str)
        stx_df['date'] = stx_df.apply(lambda r: '{0:s}-{1:s}-{2:s}'.format(
            r['date'][0:4], r['date'][4:6], r['date'][6:8]),
                                      axis=1)
        logging.info('Converted stx_df dates in yyyy-mm-dd format')
        dates = stx_df.groupby(by='date')['ticker'].count()
        next_date = stxcal.next_busday(last_db_date)
        ix0, num_dates = 0, len(dates)
        logging.info('Data available for {0:d} dates, from {1:s} to {2:s}; DB '
                     'needs data starting from {3:s}'.format(
                         len(dates), dates.index[0],
                         dates.index[num_dates - 1], next_date))
        db_dates = []
        while ix0 < num_dates:
            if dates.index[ix0] == next_date:
                break
            ix0 += 1
        for ixx in range(ix0, num_dates):
            if dates.index[ixx] == next_date and dates.values[ixx] > 9000:
                db_dates.append(dates.index[ixx])
            else:
                if dates.index[ixx] != next_date:
                    logging.error(f'Missing date {next_date}; got '
                                  f'{dates.index[ixx]} instead')

                if dates.values[ixx] < 9000:
                    logging.error(f'Not enough records ({dates.values[ixx]}) '
                                  f'available for {dates.index[ixx]}')
                break
            next_date = stxcal.next_busday(next_date)

        if not db_dates:
            logging.info('No new data available for processing. Exiting')
            return
        logging.info('Check that there are no time gaps between DB data and '
                     'upload data')
        start_date = stxcal.next_busday(last_db_date)
        num_bdays = stxcal.num_busdays(start_date, db_dates[0])
        if num_bdays > 0:
            logging.warn(
                'No data for {0:d} days ({1:s} - {2:s}). Exiting ...'.format(
                    num_bdays, start_date, stxcal.prev_busday(db_dates[0])))
            return
        logging.info('Check that there are no time gaps in the upload data')
        for ixx in range(len(db_dates) - 1):
            if stxcal.next_busday(db_dates[ixx]) != db_dates[ixx + 1]:
                logging.warn('Inconsistent dates {0:s} and {1:s} '
                             'at indexes {2:d} and {3:d}'.format(
                                 db_dates[ixx], db_dates[ixx + 1], ixx,
                                 ixx + 1))

        sel_stx_df = stx_df.query('date in @db_dates').copy()
        logging.info(
            '{0:d}/{1:d} records found for following dates: [{2:s}]'.format(
                len(sel_stx_df), len(stx_df), ', '.join(db_dates)))
        sel_stx_df['invalid'] = sel_stx_df.apply(
            lambda r: np.isnan(r['open']) or np.isnan(r['high']) or np.
            isnan(r['low']) or np.isnan(r['close']) or np.isnan(r['vol']) or r[
                'vol'] == 0 or r['open'] > r['high'] or r['open'] < r[
                    'low'] or r['close'] > r['high'] or r['close'] < r['low'],
            axis=1)
        valid_stx_df = sel_stx_df.query('not invalid').copy()
        logging.info('Found {0:d} valid records out of {1:d} records'.format(
            len(valid_stx_df), len(sel_stx_df)))

        def process_row(r):
            stk = r['ticker'][:-3].replace("-.", ".P.").replace("_",
                                                                ".").replace(
                                                                    '-', '.')
            o = int(100 * r['open'])
            hi = int(100 * r['high'])
            lo = int(100 * r['low'])
            c = int(100 * r['close'])
            v = int(r['vol'])
            v = v // 1000
            if v == 0:
                v = 1
            lst = [stk, o, hi, lo, c, v]
            return pd.Series(lst)

        valid_stx_df[['ticker', 'open', 'high', 'low', 'close', 'vol']] = \
            valid_stx_df.apply(process_row, axis=1)
        valid_stx_df['openint'] = 2
        valid_stx_df.drop(columns=['per', 'time', 'invalid'],
                          axis=1,
                          inplace=True)
        valid_stx_df.columns = ['stk', 'dt', 'o', 'hi', 'lo', 'c', 'v', 'oi']

        with closing(stxdb.db_get_cnx().cursor()) as crs:
            sql = 'CREATE TEMPORARY TABLE temp_table ('\
                'stk VARCHAR(16) NOT NULL, '\
                'dt DATE NOT NULL, '\
                'o INTEGER NOT NULL, '\
                'hi INTEGER NOT NULL, '\
                'lo INTEGER NOT NULL, '\
                'c INTEGER NOT NULL, '\
                'v INTEGER, '\
                'oi INTEGER, '\
                'PRIMARY KEY(stk, dt))'
            crs.execute(sql)
            logging.info('Created temporary table')
            upload_data = valid_stx_df.values.tolist()
            execute_values(
                crs, 'INSERT INTO temp_table '
                '(stk, dt, o, hi, lo, c, v, oi) VALUES %s', upload_data)
            logging.info('Uploaded dataframe into temporary table')
            stxdb.db_write_cmd(
                'INSERT INTO eods (stk, dt, o, hi, lo, c, v, oi) '
                'SELECT * FROM temp_table ON CONFLICT (stk, dt) DO '
                'UPDATE SET o = EXCLUDED.o, hi = EXCLUDED.hi, '
                'lo = EXCLUDED.lo, c = EXCLUDED.c, v = EXCLUDED.v, '
                'oi = EXCLUDED.oi')
            logging.info('Uploaded data into eods table')
        last_upload_date = valid_stx_df['dt'].max()
        stxdb.db_write_cmd("UPDATE analyses SET dt='{0:s}' WHERE "
                           "analysis='eod_datafeed'".format(last_upload_date))
        logging.info('Updated latest eod datafeed date {0:s} in DB'.format(
            last_upload_date))
        self.rename_stooq_file(dates.index[0], dates.index[num_dates - 1])
Beispiel #14
0
def get_etf_holdings():
    filename = os.path.join(os.getenv('HOME'), 'reports', 'etf_list.txt')
    with open(filename, 'r') as f:
        text = f.read()
        lines = text.split('\n')
    for line in lines:
        tokens = line.split('\t')
        if len(tokens) > 4:
            etf_ticker = tokens[0]
            etf_name = tokens[1].replace("'", '')
            etf_category = tokens[2]
            stxdb.db_write_cmd(
                f"INSERT INTO etfs VALUES('{etf_ticker}', '{etf_name}', "
                f"'{etf_category}') ON CONFLICT (ticker) DO NOTHING"
            )
    # begin - parse the holdings info for each fund
    etf_list = stxdb.db_read_cmd('select ticker from etfs')
    num_etfs = len(etf_list)
    print(f'Getting data for {num_etfs} ETFs')
    headers = requests.utils.default_headers()
    headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:88.0) '\
        'Gecko/20100101 Firefox/88.0'
    etf_name = 'XLE'
    for i, etf_record in enumerate(etf_list):
        etf_name = etf_record[0]
        req = requests.get(
            f"https://www.zacks.com/funds/etf/{etf_name}/holding",
            headers=headers
        )
        if req.status_code != 200:
            print(f'Request for ETF {etf_name} failed with status '
                  f'{req.status_code}, error: {req.text}')
            continue
        lines = req.text.split('\n')
        # print(f'The request response has {len(lines)} lines')
        holdings_line = ''
        for line in lines:
            if line.startswith('etf_holdings.formatted_data'):
                holdings_line = line
        if not holdings_line:
            print(f'No holdings line found for {etf_name}, skipping...')
            continue
        holdings_tokens = holdings_line[34: -5].split(' ] ,  [ ')
        # print(f'There are {len(holdings_tokens)} holdings tokens')
        hold_list = []
        for holding_row in holdings_tokens:
            # print(f'holding_row = {holding_row}')
            holding_tokens = holding_row.split(', ')
            # print(f'There are {len(holding_tokens)} holding tokens')
            if len(holding_tokens) < 2:
                continue
            ticker_token = holding_tokens[1]
            # print(f'Ticker token = {ticker_token}')
            ticker_index = ticker_token.find('rel=')
            # print(f'Ticker index = {ticker_index}')
            if ticker_index == -1:
                continue
            ticker_tokens = ticker_token[ticker_index:].split('\\"')
            # print(f'There are {len(ticker_tokens)} ticker tokens')
            if len(ticker_tokens) >= 2:
                ticker = ticker_tokens[1]
                hold_list.append(ticker)
                stxdb.db_write_cmd(
                    f"INSERT INTO stk_etfs VALUES('{ticker}', '{etf_name}') "
                    f"ON CONFLICT(stk, etf) DO NOTHING"
                )
        print(f'ETF {etf_name} has {len(hold_list)} equity holdings: '
              f'{hold_list}')
        if i > 0 and (i % 100 == 0 or i == num_etfs - 1):
            print(f'Got data for {i} out of {num_etfs} ETFS')
        time.sleep(2)
Beispiel #15
0
 def test_5_drop_table(self):
     stxdb.db_write_cmd(self.sql_drop_tbl)
     res = stxdb.db_read_cmd(self.sql_tbls)
     self.assertEqual(len(res), 0)
Beispiel #16
0
 def get_quote(self, idx, sdt, edt):
     long_start_date = stxcal.long_date(sdt)
     long_end_date = stxcal.long_date(edt)
     req = ''.join([
         "https://query2.finance.yahoo.com/v8/finance/chart/", idx,
         "?formatted=true&"
         "crumb=5dLyddyx4FN&", "lang=en-US&", "region=US&",
         "includeAdjustedClose=true&", "interval=1d&",
         f"period1={long_start_date}&", f"period2={long_end_date}&",
         "events=div|split&", "useYfid=true&",
         "corsDomain=finance.yahoo.com"
     ])
     res = requests.get(req, headers=self.headers)
     if res.status_code != 200:
         logging.error(f'Something went wrong for {idx}: {res.text}')
         return
     res_json = res.json()
     logging.info(f'res.json = \n{json.dumps(res_json, indent=2)}')
     res_error = res_json.get('chart').get('error')
     if res_error is not None:
         logging.error(f'Error in data for {idx}: {res_error}')
         return
     res_list = res_json.get('chart').get('result')
     if len(res_list) < 1:
         logging.error(f'Got empty data for {idx}')
         return
     res_data = res_list[0]
     res_dates = res_data.get('timestamp')
     res_quote_list = res_data.get('indicators').get('quote')
     if len(res_quote_list) < 1:
         logging.error(f'Got empty quote for {idx}')
         return
     res_quote = res_quote_list[0]
     res_hi = res_quote.get('high')
     res_close = res_quote.get('close')
     res_volume = res_quote.get('volume')
     res_lo = res_quote.get('low')
     res_open = res_quote.get('open')
     if (len(res_dates) != len(res_hi) or len(res_dates) != len(res_close)
             or len(res_dates) != len(res_volume)
             or len(res_dates) != len(res_lo)
             or len(res_dates) != len(res_open)):
         logging.error(f'Inconsistent lists in quote for {idx}: '
                       f'lengths for dates ({len(res_dates)}), '
                       f'opens ({len(res_open)}), '
                       f'highs ({len(res_hi)}), '
                       f'lows ({len(res_lo)}), '
                       f'closes ({len(res_close)}), '
                       f'volumes ({len(res_volume)}) do not match')
         return
     date_list = [str(datetime.fromtimestamp(x).date()) for x in res_dates]
     open_list = [int(round(x * 100, 0)) for x in res_open]
     hi_list = [int(round(x * 100, 0)) for x in res_hi]
     lo_list = [int(round(x * 100, 0)) for x in res_lo]
     close_list = [int(round(x * 100, 0)) for x in res_close]
     volume_list = [x // 1000 for x in res_volume]
     for dt, o, hi, lo, c, v in zip(date_list, open_list, hi_list, lo_list,
                                    close_list, volume_list):
         # logging.info(f'{idx} {dt} {o} {hi} {lo} {c} {v}')
         db_cmd = ''.join([
             "insert into eods values",
             f"('{idx}', '{dt}', {o}, {hi}, {lo}, {c}, {v}, 0)",
             "on conflict (stk, dt) do update set ",
             f"o={o}, hi={hi}, lo={lo}, c={c}, v={v}, oi=0"
         ])
         stxdb.db_write_cmd(db_cmd)
     logging.info(f'Updated {len(date_list)} records for {idx}')