Esempio n. 1
0
    def addleader(self, tdf, liq='PctLiq', min_rank=1, max_rank=1000):
        q = "select * from rm_ldrs where stk='%s' and rm_ten='%s' and " \
            "rank>=%d and rank<=%d" % (self.stk, liq, min_rank, max_rank)
        # print("addleader q=%s" % q)
        ldr = pd.read_sql(q, stxdb.db_get_cnx(), parse_dates=['exp'])
        # print("%5s read the leaders: %s" % (" ", datetime.datetime.now()))
        # print("Found %d leader time intervals for %s" % (len(ldr), self.stk))
        ldrs = tdf.merge(ldr, how='left', on=['exp'])
        # print("%5s merged with TDF: %s" % (" ", datetime.datetime.now()))
        ldrs.set_index('date', inplace=True)

        # print("%5s set the index: %s" % (" ", datetime.datetime.now()))

        def ldrfun1(x):
            return 0 if (np.isnan(x['rank']) or x['rank'] == 0) else 1

        ldrs['rank'].fillna(0, inplace=True)
        # print("%5s filled n/a: %s" % (" ", datetime.datetime.now()))
        ldrs['ldr'] = ldrs.apply(ldrfun1, axis=1)

        # print("%5s applied ldrfun1: %s" % (" ", datetime.datetime.now()))

        def ldrfun2(x):
            return -x['ldr'] if x['inv'] == 1 else x['ldr']

        ldrs['ldr'] = ldrs.apply(ldrfun2, axis=1)
        # print("%5s applied ldrfun2: %s" % (" ", datetime.datetime.now()))
        ldrs.drop(['exp', 'rm_ten', 'inv', 'stk'], axis=1, inplace=True)
        # print("%5s dropped columns: %s" % (" ", datetime.datetime.now()))
        self.df = self.df.merge(ldrs,
                                how='left',
                                left_index=True,
                                right_index=True)
Esempio n. 2
0
 def get_stk_data(self,
                  stk,
                  crt_date,
                  expiry,
                  exp_date,
                  save_eod=False,
                  save_opts=True):
     res = requests.get(self.yhoo_url.format(stk, expiry))
     if res.status_code != 200:
         print('Failed to get {0:s} data for {1:s}: {2:d}'.format(
             exp_date, stk, res.status_code))
         return
     res_json = json.loads(res.text)
     res_0 = res_json['optionChain']['result'][0]
     quote = res_0.get('quote', {})
     c = quote.get('regularMarketPrice', -1)
     if c == -1:
         print('Failed to get closing price for {0:s}'.format(stk))
         return
     if save_eod:
         v = quote.get('regularMarketVolume', -1)
         o = quote.get('regularMarketOpen', -1)
         hi = quote.get('regularMarketDayHigh', -1)
         lo = quote.get('regularMarketDayLow', -1)
         if o == -1 or hi == -1 or lo == -1 or v == -1:
             print('Failed to get EOD quote for {0:s}'.format(stk))
         else:
             stxdb.db_insert_eods(
                 [[stk, crt_date, o, hi, lo, c, v / 1000, -1]])
     if not save_opts:
         return
     opts = res_0.get('options', [{}])
     calls = opts[0].get('calls', [])
     puts = opts[0].get('puts', [])
     cnx = stxdb.db_get_cnx()
     with cnx.cursor() as crs:
         for call in calls:
             opt_volume = 0 if call.get('volume') is None \
                          else call['volume']['raw']
             crs.execute('insert into opt_cache values' +
                         crs.mogrify('(%s,%s,%s,%s,%s,%s,%s,%s)', [
                             call['expiration']['fmt'], stk, 'c',
                             call['strike']['raw'], crt_date, call['bid']
                             ['raw'], call['ask']['raw'], opt_volume
                         ]) + 'on conflict do nothing')
         for put in puts:
             opt_volume = 0 if put.get('volume') is None \
                          else put['volume']['raw']
             crs.execute('insert into opt_cache values' +
                         crs.mogrify('(%s,%s,%s,%s,%s,%s,%s,%s)', [
                             put['expiration']['fmt'], stk, 'p',
                             put['strike']['raw'], crt_date, put['bid']
                             ['raw'], put['ask']['raw'], opt_volume
                         ]) + 'on conflict do nothing')
     print('Got {0:d} calls and {1:d} puts for {2:s} exp {3:s}'.format(
         len(calls), len(puts), stk, exp_date))
Esempio n. 3
0
def get_etf_words(elim_dct={}):
    q = sql.Composed([sql.SQL("SELECT name FROM etfs")])
    res = stxdb.db_read_cmd(q.as_string(stxdb.db_get_cnx()))
    etf_words = ' '.join([x[0] for x in res])
    etf_words_list = [ x for x in etf_words.split() if x not in elim_dct ]
    dct = {}
    for w in etf_words_list:
        count = dct.get(w, 0)
        dct[w] = count + 1
    return dct
Esempio n. 4
0
 def mergetbl(self, tbl_name, col_list, addl_cond=None):
     q = "select date,%s from %s where stk='%s' and date between '%s' and '%s'"
     " %s" % (','.join(col_list), tbl_name, self.stk, self.sd, self.ed,
              "" if addl_cond is None else addl_cond)
     sql_res = pd.read_sql(q,
                           stxdb.db_get_cnx(),
                           index_col='date',
                           parse_dates=['date'])
     self.df = self.df.merge(sql_res,
                             how='left',
                             left_index=True,
                             right_index=True)
Esempio n. 5
0
 def get_rs_stx(self, dt):
     q = sql.Composed([
         sql.SQL("select stk, indicators->>'rs' as rs, "
                 "indicators->>'rs_rank' as rs_rank from indicators"),
         sql.SQL(' where dt='),
         sql.Literal(dt),
         sql.SQL(' and stk not in (select * from excludes)')
     ])
     rsdf = pd.read_sql(q, stxdb.db_get_cnx())
     rsdf[["rs", "rs_rank"]] = rsdf[["rs", "rs_rank"]].apply(pd.to_numeric)
     rsdf.sort_values(by=['rs'], ascending=False, inplace=True)
     return rsdf
Esempio n. 6
0
 def get_opt_spreads(self, crt_date, eod):
     exp_date = stxcal.next_expiry(crt_date, min_days=(1 if eod else 0))
     q = sql.Composed([
         sql.SQL('select stk, opt_spread from leaders '
                 'where expiry='),
         sql.Literal(exp_date)
     ])
     cnx = stxdb.db_get_cnx()
     with cnx.cursor() as crs:
         crs.execute(q.as_string(cnx))
         spread_dict = {x[0]: x[1] for x in crs}
     return spread_dict
Esempio n. 7
0
 def get_setups_for_tomorrow(self, dt):
     next_dt = stxcal.next_busday(dt)
     q = sql.Composed([
         sql.SQL('select * from setups where dt='),
         sql.Literal(next_dt),
         sql.SQL(' and setup in ('),
         sql.SQL(', ').join(
             [sql.Literal('JC_5DAYS'),
              sql.Literal('JC_1234')]),
         sql.SQL(')')
     ])
     df = pd.read_sql(q, stxdb.db_get_cnx())
     return df
Esempio n. 8
0
 def get_triggered_setups(self, dt):
     q = sql.Composed([
         sql.SQL('select * from setups where dt='),
         sql.Literal(dt),
         sql.SQL(' and setup in ('),
         sql.SQL(', ').join(
             [sql.Literal('JC_5DAYS'),
              sql.Literal('JC_1234')]),
         sql.SQL(') and triggered='),
         sql.Literal(True)
     ])
     df = pd.read_sql(q, stxdb.db_get_cnx())
     return df
Esempio n. 9
0
def stock_labels(stk, elim_dct=get_eliminated_words()):
    q = sql.Composed(
        [
            sql.SQL("SELECT name FROM etfs WHERE ticker IN "),
            sql.SQL("(SELECT etf FROM stk_etfs WHERE stk = "),
            sql.Literal(stk),
            sql.SQL(")")
        ]
    )
    res = stxdb.db_read_cmd(q.as_string(stxdb.db_get_cnx()))
    etf_words = ' '.join([x[0] for x in res])
    etf_words_list = etf_words.split()
    labels = [x for x in etf_words_list if x not in elim_dct]
    labels = list(set(labels))
    return labels
Esempio n. 10
0
 def find_all_liquid_stocks_as_of(self, selected_date):
     res = []
     q = "select * from eods where date = '{0:s}'".format(selected_date)
     df = pd.read_sql(q, stxdb.db_get_cnx())
     print('Found {0:d} stocks'.format(len(df)))
     df['rg'] = df['hi'] - df['lo']
     df_1 = df.query('volume>1000 & c>30 & rg>0.015*c')
     stx = df_1['stk'].tolist()
     print('Found {0:d} leaders'.format(len(stx)))
     start_date = stxcal.move_busdays(selected_date, -60)
     print('start_date is: {0:s}'.format(str(start_date)))
     ixx = 0
     for stk in stx:
         ixx += 1
         ts = StxTS(stk, start_date, selected_date)
         # adjust the whole thing for splits, etc.
         ts.set_day(str(ts.df.index[-1].date()))
         ts.df['hi_1'] = ts.df['hi'].shift(1)
         ts.df['lo_1'] = ts.df['lo'].shift(1)
         ts.df['rg'] = ts.df['hi'] - ts.df['lo']
         ts.df['act'] = ts.df['volume'] * ts.df['c']
         ts.df['avg_v'] = ts.df['volume'].rolling(50).mean()
         ts.df['avg_c'] = ts.df['c'].rolling(50).mean()
         ts.df['avg_rg'] = ts.df['rg'].rolling(50).mean()
         ts.df['avg_act'] = ts.df['act'].rolling(50).mean()
         rec = ts.df.ix[-1]
         if rec.avg_v > 2000 and rec.avg_c > 40 and \
            rec.avg_act > 100000 and rec.avg_rg > 0.015 * rec.avg_c:
             res.append(stk)
             sc = StxCandles(stk)
             setup_ts = sc.calculate_setups(sd=start_date)
             setups = [
                 'gap', 'marubozu', 'hammer', 'doji', 'engulfing',
                 'piercing', 'harami', 'star', 'engulfharami', 'three_m',
                 'three_in', 'three_out', 'up_gap_two_crows'
             ]
             with open('/home/cma/setups/{0:s}.csv'.format(stk), 'w') as f:
                 for index, row in setup_ts.df.iterrows():
                     f.write('{0:s};'.format(str(index.date())))
                     for setup in setups:
                         if row[setup] != 0:
                             f.write('  {0:s}: {1:.0f} '.format(
                                 setup.upper(), row[setup]))
                     f.write('\n')
         if ixx == len(stx) or ixx % 50 == 0:
             print('Processed {0:d} leaders'.format(ixx))
     print('Found {0:d} super leaders'.format(len(res)))
     return res
Esempio n. 11
0
 def hiactfun(r):
     qha = sql.Composed([
         sql.SQL('select * from jl_setups where dt between '),
         sql.Literal(r['d_8']),
         sql.SQL(' and '),
         sql.Literal(r['dt']),
         sql.SQL(' and stk='),
         sql.Literal(r['stk']),
         sql.SQL(' and abs(score) > 100 and setup in ('),
         sql.SQL(',').join([
             sql.Literal('Gap'),
             sql.Literal('SC'),
             sql.Literal('RDay')
         ]),
         sql.SQL(')')
     ])
     db_df = pd.read_sql(qha, stxdb.db_get_cnx())
     return db_df['score'].sum() if len(db_df) > 0 else 0
Esempio n. 12
0
 def get_liq_leaders(self, ana_date, min_act=80000, min_rcr=0.015):
     stk_list = stxdb.db_read_cmd(
         "select distinct stk from eods where "
         "date='{0:s}' order by stk".format(ana_date))
     all_stocks = [
         s[0] for s in stk_list if re.match(r'^[A-Za-z]', str(s[0]))
     ]
     print('Found {0:d} stocks for {1:s}'.format(len(all_stocks), ana_date))
     next_exp = stxcal.next_expiry(ana_date)
     next_exp_busday = stxcal.move_busdays(next_exp, 0)
     num_stx = 0
     num = 0
     liq_leaders = []
     for s in all_stocks:
         num += 1
         ts = self.ts_dct.get(s)
         if ts is None:
             ts = StxTS(s, self.start_date, self.end_date)
             ts.set_day(str(ts.df.index[-1].date()))
             ts.df['activity'] = ts.df['volume'] * ts.df['c']
             ts.df['avg_act'] = ts.df['activity'].rolling(50).mean()
             ts.df['rg'] = ts.df['hi'] - ts.df['lo']
             ts.df['avg_rg'] = ts.df['rg'].rolling(50).mean()
             ts.df['rg_c_ratio'] = ts.df['avg_rg'] / ts.df['c']
             self.ts_dct[s] = ts
             num_stx += 1
         stk_act = [s]
         if self.is_liq_leader(ts, ana_date, min_act, min_rcr, stk_act):
             liq_leaders.append(stk_act)
         if num % 1000 == 0 or num == len(all_stocks):
             print('Processed {0:d} stocks, found {1:d} liquidity leaders'.
                   format(num, len(liq_leaders)))
     print('Found {0:d} liquidity leaders for {1:s}'.format(
         len(liq_leaders), ana_date))
     print('Loaded {0:d} stocks for {1:s}'.format(num_stx, ana_date))
     cnx = stxdb.db_get_cnx()
     with cnx.cursor() as crs:
         for ldr in liq_leaders:
             crs.execute(
                 'insert into leaders(exp,stk,activity,opt_spread) values '
                 + crs.mogrify('(%s,%s,%s,%s)',
                               [next_exp, ldr[0],
                                int(ldr[1]), -1000]) +
                 'on conflict do nothing')
Esempio n. 13
0
 def __init__(self, stk, sd, ed, eod_tbl='eods', split_tbl='dividends'):
     self.stk = stk
     self.sd = pd.to_datetime(sd)
     self.ed = pd.to_datetime(ed)
     q = "select * from {0:s} where stk='{1:s}' and dt "\
         "between '{2:s}' and '{3:s}' order by dt".format(
         eod_tbl, stk, sd, ed)
     df = pd.read_sql(q,
                      stxdb.db_get_cnx(),
                      index_col='dt',
                      parse_dates=['dt'])
     if self.sd < df.index[0]:
         self.sd = df.index[0]
     if self.ed > df.index[-1]:
         self.ed = df.index[-1]
     self.sd_str = str(self.sd.date())
     self.ed_str = str(self.ed.date())
     self.gaps = self.get_gaps(df)
     df.drop(['stk', 'prev_dt', 'prev_date', 'gap'], axis=1, inplace=True)
     s_lst = stxdb.db_read_cmd("select dt, ratio, divi_type from "
                               "{0:s} where stk='{1:s}'".format(
                                   split_tbl, stk))
     # print('stk = {0:s}, s_lst = {1:s}'.format(stk, str(s_lst)))
     self.splits = {
         pd.to_datetime(stxcal.next_busday(s[0])): [float(s[1]),
                                                    int(s[2])]
         for s in s_lst
     }
     self.df = self.fill_gaps(df)
     self.l = len(self.df)
     self.pos = 0
     self.num_gaps = [
         tuple([self.find(str(x[0].date())),
                self.find(str(x[1].date()))]) for x in self.gaps
     ]
     self.start = self.num_gaps[0][0]
     self.end = self.num_gaps[0][1]
     self.adj_splits = []
Esempio n. 14
0
 def get_leaders(self, ldr_date, get_for_all=True):
     ldr_expiry = stxcal.next_expiry(ldr_date)
     cnx = stxdb.db_get_cnx()
     if get_for_all:
         q = sql.Composed([
             sql.SQL('select stk from leaders where exp='),
             sql.Literal(ldr_expiry)
         ])
     else:
         q = sql.Composed([
             sql.SQL('select stk from leaders where exp='),
             sql.Literal(ldr_expiry),
             sql.SQL(' and opt_spread >= 0 and atm_price is not null '
                     'and atm_price<='),
             sql.Literal(self.max_atm_price),
             sql.SQL('and stk not in (select * from exclusions) '
                     'order by opt_spread asc limit '),
             sql.Literal(self.num_stx)
         ])
     with cnx.cursor() as crs:
         crs.execute(q.as_string(cnx))
         ldrs = [x[0] for x in crs]
     return ldrs
Esempio n. 15
0
 def get_data(self,
              crt_date,
              get_eod=True,
              get_for_all=True,
              get_opts=True):
     expiries = stxcal.long_expiries()
     cnx = stxdb.db_get_cnx()
     ldrs = self.get_leaders(crt_date, get_for_all)
     exp_dates = [
         str(datetime.datetime.utcfromtimestamp(x).date())
         for x in expiries[:3]
     ]
     six = 1 if exp_dates[0] < crt_date else 0
     for ldr in ldrs:
         try:
             self.get_stk_data(ldr,
                               crt_date,
                               expiries[six],
                               exp_dates[six],
                               save_eod=get_eod,
                               save_opts=get_opts)
         except:
             print(
                 'Failed to get options for {0:s} exp {1:s}: {2:s}'.format(
                     ldr, exp_dates[six], traceback.print_exc()))
         try:
             if get_opts:
                 self.get_stk_data(ldr,
                                   crt_date,
                                   expiries[six + 1],
                                   exp_dates[six + 1],
                                   save_eod=False,
                                   save_opts=get_opts)
         except:
             print(
                 'Failed to get options for {0:s} exp {1:s}: {2:s}'.format(
                     ldr, exp_dates[six + 1], traceback.print_exc()))
Esempio n. 16
0
    def parse_stooq_new(self, last_db_date):
        logging.info('Checking if a new stooq file has been downloaded')
        # stooq_file = os.path.join(os.getenv('DOWNLOAD_DIR'), 'data_d.txt')
        download_dir = self.config.get('datafeed', 'download_dir')
        stooq_file = os.path.join(download_dir, 'data_d.txt')
        if not os.path.exists(stooq_file):
            logging.info('No new stooq data file found.  Nothing to do.')
            return
        logging.info('Reading stooq file, renaming columns, getting daily '
                     'US stocks data')
        df = pd.read_csv(stooq_file,
                         dtype={
                             "<TICKER>": "string",
                             "<PER>": "string",
                             "<DATE>": "string",
                             "<TIME>": "string",
                             "<OPEN>": float,
                             "<HIGH>": float,
                             "<LOW>": float,
                             "<CLOSE>": float,
                             "<VOL>": int,
                             "<OPENINT>": int
                         })
        df.columns = [x[1:-1].lower() for x in df.columns]
        stx_df = df.query('ticker.str.endswith(".US") and per == "D"',
                          engine='python').copy()
        logging.info(
            'Getting {0:d} daily US stocks out of {1:d} records'.format(
                len(stx_df), len(df)))
        stx_df['date'] = stx_df['date'].astype(str)
        stx_df['date'] = stx_df.apply(lambda r: '{0:s}-{1:s}-{2:s}'.format(
            r['date'][0:4], r['date'][4:6], r['date'][6:8]),
                                      axis=1)
        logging.info('Converted stx_df dates in yyyy-mm-dd format')
        dates = stx_df.groupby(by='date')['ticker'].count()
        next_date = stxcal.next_busday(last_db_date)
        ix0, num_dates = 0, len(dates)
        logging.info('Data available for {0:d} dates, from {1:s} to {2:s}; DB '
                     'needs data starting from {3:s}'.format(
                         len(dates), dates.index[0],
                         dates.index[num_dates - 1], next_date))
        db_dates = []
        while ix0 < num_dates:
            if dates.index[ix0] == next_date:
                break
            ix0 += 1
        for ixx in range(ix0, num_dates):
            if dates.index[ixx] == next_date and dates.values[ixx] > 9000:
                db_dates.append(dates.index[ixx])
            else:
                if dates.index[ixx] != next_date:
                    logging.error(f'Missing date {next_date}; got '
                                  f'{dates.index[ixx]} instead')

                if dates.values[ixx] < 9000:
                    logging.error(f'Not enough records ({dates.values[ixx]}) '
                                  f'available for {dates.index[ixx]}')
                break
            next_date = stxcal.next_busday(next_date)

        if not db_dates:
            logging.info('No new data available for processing. Exiting')
            return
        logging.info('Check that there are no time gaps between DB data and '
                     'upload data')
        start_date = stxcal.next_busday(last_db_date)
        num_bdays = stxcal.num_busdays(start_date, db_dates[0])
        if num_bdays > 0:
            logging.warn(
                'No data for {0:d} days ({1:s} - {2:s}). Exiting ...'.format(
                    num_bdays, start_date, stxcal.prev_busday(db_dates[0])))
            return
        logging.info('Check that there are no time gaps in the upload data')
        for ixx in range(len(db_dates) - 1):
            if stxcal.next_busday(db_dates[ixx]) != db_dates[ixx + 1]:
                logging.warn('Inconsistent dates {0:s} and {1:s} '
                             'at indexes {2:d} and {3:d}'.format(
                                 db_dates[ixx], db_dates[ixx + 1], ixx,
                                 ixx + 1))

        sel_stx_df = stx_df.query('date in @db_dates').copy()
        logging.info(
            '{0:d}/{1:d} records found for following dates: [{2:s}]'.format(
                len(sel_stx_df), len(stx_df), ', '.join(db_dates)))
        sel_stx_df['invalid'] = sel_stx_df.apply(
            lambda r: np.isnan(r['open']) or np.isnan(r['high']) or np.
            isnan(r['low']) or np.isnan(r['close']) or np.isnan(r['vol']) or r[
                'vol'] == 0 or r['open'] > r['high'] or r['open'] < r[
                    'low'] or r['close'] > r['high'] or r['close'] < r['low'],
            axis=1)
        valid_stx_df = sel_stx_df.query('not invalid').copy()
        logging.info('Found {0:d} valid records out of {1:d} records'.format(
            len(valid_stx_df), len(sel_stx_df)))

        def process_row(r):
            stk = r['ticker'][:-3].replace("-.", ".P.").replace("_",
                                                                ".").replace(
                                                                    '-', '.')
            o = int(100 * r['open'])
            hi = int(100 * r['high'])
            lo = int(100 * r['low'])
            c = int(100 * r['close'])
            v = int(r['vol'])
            v = v // 1000
            if v == 0:
                v = 1
            lst = [stk, o, hi, lo, c, v]
            return pd.Series(lst)

        valid_stx_df[['ticker', 'open', 'high', 'low', 'close', 'vol']] = \
            valid_stx_df.apply(process_row, axis=1)
        valid_stx_df['openint'] = 2
        valid_stx_df.drop(columns=['per', 'time', 'invalid'],
                          axis=1,
                          inplace=True)
        valid_stx_df.columns = ['stk', 'dt', 'o', 'hi', 'lo', 'c', 'v', 'oi']

        with closing(stxdb.db_get_cnx().cursor()) as crs:
            sql = 'CREATE TEMPORARY TABLE temp_table ('\
                'stk VARCHAR(16) NOT NULL, '\
                'dt DATE NOT NULL, '\
                'o INTEGER NOT NULL, '\
                'hi INTEGER NOT NULL, '\
                'lo INTEGER NOT NULL, '\
                'c INTEGER NOT NULL, '\
                'v INTEGER, '\
                'oi INTEGER, '\
                'PRIMARY KEY(stk, dt))'
            crs.execute(sql)
            logging.info('Created temporary table')
            upload_data = valid_stx_df.values.tolist()
            execute_values(
                crs, 'INSERT INTO temp_table '
                '(stk, dt, o, hi, lo, c, v, oi) VALUES %s', upload_data)
            logging.info('Uploaded dataframe into temporary table')
            stxdb.db_write_cmd(
                'INSERT INTO eods (stk, dt, o, hi, lo, c, v, oi) '
                'SELECT * FROM temp_table ON CONFLICT (stk, dt) DO '
                'UPDATE SET o = EXCLUDED.o, hi = EXCLUDED.hi, '
                'lo = EXCLUDED.lo, c = EXCLUDED.c, v = EXCLUDED.v, '
                'oi = EXCLUDED.oi')
            logging.info('Uploaded data into eods table')
        last_upload_date = valid_stx_df['dt'].max()
        stxdb.db_write_cmd("UPDATE analyses SET dt='{0:s}' WHERE "
                           "analysis='eod_datafeed'".format(last_upload_date))
        logging.info('Updated latest eod datafeed date {0:s} in DB'.format(
            last_upload_date))
        self.rename_stooq_file(dates.index[0], dates.index[num_dates - 1])
Esempio n. 17
0
 def ana_report(self, stk, start_date, end_date):
     res = '<table><tr>'
     jl_start_date = stxcal.move_busdays(end_date, -8)
     # add the A/D setups table
     res += '<td><table>'
     qad = sql.Composed([
         sql.SQL('select * from jl_setups where dt between '),
         sql.Literal(start_date),
         sql.SQL(' and '),
         sql.Literal(end_date),
         sql.SQL(' and setup in ('),
         sql.SQL(',').join(
             [sql.Literal('Gap'),
              sql.Literal('SC'),
              sql.Literal('RDay')]),
         sql.SQL(') and abs(score) >= 100 and stk='),
         sql.Literal(stk),
         sql.SQL(' order by dt, direction, setup')
     ])
     df_ad = pd.read_sql(qad, stxdb.db_get_cnx())
     for _, row in df_ad.iterrows():
         res += '<tr><td>{}</td><td>{}</td><td>{}</td><td>{}</td>'\
             '</tr>'.format(row['dt'].strftime('%b %d'), row['setup'],
                            row['direction'], row['score'])
     res += '</td></table>'
     # add the JL setups table
     res += '<td><table>'
     qjl = sql.Composed([
         sql.SQL('select * from jl_setups where dt between '),
         sql.Literal(jl_start_date),
         sql.SQL(' and '),
         sql.Literal(end_date),
         sql.SQL(' and setup in ('),
         sql.SQL(',').join([
             sql.Literal('JL_B'),
             sql.Literal('JL_P'),
             sql.Literal('JL_SR')
         ]),
         sql.SQL(') and stk='),
         sql.Literal(stk),
         sql.SQL(' order by dt, direction, setup, factor')
     ])
     df_jl = pd.read_sql(qjl, stxdb.db_get_cnx())
     for _, row in df_jl.iterrows():
         res += '<tr><td>{}</td><td>{}</td><td>{}</td><td>{}</td>'\
             '<td>{}</td></tr>'.format(row['dt'].strftime('%b %d'),
                                       row['setup'], row['direction'],
                                       row['factor'], row['score'])
     res += '</table></td>'
     # add the candlesticks setups table
     res += '<td><table>'
     qcs = sql.Composed([
         sql.SQL('select * from jl_setups where dt between '),
         sql.Literal(start_date),
         sql.SQL(' and '),
         sql.Literal(end_date),
         sql.SQL(' and setup in ('),
         sql.SQL(',').join([
             sql.Literal('EngHarami'),
             sql.Literal('Cbs'),
             sql.Literal('3out'),
             sql.Literal('3'),
             sql.Literal('Kicking'),
             sql.Literal('Piercing'),
             sql.Literal('Engulfing'),
             sql.Literal('Star')
         ]),
         sql.SQL(') and stk='),
         sql.Literal(stk),
         sql.SQL(' order by dt, direction, setup')
     ])
     df_cs = pd.read_sql(qcs, stxdb.db_get_cnx())
     for _, row in df_cs.iterrows():
         res += '<tr><td>{}</td><td>{}</td><td>{}</td></tr>'.format(
             row['dt'].strftime('%b %d'), row['setup'], row['direction'])
     res += '</td></table>'
     res += '</tr></table>'
     return res
Esempio n. 18
0
 def get_opt_spread_leaders(self, ldr_date):
     next_exp = stxcal.next_expiry(ldr_date)
     calc_exp = stxcal.next_expiry(ldr_date, 9)
     crt_date = stxcal.current_busdate()
     cnx = stxdb.db_get_cnx()
     stx = self.get_leaders(ldr_date)
     print('Calculating option spread for {0:d} stocks'.format(len(stx)))
     num = 0
     if ldr_date <= self.last_opt_date:
         opt_tbl_name = 'options'
         spot_tbl_name = 'opt_spots'
         spot_column = 'spot'
         opt_date_column = 'date'
     else:
         opt_tbl_name = 'opt_cache'
         spot_tbl_name = 'eods'
         spot_column = 'c'
         opt_date_column = 'dt'
     for stk in stx:
         print('stk = {0:s}'.format(stk))
         spot_q = sql.Composed([
             sql.SQL('select {} from {} where stk=').format(
                 sql.Identifier(spot_column),
                 sql.Identifier(spot_tbl_name)),
             sql.Literal(stk),
             sql.SQL(' and date='),
             sql.Literal(crt_date)
         ])
         with cnx.cursor() as crs:
             crs.execute(spot_q.as_string(cnx))
             spot_res = crs.fetchone()
             if spot_res is None:
                 continue
             spot = float(spot_res[0])
         tokens = stk.split('.')
         und = '.'.join(tokens[:-1]) if tokens[-1].isdigit() else stk
         opt_q = sql.Composed([
             sql.SQL('select * from {} where expiry=').format(
                 sql.Identifier(opt_tbl_name)),
             sql.Literal(calc_exp),
             sql.SQL(' and und='),
             sql.Literal(und),
             sql.SQL(' and {}=').format(sql.Identifier(opt_date_column)),
             sql.Literal(crt_date)
         ])
         opt_df = pd.read_sql(opt_q.as_string(cnx), cnx)
         if len(opt_df) < 6:
             continue
         opt_df['strike_spot'] = abs(opt_df['strike'] - spot)
         opt_df['spread'] = 100 * (1 - opt_df['bid'] / opt_df['ask'])
         opt_df.sort_values(by=['strike_spot'], inplace=True)
         opt_df['avg_spread'] = opt_df['spread'].rolling(6).mean()
         try:
             avg_spread = int(opt_df.iloc[5].avg_spread * 100)
             avg_atm_price = round(
                 (opt_df.iloc[0].ask + opt_df.iloc[1].ask) / 2, 2)
             with cnx.cursor() as crs:
                 crs.execute(
                     'update leaders set opt_spread=%s, '
                     'atm_price=%s where stk=%s and exp=%s',
                     (avg_spread, avg_atm_price, stk, next_exp))
         except:
             print('Failed to calc avg_spread for {0:s}'.format(stk))
         num += 1
         if num % 100 == 0 or num == len(stx):
             print('Calculated option spread for {0:d} stocks'.format(num))