Exemplo n.º 1
0
def main(args):

    import argparse
    parser = argparse.ArgumentParser()

    # --dbpath option
    parser.add_argument("--dbpath",
                        help="Database URL to be used.",
                        dest="dbpath")

    args = parser.parse_args()

    # Make sure we can access the DB path if specified or else exit right here.
    db_metadata = None
    if args.dbpath:
        try:
            db_metadata = get_metadata(args.dbpath)
        except Exception as e:
            print("Not a valid DB URL: {} (Exception: {})".format(
                args.dbpath, e))
            return -1

    insert_statements = populate_all_scrips_table(db_metadata)
    results = execute_many_insert(insert_statements, engine=db_metadata.bind)
    _ = [r.close() for r in results]

    return 0
Exemplo n.º 2
0
def _update_bhavcopy(curdate, stocks_dict):
    """update bhavcopy Database date in DD-MM-YYYY format."""

    nse_eq_hist_data = create_or_get_nse_equities_hist_data(
        metadata=_DB_METADATA)

    # delete for today's date if there's anything FWIW
    module_logger.debug("Deleting any old data for date %s.", curdate)
    d = nse_eq_hist_data.delete(nse_eq_hist_data.c.date == curdate)
    r = execute_one(d, engine=_DB_METADATA.bind)
    module_logger.debug("Deleted %d rows.", r.rowcount)

    insert_statements = []
    for k, v in stocks_dict.items():
        ins = nse_eq_hist_data.insert().values(symbol=k,
                                               date=curdate,
                                               open=v.open,
                                               high=v.high,
                                               low=v.low,
                                               close=v.close,
                                               volume=v.volume,
                                               delivery=v.deliv)
        insert_statements.append(ins)
        module_logger.debug(ins.compile().params)

    results = execute_many_insert(insert_statements, engine=_DB_METADATA.bind)
    for r in results:
        r.close()
Exemplo n.º 3
0
def _apply_name_changes_to_db(syms):
    """Changes security names in nse_hist_data table so the name of the security
    is always the latest."""

    hist_data = create_or_get_nse_equities_hist_data(metadata=_DB_METADATA)

    update_statements = []
    for sym in syms:
        old = sym[0]
        new = sym[1]
        chdate = sym[2]

        chdt = dt.date(dt.strptime(chdate, '%d-%b-%Y'))

        upd = hist_data.update().values(symbol=new).\
            where(and_expr(hist_data.c.symbol == old,
                           hist_data.c.date < chdt))

        update_statements.append(upd)

    results = execute_many_insert(update_statements, engine=_DB_METADATA.bind)
    for r in results:
        r.close()
Exemplo n.º 4
0
def main(args):

    import argparse
    parser = argparse.ArgumentParser()

    group = parser.add_mutually_exclusive_group()
    group.add_argument("--all",
                       help="Download data for all stocks. Usually you'd have "
                       "to do it only once.",
                       dest="all_stocks",
                       action="store_true")

    group.add_argument("--from",
                       help="Download data from this data. Date Format "
                       "'DD-MM-YYYY'.",
                       dest="from_date")

    # --dbpath option
    parser.add_argument("--dbpath",
                        help="Database URL to be used.",
                        dest="dbpath")

    args, unprocessed = parser.parse_known_args()

    # Make sure we can access the DB path if specified or else exit right here.
    if args.dbpath:
        try:
            db_meta = get_metadata(args.dbpath)
        except Exception as e:
            print("Not a valid DB URL: {} (Exception: {})".format(
                args.dbpath, e))
            return -1

    all_corp_actions = []
    if args.all_stocks:
        unprocessed = (x.symbol for x in nse_get_all_stocks_list())

    # It's possible to give --from DD-MM-YYYY infy (say), just help
    for stock in unprocessed:
        time.sleep(random.randint(1, 5))
        try:
            corp_actions = get_corp_action_csv(sym_name=stock)
        except Exception as e:
            module_logger.exception(e)
            continue
        all_corp_actions.extend(corp_actions)

    if args.from_date:
        try:
            from_date = dt.date(dt.strptime(args.from_date, '%d-%m-%Y'))
            today = dt.date(dt.now())
            td = today - from_date
            if td.days < 0:
                print("From date cannot be greater than today.")
                return -1
            if td.days < 15:
                corp_actions = get_corp_action_csv(time_period='15_DAYS')
            else:
                corp_actions = get_corp_action_csv(time_period='3_MONTHS')

            all_corp_actions.extend(corp_actions)

        except ValueError as e:
            print("Date '{}' in unsupported format".format(args.from_date))
            return -1

    tbl = create_or_get_nse_corp_actions_hist_data(metadata=db_meta)

    all_insert_statements = []
    for corp_action in all_corp_actions:
        module_logger.debug("CorpAction :%s", str(corp_action))
        insert_st = tbl.insert().values(symbol=corp_action.sym,
                                        ex_date=corp_action.ex_date,
                                        action=corp_action.action,
                                        ratio=corp_action.ratio,
                                        delta=corp_action.delta)
        all_insert_statements.append(insert_st)
        module_logger.debug("insert_st : %s", insert_st.compile().params)

    results = execute_many_insert(all_insert_statements, engine=db_meta.bind)
    for result in results:
        result.close()

    return 0
Exemplo n.º 5
0
def download_and_save_index(idx, db_meta, start_date=None, end_date=None):
    """
    Returns an iterator over the rows of the data

    The way this works is - we download data for 100 days at a time - something
    that fits in the table and then read that table using BS4. Then collect all
    such data and return back.
    """

    if idx not in _INDICES_DICT.keys():
        module_logger.error("Index %s not found or not supported yet.", idx)
        module_logger.error("supported Indices are: %s",
                            (", ".join(_INDICES_DICT.keys())))
        return None

    start_dt = start_date or _INDICES_DICT[idx][1]
    s = dt.strptime(start_dt, _DATE_FMT)

    if not end_date:
        e = dt.now()
    else:
        e = dt.strptime(end_date, _DATE_FMT)

    e2 = s + td(days=_PREF_DAYS)
    if e2 > e:
        e2 = e

    all_data = []
    while e > s:
        e_ = e2.strftime(_DATE_FMT)
        s_ = s.strftime(_DATE_FMT)
        r = _do_get_index(idx, s_, e_)
        if r:
            module_logger.debug("Downloaded %d records", len(r))
            all_data.extend(r)
        else:
            module_logger.info(
                "Unable to download some records for"
                "%s (%s-%s)", idx, s_, e_)

        time.sleep(random.randint(1, 5))
        s = e2 + td(days=1)
        e2 = s + td(days=_PREF_DAYS)
        if e2 > e:
            e2 = e

    tbl = create_or_get_nse_indices_hist_data(metadata=db_meta)

    insert_statements = []
    for row in all_data:
        d = dt.date(dt.strptime(row[1].strip(), '%d-%b-%Y'))
        o = float(row[2])
        h = float(row[3])
        l = float(row[4])
        c = float(row[5])

        insert_st = tbl.insert().values(symbol=idx,
                                        date=d,
                                        open=o,
                                        high=h,
                                        low=l,
                                        close=c)
        insert_statements.append(insert_st)

    results = execute_many_insert(insert_statements, engine=db_meta.bind)
    for r in results:
        r.close()