def main(args): import argparse parser = argparse.ArgumentParser() # --dbpath option parser.add_argument("--dbpath", help="Database URL to be used.", dest="dbpath") args = parser.parse_args() # Make sure we can access the DB path if specified or else exit right here. db_metadata = None if args.dbpath: try: db_metadata = get_metadata(args.dbpath) except Exception as e: print("Not a valid DB URL: {} (Exception: {})".format( args.dbpath, e)) return -1 insert_statements = populate_all_scrips_table(db_metadata) results = execute_many_insert(insert_statements, engine=db_metadata.bind) _ = [r.close() for r in results] return 0
def main(args): import argparse parser = argparse.ArgumentParser() # --dbpath option parser.add_argument("--dbpath", help="Database URL to be used.", dest="dbpath") args = parser.parse_args() # Make sure we can access the DB path if specified or else exit right here. if args.dbpath: try: global _DB_METADATA _DB_METADATA = get_metadata(args.dbpath) except Exception as e: print("Not a valid DB URL: {} (Exception: {})".format( args.dbpath, e)) return -1 get_hist_data_as_dataframes_dict() return 0
def __init__(self, db_path=None, log_file=None): self.panels = {} self.symbols = [] self._last_symbols = None # Setup a logger, we might need it immediately below log_file = log_file or 'tickprocess_worker.log' self.logger = get_logger(name=str(self.__class__), log_file=log_file) # DB related try: self._db_meta = get_metadata(db_path) except Exception as e: self.logger.error(e) self._db_meta = None raise TickProcessWorkerExceptionDBInit("Failed to Init DB") self._profiling = False
def main(args): import argparse parser = argparse.ArgumentParser() group = parser.add_mutually_exclusive_group() group.add_argument("--all", help="Download data for all stocks. Usually you'd have " "to do it only once.", dest="all_stocks", action="store_true") group.add_argument("--from", help="Download data from this data. Date Format " "'DD-MM-YYYY'.", dest="from_date") # --dbpath option parser.add_argument("--dbpath", help="Database URL to be used.", dest="dbpath") args, unprocessed = parser.parse_known_args() # Make sure we can access the DB path if specified or else exit right here. if args.dbpath: try: db_meta = get_metadata(args.dbpath) except Exception as e: print("Not a valid DB URL: {} (Exception: {})".format( args.dbpath, e)) return -1 all_corp_actions = [] if args.all_stocks: unprocessed = (x.symbol for x in nse_get_all_stocks_list()) # It's possible to give --from DD-MM-YYYY infy (say), just help for stock in unprocessed: time.sleep(random.randint(1, 5)) try: corp_actions = get_corp_action_csv(sym_name=stock) except Exception as e: module_logger.exception(e) continue all_corp_actions.extend(corp_actions) if args.from_date: try: from_date = dt.date(dt.strptime(args.from_date, '%d-%m-%Y')) today = dt.date(dt.now()) td = today - from_date if td.days < 0: print("From date cannot be greater than today.") return -1 if td.days < 15: corp_actions = get_corp_action_csv(time_period='15_DAYS') else: corp_actions = get_corp_action_csv(time_period='3_MONTHS') all_corp_actions.extend(corp_actions) except ValueError as e: print("Date '{}' in unsupported format".format(args.from_date)) return -1 tbl = create_or_get_nse_corp_actions_hist_data(metadata=db_meta) all_insert_statements = [] for corp_action in all_corp_actions: module_logger.debug("CorpAction :%s", str(corp_action)) insert_st = tbl.insert().values(symbol=corp_action.sym, ex_date=corp_action.ex_date, action=corp_action.action, ratio=corp_action.ratio, delta=corp_action.delta) all_insert_statements.append(insert_st) module_logger.debug("insert_st : %s", insert_st.compile().params) results = execute_many_insert(all_insert_statements, engine=db_meta.bind) for result in results: result.close() return 0
def main(args): import argparse parser = argparse.ArgumentParser() # -l or --list (list all indices) parser.add_argument('--list', help="List all supported indices.", dest="list_indices", action="store_true") # --full option parser.add_argument("--full-to", help="download full data from 1 Jan 2002", action="store_true") # --from option parser.add_argument("--from", help="From Date in DD-MM-YYYY format. " \ "Default is 01-01-2002", dest='fromdate', default='') # --to option parser.add_argument("--to", help="From Date in DD-MM-YYYY format. " \ "Default is Today.", dest='todate', default="today") # --yes option parser.add_argument("--yes", help="Answer yes to all questions.", dest="sure", action="store_true") # --all option parser.add_argument("--all", help="Download all indices.", dest="all_indices", action="store_true") # --dbpath option parser.add_argument("--dbpath", help="Database URL to be used.", dest="dbpath") args, unprocessed = parser.parse_known_args() # Make sure we can access the DB path if specified or else exit right here. if args.dbpath: try: db_meta = get_metadata(args.dbpath) except Exception as e: print("Not a valid DB URL: {} (Exception: {})".format( args.dbpath, e)) return -1 if args.list_indices: print(_format_indices()) return 0 try: if args.fromdate: from_date = dt.strptime(args.fromdate, _DATE_FMT) if args.todate.lower() == 'today': args.todate = dt.now().strftime(_DATE_FMT) to_date = dt.strptime(args.todate, _DATE_FMT) except ValueError: print(parser.format_usage()) sys.exit(-1) # We are now ready to download data if args.fromdate and from_date > to_date: print(parser.format_usage()) sys.exit(-1) if args.fromdate: num_days = to_date - from_date if num_days.days > _WARN_DAYS: if args.sure: sure = True else: sure = input("Tatal number of days for download is %1d. " "Are you Sure?[y|N] " % num_days.days) if sure.lower() in ("y", "ye", "yes"): sure = True else: sure = False else: sure = True else: sure = input("Downloading data from beginning for the Index. " "Are you Sure?[y|N] ") if sure.lower() in ("y", "ye", "yes"): sure = True else: sure = False if not sure: return 0 if args.all_indices: unprocessed = _INDICES_DICT.keys() return get_indices(unprocessed, db_meta, args.fromdate, args.todate)
approaches separately and see why something seems more expensive. """ from __future__ import print_function import time import cProfile import pstats import StringIO import pandas as pd from read_sql_data import get_hist_data_as_dataframes_dict from tickerplot.sql.sqlalchemy_wrapper import get_metadata metadata = get_metadata('sqlite:///nse_hist_data.sqlite3') max_limit = 40 limit = 20 while limit < max_limit: scripdata_dict = get_hist_data_as_dataframes_dict(metadata=metadata, limit=limit) pan = pd.Panel(scripdata_dict) then0 = time.time() pr = cProfile.Profile() pr.enable() pan2 = pan.transpose(2, 0, 1) cl = pan2['close']
def __init__(self, method='cProfile', limit_rows=0, db_path=None): self.db_path = db_path self.method_name = method self.limit_rows = limit_rows self.metadata = get_metadata(self.db_path)
def main(args): # We run the full program import argparse parser = argparse.ArgumentParser() # --full option parser.add_argument("--full-to", help="download full data from 1 Jan 2002", action="store_true") # --from option parser.add_argument("--from", help="From Date in DD-MM-YYYY format. " \ "Default is 01-01-2002", dest='fromdate', default="01-01-2002") # --to option parser.add_argument("--to", help="From Date in DD-MM-YYYY format. " \ "Default is Today.", dest='todate', default="today") # --yes option parser.add_argument("--yes", help="Answer yes to all questions.", dest="sure", action="store_true") # --dbpath option parser.add_argument("--dbpath", help="Database URL to be used.", dest="dbpath") args = parser.parse_args() print(args) # Make sure we can access the DB path if specified or else exit right here. if args.dbpath: try: global _DB_METADATA _DB_METADATA = get_metadata(args.dbpath) except Exception as e: print("Not a valid DB URL: {} (Exception: {})".format( args.dbpath, e)) return -1 try: from_date = dt.strptime(args.fromdate, _DATE_FMT) if args.todate.lower() == 'today': args.todate = dt.now().strftime(_DATE_FMT) to_date = dt.strptime(args.todate, _DATE_FMT) except ValueError: print(parser.format_usage()) return -1 # We are now ready to download data if from_date > to_date: print(parser.format_usage()) return -1 num_days = to_date - from_date if num_days.days > _WARN_DAYS: if args.sure: sure = True else: sure = input("Tatal number of days for download is %1d. " "Are you Sure?[y|N] " % num_days.days) if sure.lower() in ("y", "ye", "yes"): sure = True else: sure = False else: sure = True if not sure: sys.exit(0) module_logger.info("Downloading data for %d days", num_days.days) cur_date = from_date while cur_date <= to_date: module_logger.debug("Getting data for %s", str(cur_date)) scrips_dict = get_bhavcopy(cur_date) if scrips_dict is not None: _update_bhavcopy(cur_date, scrips_dict) time.sleep(random.randrange(1, 10)) cur_date += td(1) # Apply the name changes to the DB sym_change_tuples = nse_get_name_change_tuples() if len(sym_change_tuples) == 0: module_logger.info("No name change tuples found...") sys.exit(-1) _apply_name_changes_to_db(sym_change_tuples) return 0