def __init__(self, limit=conf.settings()["api_limit"], limit_time=conf.settings()["api_limit_time"]): self._start_time = None self._limit = int(limit) self._limit_time = int(limit_time) self._call_counter = 0 self._elapsed_time = 0 self.start()
def get_ticker_to_update_df(_conn=conn, col_name="ticker", table_name="update_ticker_list", exchange=str(conf.settings()["exchange"]).lower()): return pd.read_sql( f"select {col_name} from {table_name} " f"where create_date = (select max(create_date) from {table_name})", _conn)
def copy_csv_to_db(file, table_name, _conn=conn, sqlserver_engine=conf.settings()["db_engine"]): if sqlserver_engine == "postgresql": cursor = _conn.cursor() with open(file, 'r') as f: # Notice that we don't need the `csv` module. next(f) # Skip the header row. cursor.copy_from(f, table_name, sep=',') _conn.commit()
def insert_df_to_db(df, table_name, sqlalchemy_engine=sqlalchemy_engine, sqlserver_engine=conf.settings()["db_engine"]): if sqlserver_engine == "mssql": df.to_sql(table_name, sqlalchemy_engine, if_exists="append", index=False) elif sqlserver_engine == "postgresql": df.to_sql(table_name, sqlalchemy_engine, if_exists="append", index=False, method="multi")
def __init__(self): self.db_engine = conf.settings()['db_engine'] self.db_param = self.get_db_param()