def add_table(self, env_name, table_key, table_name, hash_name, hash_type='S', has_range=False, range_name=None, range_type='S'): (dbr, dbc, s3r, s3c, bucket) = self.init_aws_resources() DB = Client(dbc) tables = DB.list_tables() if "envs" not in tables: raise ValueError("You need to create envs table") table_envs = Table(dbr.Table(self.envs_table_name)) existing_envs = table_envs.scan() names = [e["name"] for e in existing_envs] if env_name in names: env = table_envs.get_item("name", env_name) db_dict = env["db_dict"] if table_name in db_dict.values(): raise ValueError("This table exists already") else: try: DB.create_table(table_name, hash_name, hash_type, has_range, range_name, range_type) except: print("unable to create table") db_dict[table_key] = table_name env["db_dict"] = db_dict table_envs.put_item(env) else: raise ValueError("This env does not exist")
def select_env(self, env_name): (dbr, dbc, s3r, s3c, bucket) = self.init_aws_resources() DB = Client(dbc) tables = DB.list_tables() if "envs" not in tables: raise ValueError("You need to create envs table") table_envs = Table(dbr.Table(self.envs_table_name)) existing_envs = table_envs.scan() names = [e["name"] for e in existing_envs] if env_name in names: env = table_envs.get_item("name", env_name) db_dict = env["db_dict"] return dbr, dbc, s3r, s3c, bucket, db_dict else: raise ValueError("You need to create this env")
def process_signal_all(ref_ticker, maturity_suffix): table_name = "signals" + "_" + ref_ticker + "_" + maturity_suffix DB = Client(dbc) list_tables = DB.list_tables() # create table if table_name not in list_tables: DB.create_table(table_name, "ticker", "S", True, "trade_date", "S") DB.add_index(table_name, "reverse", "trade_date", "S", "ticker", "S") signal_table = Table(dbr.Table(table_name)) #batch calc_signals selected_signals = select_signals(maturity_suffix, param_rviv.pct_prefix_list, param_rviv.median_prefix_list, param_rviv.proba_1_prefix_list) ref_data = query_rviv(ref_ticker) te_table = Table(dbr.Table(db_dict["vols_table"])) tickers = list(set([item["ticker"] for item in te_table.scan()])) for t in tickers: df = calc_signals(t, ref_data, selected_signals) #write them signal_table.put_df_batch(df)
def process_signal_ticker(ticker, ref_ticker, maturity_suffix): table_name = "signals" + "_" + ref_ticker + "_" + maturity_suffix DB = Client(dbc) list_tables = DB.list_tables() # create table if table_name not in list_tables: DB.create_table(table_name, "ticker", "S", True, "trade_date", "S") DB.add_index(table_name, "reverse", "trade_date", "S", "ticker", "S") signal_table = Table(dbr.Table(table_name)) #batch calc_signals selected_signals = select_signals(maturity_suffix, param_rviv.pct_prefix_list, param_rviv.median_prefix_list, param_rviv.proba_1_prefix_list) ref_data = query_rviv(ref_ticker) df = calc_signals(ticker, ref_data, selected_signals) signal_table.put_df_batch(df) return df
def create_env(self, env_name, db_dict, db_schema): (dbr, dbc, s3r, s3c, bucket) = self.init_aws_resources() DB = Client(dbc) DB.create_table(self.envs_table_name, "name", "S") table_envs = Table(dbr.Table(self.envs_table_name)) env_item = {"name": env_name, "db_dict": db_dict} table_envs.put_item(env_item) for schema in db_schema.values(): DB.create_table(schema[0], schema[1], schema[2], schema[3], schema[4], schema[5]) #DB.add_index("data", "reverse", "trade_date", "S", "ref", "S") # DB.add_index("ticker_expis", "env-expi-index", "env", "S", "expi", "S") # DB.add_index("ticker_expis", "env-ticker-index", "env", "S", "ticker", "S") return dbr, dbc, s3r, s3c, bucket, db_dict
def process_ts_ticker(ticker, strikes, maturity_suffix, create_tables=False): errors_table = Table(dbr.Table("ts_errors")) table_names = [ derive_table_name(strike, maturity_suffix) for strike in strikes ] if create_tables: DB = Client(dbc) list_tables = DB.list_tables() for table_name in table_names: if table_name not in list_tables: DB.create_table(table_name, "ticker", "S", True, "trade_date", "S") DB.add_index(table_name, "reverse", "trade_date", "S", "ticker", "S") # batch calc_store_ts try: output, _ = ticker_bo_ts(ticker, strikes, maturity_suffix, all_dates[0], all_dates[-1], timeserie.strike_change_threshold) for i, strike in enumerate(strikes): try: output[strike]["ticker"] = ticker ts_table = Table(dbr.Table(table_names[i])) ts_table.put_df_batch(output[strike]) except Exception as ee: errors_table.put_item({ "ticker": ticker, "error": str(strike), "exception": str(ee) }) # print(str(ee)) except Exception as e: errors_table.put_item({ "ticker": ticker, "error": "ts_calc", "exception": str(e) })
import pandas as pd from src.library.dynamo.Client import Client from src.library.dynamo.Table import Table from src.library.helpers.dates import get_dates from src.library.helpers.general import bring_columns_first from src.library.osmv.Osmv import Osmv from src.library.params import param from src.library.params import rviv as param_rviv osmv = Osmv(param.IS_LOCAL, param.BUCKET_NAME) (dbr, dbc, s3r, s3c, bucket, db_dict) = osmv.select_env(param.ENV_USED) (list_of_dates, list_obj_dates) = get_dates() DB = Client(dbc) data_table = Table(dbr.Table(db_dict["data_table"])) vols_table = Table(dbr.Table(db_dict["vols_table"])) rv_table = Table(dbr.Table(db_dict["rv_table"])) iv_table = Table(dbr.Table(db_dict["iv_table"])) iv_windows = param_rviv.IV_WINDOWS AF = param_rviv.AF rv_windows = {("rv_" + k): int(AF / 365 * v) for k, v in iv_windows.items()} rv_cols = list(rv_windows.keys()) def query_rviv(ticker): rv = pd.DataFrame(rv_table.query("ticker", ticker)).set_index("trade_date") iv = pd.DataFrame(iv_table.query("ticker", ticker)).set_index("trade_date") df = pd.concat((rv, iv), axis=1)
import warnings from pandas.core.common import SettingWithCopyWarning from src.library.dynamo.Client import Client from src.library.helpers.dates import get_dates from src.library.osmv.Osmv import Osmv from src.library.params import param warnings.simplefilter(action="ignore", category=SettingWithCopyWarning) osmv = Osmv(param.IS_LOCAL,param.BUCKET_NAME) (dbr, dbc, s3r, s3c, bucket, db_dict) = osmv.create_env("test",param.db_dict,param.db_schema) Client(dbc).add_index("data_test", "reverse", "trade_date", "S", "ref", "S") (list_of_dates,list_obj_dates) = get_dates()
# osmv = Osmv(param.IS_LOCAL, param.BUCKET_NAME) # # (dbr, dbc, s3r, s3c, bucket, db_dict) = osmv.select_env(param.ENV_USED) # list_of_dates = list_dates() # # from pandas.core.common import SettingWithCopyWarning # import warnings # # warnings.simplefilter(action="ignore", category=SettingWithCopyWarning) # data_table = Table(dbr.Table(db_dict["data_table"])) # stock_splits_table = Table(dbr.Table(db_dict["splits_table"])) # rv_table = Table(dbr.Table(db_dict["rv_table"])) # errors_table = Table(dbr.Table(db_dict["errors_table"])) if __name__ == '__main__': # df = pd.DataFrame( # data_table.query_index_range_begins_with("reverse", "trade_date", dd, "ref", ticker + "-")) # items = (Table(dbr.Table("ts_90_3m")).scan()) # tickers = sorted(set([i["ticker"] for i in items])) # print(tickers) # tickers = ['A', 'AAPL', 'ABX', 'ACN', 'ADBE', 'ADM', 'ADSK', 'AEM'] # DB = Client(dbc) # DB.create_table("data_test2","ref","S",True,"trade_date","S") (dbr, dbc, s3r, s3c, bucket) = osmv.init_aws_resources() DB = Client(dbc) DB.create_table("envs", "name", "S") print(DB.list_tables()) DB.create_table("dataset", "ref", "S", True, "trade_date", "S") print(DB.list_tables())