def archive_portfolio_strategy_signal_data(model=None, signal_data=None, signal_data_z=None, backtest_update_start_date=None): if backtest_update_start_date is not None: dates = signal_data.index.get_level_values('date') signal_data = signal_data[dates >= backtest_update_start_date] dates = signal_data_z.index.get_level_values('date') signal_data_z = signal_data_z[dates >= backtest_update_start_date] if len(signal_data) > 0: signals = get_strategy_signals(model_name=model.name) model_param_id = get_model_param_config_id(model_name=model.name, settings=model.settings) df = model.process_signal_data_for_archive( signal_data=signal_data, signal_data_z=signal_data_z, db_signals=signals, model_param_id=model_param_id) table = db.get_table('model_signal_data') db.execute_db_save(df=df, table=table, time_series=True)
def archive_portfolio_strategy_security_master(model=None, sec_master=None): # Attributes json attribute_fields = ['underlying', 'start_date', 'maturity_date', 'strike'] sec_master = sec_master.copy(deep=True) sec_master['start_date'] = sec_master['start_date'].astype(str) sec_master['maturity_date'] = sec_master['maturity_date'].astype(str) sec_master_archive = utils.json_column_from_columns( df=sec_master, columns=attribute_fields, new_col_name='instrument_attributes') sec_master_archive['model_id'] = get_model_id(model_name=model.name) sec_master_archive['model_param_id'] = get_model_param_config_id( model_name=model.name, settings=model.settings) sec_master_archive['instrument_type'] = 'volatility_swap' sec_master_archive = sec_master_archive.rename( columns={'instrument': 'instrument_name'}) sec_master_archive = sec_master_archive[[ 'model_id', 'model_param_id', 'instrument_name', 'instrument_type', 'instrument_attributes' ]] table = db.get_table('model_portfolio_sec_master') db.execute_db_save(df=sec_master_archive, table=table, use_column_as_key='instrument_name')
def archive_model_output_config(): df = pd.DataFrame(columns=['model_id', 'output_name']) # VIX Curve model_id = get_model_id('vix_curve') df.loc[0] = [model_id, 'pnl_net'] df.loc[1] = [model_id, 'pnl_gross'] df.loc[2] = [model_id, 'pnl_static'] df.loc[3] = [model_id, 'position'] # Equity vs vol model_id = get_model_id('equity_vs_vol') df.loc[4] = [model_id, 'pnl_net'] df.loc[5] = [model_id, 'pnl_gross'] df.loc[6] = [model_id, 'pnl_static'] df.loc[7] = [model_id, 'position'] # Volatility RV model_id = get_model_id('vol_rv') df.loc[8] = [model_id, 'pnl_net'] df.loc[9] = [model_id, 'pnl_gross'] df.loc[10] = [model_id, 'vega_net'] df.loc[11] = [model_id, 'vega_gross'] df['id'] = df.index.get_level_values(None) table = db.get_table('model_output_config') db.execute_db_save(df=df, table=table)
def archive_strategy_signal_pnl(model=None, signal_pnl=None): signals = get_strategy_signals(model_name=model.name) model_param_id = get_model_param_config_id(model_name=model.name, settings=model.settings) df = model.process_signal_pnl_for_archive(signal_pnl=signal_pnl, db_signals=signals, model_param_id=model_param_id) table = db.get_table('model_signal_data') db.execute_db_save(df=df, table=table, time_series=True)
def archive_models(): df = pd.DataFrame(columns=['id', 'model_name', 'ref_object_type']) df.loc[0] = [1, 'vix_curve', 'strategy'] df.loc[1] = [2, 'equity_vs_vol', 'strategy'] df.loc[2] = [3, 'vol_rv', 'equity'] existing_data = get_models() ind = df.index[~df['model_name'].isin(existing_data['model_name'].tolist() )] df = df.loc[ind] table = db.get_table('models') db.execute_bulk_insert(df=df, table=table)
def archive_portfolio_strategy_positions(model=None, positions=None): model_id = get_model_id(model_name=model.name) model_param_id = get_model_param_config_id(model_name=model.name, settings=model.settings) db_sec_master = get_portfolio_strategy_security_master(model=model) positions_archive = model.process_positions_for_archive( positions=positions, db_sec_master=db_sec_master, model_id=model_id, model_param_id=model_param_id) table = db.get_table('model_portfolio_outputs') db.execute_db_save(df=positions_archive, table=table, time_series=True)
def archive_model_param_config(model_name=None, settings=None): export_settings = model_settings_to_json(settings) model_id = get_model_id(model_name) model_params = get_model_param_configs(model_name) if export_settings not in model_params['model_params'].tolist(): new_id = db.read_sql('select max(id) from model_param_config') try: new_id = new_id['max'][0] + 1 except: new_id = 0 model_params = pd.DataFrame(columns=['id', 'model_id', 'model_params']) model_params.loc[0] = [new_id, model_id, export_settings] table = db.get_table('model_param_config') db.execute_bulk_insert(df=model_params, table=table)
def archive_strategy_signals(model=None, signal_data=None): signal_names = signal_data.columns model_id = get_model_id(model.name) existing_signals = get_strategy_signals(model_name=model.name) df = pd.DataFrame(columns=['model_id', 'signal_name']) df['signal_name'] = signal_names df['model_id'] = model_id # Filter down now ind = df.index[~df['signal_name'].isin(existing_signals['signal_name']. tolist())] df = df.loc[ind] if len(df) > 0: table = db.get_table('model_signals') db.execute_bulk_insert(df=df, table=table)
def archive_model_outputs(model=None, outputs=None): # Get id for param config param_config_id = get_model_param_config_id(model_name=model.name, settings=model.settings) # Ready to archive df = model.process_model_outputs(outputs) df['model_param_id'] = param_config_id df['model_output_id'] = 0 df['model_id'] = get_model_id(model.name) # Output fields for this model output_fields = get_model_output_fields(model.name) output_dict = dict() for i in range(0, len(output_fields)): field = str(output_fields.iloc[i]['output_name']) output_dict[field] = output_fields.iloc[i]['id'] ind = df.index[df['output_name'] == field] df.loc[ind, 'model_output_id'] = output_dict[field] table = db.get_table('model_outputs') db.execute_db_save(df=df, table=table, time_series=True)