def archive_portfolio_strategy_security_master(model=None, sec_master=None): # Attributes json attribute_fields = ['underlying', 'start_date', 'maturity_date', 'strike'] sec_master = sec_master.copy(deep=True) sec_master['start_date'] = sec_master['start_date'].astype(str) sec_master['maturity_date'] = sec_master['maturity_date'].astype(str) sec_master_archive = utils.json_column_from_columns( df=sec_master, columns=attribute_fields, new_col_name='instrument_attributes') sec_master_archive['model_id'] = get_model_id(model_name=model.name) sec_master_archive['model_param_id'] = get_model_param_config_id( model_name=model.name, settings=model.settings) sec_master_archive['instrument_type'] = 'volatility_swap' sec_master_archive = sec_master_archive.rename( columns={'instrument': 'instrument_name'}) sec_master_archive = sec_master_archive[[ 'model_id', 'model_param_id', 'instrument_name', 'instrument_type', 'instrument_attributes' ]] table = db.get_table('model_portfolio_sec_master') db.execute_db_save(df=sec_master_archive, table=table, use_column_as_key='instrument_name')
def archive_portfolio_strategy_signal_data(model=None, signal_data=None, signal_data_z=None, backtest_update_start_date=None): if backtest_update_start_date is not None: dates = signal_data.index.get_level_values('date') signal_data = signal_data[dates >= backtest_update_start_date] dates = signal_data_z.index.get_level_values('date') signal_data_z = signal_data_z[dates >= backtest_update_start_date] if len(signal_data) > 0: signals = get_strategy_signals(model_name=model.name) model_param_id = get_model_param_config_id(model_name=model.name, settings=model.settings) df = model.process_signal_data_for_archive( signal_data=signal_data, signal_data_z=signal_data_z, db_signals=signals, model_param_id=model_param_id) table = db.get_table('model_signal_data') db.execute_db_save(df=df, table=table, time_series=True)
def archive_model_output_config(): df = pd.DataFrame(columns=['model_id', 'output_name']) # VIX Curve model_id = get_model_id('vix_curve') df.loc[0] = [model_id, 'pnl_net'] df.loc[1] = [model_id, 'pnl_gross'] df.loc[2] = [model_id, 'pnl_static'] df.loc[3] = [model_id, 'position'] # Equity vs vol model_id = get_model_id('equity_vs_vol') df.loc[4] = [model_id, 'pnl_net'] df.loc[5] = [model_id, 'pnl_gross'] df.loc[6] = [model_id, 'pnl_static'] df.loc[7] = [model_id, 'position'] # Volatility RV model_id = get_model_id('vol_rv') df.loc[8] = [model_id, 'pnl_net'] df.loc[9] = [model_id, 'pnl_gross'] df.loc[10] = [model_id, 'vega_net'] df.loc[11] = [model_id, 'vega_gross'] df['id'] = df.index.get_level_values(None) table = db.get_table('model_output_config') db.execute_db_save(df=df, table=table)
def archive_strategy_signal_pnl(model=None, signal_pnl=None): signals = get_strategy_signals(model_name=model.name) model_param_id = get_model_param_config_id(model_name=model.name, settings=model.settings) df = model.process_signal_pnl_for_archive(signal_pnl=signal_pnl, db_signals=signals, model_param_id=model_param_id) table = db.get_table('model_signal_data') db.execute_db_save(df=df, table=table, time_series=True)
def archive_portfolio_strategy_positions(model=None, positions=None): model_id = get_model_id(model_name=model.name) model_param_id = get_model_param_config_id(model_name=model.name, settings=model.settings) db_sec_master = get_portfolio_strategy_security_master(model=model) positions_archive = model.process_positions_for_archive( positions=positions, db_sec_master=db_sec_master, model_id=model_id, model_param_id=model_param_id) table = db.get_table('model_portfolio_outputs') db.execute_db_save(df=positions_archive, table=table, time_series=True)
def archive_models(): df = pd.DataFrame(columns=['id', 'model_name', 'ref_object_type']) df.loc[0] = [1, 'vix_curve', 'strategy'] df.loc[1] = [2, 'equity_vs_vol', 'strategy'] df.loc[2] = [3, 'vol_rv', 'equity'] existing_data = get_models() ind = df.index[~df['model_name'].isin(existing_data['model_name'].tolist() )] df = df.loc[ind] table = db.get_table('models') db.execute_bulk_insert(df=df, table=table)
def get_futures_ivol_by_series(futures_series=None, maturity_type=None, start_date=history_default_start_date, end_date=market_data_date, option_type="call", option_deltas=None): # Get series id series_id = db.get_futures_series(futures_series)['id'].values.tolist() # Handle maturity type choice if maturity_type == 'constant_maturity': table_name = "futures_ivol_constant_maturity_by_delta" elif maturity_type == 'futures_contract': table_name = "futures_ivol_fixed_maturity_by_delta" else: raise ValueError("maturity_type must be constant_maturity" " or futures_contract") # TODO: move this to someplace central if option_type.lower() == "c": option_type = "call" if option_type.lower() == "p": option_type = "put" if not option_type.lower() in ["put", "call"]: raise ValueError("option_type must be put or call") where_str = " option_type = '{0}'".format(option_type) where_str += " and date >= '{0}'".format(start_date) where_str += " and date <= '{0}'".format(end_date) where_str += " and series_id in {0}".format( dbutils.format_for_query(series_id)) # Case: specific delta requested if option_deltas is not None and option_type is not None: delta_str = list() for delta in option_deltas: if delta in option_delta_grid: if delta < 10: delta_str.append('ivol_0' + str(delta) + 'd') else: delta_str.append('ivol_' + str(delta) + 'd') s = " select series_id, date, days_to_maturity, {0}".format(delta_str) s += " from " + table_name else: s = " select * from " + table_name s += " where " + where_str + " order by date asc, days_to_maturity asc" data = db.read_sql(s) return data
def get_model_id(model_name=None): df = db.get_data(table_name='models') df = df[df['model_name'] == model_name] if len(df) == 0: raise ValueError('model name not found!') else: return int(df['id'].values[0])
def archive_model_param_config(model_name=None, settings=None): export_settings = model_settings_to_json(settings) model_id = get_model_id(model_name) model_params = get_model_param_configs(model_name) if export_settings not in model_params['model_params'].tolist(): new_id = db.read_sql('select max(id) from model_param_config') try: new_id = new_id['max'][0] + 1 except: new_id = 0 model_params = pd.DataFrame(columns=['id', 'model_id', 'model_params']) model_params.loc[0] = [new_id, model_id, export_settings] table = db.get_table('model_param_config') db.execute_bulk_insert(df=model_params, table=table)
def archive_strategy_signals(model=None, signal_data=None): signal_names = signal_data.columns model_id = get_model_id(model.name) existing_signals = get_strategy_signals(model_name=model.name) df = pd.DataFrame(columns=['model_id', 'signal_name']) df['signal_name'] = signal_names df['model_id'] = model_id # Filter down now ind = df.index[~df['signal_name'].isin(existing_signals['signal_name']. tolist())] df = df.loc[ind] if len(df) > 0: table = db.get_table('model_signals') db.execute_bulk_insert(df=df, table=table)
def get_portfolio_strategy_security_master(model=None): model_id = get_model_id(model.name) model_param_id = get_model_param_config_id(model_name=model.name, settings=model.settings) s_m = db.read_sql("select * from model_portfolio_sec_master " + " where model_id = {0} and model_param_id = {1}".format( model_id, model_param_id)) return s_m
def get_cftc_positioning_by_series(futures_series=None, start_date=market_data_date, end_date=None, cftc_financial=True, columns=None): table_name = 'staging_cftc_positioning_financial' if not cftc_financial: table_name = 'staging_cftc_positioning_commodity' if columns is None: columns = [ 'report_date_as_mm_dd_yyyy', 'open_interest_all', 'dealer_positions_long_all', 'dealer_positions_short_all', 'asset_mgr_positions_long_all', 'asset_mgr_positions_short_all', 'lev_money_positions_long_all', 'lev_money_positions_short_all' ] # Ensure that we get the most recent data one_week_prior = start_date - BDay(5) fs = db.get_futures_series(futures_series) if len(fs) == 0: return None cftc_code = str(fs['cftc_code'].values[0]) s = "select {0}".format(columns)\ .replace('[', '').replace(']', '').replace("'", '') + \ " from " + table_name + \ ' where cftc_contract_market_code in {0}'.format( dbutils.format_for_query([cftc_code])) s += " and report_date_as_mm_dd_yyyy >= '{0}'".format(one_week_prior) if end_date is not None: s += " and report_date_as_mm_dd_yyyy <= '{0}'".format(end_date) s += ' order by report_date_as_mm_dd_yyyy asc' data = db.read_sql(s) data = data.rename(columns={'report_date_as_mm_dd_yyyy': 'date'}) return data
def archive_model_outputs(model=None, outputs=None): # Get id for param config param_config_id = get_model_param_config_id(model_name=model.name, settings=model.settings) # Ready to archive df = model.process_model_outputs(outputs) df['model_param_id'] = param_config_id df['model_output_id'] = 0 df['model_id'] = get_model_id(model.name) # Output fields for this model output_fields = get_model_output_fields(model.name) output_dict = dict() for i in range(0, len(output_fields)): field = str(output_fields.iloc[i]['output_name']) output_dict[field] = output_fields.iloc[i]['id'] ind = df.index[df['output_name'] == field] df.loc[ind, 'model_output_id'] = output_dict[field] table = db.get_table('model_outputs') db.execute_db_save(df=df, table=table, time_series=True)
def get_futures_contracts_by_series(futures_series=None, start_date=history_default_start_date, end_date=None): s = "select * from futures_contracts where series_id in " \ " (select id from futures_series where series in {0}".format( dbutils.format_for_query(futures_series)) + ")" s += " and maturity_date >= '" + start_date.__str__() + "'" if end_date is not None: s += " and maturity_date <= '" + end_date.__str__() + "'" s += " order by maturity_date asc" data = db.read_sql(s) data['maturity_date'] = pd.to_datetime(data['maturity_date']) return data
def _get_time_series_data(s=None, start_date=market_data_date, end_date=None, index_fields=None, date_fields=['date']): s += " and date >= '" + start_date.__str__() + "'" if end_date is not None: s += " and date <= '" + end_date.__str__() + "'" s += " order by date asc" data = db.read_sql(s) for field in date_fields: data[field] = pd.to_datetime(data[field]) if index_fields is not None: tmp = list() for field in index_fields: tmp.append(data[field]) data.index = tmp for col in index_fields: del data[col] return data
import qfl.core.market_data as md import qfl.core.portfolio_utils as putils import qfl.core.database_interface as qfl_data import qfl.core.market_data as mkt_data import qfl.core.constants as constants import qfl.utilities.basic_utilities as utils import qfl.etl.data_ingest as etl import qfl.etl.data_interfaces as data_int from qfl.etl.data_interfaces import YahooApi, QuandlApi, DataScraper, NutmegAdapter from qfl.core.database_interface import DatabaseInterface as db reload(qfl_data) from qfl.etl.data_interfaces import NutmegAdapter db.initialize() ''' -------------------------------------------------------------------------------- Nutmeg -------------------------------------------------------------------------------- ''' # Load file xml_file = open("data/NH.xml") na = NutmegAdapter(xml_file=xml_file) # Account names na.get_account_names() account = na.get_account('Savings') # Fund names
def get_futures_calendar_name(futures_series=None): s = db.get_futures_series(futures_series=futures_series) exchange_code = s.iloc[0]['exchange'] calendar_name = utils.DateUtils.exchange_calendar_map[exchange_code] return calendar_name
def get_strategy_signals(model_name=None): model_id = get_model_id(model_name) s = db.get_data(table_name='model_signals', where_str='model_id = {0}'.format(model_id)) return s
def get_models(): s = 'select * from models' df = db.read_sql(s) return df
def get_model_output_fields(model_name=None): model_id = get_model_id(model_name) df = db.get_data(table_name='model_output_config', where_str='model_id = {0}'.format(model_id)) return df
import qfl.core.portfolio_utils as putils from qfl.etl.data_interfaces import QuandlApi, IBApi, DataScraper, NutmegAdapter, YahooApi from qfl.core.database_interface import DatabaseInterface as db from qfl.utilities.chart_utilities import format_plot from qfl.utilities.nlp import DocumentsAnalyzer as da from qfl.core.market_data import VolatilitySurfaceManager from qfl.macro.macro_models import FundamentalMacroModel import qfl.utilities.statistics as stats import qfl.models.volatility_factor_model as vfm from qfl.utilities.statistics import RollingFactorModel from qfl.models.volatility_factor_model import VolatilityFactorModel from scipy.interpolate import interp1d db.initialize() etl.DailyGenericFuturesPriceIngest.ingest_data(start_date=dt.datetime(2016, 7, 1)) import qfl.utilities.bayesian_modeling as bayes reload(bayes) from qfl.utilities.bayesian_modeling import BayesianTimeSeriesDataCleaner # strategy performance analysis from qfl.strategies.strategy_master import StrategyMaster sm = StrategyMaster() # VIX strategies
import qfl.models.volatility_factor_model as vfm from qfl.models.volatility_factor_model import VolatilityFactorModel import qfl.strategies.strategies as strat from qfl.strategies.vix_curve import VixCurveStrategy from qfl.strategies.equity_vs_vol import VegaVsDeltaStrategy from qfl.strategies.vol_rv import VolswapRvStrategy from qfl.strategies.strategies import PortfolioOptimizer from qfl.strategies.strategy_master import StrategyMaster import qfl.models.model_data_manager as mdm from qfl.core.marking import SecurityMaster, MarkingEngine import logging db.initialize() logger = logging.getLogger() ''' -------------------------------------------------------------------------------- New centralized approach -------------------------------------------------------------------------------- ''' start_date = dt.datetime(2010, 1, 1) vsm = VolatilitySurfaceManager() tickers = md.get_etf_vol_universe() vsm.load_data(tickers=tickers, start_date=start_date) # vsm.clean_data() # clean_data = vsm.data
def get_model_param_configs(model_name=None): model_id = get_model_id(model_name) df = db.get_data(table_name='model_param_config', where_str='model_id ={0}'.format(model_id)) df['model_params'] = df['model_params'].astype(str) return df