def create_strategy_output_dir(**kwargs): strategy_class = kwargs['strategy_class'] report_date = kwargs['report_date'] strategy_output_folder = dn.get_directory_name(ext='strategy_output') if strategy_class == 'futures_butterfly': output_dir = strategy_output_folder + '/futures_butterfly/' + cu.get_directory_extension(report_date) elif strategy_class == 'curve_pca': output_dir = strategy_output_folder + '/curve_pca/' + cu.get_directory_extension(report_date) elif strategy_class == 'spread_carry': output_dir = strategy_output_folder + '/spread_carry/' + cu.get_directory_extension(report_date) elif strategy_class == 'vcs': output_dir = strategy_output_folder + '/vcs/' + cu.get_directory_extension(report_date) elif strategy_class == 'scv': output_dir = strategy_output_folder + '/scv/' + cu.get_directory_extension(report_date) elif strategy_class == 'ifs': output_dir = strategy_output_folder + '/ifs/' + cu.get_directory_extension(report_date) elif strategy_class == 'ibo': output_dir = strategy_output_folder + '/ibo/' + cu.get_directory_extension(report_date) if not os.path.exists(output_dir): os.makedirs(output_dir) return output_dir
def assign_trades_2strategies(**kwargs): trade_source = kwargs['trade_source'] if trade_source == 'tt': formatted_fills = get_formatted_tt_fills(**kwargs) elif trade_source == 'cme_direct': formatted_fills = get_formatted_cme_direct_fills() elif trade_source == 'manual_entry': formatted_fills = get_formatted_manual_entry_fills() aggregate_trades = formatted_fills['aggregate_trades'] allocation_frame = pd.read_excel(dn.get_directory_name(ext='daily') + '/' + 'trade_allocation.xlsx') combined_list = [None]*len(allocation_frame.index) for i in range(len(allocation_frame.index)): if allocation_frame['criteria'][i]=='tickerhead': selected_trades = aggregate_trades[aggregate_trades['ticker_head'] == allocation_frame['value'][i]] elif allocation_frame['criteria'][i]=='ticker': selected_trades = aggregate_trades[aggregate_trades['ticker'] == allocation_frame['value'][i]] combined_list[i] = selected_trades[['ticker','option_type','strike_price','trade_price','trade_quantity','instrument','real_tradeQ']] combined_list[i]['alias'] = allocation_frame['alias'][i] return pd.concat(combined_list).reset_index(drop=True)
def get_futures_price_preloaded(**kwargs): if 'ticker_head' in kwargs.keys(): ticker_head = kwargs['ticker_head'] else: ticker = kwargs['ticker'] ticker_head = cmi.get_contract_specs(ticker)['ticker_head'] if 'futures_data_dictionary' in kwargs.keys(): data_out = kwargs['futures_data_dictionary'][ticker_head] else: presaved_futures_data_folder = dn.get_directory_name(ext='presaved_futures_data') if os.path.isfile(presaved_futures_data_folder + '/' + ticker_head + '.pkl'): data_out = pd.read_pickle(presaved_futures_data_folder + '/' + ticker_head + '.pkl') else: data_out = pd.DataFrame() return data_out if 'settle_date' in kwargs.keys(): settle_date = kwargs['settle_date'] if isinstance(settle_date,int): data_out = data_out[data_out['settle_date'] == cu.convert_doubledate_2datetime(settle_date)] elif isinstance(settle_date,dt.datetime): data_out = data_out[data_out['settle_date'] == settle_date] if 'settle_date_from_exclusive' in kwargs.keys(): data_out = data_out[data_out['settle_date']>cu.convert_doubledate_2datetime(kwargs['settle_date_from_exclusive'])] if 'ticker' in kwargs.keys(): data_out = data_out[data_out['ticker']==ticker] return data_out
def get_strategy_class_historical_pnls(**kwargs): con = msu.get_my_sql_connection(**kwargs) if 'as_of_date' in kwargs.keys(): as_of_date = kwargs['as_of_date'] else: as_of_date = exp.doubledate_shift_bus_days() strategy_frame = ts.select_strategies(con=con,open_date_to=as_of_date) strategy_frame['strategy_class'] = [sc.convert_from_string_to_dictionary(string_input=strategy_frame['description_string'][x])['strategy_class'] for x in range(len(strategy_frame.index))] unique_strategy_class_list = strategy_frame['strategy_class'].unique() time_series_list = [None]*len(unique_strategy_class_list) for i in range(len(unique_strategy_class_list)): strategy_frame_selected = strategy_frame[strategy_frame['strategy_class'] == unique_strategy_class_list[i]] pnl_out = [tpnl.get_strategy_pnl(alias=x,as_of_date=as_of_date,con=con)['pnl_frame'][['settle_date','total_pnl']] for x in strategy_frame_selected['alias']] [x.set_index('settle_date',drop=True, inplace=True) for x in pnl_out] time_series_list[i] = pd.concat(pnl_out,axis=1).fillna(0).sum(axis=1) if 'con' not in kwargs.keys(): con.close() merged_pnl = pd.concat(time_series_list,axis=1,keys=unique_strategy_class_list).fillna(0) merged_pnl['total'] = merged_pnl.sum(axis=1) output_dir = dn.get_directory_name(ext='daily') writer = pd.ExcelWriter(output_dir + '/historical_performance_' + str(as_of_date) + '.xlsx', engine='xlsxwriter') merged_pnl.to_excel(writer, sheet_name='timeSeries') writer.save() return merged_pnl
def get_stock_price_preloaded(**kwargs): ticker = kwargs['ticker'] if 'data_source' in kwargs.keys(): data_source = kwargs['data_source'] else: data_source = 'iex' if 'stock_data_dictionary' in kwargs.keys(): data_out = kwargs['stock_data_dictionary'][ticker] else: if data_source == 'iex': file_dir = dn.get_directory_name(ext='iex_stock_data') else: file_dir = dn.get_directory_name(ext='stock_data') if not os.path.isfile(file_dir + '/' + ticker + '.pkl'): ssd.save_stock_data(symbol_list=[ticker], data_source=data_source) data_out = pd.read_pickle(file_dir + '/' + ticker + '.pkl') report_date = exp.doubledate_shift_bus_days() if cu.convert_doubledate_2datetime(report_date) > data_out[ 'settle_datetime'].iloc[-1].to_pydatetime(): ssd.save_stock_data(symbol_list=[ticker], data_source=data_source) data_out = pd.read_pickle(file_dir + '/' + ticker + '.pkl') if 'settle_date' in kwargs.keys(): settle_date = kwargs['settle_date'] if isinstance(settle_date, int): data_out = data_out[data_out['settle_datetime'] == cu.convert_doubledate_2datetime(settle_date)] elif isinstance(settle_date, dt.datetime): data_out = data_out[data_out['settle_datetime'] == settle_date] if 'settle_date_from' in kwargs.keys(): data_out = data_out[ data_out['settle_datetime'] >= cu.convert_doubledate_2datetime( kwargs['settle_date_from'])] if 'settle_date_to' in kwargs.keys(): data_out = data_out[ data_out['settle_datetime'] <= cu.convert_doubledate_2datetime( kwargs['settle_date_to'])] return data_out
def get_futures_price_preloaded(**kwargs): if 'ticker_head' in kwargs.keys(): ticker_head = kwargs['ticker_head'] else: ticker = kwargs['ticker'] contract_specs_output = cmi.get_contract_specs(ticker) ticker_head = contract_specs_output['ticker_head'] file_ticker = cmi.mini_contract_dictionary.get( ticker_head, ticker_head) + contract_specs_output['ticker_month_str'] + str( contract_specs_output['ticker_year']) if 'futures_data_dictionary' in kwargs.keys(): data_out = kwargs['futures_data_dictionary'][ticker_head] else: presaved_futures_data_folder = dn.get_directory_name( ext='presaved_futures_data') file_ticker_head = cmi.mini_contract_dictionary.get( ticker_head, ticker_head) if os.path.isfile(presaved_futures_data_folder + '/' + file_ticker_head + '.pkl'): data_out = pd.read_pickle(presaved_futures_data_folder + '/' + file_ticker_head + '.pkl') else: data_out = pd.DataFrame() return data_out if 'settle_date' in kwargs.keys(): settle_date = kwargs['settle_date'] if isinstance(settle_date, int): data_out = data_out[data_out['settle_date'] == cu.convert_doubledate_2datetime(settle_date)] elif isinstance(settle_date, dt.datetime): data_out = data_out[data_out['settle_date'] == settle_date] if 'settle_date_from_exclusive' in kwargs.keys(): data_out = data_out[ data_out['settle_date'] > cu.convert_doubledate_2datetime( kwargs['settle_date_from_exclusive'])] if 'settle_date_from' in kwargs.keys(): data_out = data_out[ data_out['settle_date'] >= cu.convert_doubledate_2datetime( kwargs['settle_date_from'])] if 'settle_date_to' in kwargs.keys(): settle_date_to = kwargs['settle_date_to'] if isinstance(settle_date_to, int): data_out = data_out[ data_out['settle_date'] <= cu.convert_doubledate_2datetime( kwargs['settle_date_to'])] elif isinstance(settle_date_to, dt.datetime): data_out = data_out[data_out['settle_date'] <= settle_date_to] if 'ticker' in kwargs.keys(): data_out = data_out[data_out['ticker'] == file_ticker] return data_out
def load_and_convert_man_position_file(**kwargs): positions_directory = dn.get_directory_name(ext='man_positions') file_list = os.listdir(positions_directory) num_files = len(file_list) time_list = [] for i in range(num_files): time_list.append( os.path.getmtime(positions_directory + '/' + file_list[i])) loc_latest_file = time_list.index(max(time_list)) man_frame = pd.read_csv(positions_directory + '/' + file_list[loc_latest_file]) man_frame['ticker_head'] = [ conversion_from_man_ticker_head[x] for x in man_frame['Instrument'] ] man_frame['strike_multiplier'] = [ man_strike_multiplier.get(x, 1) for x in man_frame['ticker_head'] ] man_frame['ticker'] = [ man_frame['ticker_head'].iloc[x] + cmi.full_letter_month_list[int(man_frame['Prompt'].iloc[x] % 100) - 1] + str(m.floor(man_frame['Prompt'].iloc[x] / 100)) for x in range(len(man_frame.index)) ] man_frame.rename(columns={ 'Strike': 'strike_price', 'OptionType': 'option_type', 'NetQty': 'qty' }, inplace=True) man_frame['strike_price'] = round( man_frame['strike_multiplier'] * man_frame['strike_price'], 4) man_frame['instrumet'] = 'F' option_indx = (man_frame['option_type'] == 'C') | (man_frame['option_type'] == 'P') man_frame['instrumet'][option_indx] = 'O' man_frame['generalized_ticker'] = man_frame['ticker'] man_frame['generalized_ticker'][option_indx] = man_frame['ticker'][option_indx] + '-' + \ man_frame['option_type'][option_indx] + '-' + \ man_frame['strike_price'][option_indx].astype(str) man_frame = man_frame[man_frame['qty'] != 0] man_frame['generalized_ticker'] = [ x.rstrip('0').rstrip('.') for x in man_frame['generalized_ticker'] ] return man_frame[['generalized_ticker', 'qty']]
def load_cme__fills(**kwargs): fill_frame = pd.read_csv(dn.get_directory_name(ext='daily') + '/' + cme_direct_fill_file_name, header=1) fill_frame_filtered = fill_frame[fill_frame['IsStrategy'] == False] fill_frame_filtered.reset_index(inplace=True,drop=True) return fill_frame_filtered[['ContractCode', 'Side', 'Price', 'FilledQuantity']]
def get_ticker_list(**kwargs): output_frame_list = [get_ticker_list_4tickerhead(ticker_head=x, settle_date=20160602,volume_filter=100) for x in cmi.cme_futures_tickerhead_list] output_frame = pd.concat(output_frame_list) output_dir = dn.get_directory_name(ext='daily') writer = pd.ExcelWriter(output_dir + '/intraday_tickers.xlsx', engine='xlsxwriter') output_frame.to_excel(writer, sheet_name='tickers') writer.save()
def get_pca_seasonality_adjustments(**kwargs): ticker_head = kwargs['ticker_head'] if 'file_date_to' in kwargs.keys(): file_date_to = kwargs['file_date_to'] else: file_date_to = 20160219 if 'years_back' in kwargs.keys(): years_back = kwargs['years_back'] else: years_back = 10 if 'date_to' in kwargs.keys(): date_to = kwargs['date_to'] else: date_to = file_date_to date5_years_ago = cu.doubledate_shift(date_to, 5 * 365) backtest_output_dir = dn.get_directory_name(ext='backtest_results') file_name = ticker_head + '_' + str(file_date_to) + '_' + str( years_back) + '_z' if os.path.isfile(backtest_output_dir + '/curve_pca/' + file_name + '.pkl'): backtest_results = pd.read_pickle(backtest_output_dir + '/curve_pca/' + file_name + '.pkl') else: return pd.DataFrame.from_items([('monthSpread', [1] * 12 + [6] * 2), ('ticker_month_front', list(range(1, 13)) + [6] + [12]), ('z_seasonal_mean', [0] * 14)]) entire_report = pd.concat(backtest_results['report_results_list']) selected_report = entire_report[ (entire_report['report_date'] <= date_to) & (entire_report['report_date'] >= date5_years_ago)] selected_report = selected_report[(selected_report['tr_dte_front'] > 80) & (selected_report['monthSpread'] < 12)] grouped = selected_report.groupby(['monthSpread', 'ticker_month_front']) seasonality_adjustment = pd.DataFrame() seasonality_adjustment['monthSpread'] = ( grouped['monthSpread'].first()).values seasonality_adjustment['ticker_month_front'] = ( grouped['ticker_month_front'].first()).values seasonality_adjustment['z_seasonal_mean'] = (grouped['z'].mean()).values return seasonality_adjustment
def load_and_convert_abn_position_file(**kwargs): abn_frame = pd.read_csv( dn.get_directory_name(ext='daily') + '/' + position_file_name) abn_frame = abn_frame[abn_frame['Expiration'].notnull()] abn_frame['ticker_head'] = [ conversion_from_abn_ticker_head[x] for x in abn_frame['Symbol'] ] abn_frame['strike_multiplier'] = [ get_abn_strike_multiplier(x) for x in abn_frame['ticker_head'] ] abn_frame['ticker'] = [ abn_frame['ticker_head'].iloc[x] + cmi.full_letter_month_list[int(abn_frame['Expiration'].iloc[x] % 100) - 1] + str(m.floor(abn_frame['Expiration'].iloc[x] / 100)) for x in range(len(abn_frame.index)) ] abn_frame['Short'] = -abn_frame['Short'] abn_frame['Long'] = abn_frame['Long'].fillna(0).astype('int') abn_frame['Short'] = abn_frame['Short'].fillna(0).astype('int') abn_frame['qty'] = abn_frame['Long'] + abn_frame['Short'] abn_frame.rename(columns={ 'Strike': 'strike_price', 'PutCall': 'option_type' }, inplace=True) abn_frame['strike_price'] = abn_frame['strike_multiplier'] * abn_frame[ 'strike_price'] abn_frame['instrumet'] = 'F' option_indx = abn_frame['strike_price'].notnull() abn_frame['instrumet'][option_indx] = 'O' abn_frame['generalized_ticker'] = abn_frame['ticker'] abn_frame['generalized_ticker'][option_indx] = abn_frame['ticker'][option_indx] + '-' + \ abn_frame['option_type'][option_indx] + '-' + \ abn_frame['strike_price'][option_indx].astype(str) abn_frame = abn_frame[abn_frame['qty'] != 0] abn_frame['generalized_ticker'] = [ x.rstrip('0').rstrip('.') for x in abn_frame['generalized_ticker'] ] return abn_frame[['generalized_ticker', 'qty']]
def test_option_models(**kwargs): test_data_dir = dn.get_directory_name(ext='option_model_test_data') engine_name = kwargs['engine_name'] if os.path.isfile(test_data_dir + '/' + engine_name + '.pkl'): data_frame_test = pd.read_pickle(test_data_dir + '/' + engine_name + '.pkl') return data_frame_test data_frame_test = generate_test_cases_from_aligned_option_data() data_frame_test = data_frame_test[data_frame_test['strike'].notnull()] if 'num_cases' in kwargs.keys(): data_frame_test = data_frame_test.iloc[0:kwargs['num_cases']] model_wrapper_output = [] #return data_frame_test['settleDates'].iloc[no] for no in range(len(data_frame_test.index)): #print('true vol: ' + str(data_frame_test['impVol'].iloc[no])) #print('int rate: ' + str(data_frame_test['rate2OptExp'].iloc[no])) print(no) model_wrapper_output.append(omu.option_model_wrapper(ticker=data_frame_test['ticker'].iloc[no], calculation_date=int(data_frame_test['settleDates'].iloc[no]), underlying=data_frame_test['underlying'].iloc[no], strike = data_frame_test['strike'].iloc[no], option_price=data_frame_test['theoValue'].iloc[no], exercise_type = data_frame_test['exercise_type'].iloc[no], option_type=data_frame_test['option_type'].iloc[no], engine_name=engine_name)) #return model_wrapper_output data_frame_test['vol_deviation'] = [100*(model_wrapper_output[no]['implied_vol']-data_frame_test['impVol'].iloc[no])/data_frame_test['impVol'].iloc[no] for no in range(len(data_frame_test.index))] data_frame_test['dollar_gamma_deviation'] = [100*(model_wrapper_output[no]['dollar_gamma']-data_frame_test['dollarGamma'].iloc[no])/data_frame_test['dollarGamma'].iloc[no] for no in range(len(data_frame_test.index))] data_frame_test['gamma_deviation'] = [100*(model_wrapper_output[no]['gamma']-data_frame_test['gamma'].iloc[no])/data_frame_test['gamma'].iloc[no] for no in range(len(data_frame_test.index))] data_frame_test['delta_deviation'] = [100*(model_wrapper_output[no]['delta']-data_frame_test['delta'].iloc[no]) for no in range(len(data_frame_test.index))] data_frame_test['dollar_vega_deviation'] = [100*(model_wrapper_output[no]['dollar_vega']-data_frame_test['dollarVega'].iloc[no])/data_frame_test['dollarVega'].iloc[no] for no in range(len(data_frame_test.index))] data_frame_test['dollar_theta_deviation'] = [100*(model_wrapper_output[no]['dollar_theta']-data_frame_test['dollarTheta'].iloc[no])/data_frame_test['dollarTheta'].iloc[no] for no in range(len(data_frame_test.index))] data_frame_test['interest_rate_deviation'] = [100*(model_wrapper_output[no]['interest_rate']-data_frame_test['rate2OptExp'].iloc[no]) for no in range(len(data_frame_test.index))] data_frame_test['cal_dte_deviation'] = [model_wrapper_output[no]['cal_dte']-data_frame_test['calDTE'].iloc[no] for no in range(len(data_frame_test.index))] data_frame_test.to_pickle(test_data_dir + '/' + engine_name + '.pkl') return data_frame_test
def get_strategy_class_historical_pnls(**kwargs): con = msu.get_my_sql_connection(**kwargs) if 'as_of_date' in kwargs.keys(): as_of_date = kwargs['as_of_date'] else: as_of_date = exp.doubledate_shift_bus_days() strategy_frame = ts.select_strategies(con=con, open_date_to=as_of_date) strategy_frame['strategy_class'] = [ sc.convert_from_string_to_dictionary( string_input=strategy_frame['description_string'][x]) ['strategy_class'] for x in range(len(strategy_frame.index)) ] unique_strategy_class_list = strategy_frame['strategy_class'].unique() time_series_list = [None] * len(unique_strategy_class_list) for i in range(len(unique_strategy_class_list)): strategy_frame_selected = strategy_frame[ strategy_frame['strategy_class'] == unique_strategy_class_list[i]] pnl_out = [ tpnl.get_strategy_pnl( alias=x, as_of_date=as_of_date, con=con)['pnl_frame'][['settle_date', 'total_pnl']] for x in strategy_frame_selected['alias'] ] [x.set_index('settle_date', drop=True, inplace=True) for x in pnl_out] time_series_list[i] = pd.concat(pnl_out, axis=1).fillna(0).sum(axis=1) if 'con' not in kwargs.keys(): con.close() merged_pnl = pd.concat(time_series_list, axis=1, keys=unique_strategy_class_list).fillna(0) merged_pnl['total'] = merged_pnl.sum(axis=1) output_dir = dn.get_directory_name(ext='daily') writer = pd.ExcelWriter(output_dir + '/historical_performance_' + str(as_of_date) + '.xlsx', engine='xlsxwriter') merged_pnl.to_excel(writer, sheet_name='timeSeries') writer.save() return merged_pnl
def generate_csv_file(**kwargs): output_dir = dn.get_directory_name(ext='test_data') engine_name = kwargs['engine_name'] data_frame_test = generate_test_cases_from_aligned_option_data() data_frame_test = data_frame_test[data_frame_test['strike'].notnull()] if 'num_cases' in kwargs.keys(): data_frame_test = data_frame_test.iloc[0:kwargs['num_cases']] model_wrapper_output = [] for no in range(len(data_frame_test.index)): #print('true vol: ' + str(data_frame_test['impVol'].iloc[no])) #print('int rate: ' + str(data_frame_test['rate2OptExp'].iloc[no])) print(no) model_wrapper_output.append(omu.option_model_wrapper(ticker=data_frame_test['ticker'].iloc[no], calculation_date=int(data_frame_test['settleDates'].iloc[no]), underlying=data_frame_test['underlying'].iloc[no], strike = data_frame_test['strike'].iloc[no], option_price=data_frame_test['theoValue'].iloc[no], exercise_type = data_frame_test['exercise_type'].iloc[no], option_type=data_frame_test['option_type'].iloc[no], engine_name=engine_name)) #return model_wrapper_output data_frame_test['impVol'] = [model_wrapper_output[no]['implied_vol'] for no in range(len(data_frame_test.index))] data_frame_test['dollarGamma'] = [model_wrapper_output[no]['dollar_gamma'] for no in range(len(data_frame_test.index))] data_frame_test['gamma'] = [model_wrapper_output[no]['gamma'] for no in range(len(data_frame_test.index))] data_frame_test['delta'] = [model_wrapper_output[no]['delta'] for no in range(len(data_frame_test.index))] data_frame_test['dollarVega'] = [model_wrapper_output[no]['dollar_vega']for no in range(len(data_frame_test.index))] data_frame_test['dollarTheta'] = [model_wrapper_output[no]['dollar_theta'] for no in range(len(data_frame_test.index))] data_frame_test['rate2OptExp'] = [model_wrapper_output[no]['interest_rate'] for no in range(len(data_frame_test.index))] data_frame_test['calDTE'] = [model_wrapper_output[no]['cal_dte'] for no in range(len(data_frame_test.index))] data_frame_test = data_frame_test[data_frame_test['impVol'].notnull()] data_frame_test = data_frame_test[~data_frame_test['tickerHead'].isin(['ED','E0','E1','E2','E3','E4','E5'])] data_frame_test = data_frame_test[['settleDates','ticker','option_type','strike','underlying','theoValue', 'impVol','delta','dollarVega','dollarTheta','dollarGamma','rate2OptExp']] data_frame_test.reset_index(drop=True, inplace=True) writer = pd.ExcelWriter(output_dir + '/' + 'option_model_test' + '.xlsx', engine='xlsxwriter') data_frame_test.to_excel(writer, sheet_name='all')
def get_ticker_list(**kwargs): output_frame_list = [ get_ticker_list_4tickerhead(ticker_head=x, settle_date=20160602, volume_filter=100) for x in cmi.cme_futures_tickerhead_list ] output_frame = pd.concat(output_frame_list) output_dir = dn.get_directory_name(ext='daily') writer = pd.ExcelWriter(output_dir + '/intraday_tickers.xlsx', engine='xlsxwriter') output_frame.to_excel(writer, sheet_name='tickers') writer.save()
def get_formatted_manual_entry_fills(**kwargs): fill_frame = pd.read_csv(dn.get_directory_name(ext='daily') + '/' + manual_trade_entry_file_name) formatted_frame = fill_frame formatted_frame.rename(columns={'optionType': 'option_type', 'strikePrice': 'strike_price', 'tradePrice': 'trade_price', 'quantity': 'trade_quantity'}, inplace=True) formatted_frame['strike_price'] = formatted_frame['strike_price'].astype('float64') formatted_frame['PQ'] = formatted_frame['trade_price']*formatted_frame['trade_quantity'] formatted_frame['instrument'] = 'O' formatted_frame.loc[formatted_frame['option_type'].isnull(),'instrument'] = 'F' formatted_frame.loc[[cmi.is_stockQ(x) for x in formatted_frame['ticker']], 'instrument'] = 'S' option_type = formatted_frame['option_type'] formatted_frame['option_type']= option_type.where(pd.notnull(option_type),None) option_indx = formatted_frame['instrument'] == 'O' formatted_frame['generalized_ticker'] = formatted_frame['ticker'] formatted_frame['generalized_ticker'][option_indx] = formatted_frame['ticker'][option_indx] + '-' + \ formatted_frame['option_type'][option_indx] + '-' + \ formatted_frame['strike_price'][option_indx].astype(str) formatted_frame['side'] = np.sign(formatted_frame['trade_quantity']) formatted_frame['ticker_head'] = [cmi.get_contract_specs(x)['ticker_head'] for x in formatted_frame['ticker']] grouped = formatted_frame.groupby(['generalized_ticker', 'side']) aggregate_trades = pd.DataFrame() aggregate_trades['trade_price'] = grouped['PQ'].sum()/grouped['trade_quantity'].sum() aggregate_trades['trade_quantity'] = grouped['trade_quantity'].sum() aggregate_trades['ticker'] = grouped['ticker'].first() aggregate_trades['ticker_head'] = grouped['ticker_head'].first() aggregate_trades['instrument'] = grouped['instrument'].first() aggregate_trades['option_type'] = grouped['option_type'].first() aggregate_trades['strike_price'] = grouped['strike_price'].first() aggregate_trades['real_tradeQ'] = True return {'raw_trades': fill_frame, 'aggregate_trades': aggregate_trades }
def create_strategy_output_dir(**kwargs): strategy_class = kwargs['strategy_class'] report_date = kwargs['report_date'] strategy_output_folder = dn.get_directory_name(ext='strategy_output') if strategy_class == 'futures_butterfly': output_dir = strategy_output_folder + '/futures_butterfly/' + cu.get_directory_extension(report_date) elif strategy_class == 'intraday_futures_experimental': output_dir = strategy_output_folder + '/ife/' + cu.get_directory_extension(report_date) else: output_dir = strategy_output_folder + '/' + strategy_class + '/' + cu.get_directory_extension(report_date) if not os.path.exists(output_dir): os.makedirs(output_dir) return output_dir
def get_formatted_manual_entry_fills(**kwargs): fill_frame = pd.read_csv(dn.get_directory_name(ext='daily') + '/' + manual_trade_entry_file_name) formatted_frame = fill_frame formatted_frame.rename(columns={'optionType': 'option_type', 'strikePrice': 'strike_price', 'tradePrice': 'trade_price', 'quantity': 'trade_quantity'}, inplace=True) formatted_frame['strike_price'] = formatted_frame['strike_price'].astype('float64') formatted_frame['PQ'] = formatted_frame['trade_price']*formatted_frame['trade_quantity'] formatted_frame['instrument'] = 'O' formatted_frame.loc[formatted_frame['option_type'].isnull(),'instrument'] = 'F' option_type = formatted_frame['option_type'] formatted_frame['option_type']= option_type.where(pd.notnull(option_type),None) option_indx = formatted_frame['instrument'] == 'O' formatted_frame['generalized_ticker'] = formatted_frame['ticker'] formatted_frame['generalized_ticker'][option_indx] = formatted_frame['ticker'][option_indx] + '-' + \ formatted_frame['option_type'][option_indx] + '-' + \ formatted_frame['strike_price'][option_indx].astype(str) formatted_frame['side'] = np.sign(formatted_frame['trade_quantity']) formatted_frame['ticker_head'] = [cmi.get_contract_specs(x)['ticker_head'] for x in formatted_frame['ticker']] grouped = formatted_frame.groupby(['generalized_ticker', 'side']) aggregate_trades = pd.DataFrame() aggregate_trades['trade_price'] = grouped['PQ'].sum()/grouped['trade_quantity'].sum() aggregate_trades['trade_quantity'] = grouped['trade_quantity'].sum() aggregate_trades['ticker'] = grouped['ticker'].first() aggregate_trades['ticker_head'] = grouped['ticker_head'].first() aggregate_trades['instrument'] = grouped['instrument'].first() aggregate_trades['option_type'] = grouped['option_type'].first() aggregate_trades['strike_price'] = grouped['strike_price'].first() aggregate_trades['real_tradeQ'] = True return {'raw_trades': fill_frame, 'aggregate_trades': aggregate_trades }
def get_stock_price(**kwargs): symbol = kwargs['symbol'] data_source = kwargs['data_source'] if data_source == 'alpha_vantage': file_dir = dn.get_directory_name(ext='drop_box_trading') file_name = file_dir + '/apiKeys.txt' config_output = su.read_config_file(file_name=file_name) apiKey = config_output['alphaVantage'] ts = TimeSeries(key=apiKey, output_format='pandas') t.sleep(1) result = ts.get_daily_adjusted(symbol=symbol, outputsize=kwargs['outputsize']) data_out = result[0] data_out['settle_date'] = data_out.index data_out.rename(columns={ '1. open': 'open', '2. high': 'high', '3. low': 'low', '4. close': 'close', '5. adjusted close': 'adjusted_close', '6. volume': 'volume', '7. dividend amount': 'dividend_amount', '8. split coefficient': 'split_coefficient' }, inplace=True) data_out['settle_datetime'] = [ dt.datetime.strptime(x, '%Y-%m-%d') for x in data_out['settle_date'] ] elif data_source == 'iex': data_out = web.DataReader(symbol, data_source='iex', start='01/01/2013') data_out['settle_datetime'] = [ dt.datetime.strptime(x, '%Y-%m-%d') for x in data_out.index ] data_out.set_index('settle_datetime', drop=False, inplace=True) return data_out
def get_pca_seasonality_adjustments(**kwargs): ticker_head = kwargs['ticker_head'] if 'file_date_to' in kwargs.keys(): file_date_to = kwargs['file_date_to'] else: file_date_to = 20160219 if 'years_back' in kwargs.keys(): years_back = kwargs['years_back'] else: years_back = 10 if 'date_to' in kwargs.keys(): date_to = kwargs['date_to'] else: date_to = file_date_to date5_years_ago = cu.doubledate_shift(date_to, 5*365) backtest_output_dir = dn.get_directory_name(ext='backtest_results') file_name = ticker_head + '_' + str(file_date_to) + '_' + str(years_back) + '_z' if os.path.isfile(backtest_output_dir + '/curve_pca/' + file_name + '.pkl'): backtest_results = pd.read_pickle(backtest_output_dir + '/curve_pca/' + file_name + '.pkl') else: return pd.DataFrame.from_items([('monthSpread',[1]*12+[6]*2), ('ticker_month_front',list(range(1,13))+[6]+[12]), ('z_seasonal_mean',[0]*14)]) entire_report = pd.concat(backtest_results['report_results_list']) selected_report = entire_report[(entire_report['report_date'] <= date_to) & (entire_report['report_date'] >= date5_years_ago)] selected_report = selected_report[(selected_report['tr_dte_front'] > 80)&(selected_report['monthSpread'] < 12)] grouped = selected_report.groupby(['monthSpread','ticker_month_front']) seasonality_adjustment = pd.DataFrame() seasonality_adjustment['monthSpread'] = (grouped['monthSpread'].first()).values seasonality_adjustment['ticker_month_front'] = (grouped['ticker_month_front'].first()).values seasonality_adjustment['z_seasonal_mean'] = (grouped['z'].mean()).values return seasonality_adjustment
def generate_test_file_4stir_rates(**kwargs): output_dir = dn.get_directory_name(ext='test_data') full_dates = exp.get_bus_day_list(date_from=20100101, date_to=20160821) #full_dates = exp.get_bus_day_list(date_from=20160812,date_to=20160821) bus_dates_select = full_dates[0::6] data_frame_list = [] #bus_dates_select = bus_dates_select[:200] for i in range(len(bus_dates_select)): #print(bus_dates_select[i]) date_file_name = output_dir + '/' + str(bus_dates_select[i]) + '.pkl' if os.path.isfile(date_file_name): liquid_options = pd.read_pickle(date_file_name) else: liquid_options = cl.generate_liquid_options_list_dataframe( settle_date=bus_dates_select[i]) liquid_options.drop_duplicates('expiration_date', inplace=True) liquid_options = liquid_options[['expiration_date']] liquid_options['settle_date'] = bus_dates_select[i] liquid_options['exp_date'] = liquid_options[ 'expiration_date'].apply( lambda x: 10000 * x.year + 100 * x.month + x.day) liquid_options['int_rate'] = liquid_options.apply( lambda x: grfs.get_simple_rate(as_of_date=x['settle_date'], date_to=x['exp_date'])[ 'rate_output'], axis=1) liquid_options.to_pickle(date_file_name) data_frame_list.append(liquid_options) merged_data = pd.concat(data_frame_list) merged_data.reset_index(inplace=True, drop=True) merged_data = merged_data[['settle_date', 'exp_date', 'int_rate']] writer = pd.ExcelWriter(output_dir + '/' + 'stir_option_rate_test' + '.xlsx', engine='xlsxwriter') merged_data.to_excel(writer, sheet_name='all')
def get_symbol_frame(**kwargs): frame_type = kwargs['frame_type'] settle_date = kwargs['settle_date'] if frame_type == 'nasdaq': symbol_address = nasdaq_symbol_address elif frame_type == 'other': symbol_address = other_symbol_address output_dir = dn.get_directory_name(ext='stock_data') file_name = output_dir + '/' + frame_type + '_' + str(settle_date) + '.pkl' if os.path.isfile(file_name): return pd.read_pickle(file_name) else: datetime_now = dt.datetime.now() if datetime_now.weekday() in [5, 6]: last_settle_date = exp.doubledate_shift_bus_days() elif 100 * datetime_now.hour + datetime_now.minute > 930: last_settle_date = cu.get_doubledate() else: last_settle_date = exp.doubledate_shift_bus_days() if settle_date == last_settle_date: data_list = sd.download_txt_from_web(web_address=symbol_address) column_names = data_list[0].decode('iso-8859-1').split("|") parset_data_list = [ data_list[x].decode('iso-8859-1').split("|") for x in range(1, len(data_list) - 1) ] symbol_frame = pd.DataFrame(parset_data_list, columns=column_names) symbol_frame.to_pickle(file_name) else: symbol_frame = pd.DataFrame() return symbol_frame
def load_aligend_options_data_file(**kwargs): ticker_head = kwargs['ticker_head'] tr_dte_center = kwargs['tr_dte_center'] option_data_dir = dn.get_directory_name(ext='aligned_time_series_output') if 'delta_center' in kwargs.keys(): delta_center = kwargs['delta_center'] else: delta_center = 0.5 if 'model' in kwargs.keys(): model = kwargs['model'] else: model = 'BS' if 'contract_month_letter' in kwargs.keys(): contract_month_str = '_' + kwargs['contract_month_letter'] else: contract_month_str = '' if 'column_names' in kwargs.keys(): column_names = kwargs['column_names'] else: column_names = get_column_names_4option_data() file_dir = ticker_head + '_' + str(delta_center) + '_' + model + '_20_510204060_' + str(tr_dte_center) + contract_month_str + '.mat' if os.path.isfile(option_data_dir+'/'+file_dir): try: mat_output = scipy.io.loadmat(option_data_dir+'/'+file_dir) data_frame_out = pd.DataFrame(mat_output['alignedDataMatrix'], columns=column_names) except Exception: mat_output = h5py.File(option_data_dir+'/'+file_dir) data_frame_out = pd.DataFrame(mat_output['alignedDataMatrix'].value.transpose(), columns=column_names) else: data_frame_out = pd.DataFrame(columns=column_names) return data_frame_out
def send_hrsn_report(**kwargs): daily_dir = dn.get_directory_name(ext='daily') if 'report_date' in kwargs.keys(): report_date = kwargs['report_date'] else: report_date = exp.doubledate_shift_bus_days() ibo_dir = ts.create_strategy_output_dir(strategy_class='os', report_date=report_date) cov_data_integrity = '' try: with open(ibo_dir + '/' + 'covDataIntegrity.txt', 'r') as text_file: cov_data_integrity = text_file.read() except Exception: pass try: expiration_report = ef.get_expiration_report(report_date=report_date, con=kwargs['con']) expiration_report = expiration_report[ expiration_report['tr_days_2roll'] < 5] if expiration_report.empty: expiration_text = 'No near expirations.' else: expiration_text = 'Check for approaching expirations!' except Exception: expiration_text = 'Check expiration report for errors!' se.send_email_with_attachment( subject='hrsn_' + str(report_date), email_text='cov_data_integrity: ' + cov_data_integrity + "\r\n" + expiration_text, attachment_list=[ daily_dir + '/' + 'pnl_' + str(report_date) + '.xlsx', daily_dir + '/' + 'followup_' + str(report_date) + '.xlsx' ])
def save_ib_data(**kwargs): if 'duration_str' in kwargs.keys(): duration_str = kwargs['duration_str'] else: duration_str = '2 M' app = algo.Algo() con = msu.get_my_sql_connection() date_now = cu.get_doubledate() datetime_now = dt.datetime.now() report_date = exp.doubledate_shift_bus_days() ticker_head_list = cmi.cme_futures_tickerhead_list data_list = [ gfp.get_futures_price_preloaded(ticker_head=x, settle_date=report_date) for x in ticker_head_list ] ticker_frame = pd.concat(data_list) ticker_frame = ticker_frame[~((ticker_frame['ticker_head'] == 'ED') & (ticker_frame['tr_dte'] < 250))] ticker_frame = ticker_frame[~((ticker_frame['ticker_head'] == 'GC') | (ticker_frame['ticker_head'] == 'SI'))] ticker_frame.sort_values(['ticker_head', 'volume'], ascending=[True, False], inplace=True) ticker_frame.drop_duplicates(subset=['ticker_head'], keep='first', inplace=True) app.ticker_list = list(ticker_frame['ticker']) app.output_dir = sd.get_directory_name(ext='ib_data') app.durationStr = duration_str app.con = con app.connect(client_id=5) app.run()
def get_pnl_4_date_range(**kwargs): ticker_list = kwargs['ticker_list'] date_to = kwargs['date_to'] num_bus_days_back = kwargs['num_bus_days_back'] directory_name = dn.get_directory_name(ext='backtest_results') file_name = '_'.join(ticker_list) if os.path.isfile(directory_name + '/ifs_pnls/' + file_name + '.pkl'): pnl_frame = pd.read_pickle(directory_name + '/ifs_pnls/' + file_name + '.pkl') else: pnl_frame = pd.DataFrame(columns=['pnl_date', 'long_pnl', 'short_pnl','total_pnl']) date_from = exp.doubledate_shift_bus_days(double_date=date_to,shift_in_days=num_bus_days_back) date_list = exp.get_bus_day_list(date_from=date_from,date_to=date_to) dates2calculate = list(set(date_list)-set(pnl_frame['pnl_date'])) if not dates2calculate: return pnl_frame[(pnl_frame['pnl_date']>=date_list[0])&(pnl_frame['pnl_date']<=date_list[-1])] pnl_list = [] for i in dates2calculate: #print(i) pnl_list.append(calc_pnl4date(ticker_list=ticker_list,pnl_date=i)) pnl_frame = pd.concat([pnl_frame,pd.DataFrame(pnl_list)]) pnl_frame = pnl_frame[['pnl_date', 'long_pnl', 'short_pnl','total_pnl']] pnl_frame.sort('pnl_date',ascending=True,inplace=True) pnl_frame.to_pickle(directory_name + '/ifs_pnls/' + file_name + '.pkl') return pnl_frame[(pnl_frame['pnl_date']>=date_list[0])&(pnl_frame['pnl_date']<=date_list[-1])]
def send_hrsn_report(**kwargs): daily_dir = dn.get_directory_name(ext="daily") if "report_date" in kwargs.keys(): report_date = kwargs["report_date"] else: report_date = exp.doubledate_shift_bus_days() ibo_dir = ts.create_strategy_output_dir(strategy_class="ibo", report_date=report_date) cov_data_integrity = "" try: with open(ibo_dir + "/" + "covDataIntegrity.txt", "r") as text_file: cov_data_integrity = text_file.read() except Exception: pass try: expiration_report = ef.get_expiration_report(report_date=report_date) expiration_report = expiration_report[expiration_report["tr_dte"] < 10] if expiration_report.empty: expiration_text = "No near expirations." else: expiration_text = "Check for approaching expirations!" except Exception: expiration_text = "Check expiration report for errors!" se.send_email_with_attachment( subject="hrsn_" + str(report_date), email_text="cov_data_integrity: " + cov_data_integrity + "\r\n" + expiration_text, attachment_list=[ daily_dir + "/" + "pnl_" + str(report_date) + ".xlsx", daily_dir + "/" + "followup_" + str(report_date) + ".xlsx", ], )
def generate_test_file_4stir_rates(**kwargs): output_dir = dn.get_directory_name(ext="test_data") full_dates = exp.get_bus_day_list(date_from=20100101, date_to=20160821) # full_dates = exp.get_bus_day_list(date_from=20160812,date_to=20160821) bus_dates_select = full_dates[0::6] data_frame_list = [] # bus_dates_select = bus_dates_select[:200] for i in range(len(bus_dates_select)): # print(bus_dates_select[i]) date_file_name = output_dir + "/" + str(bus_dates_select[i]) + ".pkl" if os.path.isfile(date_file_name): liquid_options = pd.read_pickle(date_file_name) else: liquid_options = cl.generate_liquid_options_list_dataframe(settle_date=bus_dates_select[i]) liquid_options.drop_duplicates("expiration_date", inplace=True) liquid_options = liquid_options[["expiration_date"]] liquid_options["settle_date"] = bus_dates_select[i] liquid_options["exp_date"] = liquid_options["expiration_date"].apply( lambda x: 10000 * x.year + 100 * x.month + x.day ) liquid_options["int_rate"] = liquid_options.apply( lambda x: grfs.get_simple_rate(as_of_date=x["settle_date"], date_to=x["exp_date"])["rate_output"], axis=1, ) liquid_options.to_pickle(date_file_name) data_frame_list.append(liquid_options) merged_data = pd.concat(data_frame_list) merged_data.reset_index(inplace=True, drop=True) merged_data = merged_data[["settle_date", "exp_date", "int_rate"]] writer = pd.ExcelWriter(output_dir + "/" + "stir_option_rate_test" + ".xlsx", engine="xlsxwriter") merged_data.to_excel(writer, sheet_name="all")
def load_and_convert_abn_position_file(**kwargs): abn_frame = pd.read_csv(dn.get_directory_name(ext='daily') + '/' + position_file_name) abn_frame = abn_frame[abn_frame['Expiration'].notnull()] abn_frame['ticker_head'] = [conversion_from_abn_ticker_head[x] for x in abn_frame['Symbol']] abn_frame['strike_multiplier'] = [get_abn_strike_multiplier(x) for x in abn_frame['ticker_head']] abn_frame['ticker'] = [abn_frame['ticker_head'].iloc[x] + cmi.full_letter_month_list[int(abn_frame['Expiration'].iloc[x]%100)-1] + str(m.floor(abn_frame['Expiration'].iloc[x]/100)) for x in range(len(abn_frame.index))] abn_frame['Short'] = -abn_frame['Short'] abn_frame['Long'] = abn_frame['Long'].fillna(0).astype('int') abn_frame['Short'] = abn_frame['Short'].fillna(0).astype('int') abn_frame['qty'] = abn_frame['Long']+abn_frame['Short'] abn_frame.rename(columns={'Strike': 'strike_price','PutCall': 'option_type'},inplace=True) abn_frame['strike_price'] = abn_frame['strike_multiplier']*abn_frame['strike_price'] abn_frame['instrumet'] = 'F' option_indx = abn_frame['strike_price'].notnull() abn_frame['instrumet'][option_indx] = 'O' abn_frame['generalized_ticker'] = abn_frame['ticker'] abn_frame['generalized_ticker'][option_indx] = abn_frame['ticker'][option_indx] + '-' + \ abn_frame['option_type'][option_indx] + '-' + \ abn_frame['strike_price'][option_indx].astype(str) abn_frame = abn_frame[abn_frame['qty'] != 0] abn_frame['generalized_ticker'] = [x.rstrip('0').rstrip('.') for x in abn_frame['generalized_ticker']] return abn_frame[['generalized_ticker', 'qty']]
def main(): app = algo.Algo() report_date = exp.doubledate_shift_bus_days() todays_date = cu.get_doubledate() con = msu.get_my_sql_connection() vcs_output = vcs.generate_vcs_sheet_4date(date_to=report_date) vcs_pairs = vcs_output['vcs_pairs'] filter_out = of.get_vcs_filters(data_frame_input=vcs_pairs, filter_list=['long2', 'short2']) vcs_pairs = filter_out['selected_frame'] vcs_pairs = vcs_pairs[vcs_pairs['downside'].notnull() & vcs_pairs['upside'].notnull()] # &(vcs_pairs.tickerClass!='Energy') vcs_pairs = vcs_pairs[(vcs_pairs['trDte1'] >= 50) & (vcs_pairs.tickerClass != 'Metal') & (vcs_pairs.tickerClass != 'FX') & (vcs_pairs.tickerClass != 'Energy')] vcs_pairs = vcs_pairs[((vcs_pairs['Q'] <= 30) & (vcs_pairs['fwdVolQ'] >= 30)) | ((vcs_pairs['Q'] >= 70) & (vcs_pairs['fwdVolQ'] <= 70))] vcs_pairs.reset_index(drop=True, inplace=True) vcs_pairs['underlying_ticker1'] = [ omu.get_option_underlying(ticker=x) for x in vcs_pairs['ticker1'] ] vcs_pairs['underlying_ticker2'] = [ omu.get_option_underlying(ticker=x) for x in vcs_pairs['ticker2'] ] vcs_pairs['underlying_tickerhead'] = [ cmi.get_contract_specs(x)['ticker_head'] for x in vcs_pairs['underlying_ticker1'] ] futures_data_dictionary = { x: gfp.get_futures_price_preloaded(ticker_head=x) for x in vcs_pairs['underlying_tickerhead'].unique() } proxy_output_list1 = [ up.get_underlying_proxy_ticker( ticker=x, settle_date=report_date, futures_data_dictionary=futures_data_dictionary) for x in vcs_pairs['underlying_ticker1'] ] vcs_pairs['proxy_ticker1'] = [x['ticker'] for x in proxy_output_list1] vcs_pairs['add_2_proxy1'] = [x['add_2_proxy'] for x in proxy_output_list1] proxy_output_list2 = [ up.get_underlying_proxy_ticker( ticker=x, settle_date=report_date, futures_data_dictionary=futures_data_dictionary) for x in vcs_pairs['underlying_ticker2'] ] vcs_pairs['proxy_ticker2'] = [x['ticker'] for x in proxy_output_list2] vcs_pairs['add_2_proxy2'] = [x['add_2_proxy'] for x in proxy_output_list2] vcs_pairs['expiration_date1'] = [ int( exp.get_expiration_from_db(instrument='options', ticker=x, con=con).strftime('%Y%m%d')) for x in vcs_pairs['ticker1'] ] vcs_pairs['expiration_date2'] = [ int( exp.get_expiration_from_db(instrument='options', ticker=x, con=con).strftime('%Y%m%d')) for x in vcs_pairs['ticker2'] ] vcs_pairs['interest_date1'] = [ grfs.get_simple_rate(as_of_date=report_date, date_to=x)['rate_output'] for x in vcs_pairs['expiration_date1'] ] vcs_pairs['interest_date2'] = [ grfs.get_simple_rate(as_of_date=report_date, date_to=x)['rate_output'] for x in vcs_pairs['expiration_date2'] ] vcs_pairs['exercise_type'] = [ cmi.get_option_exercise_type(ticker_head=x) for x in vcs_pairs['tickerHead'] ] admin_dir = dn.get_directory_name(ext='admin') risk_file_out = su.read_text_file(file_name=admin_dir + '/RiskParameter.txt') vcs_risk_parameter = 5 * 2 * float(risk_file_out[0]) vcs_pairs['long_quantity'] = vcs_risk_parameter / abs( vcs_pairs['downside']) vcs_pairs['short_quantity'] = vcs_risk_parameter / vcs_pairs['upside'] vcs_pairs['long_quantity'] = vcs_pairs['long_quantity'].round() vcs_pairs['short_quantity'] = vcs_pairs['short_quantity'].round() vcs_pairs['alias'] = [ generate_vcs_alias(vcs_row=vcs_pairs.iloc[x]) for x in range(len(vcs_pairs.index)) ] vcs_pairs['call_mid_price1'] = np.nan vcs_pairs['put_mid_price1'] = np.nan vcs_pairs['call_mid_price2'] = np.nan vcs_pairs['put_mid_price2'] = np.nan vcs_pairs['call_iv1'] = np.nan vcs_pairs['put_iv1'] = np.nan vcs_pairs['call_iv2'] = np.nan vcs_pairs['put_iv2'] = np.nan vcs_pairs['underlying_mid_price1'] = np.nan vcs_pairs['underlying_mid_price2'] = np.nan vcs_pairs['proxy_mid_price1'] = np.nan vcs_pairs['proxy_mid_price2'] = np.nan vcs_pairs['current_strike1'] = np.nan vcs_pairs['current_strike2'] = np.nan ta_folder = dn.get_dated_directory_extension(folder_date=todays_date, ext='ta') app.vcs_pairs = vcs_pairs app.con = con app.futures_data_dictionary = futures_data_dictionary app.report_date = report_date app.todays_date = todays_date app.log = lg.get_logger(file_identifier='vcs', log_level='INFO') app.trade_file = ta_folder + '/trade_dir.csv' app.vcs_risk_parameter = vcs_risk_parameter app.connect(client_id=3) app.run()
def get_cme_direct_prices(**kwargs): cme_frame = pd.read_csv(dn.get_directory_name(ext='daily') + '/' + price_file_name) return cme_frame
def backtest_curve_pca(**kwargs): ticker_head = kwargs['ticker_head'] date_to = kwargs['date_to'] years_back = kwargs['years_back'] indicator = kwargs['indicator'] backtest_output_dir = dn.get_directory_name(ext='backtest_results') file_name = ticker_head + '_' + str(date_to) + '_' + str(years_back) + '_' + indicator if os.path.isfile(backtest_output_dir + '/curve_pca/' + file_name + '.pkl'): backtest_results = pd.read_pickle(backtest_output_dir + '/curve_pca/' + file_name + '.pkl') return backtest_results dates_output = bu.get_backtesting_dates(date_to=date_to, years_back=years_back) date_list = sorted(dates_output['double_dates'], reverse=False) if 'use_existing_filesQ' in kwargs.keys(): use_existing_filesQ = kwargs['use_existing_filesQ'] else: use_existing_filesQ = True report_results_list = [] success_indx = [] contract_multiplier = cmi.contract_multiplier[ticker_head] for date_to in date_list: report_out = cpc.get_curve_pca_report(ticker_head=ticker_head, date_to=date_to, use_existing_filesQ=use_existing_filesQ) success_indx.append(report_out['success']) if report_out['success']: pca_results = report_out['pca_results'] pca_results['report_date'] = date_to report_results_list.append(pca_results) good_dates = [date_list[i] for i in range(len(date_list)) if success_indx[i]] total_pnl_list = [] z_score_list = [] residual_list = [] num_contract_list = [] short_side_weight_list = [] for i in range(len(good_dates)): daily_report = report_results_list[i] #daily_report = daily_report[daily_report['monthSpread']==1] #daily_report = daily_report[3:] daily_report_filtered = daily_report[(daily_report['tr_dte_front']>80) & (daily_report['monthSpread']==1)] #median_factor_load2 = daily_report['factor_load2'].median() #if median_factor_load2>0: # daily_report_filtered = daily_report[daily_report['factor_load2']>=0] #else: # daily_report_filtered = daily_report[daily_report['factor_load2']<=0] daily_report_filtered.sort(indicator,ascending=True,inplace=True) num_contract_4side = round(len(daily_report_filtered.index)/4) long_side = daily_report_filtered.iloc[:num_contract_4side] short_side = daily_report_filtered.iloc[-num_contract_4side:] short_side_weight = long_side['factor_load1'].sum()/short_side['factor_load1'].sum() z_score_list.append(np.nanmean(short_side[indicator])-np.nanmean(long_side[indicator])) if any(np.isnan(long_side['change5'])) or any(np.isnan(short_side['change5'])): total_pnl_list.append(np.NAN) else: total_pnl_list.append(np.nanmean(long_side['change5'])-short_side_weight*np.nanmean(short_side['change5'])) residual_list.append(np.nanmean(short_side['residuals'])-np.nanmean(long_side['residuals'])) num_contract_list.append(num_contract_4side) short_side_weight_list.append(short_side_weight) backtest_results = {'pnl_frame': pd.DataFrame.from_items([('settle_date',good_dates), ('num_contracts',num_contract_list), ('z',z_score_list), ('residual',residual_list), ('short_side_weight',short_side_weight_list), ('pnl',[x*contract_multiplier for x in total_pnl_list])]), 'report_results_list': report_results_list} with open(backtest_output_dir + '/curve_pca/' + file_name + '.pkl', 'wb') as handle: pickle.dump(backtest_results, handle) return backtest_results
import get_price.save_stock_data as ssd import contract_utilities.expiration as exp from pandas_finance import Equity import shared.directory_names as dn import pandas as pd import os.path directory_name = dn.get_directory_name(ext='fundamental_data') def create_sector_classification_file(**kwargs): file_name = directory_name + '\sector_classification.pkl' if os.path.isfile(file_name): return pd.read_pickle(file_name) if 'report_date' in kwargs.keys(): report_date = kwargs['report_date'] else: report_date = exp.doubledate_shift_bus_days() symbol_list = ssd.get_symbol_list_4date(settle_date=report_date) for i in range(len(symbol_list)): eqty = Equity(symbol_list[i]) try: sector_list.append(eqty.sector) industry_list.append(eqty.industry) except:
def get_boto_client(): config_directory_name = dn.get_directory_name(ext='config') config_out = su.read_config_file(file_name=config_directory_name + '/s3_credentials.txt') return bt3.client('s3', aws_access_key_id=config_out['aws_access_key_id'], aws_secret_access_key=config_out['aws_secret_access_key'])
def get_spreads_4date(**kwargs): futures_dataframe = cl.generate_futures_list_dataframe(**kwargs) if 'volume_filter' in kwargs.keys(): volume_filter = kwargs['volume_filter'] futures_dataframe = futures_dataframe[futures_dataframe['volume'] > volume_filter] futures_dataframe.reset_index(drop=True, inplace=True) futures_dataframe['yearMonth'] = 12*futures_dataframe['ticker_year']+futures_dataframe['ticker_month'] futures_dataframe['yearMonthMerge'] = futures_dataframe['yearMonth'] futures_dataframe = futures_dataframe[['ticker','yearMonth','yearMonthMerge','ticker_head','volume']] spread_frame = pd.read_excel(dn.get_directory_name(ext='python_file') + '/opportunity_constructs/user_defined_spreads.xlsx') output_frame = pd.DataFrame() for i in range(len(spread_frame.index)): tickerhead1 = spread_frame['tickerHead1'].iloc[i] tickerhead2 = spread_frame['tickerHead2'].iloc[i] tickerhead3 = spread_frame['tickerHead3'].iloc[i] tickerhead4 = spread_frame['tickerHead4'].iloc[i] frame1 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead1] frame2 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead2] frame3 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead3] frame4 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead4] merged11 = pd.merge(frame1, frame2, how='inner', on='yearMonthMerge') frame2['yearMonthMerge'] = frame2['yearMonth']+1 merged12 = pd.merge(frame1, frame2, how='inner', on='yearMonthMerge') frame2['yearMonthMerge'] = frame2['yearMonth']-1 merged10 = pd.merge(frame1, frame2, how='inner', on='yearMonthMerge') if frame3.empty: output_frame2 = pd.concat([merged11, merged12, merged10]) spread_i = pd.DataFrame() spread_i['contract1'] = output_frame2['ticker_x'] spread_i['contract2'] = output_frame2['ticker_y'] spread_i['contract3'] = None spread_i['ticker_head1'] = tickerhead1 spread_i['ticker_head2'] = tickerhead2 spread_i['ticker_head3'] = None spread_i['volume1'] = output_frame2['volume_x'] spread_i['volume2'] = output_frame2['volume_y'] spread_i['volume3'] = None output_frame = pd.concat([output_frame, spread_i]) elif frame4.empty: frame3 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead3] merged111 = pd.merge(merged11, frame3, how='inner', on='yearMonthMerge') merged121 = pd.merge(merged12, frame3, how='inner', on='yearMonthMerge') merged101 = pd.merge(merged10, frame3, how='inner', on='yearMonthMerge') frame3['yearMonthMerge'] = frame3['yearMonth']+1 merged112 = pd.merge(merged11, frame3, how='inner', on='yearMonthMerge') merged122 = pd.merge(merged12, frame3, how='inner', on='yearMonthMerge') frame3['yearMonthMerge'] = frame3['yearMonth']-1 merged110 = pd.merge(merged11, frame3, how='inner', on='yearMonthMerge') merged100 = pd.merge(merged10, frame3, how='inner', on='yearMonthMerge') output_frame3 = pd.concat([merged111,merged121,merged101, merged112,merged122,merged110,merged100]) spread_i = pd.DataFrame() spread_i['contract1'] = output_frame3['ticker_x'] spread_i['contract2'] = output_frame3['ticker_y'] spread_i['contract3'] = output_frame3['ticker'] spread_i['ticker_head1'] = tickerhead1 spread_i['ticker_head2'] = tickerhead2 spread_i['ticker_head3'] = tickerhead3 spread_i['volume1'] = output_frame3['volume_x'] spread_i['volume2'] = output_frame3['volume_y'] spread_i['volume3'] = output_frame3['volume'] output_frame = pd.concat([output_frame,spread_i]) output_frame.reset_index(drop=True,inplace=True) return output_frame
def construct_futures_butterfly_portfolio(**kwargs): rule_no = kwargs['rule_no'] backtest_output = kwargs['backtest_output'] pnl_field = kwargs['pnl_field'] if rule_no in [1, 3, 4, 5, 6, 7, 8, 9]: stop_loss = -100000000000 elif rule_no == 2: stop_loss = -1000 backtest_results_folder = dn.get_directory_name(ext='backtest_results') if os.path.isfile(backtest_results_folder + '/futures_butterfly/portfolio' + str(rule_no) + '.pkl'): return pd.read_pickle(backtest_results_folder + '/futures_butterfly/portfolio' + str(rule_no) + '.pkl') elif not os.path.exists(backtest_results_folder + '/futures_butterfly'): os.makedirs(backtest_results_folder + '/futures_butterfly') date_list = kwargs['date_list'] ticker_head_list = cmi.futures_butterfly_strategy_tickerhead_list total_pnl_frame = pd.DataFrame({'report_date': date_list}) total_pnl_frame['portfolio'] = 0 for i in range(len(ticker_head_list)): total_pnl_frame[ticker_head_list[i]] = 0 for i in range(len(date_list)): pnl_tickerhead_frame = pd.DataFrame({'ticker_head': ticker_head_list}) pnl_tickerhead_frame['buy_mean_pnl'] = 0 pnl_tickerhead_frame['sell_mean_pnl'] = 0 pnl_tickerhead_frame['total_pnl'] = 0 daily_sheet = backtest_output[i] for j in range(len(ticker_head_list)): ticker_head_results = daily_sheet[daily_sheet['tickerHead'] == ticker_head_list[j]] filter_output_long = sf.get_futures_butterfly_filters( data_frame_input=ticker_head_results, filter_list=['long' + str(rule_no)]) filter_output_short = sf.get_futures_butterfly_filters( data_frame_input=ticker_head_results, filter_list=['short' + str(rule_no)]) selected_short_trades = ticker_head_results[ filter_output_short['selection_indx'] & (np.isfinite(ticker_head_results[pnl_field]))] selected_long_trades = ticker_head_results[ filter_output_long['selection_indx'] & (np.isfinite(ticker_head_results[pnl_field]))] if len(selected_short_trades.index) > 0: selected_short_trades.loc[selected_short_trades['hold_pnl1short'] < stop_loss, pnl_field]= \ selected_short_trades.loc[selected_short_trades['hold_pnl1short'] < stop_loss, 'hold_pnl2short'] pnl_tickerhead_frame['sell_mean_pnl'][ j] = selected_short_trades[pnl_field].mean() if len(selected_long_trades.index) > 0: selected_long_trades.loc[selected_long_trades['hold_pnl1long'] <stop_loss, pnl_field] = \ selected_long_trades.loc[selected_long_trades['hold_pnl1long'] <stop_loss, 'hold_pnl2long'] pnl_tickerhead_frame['buy_mean_pnl'][j] = selected_long_trades[ pnl_field].mean() pnl_tickerhead_frame['total_pnl'][j] = pnl_tickerhead_frame[ 'buy_mean_pnl'][j] + pnl_tickerhead_frame['sell_mean_pnl'][j] total_pnl_frame[ ticker_head_list[j]][i] = pnl_tickerhead_frame['total_pnl'][j] total_pnl_frame['portfolio'][i] = pnl_tickerhead_frame[ 'total_pnl'].sum() total_pnl_frame.to_pickle(backtest_results_folder + '/futures_butterfly/portfolio' + str(rule_no) + '.pkl') return total_pnl_frame
import contract_utilities.expiration as exp import ta.strategy as ts import shutil as sutil import shared.directory_names as dn import formats.utils as futil daily_dir = dn.get_directory_name(ext='daily') def prepare_strategy_daily(**kwargs): strategy_class = kwargs['strategy_class'] if 'report_date' in kwargs.keys(): report_date = kwargs['report_date'] else: report_date = exp.doubledate_shift_bus_days() output_dir = ts.create_strategy_output_dir(strategy_class=strategy_class, report_date=report_date) sutil.copyfile( output_dir + '/' + futil.xls_file_names[strategy_class] + '.xlsx', daily_dir + '/' + futil.xls_file_names[strategy_class] + '_' + str(report_date) + '.xlsx') def move_from_dated_folder_2daily_folder(**kwargs): ext = kwargs['ext'] file_name_raw = kwargs['file_name']
def generate_test_cases_from_aligned_option_data(**kwargs): test_data_dir = dn.get_directory_name(ext='option_model_test_data') if os.path.isfile(test_data_dir + '/option_model_test_data.pkl'): data_frame_test = pd.read_pickle(test_data_dir + '/option_model_test_data.pkl') return data_frame_test ticker_head_list = cmi.cme_option_tickerhead_list tr_dte_center_list = [10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 90, 100, 120, 140, 180, 200, 220, 240, 260, 280, 300, 330, 360, 390] delta_list = [0.1, 0.15, 0.25, 0.35, 0.5, -0.1, -0.15, -0.25, -0.35, -0.5] data_frame_list = [] aligned_column_names = ['TickerYear','TickerMonth','settleDates','calDTE','rate2OptExp', 'theoValue' , 'impVol', 'atmVol', 'delta', 'strike', 'underlying', 'dollarVega', 'dollarTheta','gamma','dollarGamma', 'optionPnL', 'deltaPnL', 'gammaPnL', 'thetaPnL'] #ticker_head_list = ticker_head_list[3:5] for i in range(len(ticker_head_list)): if ticker_head_list[i] in ['ED', 'E0', 'E2', 'E3', 'E4', 'E5']: model = 'OU' else: model = 'BS' ticker_class = cmi.ticker_class[ticker_head_list[i]] contract_multiplier = cmi.contract_multiplier[ticker_head_list[i]] if ticker_class in ['Livestock', 'Ag'] or ticker_head_list[i] == 'NG': month_specificQ = True contract_month_list = cmi.get_option_contract_months(ticker_head=ticker_head_list[i]) else: month_specificQ = False for j in range(len(tr_dte_center_list)): for k in range(len(delta_list)): if month_specificQ: for contract_month in contract_month_list: data_frame = gop.load_aligend_options_data_file(ticker_head=ticker_head_list[i], tr_dte_center=tr_dte_center_list[j], delta_center=delta_list[k], contract_month_letter=contract_month, model=model) if data_frame.empty: continue else: x1 = round(len(data_frame.index)/4) x2 = round(2*len(data_frame.index)/4) x3 = round(3*len(data_frame.index)/4) data_frame_select = data_frame[aligned_column_names].iloc[[-x3, -x2, -x1, -1]] data_frame_select['tickerHead'] = ticker_head_list[i] data_frame_select['contractMultiplier'] = contract_multiplier data_frame_select['exercise_type'] = cmi.get_option_exercise_type(ticker_head=ticker_head_list[i]) data_frame_list.append(data_frame_select) else: data_frame = gop.load_aligend_options_data_file(ticker_head=ticker_head_list[i], tr_dte_center=tr_dte_center_list[j], delta_center=delta_list[k], model=model) if data_frame.empty: continue else: x1 = round(len(data_frame.index)/4) x2 = round(2*len(data_frame.index)/4) x3 = round(3*len(data_frame.index)/4) data_frame_select = data_frame[aligned_column_names].iloc[[-x3, -x2, -x1, -1]] data_frame_select['tickerHead'] = ticker_head_list[i] data_frame_select['contractMultiplier'] = contract_multiplier data_frame_select['exercise_type'] = cmi.get_option_exercise_type(ticker_head=ticker_head_list[i]) data_frame_list.append(data_frame_select) data_frame_test = pd.concat(data_frame_list) data_frame_test['optionPnL'] = data_frame_test['optionPnL']*data_frame_test['contractMultiplier'] data_frame_test['deltaPnL'] = data_frame_test['deltaPnL']*data_frame_test['contractMultiplier'] data_frame_test['ticker'] = [data_frame_test['tickerHead'].iloc[x] + cmi.letter_month_string[int(data_frame_test['TickerMonth'].iloc[x]-1)] + str(int(data_frame_test['TickerYear'].iloc[x])) for x in range(len(data_frame_test.index))] data_frame_test['option_type'] = 'C' data_frame_test['option_type'][data_frame_test['delta'] < 0] = 'P' data_frame_test.to_pickle(test_data_dir + '/option_model_test_data.pkl') return data_frame_test
import contract_utilities.expiration as exp import ta.strategy as ts import shutil as sutil import shared.directory_names as dn import formats.utils as futil daily_dir = dn.get_directory_name(ext="daily") def prepare_strategy_daily(**kwargs): strategy_class = kwargs["strategy_class"] if "report_date" in kwargs.keys(): report_date = kwargs["report_date"] else: report_date = exp.doubledate_shift_bus_days() output_dir = ts.create_strategy_output_dir(strategy_class=strategy_class, report_date=report_date) sutil.copyfile( output_dir + "/" + futil.xls_file_names[strategy_class] + ".xlsx", daily_dir + "/" + futil.xls_file_names[strategy_class] + "_" + str(report_date) + ".xlsx", ) def move_from_dated_folder_2daily_folder(**kwargs): ext = kwargs["ext"] file_name_raw = kwargs["file_name"]
def main(): app = algo.Algo() admin_dir = dn.get_directory_name(ext='admin') risk_file_out = su.read_text_file(file_name=admin_dir + '/RiskParameter.txt') app.bet_size = float(risk_file_out[0]) con = msu.get_my_sql_connection() date_now = cu.get_doubledate() report_date = exp.doubledate_shift_bus_days() report_date_list = [ exp.doubledate_shift_bus_days(shift_in_days=x) for x in range(1, 10) ] overnight_calendars_list = [] for i in range(len(report_date_list)): ocs_output = ocs.generate_overnight_spreads_sheet_4date( date_to=report_date_list[i]) overnight_calendars = ocs_output['overnight_calendars'] overnight_calendars = \ overnight_calendars[overnight_calendars['tickerHead'].isin(['CL', 'HO', 'NG', 'C', 'W', 'KW', 'S', 'SM', 'BO', 'LC', 'LN', 'FC'])] #isin(['CL', 'HO','NG', 'C', 'W', 'KW', 'S', 'SM', 'BO', 'LC', 'LN', 'FC'])] overnight_calendars = overnight_calendars[ (overnight_calendars['ticker1L'] != '') & (overnight_calendars['ticker2L'] != '')] overnight_calendars['back_spread_price'] = np.nan overnight_calendars['front_spread_price'] = np.nan overnight_calendars['mid_ticker_price'] = np.nan overnight_calendars['back_spread_ticker'] = [ overnight_calendars['ticker1'].iloc[x] + '-' + overnight_calendars['ticker2'].iloc[x] for x in range(len(overnight_calendars.index)) ] overnight_calendars['front_spread_ticker'] = [ overnight_calendars['ticker1L'].iloc[x] + '-' + overnight_calendars['ticker2L'].iloc[x] for x in range(len(overnight_calendars.index)) ] overnight_calendars['target_quantity'] = [ min(mth.ceil(app.bet_size / x), app.total_traded_volume_max_before_user_confirmation) for x in overnight_calendars['dollarNoise100'] ] overnight_calendars['alias'] = [ overnight_calendars['ticker1'].iloc[x] + '_' + overnight_calendars['ticker2'].iloc[x] + '_ocs' for x in range(len(overnight_calendars.index)) ] overnight_calendars['total_quantity'] = 0 overnight_calendars['total_risk'] = 0 overnight_calendars['holding_period'] = 0 #overnight_calendars['expiring_position_q'] = 0 overnight_calendars.reset_index(drop=True, inplace=True) overnight_calendars_list.append(overnight_calendars) overnight_calendars = overnight_calendars_list.pop(0) open_strategy_frame = ts.get_filtered_open_strategies( strategy_class_list=['ocs'], as_of_date=date_now) for i in range(len(open_strategy_frame.index)): position_manager_output = pm.get_ocs_position( alias=open_strategy_frame['alias'].iloc[i], as_of_date=date_now, con=con) trades_frame = ts.get_trades_4strategy_alias( alias=open_strategy_frame['alias'].iloc[i], con=con) datetime_now = cu.convert_doubledate_2datetime(date_now) holding_period = (datetime_now - trades_frame['trade_date'].min()).days if (not position_manager_output['empty_position_q']) & ( not position_manager_output['correct_position_q']): print('Check ' + open_strategy_frame['alias'].iloc[i] + ' ! Position may be incorrect') elif position_manager_output['correct_position_q']: ticker_head = cmi.get_contract_specs( position_manager_output['sorted_position'] ['ticker'].iloc[0])['ticker_head'] position_name = '' if position_manager_output['scale'] > 0: position_name = ticker_head + '_long' else: position_name = ticker_head + '_short' app.ocs_portfolio.order_send(ticker=position_name, qty=abs( position_manager_output['scale'])) app.ocs_portfolio.order_fill(ticker=position_name, qty=abs( position_manager_output['scale'])) ticker1 = position_manager_output['sorted_position'][ 'ticker'].iloc[0] ticker2 = position_manager_output['sorted_position'][ 'ticker'].iloc[1] selection_indx = overnight_calendars[ 'back_spread_ticker'] == ticker1 + '-' + ticker2 if sum(selection_indx) == 1: overnight_calendars.loc[ selection_indx, 'total_quantity'] = position_manager_output['scale'] overnight_calendars.loc[ selection_indx, 'total_risk'] = position_manager_output[ 'scale'] * overnight_calendars.loc[selection_indx, 'dollarNoise100'] overnight_calendars.loc[ selection_indx, 'alias'] = open_strategy_frame['alias'].iloc[i] overnight_calendars.loc[selection_indx, 'holding_period'] = holding_period app.ocs_risk_portfolio.order_send( ticker=position_name, qty=abs(position_manager_output['scale'] * overnight_calendars.loc[selection_indx, 'dollarNoise100'])) app.ocs_risk_portfolio.order_fill( ticker=position_name, qty=abs(position_manager_output['scale'] * overnight_calendars.loc[selection_indx, 'dollarNoise100'])) else: for j in range(len(overnight_calendars_list)): overnight_calendars_past = overnight_calendars_list[j] selection_indx = overnight_calendars_past[ 'back_spread_ticker'] == ticker1 + '-' + ticker2 if sum(selection_indx) == 1: overnight_calendars_past.loc[ selection_indx, 'total_quantity'] = position_manager_output[ 'scale'] overnight_calendars_past.loc[ selection_indx, 'total_risk'] = position_manager_output[ 'scale'] * overnight_calendars_past.loc[ selection_indx, 'dollarNoise100'] overnight_calendars_past.loc[ selection_indx, 'alias'] = open_strategy_frame['alias'].iloc[i] overnight_calendars_past.loc[ selection_indx, 'holding_period'] = holding_period app.ocs_risk_portfolio.order_send( ticker=position_name, qty=abs(position_manager_output['scale'] * overnight_calendars_past.loc[ selection_indx, 'dollarNoise100'])) app.ocs_risk_portfolio.order_fill( ticker=position_name, qty=abs(position_manager_output['scale'] * overnight_calendars_past.loc[ selection_indx, 'dollarNoise100'])) if j > 1: overnight_calendars_past.loc[ selection_indx, 'butterflyMean'] = np.nan overnight_calendars_past.loc[ selection_indx, 'butterflyNoise'] = np.nan overnight_calendars = overnight_calendars.append( overnight_calendars_past[selection_indx]) break overnight_calendars.reset_index(drop=True, inplace=True) overnight_calendars['working_order_id'] = np.nan spread_ticker_list = list( set(overnight_calendars['back_spread_ticker']).union( overnight_calendars['front_spread_ticker'])) back_spread_ticker_list = list(overnight_calendars['back_spread_ticker']) theme_name_list = set([ x + '_long' for x in back_spread_ticker_list ]).union(set([x + '_short' for x in back_spread_ticker_list])) ocs_alias_portfolio = aup.portfolio(ticker_list=theme_name_list) for i in range(len(overnight_calendars.index)): if overnight_calendars.loc[i, 'total_quantity'] > 0: position_name = overnight_calendars.loc[ i, 'back_spread_ticker'] + '_long' ocs_alias_portfolio.order_send( ticker=position_name, qty=overnight_calendars.loc[i, 'total_quantity']) ocs_alias_portfolio.order_fill( ticker=position_name, qty=overnight_calendars.loc[i, 'total_quantity']) elif overnight_calendars.loc[i, 'total_quantity'] < 0: position_name = overnight_calendars.loc[ i, 'back_spread_ticker'] + '_short' ocs_alias_portfolio.order_send( ticker=position_name, qty=-overnight_calendars.loc[i, 'total_quantity']) ocs_alias_portfolio.order_fill( ticker=position_name, qty=-overnight_calendars.loc[i, 'total_quantity']) app.price_request_dictionary['spread'] = spread_ticker_list app.price_request_dictionary['outright'] = overnight_calendars[ 'ticker1'].values app.overnight_calendars = overnight_calendars app.open_strategy_list = list(open_strategy_frame['alias']) app.ocs_alias_portfolio = ocs_alias_portfolio app.ticker_list = list( set(overnight_calendars['ticker1']).union( overnight_calendars['ticker2']).union( set(overnight_calendars['ticker1L'])).union( set(overnight_calendars['ticker2L']))) app.output_dir = ts.create_strategy_output_dir(strategy_class='ocs', report_date=report_date) app.log = lg.get_logger(file_identifier='ib_ocs', log_level='INFO') app.con = con app.pnl_frame = tpm.get_daily_pnl_snapshot(as_of_date=report_date) print('Emre') app.connect(client_id=2) app.run()
import quandl_data.get_data_quandl as gdq import shared.directory_names as dn import contract_utilities.contract_meta_info as cmi import shared.calendar_utilities as cu import datetime as dt import os.path import pandas as pd presaved_cot_data_folder = dn.get_directory_name( ext='commitments_of_traders_data') db_2_quandl_dictionary = { 'GC': '088691', 'SI': '084691', 'EC': '099741', 'BP': '096742', 'JY': '097741', 'AD': '232741', 'CD': '090741', 'TU': '042601', 'FV': '044601', 'TY': '043602', 'US': '020601', 'ED': '132741', 'ES': '13874A', 'NQ': '209742', 'CL': '067651', 'HO': '022651', 'RB': '111659', 'NG': '023651', 'C': '002602',
__author__ = 'kocat_000' import os.path import get_price.get_futures_price as gfp import contract_utilities.contract_meta_info as cmi import my_sql_routines.my_sql_utilities as msu import contract_utilities.expiration as exp from pandas.tseries.offsets import CustomBusinessDay import shared.calendar_utilities as cu import shared.directory_names as dn import pandas as pd import numpy as np import datetime as dt pd.options.mode.chained_assignment = None presaved_futures_data_folder = dn.get_directory_name(ext='presaved_futures_data') dirty_data_points = pd.DataFrame([('BM2006', dt.datetime(2005, 11, 18), True), ('BM2006', dt.datetime(2005, 11, 21), True), ('BM2006', dt.datetime(2005, 11, 25), True), ('BM2006', dt.datetime(2005, 11, 28), True), ('BM2006', dt.datetime(2005, 11, 30), True), ('BM2006', dt.datetime(2005, 12, 1), True), ('BM2006', dt.datetime(2005, 12, 2), True), ('BM2006', dt.datetime(2005, 12, 6), True), ('BM2006', dt.datetime(2005, 12, 8), True), ('BM2006', dt.datetime(2005, 12, 9), True), ('BM2006', dt.datetime(2005, 12, 12), True), ('BM2006', dt.datetime(2005, 12, 13), True), ('BM2006', dt.datetime(2005, 12, 14), True), ('BM2006', dt.datetime(2005, 12, 15), True),
def get_spreads_4date(**kwargs): futures_dataframe = cl.generate_futures_list_dataframe(**kwargs) if 'volume_filter' in kwargs.keys(): volume_filter = kwargs['volume_filter'] futures_dataframe = futures_dataframe[futures_dataframe['volume'] > volume_filter] futures_dataframe.reset_index(drop=True, inplace=True) futures_dataframe['yearMonth'] = 12*futures_dataframe['ticker_year']+futures_dataframe['ticker_month'] futures_dataframe['yearMonthMerge'] = futures_dataframe['yearMonth'] futures_dataframe = futures_dataframe[['ticker','yearMonth','yearMonthMerge','ticker_head','volume']] spread_frame = pd.read_excel(dn.get_directory_name(ext='python_file') + '/opportunity_constructs/user_defined_spreads.xlsx') output_frame = pd.DataFrame() for i in range(len(spread_frame.index)): tickerhead1 = spread_frame['tickerHead1'].iloc[i] tickerhead2 = spread_frame['tickerHead2'].iloc[i] tickerhead3 = spread_frame['tickerHead3'].iloc[i] tickerhead4 = spread_frame['tickerHead4'].iloc[i] frame1 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead1] frame2 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead2] frame3 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead3] frame4 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead4] merged11 = pd.merge(frame1, frame2, how='inner', on='yearMonthMerge') frame2['yearMonthMerge'] = frame2['yearMonth']+1 merged12 = pd.merge(frame1, frame2, how='inner', on='yearMonthMerge') frame2['yearMonthMerge'] = frame2['yearMonth']-1 merged10 = pd.merge(frame1, frame2, how='inner', on='yearMonthMerge') if frame3.empty: output_frame2 = pd.concat([merged11, merged12, merged10]) spread_i = pd.DataFrame() spread_i['contract1'] = output_frame2['ticker_x'] spread_i['contract2'] = output_frame2['ticker_y'] spread_i['contract3'] = None spread_i['ticker_head1'] = tickerhead1 spread_i['ticker_head2'] = tickerhead2 spread_i['ticker_head3'] = None spread_i['volume1'] = output_frame2['volume_x'] spread_i['volume2'] = output_frame2['volume_y'] spread_i['volume3'] = None output_frame = pd.concat([output_frame, spread_i]) elif frame4.empty: frame3 = futures_dataframe[futures_dataframe['ticker_head'] == tickerhead3] merged111 = pd.merge(merged11, frame3, how='inner', on='yearMonthMerge') merged121 = pd.merge(merged12, frame3, how='inner', on='yearMonthMerge') merged101 = pd.merge(merged10, frame3, how='inner', on='yearMonthMerge') frame3['yearMonthMerge'] = frame3['yearMonth']+1 merged112 = pd.merge(merged11, frame3, how='inner', on='yearMonthMerge') merged122 = pd.merge(merged12, frame3, how='inner', on='yearMonthMerge') frame3['yearMonthMerge'] = frame3['yearMonth']-1 merged110 = pd.merge(merged11, frame3, how='inner', on='yearMonthMerge') merged100 = pd.merge(merged10, frame3, how='inner', on='yearMonthMerge') output_frame3 = pd.concat([merged111,merged121,merged101, merged112,merged122,merged110,merged100]) spread_i = pd.DataFrame() spread_i['contract1'] = output_frame3['ticker_x'] spread_i['contract2'] = output_frame3['ticker_y'] spread_i['contract3'] = output_frame3['ticker'] spread_i['ticker_head1'] = tickerhead1 spread_i['ticker_head2'] = tickerhead2 spread_i['ticker_head3'] = tickerhead3 spread_i['volume1'] = output_frame3['volume_x'] spread_i['volume2'] = output_frame3['volume_y'] spread_i['volume3'] = output_frame3['volume'] output_frame = pd.concat([output_frame,spread_i]) output_frame['spread_description'] = output_frame.apply(lambda x: x['ticker_head1']+ '_' +x['ticker_head2'] if x['ticker_head3'] is None else x['ticker_head1']+ '_' +x['ticker_head2'] + '_' + x['ticker_head3'] , axis=1) output_frame['min_volume'] = output_frame.apply(lambda x: min(x['volume1'],x['volume2']) if x['ticker_head3'] is None else min(x['volume1'],x['volume2'],x['volume3']),axis=1) output_frame.sort_values(['spread_description','min_volume'],ascending=[True, False],inplace=True) output_frame.drop_duplicates('spread_description',inplace=True) output_frame.reset_index(drop=True,inplace=True) return output_frame
def save_stock_data(**kwargs): if 'symbol_list' in kwargs.keys(): symbol_list = kwargs['symbol_list'] elif 'settle_date' in kwargs.keys(): settle_date = kwargs['settle_date'] symbol_list = get_symbol_list_4date(settle_date=settle_date) data_source = kwargs['data_source'] if data_source == 'iex': output_dir = dn.get_directory_name(ext='iex_stock_data') else: output_dir = dn.get_directory_name(ext='stock_data') for i in range(len(symbol_list)): file_name = output_dir + '/' + symbol_list[i] + '.pkl' if data_source == 'iex': data_out = get_stock_price(symbol=symbol_list[i], data_source=data_source) data_out.reset_index(drop=True, inplace=True) data_out.to_pickle(file_name) else: if os.path.isfile(file_name): old_data = pd.read_pickle(file_name) while True: try: new_data = get_stock_price(symbol=symbol_list[i], data_source=data_source, outputsize='compact') new_data['frame_indx'] = 1 old_data['frame_indx'] = 0 merged_data = pd.concat([old_data, new_data], ignore_index=True, sort=False) merged_data.sort_values(['settle_date', 'frame_indx'], ascending=[True, False], inplace=True) merged_data.drop_duplicates(subset=['settle_date'], keep='first', inplace=True) merged_data = merged_data.drop('frame_indx', 1, inplace=False) merged_data.reset_index(drop=True, inplace=True) merged_data.to_pickle(file_name) break except Exception as e: print(e) if ('API call frequency' in str(e)) or ('API call volume' in str(e)): print('waiting 20 seconds...') t.sleep(20) else: break else: print(str(i) + ': ' + symbol_list[i]) while True: try: data_out = get_stock_price(symbol=symbol_list[i], data_source=data_source, outputsize='full') data_out.reset_index(drop=True, inplace=True) data_out.to_pickle(file_name) break except Exception as e: print(e) if ('API call frequency' in str(e)) or ('API call volume' in str(e)): print('waiting 20 seconds...') t.sleep(20) else: break
def construct_futures_butterfly_portfolio(**kwargs): rule_no = kwargs['rule_no'] backtest_output = kwargs['backtest_output'] pnl_field = kwargs['pnl_field'] if rule_no in [1, 3, 4, 5, 6]: stop_loss = -100000000000 elif rule_no == 2: stop_loss = -1000 backtest_results_folder = dn.get_directory_name(ext='backtest_results') if os.path.isfile(backtest_results_folder + '/futures_butterfly/portfolio' + str(rule_no) + '.pkl'): return pd.read_pickle(backtest_results_folder + '/futures_butterfly/portfolio' + str(rule_no) + '.pkl') elif not os.path.exists(backtest_results_folder + '/futures_butterfly'): os.makedirs(backtest_results_folder + '/futures_butterfly') date_list = kwargs['date_list'] ticker_head_list = cmi.futures_butterfly_strategy_tickerhead_list total_pnl_frame = pd.DataFrame({'report_date': date_list}) total_pnl_frame['portfolio'] = 0 for i in range(len(ticker_head_list)): total_pnl_frame[ticker_head_list[i]] = 0 for i in range(len(date_list)): pnl_tickerhead_frame = pd.DataFrame({'ticker_head': ticker_head_list}) pnl_tickerhead_frame['buy_mean_pnl'] = 0 pnl_tickerhead_frame['sell_mean_pnl'] = 0 pnl_tickerhead_frame['total_pnl'] = 0 daily_sheet = backtest_output[i] for j in range(len(ticker_head_list)): ticker_head_results = daily_sheet[daily_sheet['tickerHead'] == ticker_head_list[j]] filter_output_long = sf.get_futures_butterfly_filters(data_frame_input=ticker_head_results, filter_list=['long'+str(rule_no)]) filter_output_short = sf.get_futures_butterfly_filters(data_frame_input=ticker_head_results, filter_list=['short'+str(rule_no)]) selected_short_trades = ticker_head_results[filter_output_short['selection_indx'] & (np.isfinite(ticker_head_results[pnl_field]))] selected_long_trades = ticker_head_results[filter_output_long['selection_indx'] & (np.isfinite(ticker_head_results[pnl_field]))] if len(selected_short_trades.index) > 0: selected_short_trades.loc[selected_short_trades['hold_pnl1short'] < stop_loss, pnl_field]= \ selected_short_trades.loc[selected_short_trades['hold_pnl1short'] < stop_loss, 'hold_pnl2short'] pnl_tickerhead_frame['sell_mean_pnl'][j] = selected_short_trades[pnl_field].mean() if len(selected_long_trades.index) > 0: selected_long_trades.loc[selected_long_trades['hold_pnl1long'] <stop_loss, pnl_field] = \ selected_long_trades.loc[selected_long_trades['hold_pnl1long'] <stop_loss, 'hold_pnl2long'] pnl_tickerhead_frame['buy_mean_pnl'][j] = selected_long_trades[pnl_field].mean() pnl_tickerhead_frame['total_pnl'][j] = pnl_tickerhead_frame['buy_mean_pnl'][j] + pnl_tickerhead_frame['sell_mean_pnl'][j] total_pnl_frame[ticker_head_list[j]][i] = pnl_tickerhead_frame['total_pnl'][j] total_pnl_frame['portfolio'][i] = pnl_tickerhead_frame['total_pnl'].sum() total_pnl_frame.to_pickle(backtest_results_folder + '/futures_butterfly/portfolio' + str(rule_no) + '.pkl') return total_pnl_frame