start_month = last_date.month + 1 else: start_year = last_date.year - year_delta + 1 start_month = 1 # 基础设定 history_data = History_Data[asset_list][ str(start_year) + '-' + str(start_month): last_date] predict_data = Predict_Data[asset_list][ str(start_year) + '-' + str(start_month): last_date] #cov_mat = history_data[asset_list].cov() * 12.0 cov_mat = pd.ewmcov(history_data, alpha=0.2).iloc[-3:] * 12.0 # print cov_mat omega = np.matrix(cov_mat.values) mkt_wgt = Risk_Parity_Weight(cov_mat) print mkt_wgt P = np.diag([1] * len(mkt_wgt)) conf_list = list() for each in asset_list: conf_temp = ((history_data[each][str(start_year) + '-' + str(start_month):] - predict_data[each][str(start_year) + '-' + str(start_month):])**2).mean() * 12.0 conf_list.append(conf_temp) conf_mat = np.matrix(np.diag(conf_list)) Q = np.matrix(Predict_Data[asset_list].loc[next_date]) com_ret, com_cov_mat = Combined_Return_Distribution( 2, cov_mat, tau, mkt_wgt, P, Q, conf_mat)
def Ms_RP(return_frame, switch_map): temp_columns = list(return_frame.columns) temp_Ms_list = [] for each in temp_columns: if switch_map[each]: temp_std, temp_Coef, temp_transMat, temp_prob_smo = Ms_R( list(return_frame[each])) else: temp_std = np.array([np.std(list(return_frame[each]))] * 2) temp_Coef = np.array([np.mean(list(return_frame[each]))] * 2) temp_transMat = np.array([[1, 0], [0, 1]]).reshape(2, 2) temp_prob_smo = np.array([[0.5] * len(return_frame[each]), [0.5] * len(return_frame[each])]).T temp_Ms_list.append( [temp_std, temp_Coef, temp_transMat, temp_prob_smo]) Ms_frame = pd.DataFrame(temp_Ms_list, index=temp_columns, columns=['std', 'Coef', 'transMat', 'prob_smo']).T temp_cov_list = [] for each_i in temp_columns: for each_j in temp_columns[temp_columns.index(each_i) + 1:]: temp_cov_mat = Cross_Cov(return_frame[each_i], return_frame[each_j], Ms_frame[each_i]['Coef'], Ms_frame[each_j]['Coef'], Ms_frame[each_i]['prob_smo'], Ms_frame[each_j]['prob_smo']) temp_cov_list.append(temp_cov_mat) Tree = Tree_Gen(len(temp_columns)) rp_wgt_list = [] for each_leaf in Tree: cov_mat_list = [] for i in range(len(temp_columns)): for j in range(len(temp_columns)): if i == j: cov_mat_list.append((Ms_frame[temp_columns[i]]['std'][int( each_leaf[i])])**2) else: if i < j: location = len(temp_columns) * (i + 1) - sum( range(i + 2)) - (len(temp_columns) - j) cov_mat_list.append( temp_cov_list[location][int(each_leaf[i]), int(each_leaf[j])]) else: location = len(temp_columns) * (j + 1) - sum( range(j + 2)) - (len(temp_columns) - i) cov_mat_list.append( temp_cov_list[location][int(each_leaf[j]), int(each_leaf[i])]) cov_mat = np.array(cov_mat_list).reshape(len(temp_columns), len(temp_columns)) cov_mat = pd.DataFrame(cov_mat, columns=temp_columns, index=temp_columns) rp_wgt = Risk_Parity_Weight(cov_mat) rp_wgt_list.append(rp_wgt) prob_list = [] for each_leaf in Tree: prob_leaf = 1 for i in range(len(temp_columns)): stat = int(each_leaf[i]) trans_mat = Ms_frame[temp_columns[i]]['transMat'] temp_prob = Ms_frame[temp_columns[i]]['prob_smo'][ -1, 0] * trans_mat[0, stat] + Ms_frame[ temp_columns[i]]['prob_smo'][-1, 1] * trans_mat[1, stat] prob_leaf = prob_leaf * temp_prob prob_list.append(prob_leaf) ''' filt_prob_list = [] for each_prob in prob_list: if each_prob == max(prob_list): filt_prob_list.append(1.0) else: filt_prob_list.append(0.0) prob_list = filt_prob_list ''' prob_wgt_list = [] for i in range(len(Tree)): prob_wgt_list.append(rp_wgt_list[i] * prob_list[i]) return sum(prob_wgt_list)
rp_wgt = Ms_RP(data_frame, {'SP500':True, 'Barclays_US_bond':False}) ''' multi_wgt = Ms_Multi(data_frame, { 'SP500': True, 'London_gold': True, 'Barclays_US_bond': False }, 2) mu_wgt = multi_wgt["mu_wgt"] rp_wgt = multi_wgt["rp_wgt"] print mu_wgt print rp_wgt #data_frame = data[data.index[each-120]:data.index[each]] rp_wgt_bm = Risk_Parity_Weight(data_frame.cov()).round(3) mu_wgt_bm = Max_Utility_Weight_new(pd.DataFrame(data_frame.mean()), data_frame.cov(), 2, [(0.0, None)] * 3).round(3) ''' mu_wgt = Ms_MU(data_frame, {'SP500':True, 'London_gold':True, 'Barclays_US_bond':False}) rp_wgt = Ms_RP(data_frame, {'SP500':True, 'London_gold':True, 'Barclays_US_bond':False}) #data_frame = data[data.index[each-120]:data.index[each]] rp_wgt_bm = Risk_Parity_Weight(data_frame.cov()) mu_wgt_bm = Max_Utility_Weight_new(pd.DataFrame(data_frame.mean()), data_frame.cov(), 2, [(0.0,None)]*3) ''' mu_ms_return = np.sum(mu_wgt * data.loc[data.index[each + 1]]) mu_bm_return = np.sum(mu_wgt_bm * data.loc[data.index[each + 1]]) rp_ms_return = np.sum(rp_wgt * data.loc[data.index[each + 1]]) rp_bm_return = np.sum(rp_wgt_bm * data.loc[data.index[each + 1]])
def Ms_Multi(return_frame, switch_map, lam): temp_columns = list(return_frame.columns) temp_Ms_list = [] for each in temp_columns: if switch_map[each] != 1: temp_std, temp_Coef, temp_transMat, temp_prob_smo = Ms_R( list(return_frame[each]), switch_map[each]) #print temp_Coef #print temp_std else: temp_std = np.array([np.std(list(return_frame[each]))] * 2) temp_Coef = np.array([np.mean(list(return_frame[each]))] * 2) temp_transMat = np.array([[1, 0], [0, 1]]).reshape(2, 2) temp_prob_smo = np.array([[0.5] * len(return_frame[each]), [0.5] * len(return_frame[each])]).T temp_Ms_list.append( [temp_std, temp_Coef, temp_transMat, temp_prob_smo]) Ms_frame = pd.DataFrame(temp_Ms_list, index=temp_columns, columns=['std', 'Coef', 'transMat', 'prob_smo']).T temp_cov_list = [] for each_i in temp_columns: for each_j in temp_columns[temp_columns.index(each_i) + 1:]: temp_cov_mat = Cross_Cov(return_frame[each_i], return_frame[each_j], Ms_frame[each_i]['Coef'], Ms_frame[each_j]['Coef'], Ms_frame[each_i]['prob_smo'], Ms_frame[each_j]['prob_smo']) temp_cov_list.append(temp_cov_mat) #print temp_cov_list Tree = Tree_Gen(switch_map, temp_columns) rp_wgt_list = [] mu_wgt_list = [] for each_leaf in Tree: cov_mat_temp = [] exp_ret_temp = [] for i in range(len(temp_columns)): for j in range(len(temp_columns)): if i == j: cov_mat_temp.append((Ms_frame[temp_columns[i]]['std'][int( each_leaf[i])])**2) exp_ret_temp.append(Ms_frame[temp_columns[i]]['Coef'][int( each_leaf[i])]) else: if i < j: location = len(temp_columns) * (i + 1) - sum( range(i + 2)) - (len(temp_columns) - j) cov_mat_temp.append( temp_cov_list[location][int(each_leaf[i]), int(each_leaf[j])]) else: location = len(temp_columns) * (j + 1) - sum( range(j + 2)) - (len(temp_columns) - i) cov_mat_temp.append( temp_cov_list[location][int(each_leaf[j]), int(each_leaf[i])]) exp_ret = pd.DataFrame(exp_ret_temp, index=temp_columns) cov_mat = np.array(cov_mat_temp).reshape(len(temp_columns), len(temp_columns)) cov_mat = pd.DataFrame(cov_mat, columns=temp_columns, index=temp_columns) rp_wgt = Risk_Parity_Weight(cov_mat) mu_wgt = Max_Utility_Weight(exp_ret, cov_mat, lam, [(0.0, None)] * len(temp_columns)) #print mu_wgt #print "----" rp_wgt_list.append(rp_wgt) mu_wgt_list.append(mu_wgt) prob_list = [] for each_leaf in Tree: prob_leaf = 1 for i in range(len(temp_columns)): stat = int(each_leaf[i]) trans_mat = Ms_frame[temp_columns[i]]['transMat'] temp_prob = sum(Ms_frame[temp_columns[i]]['prob_smo'][-1, :] * trans_mat[:, stat]) #temp_prob = Ms_frame[temp_columns[i]]['prob_smo'][-1,0]*trans_mat[0,stat] + Ms_frame[temp_columns[i]]['prob_smo'][-1,1]*trans_mat[1,stat] prob_leaf = prob_leaf * temp_prob prob_list.append(prob_leaf) #print prob_list ''' filt_prob_list = [] for each_prob in prob_list: if each_prob == max(prob_list): filt_prob_list.append(1.0) else: filt_prob_list.append(0.0) prob_list = filt_prob_list ''' prob_rp_wgt_list = [] prob_mu_wgt_list = [] prob_rp_list = [] prob_mu_list = [] for i in range(len(Tree)): if any(np.isnan(rp_wgt_list[i])): pass else: prob_rp_wgt_list.append(rp_wgt_list[i] * prob_list[i]) prob_rp_list.append(prob_list[i]) if any(np.isnan(mu_wgt_list[i])): pass else: prob_mu_wgt_list.append(mu_wgt_list[i] * prob_list[i]) prob_mu_list.append(prob_list[i]) #print np.sum(prob_mu_wgt_list) return { "rp_wgt": (sum(prob_rp_wgt_list) / sum(prob_rp_list)).round(3), "mu_wgt": (sum(prob_mu_wgt_list) / sum(prob_mu_list)).round(3) }
def Backtest_CPPI_BL_step(History_Data, Predict_Data, History_Data_D, asset_list, risk_list, bnds, asset_level_1, asset_level_2, year_delta, portfolio_name, money_weight, up_per, target_ret, multiplier, max_risk_weight): tau = 1.0 if portfolio_name == "wenjian": lam = 2.3 #进取-1.9 平衡-2.0 稳健-2.3 elif portfolio_name == "pingheng": lam = 1.9 elif portfolio_name == "jinqu": lam = 1.7 else: raise Exception("Wrong portfolio_name!") pct_list = [] weight_list = [] date_list = [] asset_drawdown = pd.Series([0.0] * len(asset_list), index=asset_list) asset_position = pd.Series([1.0] * len(asset_list), index=asset_list) for each_date in Predict_Data.index[60:-1]: last_date = History_Data.index[list( Predict_Data.index).index(each_date) - 1] # 当前月份日期 next_date = each_date # 下一月份日期 if last_date.month <= 11: start_year = last_date.year - year_delta start_month = last_date.month + 1 else: start_year = last_date.year - year_delta + 1 start_month = 1 # 基础设定 history_data = History_Data[str(start_year) + '-' + str(start_month):last_date] history_data_d = History_Data_D[str(start_year) + '-' + str(start_month):last_date] predict_data = Predict_Data[str(start_year) + '-' + str(start_month):last_date] cov_mat = history_data_d[risk_list].cov() * (len(history_data_d) / year_delta) #cov_mat = history_data[asset_list].cov() * 12.0 #for each_asset in risk_list: for each_asset in risk_list: temp_drawdown = (asset_drawdown[each_asset] + 1.0) * (history_data[each_asset][-1] + 1.0) - 1 if temp_drawdown >= 0: temp_drawdown = 0 else: pass asset_drawdown[each_asset] = temp_drawdown # print cov_mat omega = np.matrix(cov_mat.values) mkt_wgt = Risk_Parity_Weight(cov_mat) #print mkt_wgt P = np.diag([1] * len(mkt_wgt)) conf_list = list() for each in risk_list: conf_temp = (( history_data[each][str(start_year) + '-' + str(start_month):] - predict_data[each][str(start_year) + '-' + str(start_month):]) **2).mean() * 12.0 conf_list.append(conf_temp) conf_mat = np.matrix(np.diag(conf_list)) Q = np.matrix(Predict_Data[risk_list].loc[next_date]) com_ret, com_cov_mat = Combined_Return_Distribution( 2, cov_mat, tau, mkt_wgt, P, Q, conf_mat) #print com_ret weight_bl = Max_Utility_Weight(com_ret, com_cov_mat, lam, bnds) #print weight_bl rf_ret = history_data[list(set(asset_list) - set(risk_list))[0]].mean() * 12 if len(pct_list) == 0: current_nv = 1 nv_threshold = 1 else: current_nv = (np.array(pct_list) + 1).prod() #print "current_nv" #print current_nv if current_nv > (1 + up_per) * nv_threshold: nv_threshold = (1 + up_per) * nv_threshold else: pass cushion_per = Cushion_Cal(nv=current_nv, nv_threshold=nv_threshold, rf_ret=rf_ret, target_ret=target_ret) #print "cushion" #print cushion_per para_m = 0.01 / (-VaR_Cal(0.99, [0] * len(risk_list), history_data[risk_list].cov() * 12.0, 1 / 12, weight_bl[risk_list])) #print "para_m" #print para_m #print "--------------" risk_weight = min(cushion_per * multiplier, max_risk_weight) weight_risk = weight_bl[risk_list] * risk_weight #weight_risk = weight_bl[risk_list] #print weight_risk weight_bl[risk_list] = weight_risk #weight_bl[risk_list] = [0.0]*len(risk_list) weight_bl[list(set(asset_list) - set(risk_list))[0]] = money_weight - sum( weight_bl[risk_list]) #weight_bl[list(set(asset_list)-set(risk_list))[0]] = 0.0 for each_asset in asset_list: if asset_position[each_asset] == 1: if (asset_drawdown[each_asset] <= asset_level_1[each_asset] ) and (asset_drawdown[each_asset] > asset_level_2[each_asset]): asset_position[each_asset] = 0.5 elif asset_drawdown[each_asset] <= asset_level_2[each_asset]: asset_position[each_asset] = 0.0 else: pass elif asset_position[each_asset] == 0.5: if asset_position[each_asset] <= asset_level_2[each_asset]: asset_position[each_asset] = 0.0 elif (predict_data[each_asset][-1] > 0) and (history_data[each_asset][-1] > 0): asset_position[each_asset] = 1.0 asset_drawdown[each_asset] = 0.0 else: pass elif asset_position[each_asset] == 0.0: if (predict_data[each_asset][-1] > 0) and (history_data[each_asset][-1] > 0): asset_position[each_asset] = 0.5 asset_drawdown[each_asset] = 0.0 else: pass weight_bl = (weight_bl * asset_position).round(2) #print weight_bl #print next_date #print sum(weight_bl) port_ret = sum(weight_bl * History_Data[asset_list].loc[next_date]) + ( 1.0 - sum(weight_bl)) * History_Data["money"].loc[next_date] #print sum(weight_bl*History_Data[asset_list].loc[next_date])*money_weight + money_weight*History_Data["money"].loc[next_date] pct_list.append(port_ret) weight_list.append(list(weight_bl)) date_list.append(next_date) #pd.Series(np.array(pct_list), index=date_list).to_csv("/Users/WangBin-Mac/FOF/Asset Allocation/backtest_%s.csv"%portfolio_name) #pd.DataFrame(np.array(weight_list), index=date_list, columns=asset_list).to_excel("/Users/WangBin-Mac/FOF/Asset Allocation/backtest_%s_weight.xlsx"%portfolio_name) return pd.Series(np.array(pct_list), index=date_list)
rho_matrix_dcc[i][j] = temp_rho cov_matrix_garch_dcc = np.matrix( np.diag(std_list_garch)) * np.matrix(rho_matrix_dcc) * np.matrix( np.diag(std_list_garch)) cov_matrix_garch = np.matrix(np.diag(std_list_garch)) * np.matrix( return_frame.corr()) * np.matrix(np.diag(std_list_garch)) cov_matrix_all_garch_dcc = return_frame_all.cov() cov_matrix_all_garch_dcc.loc[asset_col, asset_col] = cov_matrix_garch_dcc cov_matrix_all_garch = return_frame_all.cov() cov_matrix_all_garch.loc[asset_col, asset_col] = cov_matrix_garch rw = Risk_Parity_Weight(return_frame_all.cov()).round(3) rw_gd = Risk_Parity_Weight(cov_matrix_all_garch_dcc).round(3) rw_g = Risk_Parity_Weight(cov_matrix_all_garch).round(3) ''' rw = Max_Utility_Weight_new(pd.DataFrame(return_frame_all.mean()), return_frame_all.cov(), 5, [(0.0,None)]*len(asset_col_all)).round(3) rw_gd = Max_Utility_Weight_new(pd.DataFrame(return_frame_all.mean()), cov_matrix_all_garch_dcc, 5, [(0.0,None)]*len(asset_col_all)).round(3) rw_g = Max_Utility_Weight_new(pd.DataFrame(return_frame_all.mean()), cov_matrix_all_garch, 5, [(0.0,None)]*len(asset_col_all)).round(3) ''' mw = Max_Utility_Weight_new(pd.DataFrame(return_frame_all.mean()), return_frame_all.cov(), 2, [(0.0, None)] * len(asset_col_all)).round(3) mw_gd = Max_Utility_Weight_new( pd.DataFrame(return_frame_all.mean()), cov_matrix_all_garch_dcc, 2, [(0.0, None)] * len(asset_col_all)).round(3) mw_g = Max_Utility_Weight_new(pd.DataFrame(return_frame_all.mean()), cov_matrix_all_garch, 2, [(0.0, None)] *