def __init__(self, frequency='daily', list_remove_days=0, fillna_method='median', fillna_value=None, fillna_lookback=0): ## obtaining data # 2010.1.1-2017.8.30 # y: 'stock_suspend_date', 'index_weight', 'benchmark', 'stock_list_date', 'stock_close_price', 90mb y_dic = gftIO.zload('Data/y_5.pkl') stock_close_price = y_dic['stock_close_price'] stock_resampled = self.resample(stock_close_price, frequency) #(91, 3437) stock_list_removed = gftIO.zload( 'Data/list_date.pkl').asColumnTab().dropna() # 3442 list_filtered = self.listed_filter(stock_resampled, stock_list_removed, list_remove_days) stock_suspend = gftIO.zload('Data/stock_suspend.pkl') stock_suspend_filtered = self.suspended_filter( list_filtered, stock_suspend) # [91 rows x 2637 columns] stock_rtn = self.simple_return( stock_suspend_filtered) # [86 rows x 2566 columns] self.stock_cleaned = self.fillna(stock_rtn, fillna_method, fillna_value, fillna_lookback) # (86, 2566)
def debug__gsWrapper__(): context = gftIO.zload("/home/gft/data/context.pkl") x0 = gftIO.zload("/home/gft/data/x0.pkl") x1 = gftIO.zload("/home/gft/data/x1.pkl") x2 = gftIO.zload("/home/gft/data/x2.pkl") x3 = gftIO.zload("/home/gft/data/x3.pkl") __gsWrapper__(context, x0, x1, x2, x3)
def get_data_from_gs(gscall, jgid, start_date, end_date, filename=None): if filename is None or len(filename) == 0: filename = jgid + '.pkl' if isinstance(start_date, str): start_ts = pd.Timestamp(start_date) elif not isinstance(start_date, pd.Timestamp): raise Exception("Start date must be pd.Timestamp or str") else: start_ts = start_date if isinstance(end_date, str): end_ts = pd.Timestamp(end_date) elif not isinstance(end_date, pd.Timestamp): raise Exception("End date must be pd.Timestamp or str") else: end_ts = start_date sever_ret = gscall.call_vq( 4, 5, create_view_data_req(jgid, start_ts, end_ts, filename)) filepath = data_root_path + filename if os.path.exists(filepath): return gftIO.zload(filepath) raise Exception("Load j from gs failed, server ret is : " + (str(sever_ret)[:100]))
def data(): y_dic = gftIO.zload('Data/y_5.pkl') # 获取数据,字典格式 y_value = list(y_dic.values()) y_value=y_value[0].iloc[:,1500] y_value=list(y_value) y_value=list(np.nan_to_num(y_value)) x=[] for i in y_value: if i > 0: x.append(i) return x
def setUpClass(cls): path = r'/home/weiwu/projects/simulate/data/stats/' cls.f_risk_free_rate = 0.0 cls.f_risk_free_rate = gftIO.zload(path + 'f_risk_free_rate.pkl') df_single_period_return = gftIO.zload(path + 'df_single_period_return.pkl') benchmark_ret = gftIO.zload(path + 'benchmark_ret.pkl') holding = gftIO.zload(path + 'holding.pkl') closing_price = gftIO.zload(path + 'closing_price.pkl') market_capital = gftIO.zload(path + 'market_capital.pkl') if isinstance(df_single_period_return, gftIO.GftTable): cls.df_single_period_return = df_single_period_return.asMatrix( ).copy() if isinstance(benchmark_ret, gftIO.GftTable): cls.benchmark_ret = benchmark_ret.asMatrix().copy() if isinstance(holding, gftIO.GftTable): cls.holding = holding.asMatrix().copy() if isinstance(closing_price, gftIO.GftTable): cls.closing_price = closing_price.asMatrix().copy() if isinstance(market_capital, gftIO.GftTable): cls.market_capital = market_capital.asMatrix().copy() dt_diff = cls.df_single_period_return.index.to_series().diff().mean() if dt_diff < pd.Timedelta('3 days'): cls.periods = gsConst.Const.DAILY elif dt_diff > pd.Timedelta('3 days') and dt_diff < pd.Timedelta( '10 days'): cls.periods = gsConst.Const.WEEKLY else: cls.periods = gsConst.Const.MONTHLY
def find_index(): # 对不同公司的股票数进行排序,并提取公司位置 num = [] y_dic = gftIO.zload('y_5.pkl') # 获取数据,字典格式 y_value = y_dic['stock_close_price'] # 获取键股票价格的值,每一列代表一家公司的股票价格 y_value = np.array(y_value) # 转换为二维数组 for i in range(np.shape(y_value)[1]): # 遍历每一家公司 y = list(y_value[:, i]) # 第i家的股票价格 y = list(np.nan_to_num(y)) # nan变为0 for j in y: if j == 0: y.remove(j) # 去除nan temp = len(y) num.append(temp) # 获得处理后每家公司的股票数,存在列表num index1 = np.argsort(num) # 对所有公司的股票进行升序排序,获得公司的位置存入index1 return index1, y_value
def __init__(self, file_path='database/data.pkl'): dic_data = gftIO.zload(file_path) all_factors = list() columns = None for key, value in dic_data.items(): if columns is None: columns = self._bin_o_set_2_str(value.columns.values) value.columns = columns all_factors.append(key) panel = pd.Panel(dic_data).transpose(0, 2, 1) # print(len(panel.major_axis), len(panel.minor_axis)) # cutoff old data split_date = pd.to_datetime('20150701', format='%Y%m%d') panel = panel.loc[:, :, panel.minor_axis >= split_date] na_filled = panel.fillna(axis=2, method='ffill') panel_na_droped = na_filled.dropna(axis=1, how='any') self.panel = panel_na_droped
def debug__gsWrapper__(): context = gftIO.zload("/home/gft/data/context.pkl") x0 = gftIO.zload("/home/gft/data/x0.pkl") x1 = gftIO.zload("/home/gft/data/x1.pkl") x2 = gftIO.zload("/home/gft/data/x2.pkl") x3 = gftIO.zload("/home/gft/data/x3.pkl") __gsWrapper__(context, x0, x1, x2, x3) # In[14]: from lib.gftTools import gftIO # In[17]: context = gftIO.zload("/home/gft/data/context.pkl") x0 = gftIO.zload("/home/gft/data/x0.pkl") x1 = gftIO.zload("/home/gft/data/x1.pkl") x2 = gftIO.zload("/home/gft/data/x2.pkl") x3 = gftIO.zload("/home/gft/data/x3.pkl") # In[16]: df_ret = x0 df_expo = x1 int_max_iter = 2000 int_cv = 10 # In[30]: test = Lasso(context, df_ret, df_expo, int_max_iter, int_cv)
benchmark_ret, f_risk_free_rate, periods) result[gsConst.Const.BenchmarkAnnualVolatility] = annual_volatility( benchmark_ret, period=periods) result[gsConst.Const.BenchmarStdReturn] = return_std(benchmark_ret) result[gsConst.Const.BenchmarkMaxDrawdownRate] = cal_max_dd( benchmark_ret) result[gsConst.Const.BenchmarkCumulativeReturn] = cum_returns( benchmark_ret) result[gsConst.Const.ExcessAnnualReturn] = excess_annual_return( df_single_period_return, benchmark_ret, period=periods) return result path = r'/home/weiwu/projects/simulate/data/stats/' df_single_period_return = gftIO.zload(path + 'df_single_period_return.pkl') f_risk_free_rate = gftIO.zload(path + 'f_risk_free_rate.pkl') benchmark_ret = gftIO.zload(path + 'benchmark_ret.pkl') holding = gftIO.zload(path + 'holding.pkl') closing_price = gftIO.zload(path + 'closing_price.pkl') market_capital = gftIO.zload(path + 'market_capital.pkl') if isinstance(df_single_period_return, gftIO.GftTable): df_single_period_return = df_single_period_return.asMatrix().copy() if isinstance(benchmark_ret, gftIO.GftTable): benchmark_ret = benchmark_ret.asMatrix().copy() if isinstance(holding, gftIO.GftTable): holding = holding.asMatrix().copy()
from copy import copy from lib.gftTools import gftIO from scipy import linalg import matplotlib.pyplot as plt from utils.winsorize_mad import winsorize_mad logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) if not logger.handlers: logger.addHandler(handler) logger.setLevel(logging.DEBUG) C = gftIO.zload("/home/weiwu/share/black_litterman/C.pkl") ROE = gftIO.zload( "/home/weiwu/share/black_litterman/ROE_daily_PIT_change.pkl") delta = gftIO.zload("/home/weiwu/share/black_litterman/delta.pkl") historical_ret = gftIO.zload( "/home/weiwu/share/black_litterman/historical_ret.pkl") # Q = gftIO.zload("/home/weiwu/share/black_litterman/Q.pkl") tau = gftIO.zload("/home/weiwu/share/black_litterman/tau.pkl") weq = gftIO.zload("/home/weiwu/share/black_litterman/weq.pkl") if isinstance(historical_ret, gftIO.GftTable): historical_ret = historical_ret.asMatrix().copy() # historical_ret.fillna(0) # In [139]: historical_ret.shape # Out[140]: (451, 3437) if isinstance(ROE, gftIO.GftTable):
if type(input_data) is gftIO.GftTable: input_data = input_data.asMatrix() input_data = input_data.dropna(axis=0, how='all') if input_data.columns.dtype == 'datetime64[ns]': return input_data.T else: return input_data elif type(input_data) is np.ndarray: return input_data.astype(datetime) elif type(input_data) is pd.tseries.index.DatetimeIndex: return input_data[0] else: return input_data data = gftIO.zload( r'd:\Wuwei\Project\simulator\data\monthly_rebalance_data_same_as_R_5.pkl') data = gftIO.transformDict4Name(data) for key, value in data.items(): data[key] = parse_data(value) data['begin_date'] = data.pop('x0') data['end_date'] = data.pop('x1') data['initial_holding'] = data.pop('x2') data['target_portfolio_weight'] = data.pop('x3') data['market_to_market_price'] = data.pop('x4') data['total_return_factor'] = data.pop('x5') data['execute_price'] = data.pop('x6') data['execute_price_return'] = data.pop('x7') data['trade_volume'] = data.pop('x8') data['trading_param'] = data.pop('x9') data['additional_Ts'] = data.pop('x10')
return {'weight':df_opts_weight.dropna(axis=0, how='all'), 'status':df_opts_status} # import datetime # time_start = datetime.datetime.now() logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) if not handler: logger.addHandler(handler) logger.setLevel(logging.DEBUG) logger.debug('start') # #if not context: context = gftIO.zload("/home/weiwu/share/optimize/context.pkl") mode = gftIO.zload("/home/weiwu/share/optimize/mode.pkl") position_limit = gftIO.zload("/home/weiwu/share/optimize/position_limit.pkl") forecast_return = gftIO.zload("/home/weiwu/share/optimize/forecast_return.pkl") #original_portfolio = gftIO.zload("/home/weiwu/share/optimize/original_portfolio.pkl") original_portfolio_hs300 = gftIO.zload("/home/weiwu/share/optimize/original_portfolio_hs300.pkl") target_risk = gftIO.zload("/home/weiwu/share/optimize/target_risk.pkl") target_return = gftIO.zload("/home/weiwu/share/optimize/target_return.pkl") X = gftIO.zload("/home/weiwu/share/optimize/X.pkl") covariance_matrix = gftIO.zload("/home/weiwu/share/optimize/covariance_matrix.pkl") delta = gftIO.zload("/home/weiwu/share/optimize/delta.pkl") constraint1 = gftIO.zload("/home/weiwu/share/optimize/individual_asset_weight_constraint.pkl") constraint2 = gftIO.zload("/home/weiwu/share/optimize/industry_weight_constraint.pkl") factor_exposure_constraint = gftIO.zload("/home/weiwu/share/optimize/factor_exposure_constraint.pkl") null = gftIO.zload("/home/weiwu/share/optimize/NULL.pkl")
replace_min_x = -madvalue * maxValue * (1 + (abs(min_x)).rank( ascending=True, method='average') / len(min_x) / 10000) result[result < -maxValue * madvalue] = replace_min_x else: absresult = abs(result) result = (np.sign(result)) * (absresult.where( absresult <= maxValue * madvalue, maxValue * madvalue)) result = result + medianvalue result = pd.DataFrame(result).reindex(ls_x_var).assign( date=date).reset_index() return result else: return raw_return def winsorize_mad(x, maxValue=5, keepOrder=0): x = x.asColumnTab().copy() ls_date = np.unique(x.idname) ls_mad_result = [Mad(x, maxValue, keepOrder, date) for date in ls_date] result = pd.concat(ls_mad_result) return result.pivot( index='date', columns='variable', values='value').dropna( how='all', axis=1, inplace=True) if __name__ == '__main__': ROE = gftIO.zload("/home/weiwu/share/black_litterman/ROE_cur_year.pkl") result = winsorize_mad(ROE)
# -*- coding: utf-8 -*- import pandas as pd import numpy as np import re import os import warnings from cvxopt import matrix, solvers, spmatrix, sparse from cvxopt.blas import dot from lib.gftTools import gftIO # fetch data <<<<<<< HEAD path = '/home/weiwu/share/optimize/' alpha_return = gftIO.zload(os.path.join(path, 'alpha_return.pkl')) asset_constraint = gftIO.zload(os.path.join(path, 'asset_constraint.pkl')) asset_return = gftIO.zload(os.path.join(path, 'asset_return.pkl')) asset_weight = gftIO.zload(os.path.join(path, 'asset_weight.pkl')) beta_transaction = gftIO.zload(os.path.join(path, 'beta_transaction.pkl')) exposure_constraint = gftIO.zload(os.path.join(path, 'exposure_constraint.pkl')) factor_exposure = gftIO.zload(os.path.join(path, 'factor_exposure.pkl')) group_constraint = gftIO.zload(os.path.join(path, 'group_constraint.pkl')) lambda_risk = gftIO.zload(os.path.join(path, 'lambda_risk.pkl')) position_limit = gftIO.zload(os.path.join(path, 'position_limit.pkl')) ======= path = '~/share/' target_mode = gftIO.zload(os.path.join(path, 'x0.pkl')) position_limit = gftIO.zload(os.path.join(path, 'x1.pkl'))
if 'idname' in multiplicand: multiplicand.drop('idname', axis=1, inplace=True) datetimeindex = multiplicand.index.intersection(multiplier_panel.items) product = pd.DataFrame(data=np.nan, index=datetimeindex, columns=multiplier_panel.minor_axis) for target_date in datetimeindex: product.ix[target_date] = multiplier_panel[target_date].loc[ multiplicand.columns, :].T.fillna(0).dot( multiplicand.ix[target_date].fillna(0)) return product context = gftIO.zload("/home/weiwu/share/optimize/context.pkl") multiplier_panel = gftIO.zload( "/home/weiwu/share/optimize/group_sparse_panel.pkl") multiplicand = gftIO.zload("/home/weiwu/share/optimize/hs300_weight.pkl") multiply_panel(context, multiplier_panel, multiplicand) # if isinstance(multiplicand, gftIO.GftTable): # multiplicand = multiplicand.asMatrix().copy() # if not isinstance(multiplicand.index, pd.DatetimeIndex): # multiplicand.set_index('idname', inplace=True) # if 'idname' in multiplicand: # multiplicand.drop('idname', axis=1, inplace=True) # datetimeindex = multiplicand.index.intersection(multiplier_panel.items) # product = pd.DataFrame(data=np.nan, index=datetimeindex, columns=multiplier_panel.minor_axis) # for target_date in datetimeindex: # product.ix[target_date] = multiplier_panel[target_date].loc[multiplicand.columns,:].T.fillna(0).dot(multiplicand.ix[target_date].fillna(0))
index=['constrain'], columns=sty_factor_name) df_wgt_con_fnl = pd.concat([df_wgt_con, df_con_add], axis=1) return pd.concat( [dict_df_fexpo[date], df_wgt_con_fnl.assign(countryfactor=0)], axis=0) """ Regression with xarray ---------------------------------------------------------------------- """ logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) if not logger.handlers: logger.addHandler(handler) logger.setLevel(logging.DEBUG) risk_model_path = '/home/weiwu/share/risk_model/' # keep from double loading logger.debug('load xarray data') xr_exposure = gftIO.zload(os.path.join(risk_model_path, 'risk_model.zpkl')) # get the datetimeindex for dim in X.dims: if X[dim].values.dtype == np.dtype('<M8[ns]'): date = dim
# -*- coding: utf-8 -*- """ Created on Tue Jul 18 13:21:31 2017 @author: gft """ ##change information ##当前要计算的日期范围和 传入的risk model计算的日期范围不一致,取最新一期的权重 from lib.gftTools import gftIO x0 = gftIO.zload("x0.pkl") x1 = gftIO.zload("x1.pkl") x2 = gftIO.zload("x2.pkl") x3 = gftIO.zload("x3.pkl") x4 = gftIO.zload("x4.pkl") ''' 49EFD5C6530545618490610BE4103358 merge_dicts by smp 0D9E9789D2EFE27F77B8AE71F7519EF0 riskmodel_merge by wjj newest gid:2F8CFDC0CC008CF1F219537670A4C57C ''' ''' from lib.gftTools import gftIO x0 = gftIO.zload("x0.pkl") x1 = gftIO.zload("x1.pkl") x2 = gftIO.zload("x2.pkl")
if isinstance(df_limit, pd.DataFrame): return [ obj >= df_limit.iloc[:, 0].values, obj <= df_limit.iloc[:, 1].values ] logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) if not logger.handlers: logger.addHandler(handler) logger.setLevel(logging.DEBUG) x0 = gftIO.zload("/home/weiwu/share/optimize/x0.pkl") x1 = gftIO.zload("/home/weiwu/share/optimize/x1.pkl") x2 = gftIO.zload("/home/weiwu/share/optimize/risk_model_201708.pkl") x3 = gftIO.zload("/home/weiwu/share/optimize/x3.pkl") x4 = gftIO.zload("/home/weiwu/share/optimize/x4.pkl") x5 = gftIO.zload("/home/weiwu/share/optimize/x5.pkl") x6 = gftIO.zload("/home/weiwu/share/optimize/x6.pkl") x7 = gftIO.zload("/home/weiwu/share/optimize//x7.pkl") x8 = gftIO.zload("/home/weiwu/share/optimize//x8.pkl") x9 = gftIO.zload("/home/weiwu/share/optimize//x9.pkl") x10 = gftIO.zload("/home/weiwu/share/optimize/exposure_constraint.pkl") risk_model_201707 = gftIO.zload("/home/weiwu/share/optimize/x2.pkl") target_mode = x0 position_limit = x1 risk_model = x2 asset_return = x3
# -*- coding: utf-8 -*- import pandas as pd import numpy as np from datetime import datetime import scipy.optimize as sco from lib.gftTools import gftIO # fetch data path = '~/projects/simulate/data/optimization/' path = '~/share/optimize/' target_mode = gftIO.zload(path + 'x0.pkl') position_limit = gftIO.zload(path + 'x1.pkl') covariance_matrix = gftIO.zload(path + 'x2.pkl') asset_return = gftIO.zload(path + 'x3.pkl') asset_weight = gftIO.zload(path + 'x4.pkl') target_risk = gftIO.zload(path + 'x5.pkl') target_return = gftIO.zload(path + 'x6.pkl') risk_model = gftIO.zload(path + 'risk_model.pkl') # assign specific date target_date = datetime(year=2015, month=7, day=31) # load initial portfolio weight allocation data. data = gftIO.zload(path + 'monthly_rebalance_data_same_as_R_5.pkl') data['target_portfolio_weight'] = data.pop('x3') df_target_portfolio_weight = data['target_portfolio_weight'].\ asMatrix().dropna(axis=0, how='all') target_risk = 0.00087447 target_return = 0.0262495
la_period -- lookahead period, days stock_ret -- stock return """ pass logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) if not logger.handlers: logger.addHandler(handler) logger.setLevel(logging.DEBUG) C = gftIO.zload("/home/weiwu/share/black_litterman/C.pkl") ROE = gftIO.zload("/home/weiwu/share/black_litterman/ROE_cur_year.pkl") ROE_forecast = gftIO.zload( "/home/weiwu/share/black_litterman/ROE_cur_year.pkl") delta = gftIO.zload("/home/weiwu/share/black_litterman/delta.pkl") historical_ret = gftIO.zload( "/home/weiwu/share/black_litterman/historical_ret.pkl") # Q = gftIO.zload("/home/weiwu/share/black_litterman/Q.pkl") tau = gftIO.zload("/home/weiwu/share/black_litterman/tau.pkl") weq = gftIO.zload("/home/weiwu/share/black_litterman/weq.pkl") if isinstance(historical_ret, gftIO.GftTable): historical_ret = historical_ret.asMatrix().copy() # historical_ret.fillna(0) # In [139]: historical_ret.shape # Out[140]: (451, 3437)
"""----------------------------------------------------------------------""" logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) if not logger.handlers: logger.addHandler(handler) logger.setLevel(logging.DEBUG) risk_model_path = '/home/weiwu/share/risk_model/' # keep from double loading stock_return = gftIO.zload(os.path.join(risk_model_path, 'stock_return.pkl')) factors = gftIO.zload(os.path.join(risk_model_path, 'factors.pkl')) market_capital = gftIO.zload( os.path.join(risk_model_path, 'market_capital.pkl')) corr_half_life = gftIO.zload( os.path.join(risk_model_path, 'corr_half_life.pkl')) var_half_life = gftIO.zload(os.path.join(risk_model_path, 'var_half_life.pkl')) model = risk_model(stock_return, factors, market_capital, corr_half_life, var_half_life) ylog.debug('parse data') # get all factor names ls_fexponame = factors['osets'].asColumnTab()['O0'].apply( gftIO.gidInt2Str).tolist() ind_factor_name = factors[ls_fexponame[0]].asColumnTab()['O0'].apply(
# -*- coding: utf-8 -*- import pandas as pd import numpy as np import re import os import warnings from cvxopt import matrix, solvers, spmatrix, sparse from cvxopt.blas import dot from lib.gftTools import gftIO # fetch data path = "/home/weiwu/share/" os.chdir(path) #target_mode = gftIO.zload(os.path.join(path, 'factor_exposure.pkl')) target_mode = gftIO.zload('factor_exposure.pkl')
import matplotlib.pyplot as plt from future_position import create_future_rollover_position as rp logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) if not handler: logger.addHandler(handler) logger.setLevel(logging.DEBUG) logger.debug('start') path = r'/home/weiwu/projects/simulate/data/future/' start_date = gftIO.zload(os.path.join(path, 'start_date.pkl')) end_date = gftIO.zload(os.path.join(path, 'end_date.pkl')) data = gftIO.zload(os.path.join(path, 'contract_data.pkl')) target = gftIO.zload(os.path.join(path, 'target.pkl')) df_commission = gftIO.zload(os.path.join(path, 'df_commission_fee.pkl')) df_position = gftIO.zload(os.path.join(path, 'df_position.pkl')) df_price = gftIO.zload(os.path.join(path, 'df_price.pkl')) df_multiplier = gftIO.zload(os.path.join(path, 'df_multiplier.pkl')) if isinstance(df_commission, gftIO.GftTable): df_commission = df_commission.asColumnTab().copy() if isinstance(df_position, gftIO.GftTable): df_position = df_position.asMatrix().copy() if isinstance(df_price, gftIO.GftTable): df_price = df_price.asColumnTab().copy() if isinstance(df_multiplier, gftIO.GftTable):
# -*- coding: utf-8 -*- """ Spyder Editor """ import pandas as pd import numpy as np from datetime import datetime import scipy.optimize as sco from lib.gftTools import gftIO # fetch data path = r'd:/share/optimize/' target_mode = gftIO.zload(path + 'x0.pkl') position_limit = gftIO.zload(path + 'x1.pkl') asset_return = gftIO.zload(path + 'x3.pkl') target_risk = gftIO.zload(path + 'x5.pkl') target_return = gftIO.zload(path + 'x6.pkl') risk_model = gftIO.zload(path + 'risk_model.pkl') # assign specific date target_date = datetime(year=2015, month=7, day=31) # load initial portfolio weight allocation data. data = gftIO.zload( r'd:\Wuwei\Project\simulator\data\monthly_rebalance_data_same_as_R_5.pkl') data['target_portfolio_weight'] = data.pop('x3') df_target_portfolio_weight = data['target_portfolio_weight'].\ asMatrix().dropna(axis=0, how='all')
import statsmodels.regression.linear_model as lm import statsmodels.api as sm from lib.gftTools import gftIO import datetime import pandas as pd import numpy as np import re import os import warnings from functools import reduce import math as mt import statsmodels.api as sm from lib.gftTools import gftIO import datetime import logging from ylib import ylog ylog.set_level(logging.DEBUG) ylog.console_on() ylog.filelog_on("app") risk_model_path = '/home/weiwu/share/risk_model/' x0 = gftIO.zload(os.path.join(risk_model_path, 'stock_return.pkl')) x1 = gftIO.zload(os.path.join(risk_model_path, 'factors.pkl') x2 = gftIO.zload(os.path.join(risk_model_path, 'market_capital.pkl')) x3 = 4 x4 = 5 ylog.debug('parse data')
# -*- coding: utf-8 -*- import logging import numpy as np import pandas as pd from copy import copy from lib.gftTools import gftIO from scipy import linalg import matplotlib.pyplot as plt from utils.winsorize_mad import winsorize_mad ROE = gftIO.zload("/home/weiwu/share/black_litterman/ROE_cur_year.pkl") ROE_forecast = gftIO.zload( "/home/weiwu/share/black_litterman/ROE_cur_year.pkl") ROE_daily_PIT_change = gftIO.zload( "/home/weiwu/share/black_litterman/ROE_daily_PIT_change.pkl") ROE_daily_PIT_change2 = gftIO.zload( "/home/weiwu/share/black_litterman/ROE_daily_PIT_change2.pkl") ROE_PIT = gftIO.zload("/home/weiwu/share/black_litterman/ROE_PIT.pkl") if isinstance(ROE, gftIO.GftTable): # views on all assets are not required ROE = ROE.asMatrix().copy() ROE.fillna(method='ffill', inplace=True) ROE_forecast = ROE_forecast.asMatrix().copy() ROE_forecast.fillna(method='ffill', inplace=True) ROE_daily_PIT_change = ROE_daily_PIT_change.asMatrix().copy() ROE_daily_PIT_change.fillna(method='ffill', inplace=True) ROE_PIT = ROE_PIT.asMatrix().copy() ROE_PIT.fillna(method='ffill', inplace=True) ROE_daily_PIT_change2 = ROE_daily_PIT_change2.asMatrix().copy() ROE_daily_PIT_change2.fillna(method='ffill', inplace=True)
import numpy as np import pandas as pd def PortfolioOptimize(target_mode,group_weight_min,position_limit,target_risk,expected_return,covariance_matrix): raise Exception("To be implemented") # <codecell> import numpy as np import pandas as pd from lib.gftTools import gftIO # <codecell> x0 = gftIO.zload("/home/jovyan/.gft/data/x0.pkl") x1 = gftIO.zload("/home/jovyan/.gft/data/x1.pkl") x2 = gftIO.zload("/home/jovyan/.gft/data/x2.pkl") x3 = gftIO.zload("/home/jovyan/.gft/data/x3.pkl") x4 = gftIO.zload("/home/jovyan/.gft/data/x4.pkl") x5 = gftIO.zload("/home/jovyan/.gft/data/x5.pkl") x6 = gftIO.zload("/home/jovyan/.gft/data/x6.pkl") x6 = gftIO.transformDict4Name(x6) # <codecell> x6 # <codecell> x6['g1'].asColumnTab()
# -*- coding: utf-8 -*- from lib.gftTools import gftIO import numpy as np import pandas as pd context = gftIO.zload("/home/gft/data/context.pkl") x0 = gftIO.zload("/home/gft/data/x0.pkl") def CreateDiagonalMatricePanel(context, otv): """ # 1. convert input otv to column table. # 2. select the datetime index from otv. # 3. loop the datetime index to convert assets on that date to a diagonal matrix, creating a 3-d panel. """ assets = otv.asColumnTab() assets = assets.set_index('idname') datetime_index = assets.index.unique() panel_diag = pd.Panel({ date: pd.DataFrame(np.eye(len(assets.loc[date, 'variable'])), index=assets.loc[date, 'variable'], columns=assets.loc[date, 'variable']) for date in datetime_index }) return panel_diag
0] = df_portfolioValue.ix[0, 0] / initHldValue - 1 result = {} result[gsConst.Const.Holding] = pd.concat( [df_holdings.replace(0, NA), df_holdingCash], axis=1) result[gsConst.Const.PortfolioValue] = df_portfolioValue result[gsConst.Const.Weights] = df_weights.replace(0, NA) result[gsConst.Const.SinglePeriodReturn] = df_singlePeriodRets result[gsConst.Const.CumulativeReturn] = df_cumRets result[gsConst.Const.Turnover] = df_turnoverPct print(df_cumRets.ix[-1]) return result dataPack = gftIO.zload( r'd:\Wuwei\Project\simulator\data\monthly_rebalance_data_execute_at_next_open_1.pkl' ) dataPack['begin_date'] = dataPack.pop('x0') dataPack['end_date'] = dataPack.pop('x1') dataPack['initial_holding'] = dataPack.pop('x2') dataPack['target_portfolio_weight'] = dataPack.pop('x3') dataPack['market_to_market_price'] = dataPack.pop('x4') dataPack['total_return_factor'] = dataPack.pop('x5') dataPack['execute_price'] = dataPack.pop('x6') dataPack['execute_price_return'] = dataPack.pop('x7') dataPack['trade_volume'] = dataPack.pop('x8') dataPack['trading_param'] = dataPack.pop('x9') dataPack['additional_Ts'] = dataPack.pop('x10') dataPack['trading_param']['execDelayPeriods'] = 1
if isinstance(array, list): array = np.array(array) idx = (np.abs(array-value)).argmin() return idx logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) solvers.options['show_progress'] = True x0 = gftIO.zload("/home/weiwu/share/optimize/x0.pkl") x1 = gftIO.zload("/home/weiwu/share/optimize/x1.pkl") x2 = gftIO.zload("/home/weiwu/share/optimize/x2.pkl") x3 = gftIO.zload("/home/weiwu/share/optimize/x3.pkl") x4 = gftIO.zload("/home/weiwu/share/optimize/x4.pkl") x5 = gftIO.zload("/home/weiwu/share/optimize/x5.pkl") x6 = gftIO.zload("/home/weiwu/share/optimize/x6.pkl") x7 = gftIO.zload("/home/weiwu/share/optimize//x7.pkl") x8 = gftIO.zload("/home/weiwu/share/optimize//x8.pkl") x9 = gftIO.zload("/home/weiwu/share/optimize//x9.pkl") x10 = gftIO.zload("/home/weiwu/share/optimize//x10.pkl") target_mode = x0 position_limit = x1 risk_model = x2 asset_return = x3