def markosolver(period_subset_data, moments_estimator, cleaning, must_haves, equalise_SR=False, equalise_vols=True, **ignored_args): """ Returns the optimal portfolio for the returns data If equalise_SR=True then assumes all assets have SR if False uses the asset natural SR If equalise_vols=True then normalises returns to have same standard deviation; the weights returned will be 'risk weightings' :param subset_data: The data to optimise over :type subset_data: pd.DataFrame TxN :param cleaning: Should we clean correlations so can use incomplete data? :type cleaning: bool :param must_haves: The indices of things we must have weights for, used for cleaning :type must_haves: list of bool :param equalise_SR: Set all means equal before optimising (makes more stable) :type equalise_SR: bool :param equalise_vols: Set all vols equal before optimising (makes more stable) :type equalise_vols: bool Other arguments are kept so we can use **kwargs with other optimisation functions *_params passed through to data estimation functions :returns: float """ rawmoments = moments_estimator.moments(period_subset_data) (mean_list, corrmatrix, stdev_list) = copy(rawmoments) # equalise vols first if equalise_vols: (mean_list, stdev_list) = vol_equaliser(mean_list, stdev_list) if equalise_SR: # moments are annualised ann_target_SR = moments_estimator.ann_target_SR mean_list = SR_equaliser(stdev_list, ann_target_SR) sigma = sigma_from_corr_and_std(stdev_list, corrmatrix) unclean_weights = optimise(sigma, mean_list) if cleaning: weights = clean_weights(unclean_weights, must_haves) else: weights = unclean_weights diag = dict( raw=rawmoments, sigma=sigma, mean_list=mean_list, unclean=unclean_weights, weights=weights, ) return (weights, diag)
def opt_shrinkage(period_subset_data, moments_estimator, cleaning, must_haves, shrinkage_SR=0.9, shrinkage_corr=0.5, equalise_vols=False, **ignored_args): """ Given dataframe of returns; returns_to_bs, performs a shrinkage optimisation :param subset_data: The data to optimise over :type subset_data: pd.DataFrame TxN :param cleaning: Should we clean correlations so can use incomplete data? :type cleaning: bool :param equalise_vols: Set all vols equal before optimising (makes more stable) :type equalise_vols: bool :param shrinkage_SR: Shrinkage factor to use with SR. 1.0 = full shrinkage :type shrinkage_SR: float :param shrinkage_corr: Shrinkage factor to use with correlations. 1.0 = full shrinkage :type shrinkage_corr: float Other arguments are kept so we can use **kwargs with other optimisation functions *_params passed through to data estimation functions :returns: float """ # subset_data will be stacked up list, need to average rawmoments = moments_estimator.moments(period_subset_data) (mean_list, corrmatrix, stdev_list) = copy(rawmoments) # equalise vols first if equalise_vols: (mean_list, stdev_list) = vol_equaliser(mean_list, stdev_list) # shrinkage: # everything is now annualised ann_target_SR = moments_estimator.ann_target_SR mean_list = shrink_SR(mean_list, stdev_list, shrinkage_SR, ann_target_SR) corrmatrix = shrink_corr(corrmatrix, shrinkage_corr) # get sigma matrix back sigma = sigma_from_corr_and_std(stdev_list, corrmatrix) unclean_weights = optimise(sigma, mean_list) if cleaning: weights = clean_weights(unclean_weights, must_haves) else: weights = unclean_weights diag = dict( raw=rawmoments, sigma=sigma, mean_list=mean_list, unclean=unclean_weights, weights=weights, ) return (weights, diag)