Ejemplo n.º 1
0
def get_annualised_risk_given_inputs(std_dev, cmatrix, weights):
    weights = np.array(weights)
    std_dev = np.array(std_dev)
    std_dev, cmatrix, weights = clean_values(std_dev, cmatrix, weights)
    sigma = sigma_from_corr_and_std(std_dev, cmatrix)

    portfolio_variance = weights.dot(sigma).dot(weights.transpose())
    portfolio_std = portfolio_variance**.5

    return portfolio_std
Ejemplo n.º 2
0
def combine_list_of_correlations_and_stdev(rolling_correlations,
                                           rolling_stdev):
    corr_index_dates = rolling_correlations.fit_dates
    std_index_dates = list(rolling_stdev.index)
    sigma_list = []
    for corr_index, index_date in enumerate(corr_index_dates):
        std_dev = rolling_stdev.loc[index_date].values
        std_dev[np.isnan(std_dev)] = 0.0
        cmatrix = rolling_correlations.corr_list[corr_index]
        cmatrix = pd.DataFrame(cmatrix)
        cmatrix[cmatrix.isna()] = 1.0
        cmatrix = np.array(cmatrix)
        sigma = sigma_from_corr_and_std(std_dev, cmatrix)

        sigma_list.append(sigma)

    covariance_estimates = CorrelationList(sigma_list, rolling_stdev.columns,
                                           corr_index_dates)

    return covariance_estimates
Ejemplo n.º 3
0
def markosolver(period_subset_data,
                moments_estimator,
                cleaning,
                must_haves,
                equalise_SR=False,
                equalise_vols=True,
                **ignored_args):
    """
    Returns the optimal portfolio for the returns data

    If equalise_SR=True then assumes all assets have SR if False uses the asset natural SR

    If equalise_vols=True then normalises returns to have same standard deviation; the weights returned
       will be 'risk weightings'

    :param subset_data: The data to optimise over
    :type subset_data: pd.DataFrame TxN

    :param cleaning: Should we clean correlations so can use incomplete data?
    :type cleaning: bool

    :param must_haves: The indices of things we must have weights for, used for cleaning
    :type must_haves: list of bool


    :param equalise_SR: Set all means equal before optimising (makes more stable)
    :type equalise_SR: bool

    :param equalise_vols: Set all vols equal before optimising (makes more stable)
    :type equalise_vols: bool

    Other arguments are kept so we can use **kwargs with other optimisation functions

    *_params passed through to data estimation functions


    :returns: float

    """

    rawmoments = moments_estimator.moments(period_subset_data)
    (mean_list, corrmatrix, stdev_list) = copy(rawmoments)

    # equalise vols first
    if equalise_vols:
        (mean_list, stdev_list) = vol_equaliser(mean_list, stdev_list)

    if equalise_SR:
        # moments are annualised
        ann_target_SR = moments_estimator.ann_target_SR
        mean_list = SR_equaliser(stdev_list, ann_target_SR)

    sigma = sigma_from_corr_and_std(stdev_list, corrmatrix)

    unclean_weights = optimise(sigma, mean_list)

    if cleaning:
        weights = clean_weights(unclean_weights, must_haves)
    else:
        weights = unclean_weights

    diag = dict(
        raw=rawmoments,
        sigma=sigma,
        mean_list=mean_list,
        unclean=unclean_weights,
        weights=weights,
    )

    return (weights, diag)
Ejemplo n.º 4
0
def optimise_using_correlation(mean_list, avg_correlation, std):
    corr_matrix = boring_corr_matrix(len(mean_list), offdiag=avg_correlation)
    stdev_list = [std] * len(mean_list)
    sigma = sigma_from_corr_and_std(stdev_list, corr_matrix)

    return optimise(sigma, mean_list)
Ejemplo n.º 5
0
def opt_shrinkage(period_subset_data,
                  moments_estimator,
                  cleaning,
                  must_haves,
                  shrinkage_SR=0.9,
                  shrinkage_corr=0.5,
                  equalise_vols=False,
                  **ignored_args):
    """
    Given dataframe of returns; returns_to_bs, performs a shrinkage optimisation

    :param subset_data: The data to optimise over
    :type subset_data: pd.DataFrame TxN

    :param cleaning: Should we clean correlations so can use incomplete data?
    :type cleaning: bool

    :param equalise_vols: Set all vols equal before optimising (makes more stable)
    :type equalise_vols: bool

    :param shrinkage_SR: Shrinkage factor to use with SR. 1.0 = full shrinkage
    :type shrinkage_SR: float

    :param shrinkage_corr: Shrinkage factor to use with correlations. 1.0 = full shrinkage
    :type shrinkage_corr: float

    Other arguments are kept so we can use **kwargs with other optimisation functions

    *_params passed through to data estimation functions

    :returns: float

    """

    # subset_data will be stacked up list, need to average
    rawmoments = moments_estimator.moments(period_subset_data)
    (mean_list, corrmatrix, stdev_list) = copy(rawmoments)

    # equalise vols first
    if equalise_vols:
        (mean_list, stdev_list) = vol_equaliser(mean_list, stdev_list)

    # shrinkage:
    # everything is now annualised
    ann_target_SR = moments_estimator.ann_target_SR
    mean_list = shrink_SR(mean_list, stdev_list, shrinkage_SR, ann_target_SR)
    corrmatrix = shrink_corr(corrmatrix, shrinkage_corr)

    # get sigma matrix back
    sigma = sigma_from_corr_and_std(stdev_list, corrmatrix)

    unclean_weights = optimise(sigma, mean_list)

    if cleaning:
        weights = clean_weights(unclean_weights, must_haves)
    else:
        weights = unclean_weights

    diag = dict(
        raw=rawmoments,
        sigma=sigma,
        mean_list=mean_list,
        unclean=unclean_weights,
        weights=weights,
    )

    return (weights, diag)