Esempio n. 1
0
def test_return_model_additional_kwargs():
    df = get_data()
    mkt_prices = get_benchmark_data()

    mu1 = expected_returns.return_model(df,
                                        method="capm_return",
                                        market_prices=mkt_prices,
                                        risk_free_rate=0.03)
    mu2 = expected_returns.capm_return(df,
                                       market_prices=mkt_prices,
                                       risk_free_rate=0.03)
    pd.testing.assert_series_equal(mu1, mu2)
Esempio n. 2
0
def test_capm_with_benchmark():
    df = get_data()
    mkt_df = get_benchmark_data()
    mu = expected_returns.capm_return(df,
                                      market_prices=mkt_df,
                                      compounding=True)

    assert isinstance(mu, pd.Series)
    assert list(mu.index) == list(df.columns)
    assert mu.notnull().all()
    assert mu.dtype == "float64"
    correct_mu = np.array([
        0.09115799375654746,
        0.09905386632033128,
        0.05676282405265752,
        0.06291827346436336,
        0.13147799781014877,
        0.10239088012000815,
        0.1311567086884512,
        0.07339649698626659,
        0.1301248935078549,
        0.07620949056643983,
        0.07629095442513395,
        0.12163575425541985,
        0.10400070536161658,
        0.0781736030988492,
        0.09185177050469516,
        0.10245700691271296,
        0.11268307946677197,
        0.07870087187919145,
        0.1275598841214107,
        0.09536788741392595,
    ])
    np.testing.assert_array_almost_equal(mu.values, correct_mu)

    mu2 = expected_returns.capm_return(df,
                                       market_prices=mkt_df,
                                       compounding=False)
    assert (mu2 >= mu).all()
def test_capm_no_benchmark():
    df = get_data()
    mu = expected_returns.capm_return(df)
    assert isinstance(mu, pd.Series)
    assert list(mu.index) == list(df.columns)
    assert mu.notnull().all()
    assert mu.dtype == "float64"
    correct_mu = np.array(
        [
            0.22148462799238577,
            0.2835429647498704,
            0.14693081977908462,
            0.1488989354304723,
            0.4162399750335195,
            0.22716772604184535,
            0.3970337136813829,
            0.16733214988182069,
            0.31791477659742146,
            0.17279931642386534,
            0.15271750464365566,
            0.351778014382922,
            0.32859883451716376,
            0.1501938182844417,
            0.268295486802897,
            0.31632339201710874,
            0.27753479916328516,
            0.16959588523287855,
            0.3089119447773357,
            0.2558719211959501,
        ]
    )
    np.testing.assert_array_almost_equal(mu.values, correct_mu)
    # Test the (warning triggering) case that input is not a dataFrame
    with pytest.warns(RuntimeWarning):
        mu_np = expected_returns.capm_return(df.to_numpy())
        mu_np.name = mu.name  # These will differ.
        mu_np.index = mu.index  # Index labels would be tickers.
        pd.testing.assert_series_equal(mu_np, mu)
Esempio n. 4
0
def mean_var(my_portfolio, vol_max=0.15, perf=True) -> list:
    # changed to take in desired timeline, the problem is that it would use all historical data

    ohlc = yf.download(
        my_portfolio.portfolio,
        start=my_portfolio.start_date,
        end=my_portfolio.end_date,
        progress=False,
    )
    prices = ohlc["Adj Close"].dropna(how="all")
    prices = prices.filter(my_portfolio.portfolio)

    # sometimes we will pick a date range where company isn't public we can't set price to 0 so it has to go to 1
    prices = prices.fillna(1)

    mu = expected_returns.capm_return(prices)
    S = risk_models.CovarianceShrinkage(prices).ledoit_wolf()

    ef = EfficientFrontier(mu, S)
    ef.add_objective(objective_functions.L2_reg,
                     gamma=my_portfolio.diversification)
    if my_portfolio.min_weights is not None:
        ef.add_constraint(lambda x: x >= my_portfolio.min_weights)
    if my_portfolio.max_weights is not None:
        ef.add_constraint(lambda x: x <= my_portfolio.max_weights)
    ef.efficient_risk(vol_max)
    weights = ef.clean_weights()

    wts = weights.items()

    result = []
    for val in wts:
        a, b = map(list, zip(*[val]))
        result.append(b)

    if perf is True:
        ef.portfolio_performance(verbose=True)

    return flatten(result)
Esempio n. 5
0
import matplotlib.pyplot as plt

# small portfolio to test
tickers = ['BLK', "BAC", "AAPL", "TM", "WMT",
           "JD"]  # "INTU", "MA", "UL", "CVS",
# "DIS", "AMD", "NVDA", "PBI", "TGT"]

ohlc = yf.download(tickers, period="max")

prices = ohlc["Adj Close"]
prices.tail()

#various types of expected returns; these will be used for expected future returns
mu = expected_returns.james_stein_shrinkage(prices)
mum = expected_returns.mean_historical_return(prices)
mummy = expected_returns.capm_return(prices)

#different risk models; guessing that you can also use the pandas.cov function
S = risk_models.semicovariance(prices)
T = risk_models.CovarianceShrinkage(prices).ledoit_wolf()

plotting.plot_covariance(S)
plotting.plot_covariance(T)

#equal weights
initial_weights = np.array([1 / len(tickers)] * len(tickers))
print(initial_weights)

#transaction cost objective
ef = EfficientFrontier(mum, T)
# 1% broker commission
Esempio n. 6
0
cov_matrix

#voltilidade do portfólio
port_volatility = np.sqrt(np.dot(pesos.T, np.dot(cov_matrix, pesos)))

vol_ano = port_volatility * np.sqrt(252)

print(vol_ano)

#Otimização Max-Sharpe
from pypfopt.expected_returns import capm_return

#Estimadores de retorno

#CAPM
mu = capm_return(carteira_cripto, risk_free_rate=0.00157)

from pypfopt import risk_models

#matriz covariância
from pypfopt.risk_models import CovarianceShrinkage

#Modelos de Risco
#estimativa de matriz de covariância
S = CovarianceShrinkage(carteira_cripto).ledoit_wolf()

#Técnica de Otimização - Fronteira Eficiente
from pypfopt.efficient_frontier import EfficientFrontier
from pypfopt import objective_functions
ef = EfficientFrontier(mu, S)
ef.add_objective(objective_functions.L2_reg, gamma=0.1)