def test_weight_bounds_minus_one_to_one():
    ef = EfficientFrontier(*setup_efficient_frontier(data_only=True),
                           weight_bounds=(-1, 1))
    assert ef.max_sharpe()
    assert ef.min_volatility()
    assert ef.efficient_return(0.05)
    assert ef.efficient_risk(0.05)
Beispiel #2
0
def min_variance(ticker_list, period='1y', interval='1d', cash=10000000):
    x = Ticker(ticker_list,
               retry=20,
               status_forcelist=[404, 429, 500, 502, 503, 504])
    data = x.history(period=period, interval=interval)
    if len(ticker_list) > 1:
        data = yf.download(ticker_list,
                           period='10y',
                           interval=interval,
                           group_by='ticker')
    new_data = []
    df = pd.DataFrame()
    weight = 1 / len(ticker_list)
    for i in ticker_list:
        stock_normal_ret = data['close'] / data.iloc[0]['close']
        df[i] = data['close']
        if len(ticker_list) > 1:
            stock_normal_ret = data[i]['close'] / data[i].iloc[0]['close']
            df[i] = data[i]['close']
        alloc = stock_normal_ret * weight
        balance = alloc * cash
        new_data.append(balance)

    mu = expected_returns.mean_historical_return(df)
    s = risk_models.sample_cov(df)
    ef = EfficientFrontier(mu, s)
    weights = ef.min_volatility()
    sharpe = ef.max_sharpe()
    cleaned_weights = ef.clean_weights()
    x = ef.portfolio_performance(verbose=False)
    return cleaned_weights, round(2.5 * x[2] / 15, 3)  # sharpe adjusted weight
Beispiel #3
0
def getMinVolatilityPortfolio(data):
    mu, Sigma = getMuSigma(data)
    ef = EfficientFrontier(mu, Sigma)
    raw_weights = ef.min_volatility()
    weights = ef.clean_weights()
    performance = ef.portfolio_performance()
    return weights, performance
    def __min_vol(mu: pd.Series, cov_matrix: pd.DataFrame) -> pd.DataFrame:
        # Setup
        ef = EfficientFrontier(mu, cov_matrix)

        # Optimizes for minimum volatility
        min_ptf = ef.min_volatility()
        return pd.DataFrame.from_dict(data=min_ptf, orient='index').T
Beispiel #5
0
def optimizePortEfficient(port, weights, start, plot = False, short = False, printBasicStats=True, how = 'Sharpe'):
	#Getting Datat
	df = getData(port)
	#Plotting the portfolio
	if plot: 
		plotPort(df, port)
		
	if printBasicStats:
		basicStats(df, weights)

	#Optimization for Sharpe using Efficient Frontier
	if short: 
		bounds = (-1,1)
	else:
		bounds = (0,1)
	mu = df.pct_change().mean() * 252
	S = risk_models.sample_cov(df)

	if how == 'Sharpe':
		# Maximized on Sharpe Ratio
		ef = EfficientFrontier(mu, S, weight_bounds=bounds) #Here the weight bounds are being used to allow short positions as well
		weights = ef.max_sharpe()
		cleaned_weights = dict(ef.clean_weights())
		print("Weights of an optimal portfolio maximised on Sharpe Ratio:")
		print(cleaned_weights)
		ef.portfolio_performance(verbose = True)
		getDiscreteAllocations(df, weights)
	if how == "Vol":
		# Minimized on Volatility
		efi = EfficientFrontier(mu, S, weight_bounds=(-1,1))
		w = dict(efi.min_volatility())
		print("\nWeights of an optimal portfolio minimized on Volatilty (Risk):")
		print(w)
		efi.portfolio_performance(verbose = True)
		getDiscreteAllocations(df, w)
Beispiel #6
0
 def calculate(self, date, universe):
     prices = universe.pricing['price'].unstack()[self.assets].iloc[-self.window:]
     mu = mean_historical_return(prices)
     S = CovarianceShrinkage(prices).ledoit_wolf()
     ef = EfficientFrontier(mu, S)
     weights = ef.min_volatility()
     weights = pd.Series(weights, index=self.assets)
     return weights
    def wf(self,
           train,
           optimizer,
           test,
           test_months,
           annual_risk_free_rate=0.02):
        """Walk-Forward backtesting method

        Args:
            train (pandas.Series): training data
            optimizer (str): portfolio optimizer for PyPortfolioOpt
            test (pandas.Series): test data
            test_months (int): number of testing months
            annual_risk_free_rate (float, optional): annual risk free rate used in calculating Sharpe ratio. Defaults to 0.02.

        Returns:
            [pandas.Series, float]: expected and realised asset performance
        """

        if optimizer == "hrp":
            returns = train.pct_change().dropna()
            hrp = HRPOpt(returns)
            weights = hrp.optimize()
            weights = pd.Series(weights)
            performance = hrp.portfolio_performance(verbose=True)

            realised_annual_return = sum(
                weights *
                ((test.iloc[-1] / test.iloc[0])**(12 / test_months) - 1))
            realised_annual_volatility = sum(
                weights * np.std(test.pct_change().dropna()) * np.sqrt(251))
            realised_sharpe_ratio = (
                realised_annual_return -
                annual_risk_free_rate) / realised_annual_volatility

            return weights, performance, realised_annual_return, realised_annual_volatility, realised_sharpe_ratio
        else:
            mu = mean_historical_return(train)
            S = CovarianceShrinkage(train).ledoit_wolf()
            ef = EfficientFrontier(mu, S)
            weights = ef.max_sharpe(
            ) if optimizer == "msr" else ef.min_volatility()
            weights = pd.Series(weights)
            performance = ef.portfolio_performance()

            realised_annual_return = sum(
                weights *
                ((test.iloc[-1] / test.iloc[0])**(12 / test_months) - 1))
            realised_annual_volatility = sum(
                weights * np.std(test.pct_change().dropna()) * np.sqrt(251))
            realised_sharpe_ratio = (
                realised_annual_return -
                annual_risk_free_rate) / realised_annual_volatility

            return weights, performance, realised_annual_return, realised_annual_volatility, realised_sharpe_ratio
Beispiel #8
0
def getPortfolio(assets, user_risk):
    priceDfFiltered = priceDf[assets]
    mu = expected_returns.mean_historical_return(priceDfFiltered)
    S = risk_models.sample_cov(priceDfFiltered)
    # Optimise for maximal Sharpe ratio
    ef = EfficientFrontier(mu, S)
    maxReturns = max(mu)
    portfoliosToReturn = {}
    raw_weights = ef.min_volatility()
    cleaned_weights = ef.clean_weights()
    for key in cleaned_weights:
        cleaned_weights[key] *= 100
    portfolioResults = ef.portfolio_performance()
    portfolioResultsOrdered = {
        "Expected annual return": portfolioResults[0],
        "Annual volatility": portfolioResults[1],
        "Sharpe Ratio": portfolioResults[2]
    }
    portfoliosToReturn["Lowest Volatility"] = {
        "Weights": cleaned_weights,
        "Portfolio": portfolioResultsOrdered
    }
    minReturns = portfolioResults[0]
    raw_weights = ef.max_sharpe()
    cleaned_weights = ef.clean_weights()
    for key in cleaned_weights:
        cleaned_weights[key] *= 100
    portfolioResults = ef.portfolio_performance()
    portfolioResultsOrdered = {
        "Expected annual return": portfolioResults[0],
        "Annual volatility": portfolioResults[1],
        "Sharpe Ratio": portfolioResults[2]
    }
    portfoliosToReturn["Best Portfolio"] = {
        "Weights": cleaned_weights,
        "Portfolio": portfolioResultsOrdered
    }
    userReturns = (maxReturns - minReturns) * user_risk + minReturns
    raw_weights = ef.efficient_return(userReturns)
    cleaned_weights = ef.clean_weights()
    for key in cleaned_weights:
        cleaned_weights[key] *= 100
    portfolioResults = ef.portfolio_performance()
    portfolioResultsOrdered = {
        "Expected annual return": portfolioResults[0],
        "Annual volatility": portfolioResults[1],
        "Sharpe Ratio": portfolioResults[2]
    }
    portfoliosToReturn["User Risk"] = {
        "Weights": cleaned_weights,
        "Portfolio": portfolioResultsOrdered
    }

    return portfoliosToReturn
def calc(stockStartTime, stockEndTime):
    #calculating 2 years prior of the start date
    tempdate = dt.datetime.fromisoformat(stockStartTime)
    tempdate = tempdate - dt.timedelta(weeks=104)
    prices = ffn.get('pfe,ibm,wmt,msft,cat',
                     start=tempdate.strftime('%Y-%m-%d'),
                     end=stockStartTime)
    # Expected returns and sample covariance
    mu = expected_returns.mean_historical_return(prices)
    S = risk_models.sample_cov(prices)
    #Minimum volatility. May be useful if you're trying to get an idea of how low the volatility could be,
    #but in practice it makes a lot more sense to me to use the portfolio that maximises the Sharpe ratio.
    # Optimise portfolio for maximum Sharpe Ratio to serve as benchmark
    ef = EfficientFrontier(mu, S)
    raw_weights = ef.min_volatility()  #ef.max_sharpe()
    cleaned_weights = ef.clean_weights()
    print("Cleaned weights:\n", cleaned_weights)
    print("Portfolio Performance:\n", ef.portfolio_performance(verbose=True))
    #To achieve beta neutrality
    ef = EfficientFrontier(mu, S, weight_bounds=(-1, 1))
    print(" Weights: ",
          ef.efficient_return(target_return=0.15, market_neutral=True))
    weights = ef.efficient_return(target_return=0.2, market_neutral=True)
    weight_sum = sum(w for w in weights.values() if w > 0)
    #normalised_weights = {k:v/weight_sum for k,v in weights.items()}
    #print("Normalized weights: ",normalised_weights)
    #We then need to convert these weights into an actual allocation, telling you how many shares of each asset you should purchase.
    latest_prices = get_latest_prices(prices)
    da = DiscreteAllocation(weights,
                            latest_prices,
                            total_portfolio_value=1000000)
    allocation, leftover = da.lp_portfolio()
    #print(allocation)
    print("")
    for key, val in allocation.items():
        print("Number of positions in ", key, " stock: ", val)
    print("")
    print("Funds remaining: ${:.2f}".format(leftover))
    print("")
    prices2 = ffn.get('pfe,ibm,wmt,msft,cat',
                      start=stockStartTime,
                      end=stockEndTime)
    latest_prices2 = get_latest_prices(prices2)
    sum1 = 0
    for key, val in allocation.items():
        sum1 = sum1 - (latest_prices[key] * val)
    print("Value of Portfolio after short sales :\t", abs(sum1))
    new = 0
    for key, val in allocation.items():
        new = new + (latest_prices2[key] * val)
        sum1 = sum1 + (latest_prices2[key] * val)
    print("Value at end of period :\t\t", new)
    print("Profit at end of time period :\t\t", sum1)
    return sum1
Beispiel #10
0
    def min_volatility(self):
        prices = self.prices
        returns = self.returns
        rf = self.rf

        e_returns = self.e_returns
        cov = self.covariance
        ef = EfficientFrontier(e_returns, cov)

        optimal_weights = ef.min_volatility()
        perf = ef.portfolio_performance(verbose=False, risk_free_rate=rf)
        return optimal_weights, perf
def get_portfolio(universe, df_tr, port_value, cutoff, df_m):
    '''create a portfolio using the stocks from the universe and the closing
    prices from df_tr with a given portfolio value and a weight cutoff value
    using the value of a momemntum indicator to limit the quantity of the stocks'''
    df_t = select_columns(df_tr, universe)
    mu = capm_returns(df_t)
    S = CovarianceShrinkage(df_t).ledoit_wolf()
    # Optimize the portfolio for min volatility
    ef = EfficientFrontier(mu, S)  # Use regularization (gamma=1)
    weights = ef.min_volatility()
    #weights = ef.max_sharpe()
    cleaned_weights = ef.clean_weights(cutoff=cutoff)
    # Allocate
    latest_prices = get_latest_prices(df_t)
    da = DiscreteAllocation(cleaned_weights,
                            latest_prices,
                            total_portfolio_value=port_value)
    allocation = da.greedy_portfolio()[0]
    non_trading_cash = da.greedy_portfolio()[1]
    # Put the stocks and the number of shares from the portfolio into a df
    symbol_list = []
    mom = []
    w = []
    num_shares_list = []
    l_price = []
    tot_cash = []
    for symbol, num_shares in allocation.items():
        symbol_list.append(symbol)
        mom.append(df_m[df_m['stock'] == symbol].values[0])
        w.append(round(cleaned_weights[symbol], 4))
        num_shares_list.append(num_shares)
        l_price.append(latest_prices[symbol])
        tot_cash.append(num_shares * latest_prices[symbol])

    df_buy = pd.DataFrame()
    df_buy['stock'] = symbol_list
    df_buy['momentum'] = mom
    df_buy['weights'] = w
    df_buy['shares'] = num_shares_list
    df_buy['price'] = l_price
    df_buy['value'] = tot_cash
    df_buy = df_buy.append(
        {
            'stock': 'CASH',
            'momentum': 0,
            'weights': round(1 - df_buy['weights'].sum(), 4),
            'shares': 1,
            'price': round(non_trading_cash, 2),
            'value': round(non_trading_cash, 2)
        },
        ignore_index=True)
    df_buy = df_buy.set_index('stock')
    return df_buy, non_trading_cash
Beispiel #12
0
def calculateInvestment(limit=10,
                        count=10,
                        write_to_file=True,
                        show_cla=False,
                        tpv=20000):
    symbols = getSymbolsFromDatabase()
    prices = createDataFrame(symbols[:limit], count)
    mu = expected_returns.mean_historical_return(prices)
    S = risk_models.CovarianceShrinkage(prices).ledoit_wolf()
    ef = EfficientFrontier(mu, S, weight_bounds=(-1, 1))
    ef.add_objective(objective_functions.L2_reg)
    ef.min_volatility()
    c_weights = ef.clean_weights()
    if write_to_file == True:
        ef.save_weights_to_file("weights.txt")
    if show_cla == True:
        cla = CLA(mu, S)
        ef_plot(cla)
    ef.portfolio_performance(verbose=True)
    latest_prices = disc_alloc.get_latest_prices(prices)
    allocation_minv, leftover = disc_alloc.DiscreteAllocation(
        c_weights, latest_prices, total_portfolio_value=tpv).lp_portfolio()
    return allocation_minv, leftover
Beispiel #13
0
def test_min_volatility_short():
    ef = EfficientFrontier(*setup_efficient_frontier(data_only=True),
                           weight_bounds=(None, None))
    w = ef.min_volatility()
    assert isinstance(w, dict)
    assert set(w.keys()) == set(ef.tickers)
    assert set(w.keys()) == set(ef.expected_returns.index)
    np.testing.assert_almost_equal(ef.weights.sum(), 1)
    np.testing.assert_allclose(
        ef.portfolio_performance(),
        (0.1719799152621441, 0.1555954785460613, 0.9767630568850568),
    )

    # Shorting should reduce volatility
    volatility = ef.portfolio_performance()[1]
    ef_long_only = setup_efficient_frontier()
    ef_long_only.min_volatility()
    long_only_volatility = ef_long_only.portfolio_performance()[1]
    assert volatility < long_only_volatility
Beispiel #14
0
def test_min_volatility_short():
    ef = EfficientFrontier(*setup_efficient_frontier(data_only=True),
                           weight_bounds=(None, None))
    w = ef.min_volatility()
    assert isinstance(w, dict)
    assert set(w.keys()) == set(ef.tickers)
    assert set(w.keys()) == set(ef.expected_returns.index)
    np.testing.assert_almost_equal(ef.weights.sum(), 1)
    np.testing.assert_allclose(
        ef.portfolio_performance(),
        (0.17225673749865328, 0.15559209747801794, 0.9752992044136976),
    )

    # Shorting should reduce volatility
    volatility = ef.portfolio_performance()[1]
    ef_long_only = setup_efficient_frontier()
    ef_long_only.min_volatility()
    long_only_volatility = ef_long_only.portfolio_performance()[1]
    assert volatility < long_only_volatility
def calculate_optimized_portfolio(
    tickers: Tuple[str],
    strategy: str,
    expected_return: str,
    risk_model: str,
    portfolio_value: float,
    risk_free: float,
    risk_aversion: float,
    target_risk: float,
    target_return: float,
    tiingo_api_key: Optional[str],
    verbose: bool,
) -> None:

    tiingo_api_key = tiingo_api_key or os.environ.get("TIINGO_API_KEY")
    if tiingo_api_key is None:
        raise RuntimeError(
            "Tiingo API key not found. Please pass in an api key or set the "
            "TIINGO_API_KEY environment variable")
    stock_data = get_stock_data(tickers, tiingo_api_key)
    expected_returns = EXPECTED_RETURN_METHODOLOGY[expected_return](stock_data)
    cov_matrix = RISK_MODELS[risk_model](stock_data)
    efficient_frontier = EfficientFrontier(expected_returns, cov_matrix)

    if strategy == "max_sharpe":
        raw_weights = efficient_frontier.max_sharpe(risk_free_rate=risk_free)
    elif strategy == "min_vol":
        raw_weights = efficient_frontier.min_volatility()
    elif strategy == "eff_risk":
        raw_weights = efficient_frontier.efficient_risk(
            target_risk=target_risk, risk_free_rate=risk_free)
    elif strategy == "eff_return":
        raw_weights = efficient_frontier.efficient_return(
            target_return=target_return)

    cleaned_weights = efficient_frontier.clean_weights()
    click.echo(cleaned_weights)
    latest_prices = get_latest_prices(stock_data)
    discrete_weights = DiscreteAllocation(cleaned_weights, latest_prices,
                                          portfolio_value)
    discrete_weights.lp_portfolio(verbose=verbose)
    efficient_frontier.portfolio_performance(verbose=True)
def test_min_volatility_short():
    ef = EfficientFrontier(
        *setup_efficient_frontier(data_only=True), weight_bounds=(None, None)
    )
    w = ef.min_volatility()
    assert isinstance(w, dict)
    assert set(w.keys()) == set(ef.tickers)
    assert set(w.keys()) == set(ef.expected_returns.index)
    np.testing.assert_almost_equal(ef.weights.sum(), 1)
    np.testing.assert_allclose(
        ef.portfolio_performance(),
        (0.1719799158957379, 0.15559547854162945, 0.9734986722620801),
    )

    # Shorting should reduce volatility
    volatility = ef.portfolio_performance()[1]
    ef_long_only = setup_efficient_frontier()
    ef_long_only.min_volatility()
    long_only_volatility = ef_long_only.portfolio_performance()[1]
    assert volatility < long_only_volatility
def handle_data(context, data):
    date = data.today()
    if date in context.balance_dates:
        temp = {}
        for code in context.stocks:
            history_price = data.history_bars(code,
                                              context.expected_return_days,
                                              '1d', 'close')
            if history_price is not None:
                temp.update({code: history_price})
        history_prices = pd.DataFrame(temp)
        mu = expected_returns.mean_historical_return(history_prices)
        if context.cov_method == 'sample':
            S = risk_models.sample_cov(history_prices)
        elif context.cov_method == 'semi':
            S = risk_models.semicovariance(history_prices)
        elif context.cov_method == 'exp_cov':
            S = risk_models.exp_cov(history_prices)

        ef = EfficientFrontier(mu, S)

        if context.opt_criterion == 'max_sharpe':
            weights = ef.max_sharpe()
        elif context.opt_criterion == 'efficient_return':
            weights = ef.efficient_return(context.target_return)
        elif context.opt_criterion == 'efficient_risk':
            weights = ef.efficient_risk(context.targe_risk,
                                        context.risk_free_rate)
        elif context.opt_criterion == 'min_volatility':
            weights = ef.min_volatility()

        if context.cleaned_weights is True:
            weights = ef.clean_weights()

        weight = []
        prices = []
        for code in context.stocks:
            weight.append(weights[code])
            prices.append(data.latest_price(code, "1d"))

        data.order_target_percent(context.stocks, weight, prices)
Beispiel #18
0
def handle_bar(context, api):

    date = api.now()

    #if date in context.balance_dates:
    history_prices = {}
    for stock in context.stocks:
        history_price = api.history_bars(stock, context.expected_return_days,
                                         '1d', 'close')
        history_prices.update({stock: history_price})

    history_prices = pd.DataFrame(history_prices)
    mu = expected_returns.mean_historical_return(history_prices)
    if context.cov_method == 'sample':
        S = risk_models.sample_cov(history_prices)
    elif context.cov_method == 'semi':
        S = risk_models.semicovariance(history_prices)
    elif context.cov_method == 'exp_cov':
        S = risk_models.exp_cov(history_prices)

    ef = EfficientFrontier(mu, S)

    if context.opt_criterion == 'max_sharpe':
        weights = ef.max_sharpe()
    elif context.opt_criterion == 'efficient_return':
        weights = ef.efficient_return(context.target_return)
    elif context.opt_criterion == 'efficient_risk':
        weights = ef.efficient_risk(context.targe_risk, context.risk_free_rate)
    elif context.opt_criterion == 'min_volatility':
        weights = ef.min_volatility()

    if context.cleaned_weights is True:
        weights = ef.clean_weights()
    prices = []
    weight = []
    for stock in context.stocks:
        weight.append(weights[stock])
        prices.append(api.latest_price(stock, "1d"))
    api.order_target_percent(stocks, weight, prices)
 def operate(self) -> tuple:
     """ポートフォリオを最適化する関数"""
     #平均リターンを求める
     #returns.mean() * 252
     mu = expected_returns.mean_historical_return(self.financial_data)
     #リスク(分散)を求める
     #Get the sample covariance matrix
     S = risk_models.sample_cov(self.financial_data)
     #効率的フロンティアの作成
     ef = EfficientFrontier(mu, S)
     weights = ef.min_volatility()
     cleaned_weights = ef.clean_weights()
     weight_keys = ["未選択", "未選択", "未選択", "未選択"]
     weight_values = ["ー", "ー", "ー", "ー"]
     count = 0
     for k, v in dict(cleaned_weights).items():
         weight_keys[count] = k
         weight_values[count] = v
         count += 1
     #最適ポートフォリオの投資比率の出力, リターン・リスク・シャープレシオの出力
     return weight_keys, weight_values, ef.portfolio_performance(
         verbose=False)
def min_volatility(stocks_in_portfolio):
     stock_data = web.DataReader(stocks_in_portfolio,data_source='yahoo',start=start_date,end=end_date)['Adj Close']
     stock_data.sort_index(inplace=True)
     
     mu = expected_returns.mean_historical_return(stock_data)
     S = risk_models.sample_cov(stock_data)
     lower_bound=0.30/len(stocks_in_portfolio)

     # Optimise for maximal Sharpe ratio with no contraints
     ef = EfficientFrontier(mu,S,weight_bounds=(lower_bound,1))
     #Need to change the risk free rate 
     raw_weights = ef.min_volatility()
     cleaned_weights = ef.clean_weights()
     cleaned_weights_df=pd.DataFrame.from_dict(cleaned_weights, orient='index')
     #remove weights with 0% 
     cleaned_weights_df=cleaned_weights_df.loc[(cleaned_weights_df!=0).any(1)]
     #print("Portfolio having maximal sharpie ratio and with no contraints\n" )
    # print(cleaned_weights)
     final_return= ef.portfolio_performance(verbose=True)
     index=['Expected Annual Return','Expected Annual Volatility','Sharpe Ratio']
     final_return_df = pd.DataFrame(final_return,index=index)
     final_df=pd.concat([cleaned_weights_df,final_return_df])
     return final_df
Beispiel #21
0
f = dfitems.drop_duplicates(subset=['date', 'code'], keep='last')
filtered = dfitems.pivot(index='date', columns='code', values='price')

cols = filtered.columns
filtered[cols] = filtered[cols].apply(pd.to_numeric, errors='coerce')
filtered = filtered.dropna()
print(filtered)
mu = mean_historical_return(filtered)
S = CovarianceShrinkage(filtered).ledoit_wolf()
ef = EfficientFrontier(mu, S)
max_sharpe = ef.max_sharpe()
max_sharpe_performance = ef.portfolio_performance()

minVolFrontier = EfficientFrontier(mu, S)
min_volatility = minVolFrontier.min_volatility()
min_vol_performance = minVolFrontier.portfolio_performance()

print(max_sharpe_performance, "MAX SHARPE PERFORMANCE")
print(min_vol_performance, "MIN VOL PERFORMANCE")

addModelPrediction("max_sharpe_ratio", max_sharpe_performance[2],
                   max_sharpe_performance[0], max_sharpe_performance[1],
                   max_sharpe)
addModelPrediction("min_volatility", min_vol_performance[2],
                   min_vol_performance[0], min_vol_performance[1],
                   min_volatility)

url = "https://4lm8nt9c67.execute-api.eu-central-1.amazonaws.com/dev/scrape/daily"
codes = [
    "AEC", "AEE", "AEN", "AEP", "AEU", "AEZ", "AGL", "ALH", "ALI", "ALR",
Beispiel #22
0
def mo_portfolio(budget, yesterday, end_period):

    # debug
    print(f'Hello from mo_portfolio')
    print(f'- budget:\t\t {budget}')
    print(f'- yesterday:\t {yesterday}')
    print(f'- end_period:\t {end_period}')

    if end_period is None:
        return 'False'

    global g_budget
    g_budget = budget
    global g_date
    g_date = [yesterday, end_period]
    # yesterday: day before the creation of the portfolio
    # end_period: end of investment

    chart = pd.DataFrame()
    # chart: adj closes fino a yesterday
    for s in stocks:
        chart = pd.concat(
            [chart, dataframes[s]['Adj Close'].loc[:yesterday, ]], axis=1)
    chart.columns = stocks
    # compute montly (default value = 'Y') cc return
    chart_rt = {}
    for s in chart:
        tmp = chart[s].groupby(pd.Grouper(freq="Y"))
        tmp2 = tmp.mean()
        chart_rt[s] = np.log(tmp2 / tmp2.shift(1))
    chart_rt = pd.DataFrame.from_dict(chart_rt)
    chart_rt = chart_rt.dropna()
    chart_rt.columns = [
        "AAPL CC returns", "NVDA CC returns", "KO CC returns", "UL CC returns",
        "BAC CC returns", "AXP CC returns"
    ]

    # adding transition costs (1,5% fee per share)
    chart = chart.apply(lambda x: x + (x * 0.015))

    # Optimal portfolio

    # computes CC return on year granularity
    avg_returns = expected_returns.mean_historical_return(chart)
    # sample covariance matrix
    S = risk_models.sample_cov(chart)
    ef = EfficientFrontier(avg_returns, S)

    # Minimize the volatily of the portfolio (Markowitz)
    weights = ef.min_volatility()
    # rounding weights values, meaning they may not add up exactly to 1 but should be close
    weights = ef.clean_weights()

    Mop_pw = weights

    opt_return, opt_risk, _ = ef.portfolio_performance(verbose=False)
    global g_expected_return_volat
    g_expected_return_volat = [opt_return, opt_risk]

    recap = {}
    for s in weights:
        # print(f'{s} budget {budget}, {type(budget)}')     # debug
        # print(f'{s} weights[s]/chart[s].iloc[-1] {weights[s]/chart[s].iloc[-1]}, {type(weights[s]/chart[s].iloc[-1])}')   # debug
        recap[s] = [int(np.floor(budget * weights[s] / chart[s].iloc[-1]))
                    ]  # number of shares
        price_no_fee = np.round(chart[s].iloc[-1] -
                                (chart[s].iloc[-1] * 1.5 / 101.5),
                                decimals=2)
        recap[s].append(price_no_fee)  # price for each shares
        recap[s].append(np.round(price_no_fee * 0.015,
                                 2))  # transaction costs 1,5%
        tot_cost = np.around(recap[s][0] * (recap[s][1] + recap[s][2]),
                             decimals=2)
        recap[s].append(
            tot_cost
        )  # total cost of the investment in s (shares * (price for each s + transaction costs))

    recap = pd.DataFrame.from_dict(recap, orient='index')
    recap.columns = [
        'Num of shares', 'Price for each share $', 'Transaction costs $',
        'Purchase cost $'
    ]

    global g_recap
    g_recap = recap

    total = 0
    for _, row in recap.iterrows():
        total += row['Purchase cost $']

    total = np.around(total, decimals=2)

    global g_spent
    g_spent = total
    global g_left
    g_left = str(np.around(budget - total, decimals=2))

    price_end = {}
    tot_port = 0
    for s in dataframes:
        price_end[s] = dataframes[s]['Adj Close'].loc[end_period]

    act_return = 0
    for index, row in recap.iterrows():
        tot_port += np.around(row['Num of shares'] *
                              (price_end[index] + row['Transaction costs $']),
                              decimals=2)
        rtn = (price_end[index] + row['Transaction costs $']
               ) / recap.loc[index, 'Price for each share $'] - 1
        act_return += weights[index] * rtn

    global g_returns
    g_returns = str(np.around(tot_port, decimals=2)) + ' [' + str(
        np.round(100 * act_return, decimals=2)) + '%]'
    print(g_returns)

    return "True"
Beispiel #23
0
weights = ef.max_sharpe()
x = ef.portfolio_performance(verbose=True)
cleaned_weights = ef.clean_weights()
ef.save_weights_to_file("weights.txt")  # saves to file
print(cleaned_weights)

x = ef.portfolio_performance(verbose=True)
from pypfopt.discrete_allocation import DiscreteAllocation, get_latest_prices
latest_prices = get_latest_prices(df)
da = DiscreteAllocation(weights, latest_prices, total_portfolio_value=20000)
allocation, leftover = da.lp_portfolio()
print(allocation)
print(leftover)

print()
print('min_volatility')
ef = EfficientFrontier(mu, S)
ef.add_objective(
    objfunc.L2_reg,
    gamma=0.1)  # incentivize optimizer to choose non zero  weights
w = ef.min_volatility()
cw = ef.clean_weights()
x = ef.portfolio_performance(verbose=True)
print(cw)
from pypfopt.discrete_allocation import DiscreteAllocation, get_latest_prices
latest_prices = get_latest_prices(df)
da = DiscreteAllocation(cw, latest_prices, total_portfolio_value=20000)
allocation, leftover = da.lp_portfolio()
print(allocation)
print(leftover)
Beispiel #24
0
print(nullin_df.isnull().sum())
#Годовая доходность
mu = expected_returns.mean_historical_return(df_stocks)
#Дисперсия портфеля
Sigma = risk_models.sample_cov(df_stocks)
#Максимальный коэффициент Шарпа
ef = EfficientFrontier(
    mu, Sigma,
    weight_bounds=(0, 1))  #weight bounds in negative allows shorting of stocks
sharpe_pfolio = ef.max_sharpe(
)  #May use add objective to ensure minimum zero weighting to individual stocks
sharpe_pwt = ef.clean_weights()
print(sharpe_pwt)
ef.portfolio_performance(verbose=True)
ef1 = EfficientFrontier(mu, Sigma, weight_bounds=(0, 1))
minvol = ef1.min_volatility()
minvol_pwt = ef1.clean_weights()
print(minvol_pwt)
ef1.portfolio_performance(verbose=True, risk_free_rate=0.27)
cl_obj = CLA(mu, Sigma)
ax = pplt.plot_efficient_frontier(cl_obj, showfig=False)
ax.xaxis.set_major_formatter(FuncFormatter(lambda x, _: '{:.0%}'.format(x)))
ax.yaxis.set_major_formatter(FuncFormatter(lambda y, _: '{:.0%}'.format(y)))
lalatest_prices = get_latest_prices(df_stocks)
allocation_minv, rem_minv = DiscreteAllocation(
    minvol_pwt, latest_prices, total_portfolio_value=100000).lp_portfolio()
print(allocation_minv)
print(
    "Осталось денежных средств после построения портфеля с минимальной волатильностью - {:.2f} рублей"
    .format(rem_minv))
print()
Beispiel #25
0
df = yf.download("AAPL,GOOG,FB", start=back_test_date, end=start_date)
df = df["Adj Close"].dropna(how="all")

test_df = yf.download("AAPL,GOOG,FB", start=start_date, end=end_date)
test_df = test_df["Adj Close"].dropna(how="all")

returns = df.pct_change().dropna()
hrp = HRPOpt(returns)
weights = hrp.optimize()
performance = hrp.portfolio_performance(verbose=True)

    optimizer = "mvo"
    mu = mean_historical_return(df)
    S = CovarianceShrinkage(df).ledoit_wolf()
    ef = EfficientFrontier(mu, S)
    weights = ef.max_sharpe() if optimizer == "msr" else ef.min_volatility()
    cleaned_weights = ef.clean_weights() # maybe remove this bc simplicity
    performance = ef.portfolio_performance()


weights = pd.Series(weights)
total_asset_returns = (test_df.iloc[-1]-test_df.iloc[0])/test_df.iloc[0]
test_months = 1

realized_annual_returns = (test_df.iloc[-1]/test_df.iloc[0])**(12/test_months)-1



annual_returns = (test_df.iloc[-1]/test_df.iloc[0])**(12/test_months)-1
weights*annual_returns
portfolio_returns = sum(weights*total_asset_returns)
"""
Expected annual return: 33.0%
Annual volatility: 21.7%
Sharpe Ratio: 1.43

Discrete allocation: {'MA': 14, 'FB': 12, 'PFE': 51, 'BABA': 5, 'AAPL': 5,
                      'AMZN': 0, 'BBY': 9, 'SBUX': 6, 'GOOG': 1}
Funds remaining: $12.15
"""

# Long-only minimum volatility portfolio, with a weight cap and regularisation
# e.g if we want at least 15/20 tickers to have non-neglible weights, and no
# asset should have a weight greater than 10%
ef = EfficientFrontier(mu, S, weight_bounds=(0, 0.10), gamma=1)
weights = ef.min_volatility()
print(weights)
ef.portfolio_performance(verbose=True)

"""
{
    "GOOG": 0.07350956640872872,
    "AAPL": 0.030014017863649482,
    "FB": 0.1,
    "BABA": 0.1,
    "AMZN": 0.020555866446753328,
    "GE": 0.04052056082259943,
    "AMD": 0.00812443078787937,
    "WMT": 0.06506870608367901,
    "BAC": 0.008164561664321555,
    "GM": 0.1,
    0.10)

c1, c2, c3, c4 = st.beta_columns((1, 1, 1, 1))
#-----Χαρτοφυλάκιο Νο1 γενικό
#Calculate portofolio mu and S
mu = expected_returns.mean_historical_return(df_t)
if riskmo:
    S = CovarianceShrinkage(df_t).ledoit_wolf()
else:
    S = risk_models.sample_cov(df_t)
# Optimise the portfolio
ef = EfficientFrontier(mu, S, gamma=2)  # Use regularization (gamma=1)
if weightsmo:
    weights = ef.max_sharpe()
else:
    weights = ef.min_volatility()
cleaned_weights = ef.clean_weights(cutoff=cutoff, rounding=3)
ef.portfolio_performance()

c1.subheader('Χαρτοφυλάκιο Νο1')
c1.write(
    'Το προτινόμενο χαρτοφυλάκιο από τις ιστορικές τιμές των επιλεγμένων μετοχών έχει τα παρακάτω χαρακτηριστικά'
)
c1.write('Αρχική Αξία Χαρτοφυλακίου : ' + str(port_value) + '€')
c1.write('Sharpe Ratio: ' + str(round(ef.portfolio_performance()[2], 2)))
c1.write('Απόδοση Χαρτοφυλακίο: ' +
         str(round(ef.portfolio_performance()[0] * 100, 2)) + '%')
c1.write('Μεταβλητότητα Χαρτοφυλακίου: ' +
         str(round(ef.portfolio_performance()[1] * 100, 2)) + '%')
# Allocate
latest_prices = get_latest_prices(df_t)
    def cv(self,
           back_test_months,
           data,
           optimizer,
           test_months,
           annual_risk_free_rate=0.02):
        """Cross-Validation backtesting method

        Args:
            back_test_months (int): number of backtesting months
            data (pandas.Series): data that includes both training and testing data
            optimizer (str): portfolio optimizer for PyPortfolioOpt
            test_months (int): number of testing months
            annual_risk_free_rate (float, optional): annual risk free rate used in calculating Sharpe ratio. Defaults to 0.02.

        Returns:
            [pandas.Series, float]: expected and realised asset performance
        """

        embargo = np.round_(0.01 * len(data), decimals=0)
        all_weights = np.zeros((back_test_months, np.shape(data)[1]))
        all_realised_annual_return = np.zeros(back_test_months)
        all_realised_annual_volatility = np.zeros(back_test_months)
        all_realised_sharpe_ratio = np.zeros(back_test_months)

        for i in range(back_test_months):

            test_start = i * len(data) / back_test_months
            test_end = test_start + len(data) / back_test_months - 1
            test = data.iloc[int(test_start):int(test_end), :]
            train = data.iloc[np.r_[0:int(test_start),
                                    int(test_end) + int(embargo):len(data)], :]

            if optimizer == "hrp":
                train_returns = train.pct_change().dropna()
                hrp = HRPOpt(train_returns)
                weights = hrp.optimize()
                weights = pd.Series(weights)
                all_weights[i] = weights
                performance = hrp.portfolio_performance(verbose=True)
            else:
                mu = mean_historical_return(train)
                S = CovarianceShrinkage(train).ledoit_wolf()
                ef = EfficientFrontier(mu, S)
                weights = ef.max_sharpe(
                ) if optimizer == "msr" else ef.min_volatility()
                weights = pd.Series(weights)
                all_weights[i] = weights
                performance = ef.portfolio_performance()

            all_realised_annual_return[i] = sum(all_weights[i] * ((test.iloc[
                (len(test) - 1)] / test.iloc[0])**(12 / test_months) - 1))
            all_realised_annual_volatility[i] = sum(
                all_weights[i] * np.std(test.pct_change().dropna()) *
                np.sqrt(251))
            all_realised_sharpe_ratio = (
                all_realised_annual_return[i] -
                annual_risk_free_rate) / all_realised_annual_volatility[i]

        weights = np.mean(all_weights)
        realised_annual_return = np.mean(all_realised_annual_return)
        realised_annual_volatility = np.mean(all_realised_annual_volatility)
        realised_sharpe_ratio = np.mean(all_realised_sharpe_ratio)

        return weights, performance, realised_annual_return, realised_annual_volatility, realised_sharpe_ratio
Beispiel #29
0
    w.set_gid(w.get_label())

    # we don't want to draw the edge of the pie
    w.set_edgecolor("none")

for w in pies[0]:
    # create shadow patch
    s = Shadow(w, -0.01, -0.01)
    s.set_gid(w.get_gid() + "_shadow")
    s.set_zorder(w.get_zorder() - 0.1)
    ax.add_patch(s)
plt.show()
fig.savefig("Markovitz' portfolio with maximum return.png")

# Calculating weights for the minimum volatility portfolio
raw_weights_minvol = ef.min_volatility()
cleaned_weights_minvol = ef.clean_weights()

# Showing portfolio performance
print(cleaned_weights_minvol)
ef.portfolio_performance(verbose=True)
size = list(cleaned_weights_minvol.values())
print(size)
plt.pie(size, labels=l, autopct='%1.1f%%')
plt.title('Min Risk')
plt.show()

#Calculating an exponentially weighted portfolio
Sigma_ew = risk_models.exp_cov(df, span=180, frequency=252)
mu_ew = expected_returns.ema_historical_return(df, frequency=252, span=180)
# Calculate the efficient frontier
Beispiel #30
0
cov_matrix_annual

from pypfopt.efficient_frontier import EfficientFrontier
from pypfopt import risk_models
from pypfopt import expected_returns

# Portfolio Optimization

# Calculate the expected returns and annualized sample covariance matrix of asset returns
mu = expected_returns.mean_historical_return(df, compounding=True)
S = risk_models.sample_cov(df)

# Optimize for maximum Sharpe Ratio
ef = EfficientFrontier(mu, S, weight_bounds=(0.05, 0.2))
# weights = ef.max_sharpe(risk_free_rate=0.01)
weights = ef.min_volatility()
ef.save_weights_to_file('weights.csv')
cleaned_weights = ef.clean_weights()
print(cleaned_weights)
ef.portfolio_performance(verbose=True)

# Get the discrete allocation of each share per stock
from pypfopt.discrete_allocation import DiscreteAllocation, get_latest_prices

latest_prices = get_latest_prices(df)
weights = cleaned_weights
da = DiscreteAllocation(weights, latest_prices, total_portfolio_value=15000)

allocation, leftover = da.lp_portfolio()
print('Discrete allocation:', allocation)
print('Funds remaining: ${:.2f}'.format(leftover))
Beispiel #31
0
def optimize_min_volatility(expected_returns_df, cov_matrix):

    EFOptimizer = EfficientFrontier(expected_returns_df, cov_matrix)
    weights_dict = EFOptimizer.min_volatility()

    return weights_dict
Beispiel #32
0
def main():
    """ Runs the main descriptive stadistict about stocks and also get optimal portafolio
    """
        
    t0 = dt.datetime.now()

    args = utils.get_args()
    all_config = yaml.safe_load(open(args.config_file_path, "r"))

    DATA = all_config['output']['data_folder']
    input_data_path = all_config['stocks']['data']
    logger = logger_utils.log_creator(all_config['output']['log_folder'], log_name='get_stats')
    start_date, end_date = utils.get_start_end(all_config)

    df_ticks_path = os.path.join('./src/data', input_data_path)
    logger.info("Reading tick and weights from %s" % df_ticks_path)
    weights = utils.get_ticks_data(df_ticks_path)

    data_path = utils.filename_maker('stocks_', DATA, start_date, end_date)
    data_sp_path = utils.filename_maker('sp500_', DATA, start_date, end_date)

    df = pd.read_csv(data_path)
    sp = pd.read_csv(data_sp_path)

    df['Date'] = pd.to_datetime(df['Date'])
    df.set_index('Date', inplace=True)
    df_pc = df.pct_change().dropna()

    sp['Date'] = pd.to_datetime(sp['Date'])
    sp.set_index('Date', inplace=True)
    sp_pc = sp.pct_change().dropna()

    weights_np = weights['WEIGHT'].to_numpy()

    anual_cov_matrix = df_pc.cov()*252
    volatilidad_por_anual = np.sqrt(np.dot(weights_np.T, np.dot(anual_cov_matrix, weights_np)))
    logger.info("Anual portafolio volatility is %.2f" % volatilidad_por_anual)

    portafolio_anual_return = np.sum(df_pc.mean()*weights_np)*252
    logger.info("Anual portafolio return is %.2f" % portafolio_anual_return)

    
    logger.info("Mean historical return for each stock %s" % round((df_pc.mean()*252),2))
    logger.info("Anual volatility for each stock %s" % round(np.std(df_pc)*np.sqrt(252),2))
    
    # np.sum(df_pc.mean()*weights['WEIGHT'].to_numpy())*252
    ticks = weights['TICK'].to_list()
    skew_list = []
    kurtosis_list = []
    shapiro_list = []
    annual_vol = []
    annual_returns = []
    for tk in ticks:
        skewness = np.round(df_pc[tk].skew(), 3)
        kurt = np.round(df_pc[tk].kurtosis() + 3, 3)
        shap = np.round(shapiro(df_pc[tk])[1], 3)
        vol = np.round(df_pc[tk].std()*np.sqrt(252), 3)
        rtn = np.round((df_pc[tk].mean()*252), 3)
        
        skew_list.append(skewness)
        kurtosis_list.append(kurt)
        shapiro_list.append(shap)
        annual_vol.append(vol)
        annual_returns.append(rtn)

    logger.info("This is the summary of the stocks regarding the anual return, anual volatility, kurtosis, shapiro and skew.")
    stocks_summary = pd.DataFrame({'STOCK': ticks,
                                   'SKEW': skew_list,
                                   'KURTOSIS': kurtosis_list,
                                   'SHAPIRO': shapiro_list,
                                   'ANNUAL_VOL': annual_vol,
                                   'ANNUAL_RETURN': annual_returns})
    stocks_summary.set_index('STOCK', inplace=True)

    logger.info(stocks_summary)

    logger.info("Lets now calculate the anual covariance between stoks")
    cov_matriz = df_pc.cov()*252
    logger.info(cov_matriz)

    logger.info("Using Python Portafolio")
    mu = expected_returns.mean_historical_return(df) 
    sigma = risk_models.sample_cov(df)
    ef = EfficientFrontier(mu, sigma)
    
    logger.info("Showing portafolio with max sharpe rate")
    raw_weights_maxsharpe = ef.max_sharpe()
    cleaned_weights_maxsharpe = ef.clean_weights()
    logger.info(cleaned_weights_maxsharpe)
    # Show portfolio performance 
    logger.info(ef.portfolio_performance(verbose=True))

    desire_return = 0.20
    ef.efficient_return(desire_return)
    logger.info("Calculating portafolio which should bring a return of  %s" % desire_return)
    logger.info(ef.clean_weights())

    logger.info("Showing portafolio with lowest risk for a return of %s" % desire_return)
    raw_weights_minvol = ef.min_volatility()
    cleaned_weights_minvol = ef.clean_weights()
    logger.info(cleaned_weights_minvol)
    logger.info(ef.portfolio_performance(verbose=True))


    t1 = dt.datetime.now()