def IFE_c (self, Rf = 0,year_start = 1996, year_finish = 2016, window = 10): ## With monthly data, calculate the Efficient frontier ## year by year. So lets do it self.set_Rf(Rf) nf_flag = 1 all_portfolios = [] for year_test in range(year_start,year_finish - window + 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test,1,1),dt.datetime(year_test + window,1,1)) # print self.get_Returns()[1] # Check that it works # portfolios = self.Lintnerian_efficient_frontier(norm = "none", maxRf = 0.00031) # optimal, portfolios = self.efficient_frontier(kind = "Markowitz") # optimal, portfolios = self.efficient_frontier(kind = "Normal") optimal, portfolios = self.efficient_frontier(kind = "Tangent") all_portfolios.append(portfolios) self.plot_allocations(portfolios, labels = ["Efficient Frontiers", "Risk (std)", "Return (%)"], legend = ["Frontier " + str(year_test + window)], nf = nf_flag) nf_flag = 0 gl.savefig(folder_images +'effAll.png', dpi = 150, sizeInches = [2*8, 2*6])
def plot_learnt_function(X_data_tr, Y_data_tr, X_data_val, Y_data_val, x_grid, y_grid, cf_a, folder_images): gl.init_figure() ax1 = gl.scatter(X_data_tr, Y_data_tr, lw=3, legend=["tr points"], labels=["Data", "X", "Y"], alpha=0.2) ax2 = gl.scatter(X_data_val, Y_data_val, lw=3, legend=["val points"], alpha=0.2) gl.set_fontSizes(ax=[ax1, ax2], title=20, xlabel=20, ylabel=20, legend=20, xticks=12, yticks=12) gl.plot(x_grid, y_grid, legend=["training line"]) gl.savefig(folder_images + 'Training_Example_Data.png', dpi=100, sizeInches=[14, 4])
def IFE_c(self, Rf=0, year_start=1996, year_finish=2016, window=10): ## With monthly data, calculate the Efficient frontier ## year by year. So lets do it self.set_Rf(Rf) nf_flag = 1 all_portfolios = [] for year_test in range(year_start, year_finish - window + 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test, 1, 1), dt.datetime(year_test + window, 1, 1)) # print self.get_Returns()[1] # Check that it works # portfolios = self.Lintnerian_efficient_frontier(norm = "none", maxRf = 0.00031) # optimal, portfolios = self.efficient_frontier(kind = "Markowitz") # optimal, portfolios = self.efficient_frontier(kind = "Normal") optimal, portfolios = self.efficient_frontier(kind="Tangent") all_portfolios.append(portfolios) self.plot_allocations( portfolios, labels=["Efficient Frontiers", "Risk (std)", "Return (%)"], legend=["Frontier " + str(year_test + window)], nf=nf_flag) nf_flag = 0 gl.savefig(folder_images + 'effAll.png', dpi=150, sizeInches=[2 * 8, 2 * 6])
def create_image_training_epoch(X_data_tr, Y_data_tr, X_data_val, Y_data_val, tr_loss, val_loss, x_grid, y_grid, cf_a, video_fotograms_folder, epoch_i): """ Creates the image of the training and validation accuracy """ gl.init_figure(); ax1 = gl.subplot2grid((2,1), (0,0), rowspan=1, colspan=1) ax2 = gl.subplot2grid((2,1), (1,0), rowspan=1, colspan=1) plt.title("Training") ## First plot with the data and predictions !!! ax1 = gl.scatter(X_data_tr, Y_data_tr, ax = ax1, lw = 3,legend = ["tr points"], labels = ["Analysis of training", "X","Y"]) gl.scatter(X_data_val, Y_data_val, lw = 3,legend = ["val points"]) gl.plot (x_grid, y_grid, legend = ["Prediction function"]) gl.set_zoom(xlimPad = [0.2, 0.2], ylimPad = [0.2,0.2], X = X_data_tr, Y = Y_data_tr) ## Second plot with the evolution of parameters !!! ax2 = gl.plot([], tr_loss, ax = ax2, lw = 3, labels = ["RMSE. lr: %.3f"%cf_a.lr, "epoch","RMSE"], legend = ["train"]) gl.plot([], val_loss, lw = 3, legend = ["validation"], loc = 3) gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 20, ylabel = 20, legend = 20, xticks = 12, yticks = 12) # Set final properties and save figure gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.30) gl.savefig(video_fotograms_folder +'%i.png'%epoch_i, dpi = 100, sizeInches = [14, 10], close = True, bbox_inches = None)
def plot_weights_network(model, folder_images): # weights = model.linear1.weight.detach().numpy() biases = model.linear1.bias.detach().numpy().reshape(-1,1) neurons = np.concatenate((weights, biases), axis = 1) weights2 = model.W2.detach().numpy() biases2 = model.b2.detach().numpy().reshape(-1,1) neurons2 = np.concatenate((weights2, biases2), axis =0).T gl.init_figure(); ax1 = gl.subplot2grid((1,4), (0,0), rowspan=1, colspan=2) ax2 = gl.subplot2grid((1,4), (0,3), rowspan=1, colspan=4) cmap = cm.get_cmap('coolwarm', 30) cax = ax1.imshow(neurons, interpolation="nearest", cmap=cmap) cax2 = ax2.imshow(neurons2, interpolation="nearest", cmap=cmap) # plt.xticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='vertical') # plt.yticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='horizontal') plt.colorbar(cax) # plt.colorbar(cax2) # ax1.set_xticks(data_df_train.columns) # , rotation='vertical' # ax1.grid(True) plt.title('Weights ') # labels=[str(x) for x in range(Nshow )] # ax1.set_xticklabels(labels,fontsize=20) # ax1.set_yticklabels(labels,fontsize=20) # Add colorbar, make sure to specify tick locations to match desired ticklabels plt.show() gl.savefig(folder_images +'Weights.png', dpi = 100, sizeInches = [2*8, 2*2])
def plot_weights_network(model, folder_images): # weights = model.linear1.weight.detach().numpy() biases = model.linear1.bias.detach().numpy().reshape(-1, 1) neurons = np.concatenate((weights, biases), axis=1) weights2 = model.W2.detach().numpy() biases2 = model.b2.detach().numpy().reshape(-1, 1) neurons2 = np.concatenate((weights2, biases2), axis=0).T gl.init_figure() ax1 = gl.subplot2grid((1, 4), (0, 0), rowspan=1, colspan=2) ax2 = gl.subplot2grid((1, 4), (0, 3), rowspan=1, colspan=4) cmap = cm.get_cmap('coolwarm', 30) cax = ax1.imshow(neurons, interpolation="nearest", cmap=cmap) cax2 = ax2.imshow(neurons2, interpolation="nearest", cmap=cmap) # plt.xticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='vertical') # plt.yticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='horizontal') plt.colorbar(cax) # plt.colorbar(cax2) # ax1.set_xticks(data_df_train.columns) # , rotation='vertical' # ax1.grid(True) plt.title('Weights ') # labels=[str(x) for x in range(Nshow )] # ax1.set_xticklabels(labels,fontsize=20) # ax1.set_yticklabels(labels,fontsize=20) # Add colorbar, make sure to specify tick locations to match desired ticklabels plt.show() gl.savefig(folder_images + 'Weights.png', dpi=100, sizeInches=[2 * 8, 2 * 2])
def IFE_b(self, year_start=1996, year_finish=2016, window=10): ## Question b of the asqued thing all_returns = [] all_covMatrices = [] all_dates = [] # To store the dates of the estimation for year_test in range(year_start, year_finish - window + 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test, 1, 1), dt.datetime(year_test + window, 1, 1)) ret = self.yearly_Return(self.get_MeanReturns()) covMat = self.yearly_covMatrix(self.get_covMatrix()) all_covMatrices.append(covMat) all_returns.append(ret) # Get the dates from any of the symbols of the portfolio dates = self.get_dates() all_dates.append(dates[-1]) ## Plotting the returns all_returns = np.array(all_returns) # gl.plot(all_dates, all_returns[:,0], # labels = ["Returns", "Time", "Return"], # legend = [self.pf.symbols.keys()[0]]) # # gl.plot(all_dates, all_returns[:,1], # legend = [self.pf.symbols.keys()[1]], nf = 0, na = 0) ## 1) Plot the returns of all of them together for the eleven windows gl.plot(all_dates, all_returns, labels=[ "Average Return in 10 years", "Time (years)", "Anual return of Assets" ], legend=self.symbol_names) gl.savefig(folder_images + 'returnsAveAll.png', dpi=150, sizeInches=[2 * 8, 1.5 * 6]) ## 2) Plot the covariance matrix for 9 years gl.set_subplots(2, 3) for i in range(6): gl.bar_3D(self.symbol_names, self.symbol_names, all_covMatrices[i], labels=[str(year_start + window + i), "", ""], fontsize=30, fontsizeA=19) gl.savefig(folder_images + 'covsAveAll.png', dpi=80, sizeInches=[4 * 8, 3 * 6])
def create_image_training_epoch(X_data_tr, Y_data_tr, X_data_val, Y_data_val, tr_loss, val_loss, x_grid, y_grid, cf_a, video_fotograms_folder, epoch_i): """ Creates the image of the training and validation accuracy """ gl.init_figure() ax1 = gl.subplot2grid((2, 1), (0, 0), rowspan=1, colspan=1) ax2 = gl.subplot2grid((2, 1), (1, 0), rowspan=1, colspan=1) plt.title("Training") ## First plot with the data and predictions !!! ax1 = gl.scatter(X_data_tr, Y_data_tr, ax=ax1, lw=3, legend=["tr points"], labels=["Analysis of training", "X", "Y"]) gl.scatter(X_data_val, Y_data_val, lw=3, legend=["val points"]) gl.plot(x_grid, y_grid, legend=["Prediction function"]) gl.set_zoom(xlimPad=[0.2, 0.2], ylimPad=[0.2, 0.2], X=X_data_tr, Y=Y_data_tr) ## Second plot with the evolution of parameters !!! ax2 = gl.plot([], tr_loss, ax=ax2, lw=3, labels=["RMSE. lr: %.3f" % cf_a.lr, "epoch", "RMSE"], legend=["train"]) gl.plot([], val_loss, lw=3, legend=["validation"], loc=3) gl.set_fontSizes(ax=[ax1, ax2], title=20, xlabel=20, ylabel=20, legend=20, xticks=12, yticks=12) # Set final properties and save figure gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.30) gl.savefig(video_fotograms_folder + '%i.png' % epoch_i, dpi=100, sizeInches=[14, 10], close=True, bbox_inches=None)
def plot_evolution_RMSE(tr_loss, val_loss, cf_a, folder_images): gl.init_figure() ax1 = gl.plot([], tr_loss, lw = 3, labels = ["RMSE loss and parameters. Learning rate: %.3f"%cf_a.lr, "","RMSE"], legend = ["train"]) gl.plot([], val_loss, lw = 3, legend = ["validation"]) gl.set_fontSizes(ax = [ax1], title = 20, xlabel = 20, ylabel = 20, legend = 20, xticks = 12, yticks = 12) gl.savefig(folder_images +'Training_Example_Parameters.png', dpi = 100, sizeInches = [14, 7])
def generate_images(self, folder_path): self.sensor_images_path = folder_path # This function is suposed to generate the cleaning images. # Basically the time series with the sensors. # It uses the pandas dataframe self.sensors_data_pd # The images are stored in self.sensor_images_path using as names the column names of the data for sensor_column in self.sensor_names: ## TODO: Big task... manage different windows in gl library # gl.init_figure() # gl.plot(self.sensors_data_pd[self.time_name],self.sensors_data_pd[sensor_column]) path_image = self.sensor_images_path + sensor_column + ".png" gl.savefig(path_image, dpi=100, sizeInches=[]) # [2*8, 2*3]
def generate_images(self, folder_path): self.sensor_images_path = folder_path # This function is suposed to generate the cleaning images. # Basically the time series with the sensors. # It uses the pandas dataframe self.sensors_data_pd # The images are stored in self.sensor_images_path using as names the column names of the data for sensor_column in self.sensor_names: ## TODO: Big task... manage different windows in gl library # gl.init_figure() # gl.plot(self.sensors_data_pd[self.time_name],self.sensors_data_pd[sensor_column]) path_image = self.sensor_images_path + sensor_column + ".png" gl.savefig( path_image, dpi = 100, sizeInches = []) # [2*8, 2*3]
def plot_learnt_function(X_data_tr, Y_data_tr, X_data_val, Y_data_val, x_grid, y_grid, cf_a, folder_images): gl.init_figure() ax1 = gl.scatter(X_data_tr, Y_data_tr, lw = 3,legend = ["tr points"], labels = ["Data", "X","Y"], alpha = 0.2) ax2 = gl.scatter(X_data_val, Y_data_val, lw = 3,legend = ["val points"], alpha = 0.2) gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 20, ylabel = 20, legend = 20, xticks = 12, yticks = 12) gl.plot (x_grid, y_grid, legend = ["training line"]) gl.savefig(folder_images +'Training_Example_Data.png', dpi = 100, sizeInches = [14, 4])
def IFE_2c(self): ### Bond question !! myBond = CBond.CBOND(name="hola", freq=2, coupon=5.75, par=100.) # Set some properties myBond.set_price(95.0428) myBond.set_timeToMaturity(2.5) myBond.set_ytm(0.10) # Plot the compound price understanding ba.plot_compound_understanding() gl.savefig(folder_images + 'compoundUnders.png', dpi=150, sizeInches=[2 * 8, 2 * 6])
def IFE_2b(self): ### Calculate the convexity and duration of the bonds and then calculate # the portfolio one by adding the weighted. (It is just weighted average # of prices. Nothing more, nothing less.) myBond = CBond.CBOND( name = "hola", freq = 2, coupon = 5.75, par = 100.) # Set some properties myBond.set_price(95.0428) myBond.set_timeToMaturity(2.5) myBond.set_ytm(0.10) # Plot the compound price understanding ba.plot_compound_understanding() gl.savefig(folder_images +'compoundUnders.png', dpi = 150, sizeInches = [2*8, 2*6])
def IFE_b(self,year_start = 1996, year_finish = 2016, window = 10): ## Question b of the asqued thing all_returns = [] all_covMatrices = [] all_dates = [] # To store the dates of the estimation for year_test in range(year_start,year_finish - window + 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test,1,1),dt.datetime(year_test + window,1,1)) ret = self.yearly_Return(self.get_MeanReturns()) covMat = self.yearly_covMatrix(self.get_covMatrix()) all_covMatrices.append(covMat) all_returns.append(ret) # Get the dates from any of the symbols of the portfolio dates = self.get_dates() all_dates.append(dates[-1]) ## Plotting the returns all_returns = np.array(all_returns) # gl.plot(all_dates, all_returns[:,0], # labels = ["Returns", "Time", "Return"], # legend = [self.pf.symbols.keys()[0]]) # # gl.plot(all_dates, all_returns[:,1], # legend = [self.pf.symbols.keys()[1]], nf = 0, na = 0) ## 1) Plot the returns of all of them together for the eleven windows gl.plot(all_dates, all_returns, labels = ["Average Return in 10 years", "Time (years)", "Anual return of Assets"], legend = self.symbol_names) gl.savefig(folder_images +'returnsAveAll.png', dpi = 150, sizeInches = [2*8, 1.5*6]) ## 2) Plot the covariance matrix for 9 years gl.set_subplots(2,3) for i in range(6): gl.bar_3D(self.symbol_names, self.symbol_names, all_covMatrices[i], labels = [str(year_start +window+i),"",""], fontsize = 30, fontsizeA = 19) gl.savefig(folder_images +'covsAveAll.png', dpi = 80, sizeInches = [4*8, 3*6])
def IFE_2b(self): ### Calculate the convexity and duration of the bonds and then calculate # the portfolio one by adding the weighted. (It is just weighted average # of prices. Nothing more, nothing less.) myBond = CBond.CBOND(name="hola", freq=2, coupon=5.75, par=100.) # Set some properties myBond.set_price(95.0428) myBond.set_timeToMaturity(2.5) myBond.set_ytm(0.10) # Plot the compound price understanding ba.plot_compound_understanding() gl.savefig(folder_images + 'compoundUnders.png', dpi=150, sizeInches=[2 * 8, 2 * 6])
def IFE_2c(self): ### Bond question !! myBond = CBond.CBOND( name = "hola", freq = 2, coupon = 5.75, par = 100.) # Set some properties myBond.set_price(95.0428) myBond.set_timeToMaturity(2.5) myBond.set_ytm(0.10) # Plot the compound price understanding ba.plot_compound_understanding() gl.savefig(folder_images +'compoundUnders.png', dpi = 150, sizeInches = [2*8, 2*6])
def IFE_i(self, Rf=0.0, year_start=1996, year_finish=2016, window=10): ### Timing. Check if when the market had big return, we incresed the beta (higher return) ## And when the market had negative return, we have not so bad return ## The way to do this is to perform a cuatratic curve fit. self.pf.set_interval(dt.datetime(year_start, 1, 1), dt.datetime(year_finish, 1, 1)) print self.marketTiming() gl.savefig(folder_images + 'timingPosteriori.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) self.set_Rf(Rf) obtained_returns = [] index_returns = [] for year_test in range(year_start, year_finish - window): # # Set the dates self.pf.set_interval(dt.datetime(year_test, 1, 1), dt.datetime(year_test + window, 1, 1)) # Obtain the market line !! w = self.TangentPortfolio(Rf=Rf) # Obtain allocation self.set_allocation(w) # Once the model is found, we obtain the returns of the next year self.pf.set_interval(dt.datetime(year_test + window, 1, 1), dt.datetime(year_test + window + 1, 1, 1)) # self.pf.set_interval(dt.datetime(fin_year,1,1),dt.datetime(fin_year +1,1,1)) returns = self.get_PortfolioReturn( ) # Get the expected return for that year # dates = self.get_dates() # print returns.shape # print returns.T.tolist()[0] obtained_returns.extend(returns.T.tolist()[0]) index_returns.extend(self.get_indexReturns().T.tolist()[0]) obtained_returns = np.array(obtained_returns) index_returns = np.array(index_returns) print self.marketTiming(obtained_returns, index_returns) gl.savefig(folder_images + 'timingBacktest.png', dpi=150, sizeInches=[2 * 8, 2 * 6])
def IFE_a(self, year_start=1996, year_finish=2016, window=10): ## Basic, just look at the bloody graphs self.pf.set_interval(dt.datetime(year_start, 1, 1), dt.datetime(year_finish, 1, 1)) dates = self.get_dates() prices = self.pf.get_timeSeries(self.period) returns = self.get_Returns() # print returns.shape gl.plot( dates, prices, labels=["Monthly price of Symbols", "Time (years)", "Price (dolar)"], legend=self.pf.symbols.keys(), loc=2) gl.savefig(folder_images + 'pricesAll.png', dpi=150, sizeInches=[2 * 8, 1.5 * 6]) gl.plot( dates, returns, labels=["Monthly return of the Symbols", "Time (years)", "Return (%)"], legend=self.pf.symbols.keys()) gl.savefig(folder_images + 'returnsAll.png', dpi=150, sizeInches=[2 * 8, 1.5 * 6]) ## Distribution obtaining gl.set_subplots(2, 2) for i in range(4): gl.histogram(returns[:, i], labels=[self.symbol_names[i]]) gl.savefig(folder_images + 'returnDistribution.png', dpi=150, sizeInches=[2 * 8, 1.5 * 6]) ############## Posible Transformations ################## ws = [3, 4, 6, 8] gl.set_subplots(2, 2) for w in ws: means, ranges = bMl.get_meanRange(prices[:, 1], w) gl.scatter(means, ranges, lw=4, labels=["", "mean", "range"], legend=["w = %i" % (w)]) gl.savefig(folder_images + 'rangeMean.png', dpi=150, sizeInches=[2 * 8, 1.5 * 6])
def plot_evolution_RMSE(tr_loss, val_loss, cf_a, folder_images): gl.init_figure() ax1 = gl.plot([], tr_loss, lw=3, labels=[ "RMSE loss and parameters. Learning rate: %.3f" % cf_a.lr, "", "RMSE" ], legend=["train"]) gl.plot([], val_loss, lw=3, legend=["validation"]) gl.set_fontSizes(ax=[ax1], title=20, xlabel=20, ylabel=20, legend=20, xticks=12, yticks=12) gl.savefig(folder_images + 'Training_Example_Parameters.png', dpi=100, sizeInches=[14, 7])
def IFE_i (self, Rf = 0.0, year_start = 1996, year_finish = 2016, window = 10): ### Timing. Check if when the market had big return, we incresed the beta (higher return) ## And when the market had negative return, we have not so bad return ## The way to do this is to perform a cuatratic curve fit. self.pf.set_interval(dt.datetime(year_start,1,1),dt.datetime(year_finish,1,1)) print self.marketTiming() gl.savefig(folder_images +'timingPosteriori.png', dpi = 150, sizeInches = [2*8, 2*6]) self.set_Rf(Rf) obtained_returns = [] index_returns = [] for year_test in range(year_start,year_finish - window): # # Set the dates self.pf.set_interval(dt.datetime(year_test,1,1),dt.datetime(year_test + window,1,1)) # Obtain the market line !! w = self.TangentPortfolio(Rf = Rf) # Obtain allocation self.set_allocation(w) # Once the model is found, we obtain the returns of the next year self.pf.set_interval(dt.datetime(year_test + window,1,1),dt.datetime(year_test + window + 1,1,1)) # self.pf.set_interval(dt.datetime(fin_year,1,1),dt.datetime(fin_year +1,1,1)) returns = self.get_PortfolioReturn() # Get the expected return for that year # dates = self.get_dates() # print returns.shape # print returns.T.tolist()[0] obtained_returns.extend(returns.T.tolist()[0]) index_returns.extend(self.get_indexReturns().T.tolist()[0]) obtained_returns = np.array(obtained_returns) index_returns = np.array(index_returns) print self.marketTiming(obtained_returns, index_returns) gl.savefig(folder_images +'timingBacktest.png', dpi = 150, sizeInches = [2*8, 2*6])
def send_email(self,data): # self.stop_reading_data(None) # return None #### Add the watking Logo Images !! logo_path = self.output_folder + "images_IoTubes/mail_warning.png" image = mpimg.imread(logo_path) ax_img = plt.axes([0.88, self.monitoring_y - 0.02, 0.08, 0.08]) ax_img.imshow(image) ax_img.axis("off") logo_path = self.output_folder + "images_IoTubes/warning.png" image = mpimg.imread(logo_path) ax_img = plt.axes([0.77, 0.09, 0.12, 0.12]) ax_img.imshow(image) ax_img.axis("off") ## Generate image folder_images = self.images_folder; path_image = folder_images +'Warning.png' gl.savefig( path_image, dpi = 100, sizeInches = []) # 2*8, 2*3 ############### Send Email #################### myMail = Cemail.Cemail(self.email_config.user,self.email_config.pwd,self.email_config.recipients) myMail.create_msgRoot(subject = self.email_config.subject + " CID: " + self.cleaning_ID) #myMail.set_subject(subject) # For some reason we can only initilize the Subject myMail.add_HTML(self.email_config.body) myMail.add_image(filedir = path_image, inline = 1) send_report_flag = True if (send_report_flag): self.generate_report(None) myMail.add_file(self.report_path) ########## YOU MAY HAVE TO ACTIVATE THE USED OF UNTRUSTFUL APPS IN GMAIL ##### myMail.send_email()
def IFE_a(self, year_start = 1996, year_finish = 2016, window = 10): ## Basic, just look at the bloody graphs self.pf.set_interval(dt.datetime(year_start,1,1),dt.datetime(year_finish,1,1)) dates = self.get_dates() prices = self.pf.get_timeSeries(self.period) returns = self.get_Returns() # print returns.shape gl.plot(dates, prices, labels = ["Monthly price of Symbols", "Time (years)", "Price (dolar)"], legend = self.pf.symbols.keys(), loc = 2) gl.savefig(folder_images +'pricesAll.png', dpi = 150, sizeInches = [2*8, 1.5*6]) gl.plot(dates, returns, labels = ["Monthly return of the Symbols", "Time (years)", "Return (%)"], legend = self.pf.symbols.keys()) gl.savefig(folder_images +'returnsAll.png', dpi = 150, sizeInches = [2*8, 1.5*6]) ## Distribution obtaining gl.set_subplots(2,2) for i in range(4): gl.histogram(returns[:,i], labels = [self.symbol_names[i]]) gl.savefig(folder_images +'returnDistribution.png', dpi = 150, sizeInches = [2*8, 1.5*6]) ############## Posible Transformations ################## ws = [3, 4, 6, 8] gl.set_subplots(2,2) for w in ws: means, ranges = bMl.get_meanRange(prices[:,1], w) gl.scatter(means, ranges, lw = 4, labels = ["", "mean","range"], legend = ["w = %i" %(w)]) gl.savefig(folder_images +'rangeMean.png', dpi = 150, sizeInches = [2*8, 1.5*6])
def IFE_d(self, Rf=0.01, Rfs_list=[0], year_start=1996, year_finish=2016, window=10): ### The official one can be done executing the exercise c with another Rf ## Just another graph to show that now we should not use all the money. ## The efficient frontier is not going to change. ## Only the market line. But we exexute IFE_c again with the new Rf ## And plot some market lines !! # self.pf.set_interval(dt.datetime(1996,12,5),dt.datetime(2016,2,21)) self.pf.set_interval(dt.datetime(year_start, 1, 1), dt.datetime(year_finish, 1, 1)) # Just plot some tangeny lines to the portfolio !! ## First plot some data !! Nalloc = 100000 self.set_Rf(Rf) alloc = self.get_random_allocations(Nalloc, short="yes", mode="gaussian") self.scatter_allocations(alloc, alpha=0.3, nf=1) # Get upper limit of std to plot market lines w = self.TangentPortfolio(Rf=Rf) self.set_allocation(w) stdR = self.get_PortfolioStd() Optimal_portfolios = [] for Rf in Rfs_list: bias, slope = self.Market_line(Rf=Rf) Optimal_portfolios.append(self.TangentPortfolio(Rf=Rf)) gl.plot([0, 4 * stdR], [bias, bias + slope * 4 * stdR], legend=["Mkt Line Rf: %0.3f, SR:%0.2f" % (Rf, slope)], nf=0, loc=2) optimal, portfolios = self.efficient_frontier(kind="Tangent", max_exp=20) self.plot_allocations(portfolios, nf=0, lw=4, color="k", legend=["Efficient Frontier"]) self.scatter_allocations(np.eye(self.Nsym), legend=["Assets"], nf=0, alpha=1.0, lw=5) self.scatter_allocations(Optimal_portfolios, legend=["Optimal portfollios"], nf=0, alpha=1.0, lw=5) gl.savefig(folder_images + 'marketLines.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) ### Only one market line Rf = 0 bias, slope = self.Market_line(Rf=Rf) gl.plot([0, 4 * stdR], [bias, bias + slope * 4 * stdR], legend=["Mkt Line Rf: %0.3f, SR:%0.2f" % (Rf, slope)], nf=1, loc=2) optimal, portfolios = self.efficient_frontier(kind="Tangent", max_exp=20) self.plot_allocations(portfolios, nf=0, lw=4, color="k", legend=["Efficient Frontier"]) self.scatter_allocations(np.eye(self.Nsym), legend=["Assets"], nf=0, alpha=1.0, lw=5) self.scatter_allocations([Optimal_portfolios[2]], legend=["Optimal portfollios"], nf=0, alpha=1.0, lw=5) gl.savefig(folder_images + 'marketLine.png', dpi=150, sizeInches=[2 * 8, 2 * 6])
def plot_multiple_iterations(Xs,mus,covs, Ks ,myDManager, logl,theta_list,model_theta_list, folder_images): ######## Plot the original data ##### gl.init_figure(); gl.set_subplots(2,3); Ngraph = 6 colors = ["r","b","g"] K_G,K_W,K_vMF = Ks for i in range(Ngraph): indx = int(i*((len(theta_list)-1)/float(Ngraph-1))) nf = 1 for xi in range(len( Xs)): ## First cluster labels = ['EM Evolution. Kg:'+str(K_G)+ ', Kw:' + str(K_W) + ', K_vMF:' + str(K_vMF), "X1","X2"] ax1 = gl.scatter(Xs[xi][0,:],Xs[xi][1,:],labels = ["","",""] , color = colors[xi] ,alpha = 0.2, nf = nf) nf =0 mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = mus[xi], Sigma = covs[xi], Chi2val = 2.4477) r_ellipse = bMA.get_ellipse_points(mean,w,h,theta) gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "--", lw = 2 ,AxesStyle = "Normal2", color = colors[xi], alpha = 0.7) # Only doable if the clusters dont die for k_c in myDManager.clusterk_to_Dname.keys(): k = myDManager.clusterk_to_thetak[k_c] distribution_name = myDManager.clusterk_to_Dname[k_c] # G W if (distribution_name == "Gaussian"): ## Plot the ecolution of the mu #### Plot the Covariance of the clusters ! mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = theta_list[indx][k][0], Sigma = theta_list[indx][k][1], Chi2val = 2.4477) r_ellipse = bMA.get_ellipse_points(mean,w,h,theta) gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "-.", lw = 3, AxesStyle = "Normal2", legend = ["Kg(%i). pi:%0.2f"%(k, float(model_theta_list[indx][0][0,k]))]) elif(distribution_name == "Watson"): #### Plot the pdf of the distributino ! ## Distribution parameters for Watson kappa = float(theta_list[indx][k][1]) mu = theta_list[indx][k][0] Nsa = 1000 # Draw 2D samples as transformation of the angle Xalpha = np.linspace(0, 2*np.pi, Nsa) Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)]) probs = [] # Vector with probabilities for i in range(Nsa): probs.append(np.exp(Wad.Watson_pdf_log(Xgrid[:,i],[mu,kappa]) )) probs = np.array(probs) # Plot it in polar coordinates X1_w = (1 + probs) * np.cos(Xalpha) X2_w = (1 + probs) * np.sin(Xalpha) gl.plot(X1_w,X2_w, alpha = 1, lw = 3, ls = "-.",legend = ["Kw(%i). pi:%0.2f"%(k, float(model_theta_list[indx][0][0,k]))]) elif(distribution_name == "vonMisesFisher"): #### Plot the pdf of the distributino ! ## Distribution parameters for Watson kappa = float(theta_list[indx][k][1]); mu = theta_list[indx][k][0] Nsa = 1000 # Draw 2D samples as transformation of the angle Xalpha = np.linspace(0, 2*np.pi, Nsa) Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)]) probs = [] # Vector with probabilities for i in range(Nsa): probs.append(np.exp(vMFd.vonMisesFisher_pdf_log(Xgrid[:,i],[mu,kappa]) )) probs = np.array(probs) probs = probs.reshape((probs.size,1)).T # Plot it in polar coordinates X1_w = (1 + probs) * np.cos(Xalpha) X2_w = (1 + probs) * np.sin(Xalpha) # print X1_w.shape, X2_w.shape gl.plot(X1_w,X2_w, alpha = 1, lw = 3, ls = "-.", legend = ["Kvmf(%i). pi:%0.2f"%(k, float(model_theta_list[indx][0][0,k]))]) ax1.axis('equal') gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.2, hspace=0.01) gl.savefig(folder_images +'Final_State2. K_G:'+str(K_G)+ ', K_W:' + str(K_W) + '.png', dpi = 100, sizeInches = [18, 8])
xlabel=18, ylabel=18, legend=15, xticks=14, yticks=14) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.20, hspace=0.10) gl.format_yaxis(ax=ax3, Nticks=10) gl.format_xaxis(ax=ax3, Nticks=len(em_train)) gl.savefig(folder_images + images_prefix + "Accuracies_epoch.png", dpi=100, sizeInches=[20, 5], close=False, bbox_inches="tight") if (Batch_related): """ ############################################# Batch plots """ data_loss_batch = training_logger["train"]["loss_batch"] em_train_batches = 100 * np.array(training_logger["train"]["em_batch"]) f1_train_batches = 100 * np.array(training_logger["train"]["f1_batch"]) gl.init_figure() ax1 = gl.subplot2grid((1, 2), (0, 0), rowspan=1, colspan=1)
mean_val_ll, nf=0, color="r", legend=["Mean Validation LL (EM)"], lw=3) gl.plot(Klusters, mean_val_ll + 2 * std_val_ll, color="r", nf=0, lw=1, ls="--", legend=["Mean Validation LL +- 2std"]) gl.plot(Klusters, mean_val_ll - 2 * std_val_ll, color="r", nf=0, lw=1, ls="--") gl.fill_between(Klusters, mean_val_ll - 2 * std_val_ll, mean_val_ll + 2 * std_val_ll, c="r", alpha=0.1) for i in range(len(logl_tr_CVs)): for k_i in range(len(Klusters)): gl.scatter(np.ones((len(logl_val_CVs[i][k_i]), 1)) * Klusters[k_i], logl_val_CVs[i][k_i], color="r", alpha=0.5, lw=1) gl.savefig(folder_images + 'EM_Gaussian_CV_artificial_data.png', dpi=100, sizeInches=[12, 6])
labels = ["","",""], legend = ["M: %.2e, std: %.2e"%(mean[0], cov[1,1])]) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.01, hspace=0.01) x_grid, y_val = bMA.gaussian1D_points(X = ret2, std_K = 3) gl.plot(y_val, x_grid, color = "b", labels = ["","",""], legend = ["M: %.2e, std: %.2e"%(mean[0], cov[1,1])]) xx, yy, zz = bMA.kde2D(ret1,ret2, bandwidth = np.std(ret1)/kde_K, xbins=n_grids*1j, ybins=n_grids*1j) ax1.contour(xx, yy, zz) ax1.axis('equal') gl.savefig(folder_images +'KDEHistogramCLOSE.png', dpi = 100, sizeInches = [18, 14]) if(distribution_graph3D): ################ Contour plot of the scatter plot ################ xx, yy, zz = bMA.kde2D(ret1,ret2, bandwidth = np.std(ret1)/kde_K, xbins=n_grids*1j, ybins=n_grids*1j) ## Plot the 3D surface ax3D = gl.plot_3D(xx, yy, zz, nf = 1) ## Limits of the plotting ! xmin,xmax = [np.min(xx.flatten()), np.max(xx.flatten())] ymin,ymax = [np.min(yy.flatten()), np.max(yy.flatten())] zmin,zmax = [np.min(zz.flatten()), np.max(zz.flatten())] # Plot the marginalization of X x_grid = np.linspace(min(ret1),max(ret1),n_grids)
def IFE_g(self, Rf=0, year_start=1996, year_finish=2016, window=10): ## CAPM model question, calculate abs and doubt everything you know self.set_Rf(Rf) self.pf.set_interval(dt.datetime(year_start, 1, 1), dt.datetime(year_finish, 1, 1)) # Plot the correlation between some index and the stock gl.set_subplots(2, 3) self.pf.set_interval(dt.datetime(year_start, 1, 1), dt.datetime(year_start + window, 1, 1)) for i in range(6): self.plot_corrab(self.symbol_names[i]) gl.savefig(folder_images + 'SymbolAB.png', dpi=80, sizeInches=[2 * 8, 2 * 6]) # Plot the jensen alpha of some of the stocks gl.set_subplots(2, 3) self.pf.set_interval(dt.datetime(year_start, 1, 1), dt.datetime(year_start + window, 1, 1)) for i in range(6): JensenAlpha = self.get_symbol_JensenAlpha(self.symbol_names[i]) gl.histogram(JensenAlpha, labels=[self.symbol_names[i]]) gl.savefig(folder_images + 'JensenAlphasAll.png', dpi=80, sizeInches=[4 * 8, 3 * 6]) ## We set a stupid initial portfolio (Everything equal) param = self.get_symbol_ab(self.symbol_names[1]) print "Params of %s" % self.symbol_names[1] print param ########## TEST ONE SYMBOL ###### # self.test_symbol_ab(self.symbol_names[1]) # Print stupid portfolio # Param params = self.get_all_symbols_ab() print "All params" print params # Params of stupid porfolio print "Params of stupid portfolio" self.set_allocation([]) param = self.get_portfolio_ab(mode="normal") # Obtained as definition print param param = self.get_portfolio_ab( mode="gaussian") # Obtained first getting the cov matrix print param ########## TEST Portfolio ###### # Test the jensenAlpha of the portfolio JensenAlpha = self.get_portfolio_JensenAlpha() ## IDEA !! Maybe use the portfolio in the frontier that maximizes ## the alpha and minimizes the beta !!! Maybe minimizing beta is not as important ## In the CAMP we already have the total Exp and risk. ## Alpha and beta say: Does out portolio perform better than the market ? ## If we just follow the market, investing everything on the index, ## Thus investing in everything proportionally to their capital. ## Then we have alpha = 0 and beta = 1 # CAPMillo.test_symbol_ab(symbols[2]) # Plot random porfolios correlation with the index alloc = self.get_random_allocations(100, short="yes", mode="gaussian") gl.set_subplots(2, 3) for i in range(6): self.set_allocation(alloc[i]) self.plot_portfoliocorrab(nf=1) gl.savefig(folder_images + 'randomPortCorr.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) # Plot Jesen Alpha for random portfolios flag_nf = 1 for i in range(5): self.set_allocation(alloc[i]) self.test_Jensens_Alpha(nf=flag_nf) flag_nf = 0 gl.savefig(folder_images + 'randomPortJA.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) ############################################## ########### ANALIZE 3 optimal portfolios ##### ############################################## Rfs = [0, 0.002, 0.0031] print "???????????????:?:::::::::::::::::::::::::::::::::::::::" flag_nf = 1 for Rf in Rfs: # Do it again with an optimal portolio w = self.TangentPortfolio(Rf=Rf) self.set_allocation(w) self.test_Jensens_Alpha(nf=flag_nf) flag_nf = 0 flag_nf = 1 gl.savefig(folder_images + 'optimalPortJA.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) gl.set_subplots(1, 3) for Rf in Rfs: # Do it again with an optimal portolio w = self.TangentPortfolio(Rf=Rf) self.set_allocation(w) self.plot_portfoliocorrab(nf=1) flag_nf = 0 gl.savefig(folder_images + 'optimalPortCorr.png', dpi=150, sizeInches=[2 * 8, 1 * 6])
def IFE_f(self, ObjectiveR=0.003, Rf=0.0, year_start=1996, year_finish=2016, window=10): ### The official one can be done executing the exercise c with another Rf ## Just another graph to show that now we should not use all the data. # Just, choose a desired return, # Using training Samples calculate using the market line # the optimal porfolio for that. # Then calculate for the next year, the real return # for that portfolio. # Do this for several years as well. self.set_Rf(Rf) nf_flag = 1 All_stds = [] PortfolioReturns = [] IndexReturns = [] all_dates = [] for year_test in range(year_start, year_finish - window + 1 - 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test, 1, 1), dt.datetime(year_test + window, 1, 1)) # Obtain the market line !! w = self.TangentPortfolio(Rf=Rf) # Obtain allocation self.set_allocation(w) # Obtain the expected return and std when using all our money ! expRet, stdRet = self.get_metrics(investRf="no") param = bMl.obtain_equation_line(Rf, expRet, stdRet) bias, slope = param X = (ObjectiveR - Rf) / (expRet - Rf) wdesired = w * X ## Check that the output of this portfolio is the desired one. self.set_allocation(wdesired) # Set the allocation expRet, stdRet = self.get_metrics( ) # Get the expected return for that year # print ret ## Now that we have the desired w*X, we will calculate the resturn of ## the portfolio in the following year. # To do so, we set the dates, only to the next year, set the portfolio allocation # And calculate the yearly expected return !! # Set the dates to only the next year !! # Also, one month before in order to get the returns of the first month. self.pf.set_interval(dt.datetime(year_test + window, 1, 1), dt.datetime(year_test + window + 1, 1, 1)) self.set_allocation(wdesired) # Set the allocation expRet, stdRet = self.get_metrics( ) # Get the expected return for that year PortfolioRet = self.yearly_Return(expRet) # Get yearly returns PortfolioReturns.append(PortfolioRet) All_stds.append(self.yearly_covMatrix(stdRet)) indexRet = self.get_indexMeanReturn() indexRet = self.yearly_Return(indexRet) IndexReturns.append(indexRet) # dates = self.get_dates() all_dates.append(year_test + window + 1) ## Graph with the evolutio of the portfolio price after the assignment gl.plot(range(1, 13), np.cumsum(self.get_PortfolioReturn()), nf=nf_flag, labels=[ "Evolution of returns by month", "Months passed", "Cumulative Return" ], legend=[str(year_test + window + 1)]) nf_flag = 0 # print ret gl.savefig(folder_images + 'returnsEvolMonth.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) ## Graph with the desired, the obtained returns and the returns of the index gl.bar(all_dates[:], IndexReturns, labels=["Obtained returns", "Time (years)", "Return (%)"], legend=["Index Return"], alpha=0.8, nf=1) gl.bar(all_dates[:], PortfolioReturns, labels=["Returns of year", "Year", "Value"], legend=["Porfolio Return"], alpha=0.8, nf=0) gl.scatter(all_dates[:], self.yearly_Return(ObjectiveR) * np.ones( (len(all_dates[:]), 1)), legend=["Objective Return"], nf=0) gl.scatter(all_dates[:], All_stds, legend=["Std of the portfolio return"], nf=0) gl.savefig(folder_images + 'returnsEvolYears.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) #### Crazy idea !! Lets plot where the f*****g efficient frontier went nf_flag = 1 PortfolioReturns = [] IndexReturns = [] all_dates = [] gl.set_subplots(2, 3) for year_test in range(year_start, year_start + 6): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test, 1, 1), dt.datetime(year_test + window, 1, 1)) optimal, portfolios = self.efficient_frontier(kind="Tangent") self.plot_allocations( portfolios, labels=["Evolution of the efficient frontier"], legend=["Frontier " + str(year_test + window) + " before"], color="k", nf=1) self.pf.set_interval(dt.datetime(year_test + window, 1, 1), dt.datetime(year_test + window + 1, 1, 1)) self.set_allocation(self.TangentPortfolio(Rf=Rf)) self.plot_allocations( portfolios, legend=["Frontier " + str(year_test + window) + " after"], color="r", nf=0) gl.savefig(folder_images + 'effEvol.png', dpi=80, sizeInches=[4 * 8, 3 * 6])
if (ll_train[ic, K_i] > ll_train_best[ic, K_i]): ll_train_best[ic, K_i] = copy.deepcopy(ll_train[ic, K_i]) ll_test_best[ic, K_i] = copy.deepcopy(ll_test[ic, K_i]) All_Ks_params_best[K_i] = copy.deepcopy( All_Ks_params[K_i]) for ic in range(Nclasses): gl.plot(Klusters, np.array([ll_train_best[ic], ll_test_best[ic]]).T, legend=["tr", "Val"], labels=["EM class = " + str(ic), "States", "loglike"]) gl.savefig( file_dir="./OnePerson_5fold_cluster" + str(ic) + "/Iteration" + str(i) + ".png", bbox_inches='tight', sizeInches=[], # The size in inches as a list close=True, # If we close the figure once saved dpi=100 ) # Density of pixels !! Same image but more cuality ! Pixels loading_precomputed_centroids = 1 if (loading_precomputed_centroids): # pkl.store_pickle("./OnePerson1FoldEM.pkl",[ll_train_best, ll_test_best, All_Ks_params_best]) cosas = pkl.load_pickle("./OnePerson1FoldEM.pkl") class_i = 1 n_cluster_opt = 5 good_clusters_EM = cosas[2][n_cluster_opt][class_i] Ks_params = good_clusters_EM pi_opt = good_clusters_EM[0] mu_opt = good_clusters_EM[1][0]
def IFE_f2 (self, ObjectiveRlist = [0.003], Rf = 0.0, year_start = 1996, year_finish = 2016, window = 10): ### The official one can be done executing the exercise c with another Rf ## Just another graph to show that now we should not use all the data. # Just, choose a desired return, # Using training Samples calculate using the market line # the optimal porfolio for that. # Then calculate for the next year, the real return # for that portfolio. # Do this for several years as well. self.set_Rf(Rf) All_returns = [] All_vars = [] windowslist = range(1,13) ObjectiveR = 0.03 for window in windowslist: PortfolioReturns = [] all_dates = [] for year_test in range(year_start,year_finish - window + 1 - 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test,1,1),dt.datetime(year_test + window,1,1)) # Obtain the market line !! w = self.TangentPortfolio(Rf = Rf) # Obtain allocation self.set_allocation(w) # Obtain the expected return and std when using all our money ! expRet, stdRet = self.get_metrics (investRf = "no") param = bMl.obtain_equation_line(Rf, expRet, stdRet) bias, slope = param X = (ObjectiveR - Rf)/(expRet - Rf) wdesired = w*X self.pf.set_interval(dt.datetime(year_test + window,1,1),dt.datetime(year_test + window + 1,1,1)) self.set_allocation(wdesired) # Set the allocation expRet, stdRet = self.get_metrics() # Get the expected return for that year PortfolioRet = self.yearly_Return(expRet) # Get yearly returns PortfolioReturns.append(PortfolioRet) dates = self.get_dates() all_dates.append(dates[0]) All_returns.append(np.mean(PortfolioReturns)) All_vars.append(np.std(PortfolioReturns)/np.sqrt(np.sqrt(12*12))) # All_returns = np.array(All_returns).reshape(len(ObjectiveRlist),10) # print All_returns All_means = All_returns print All_returns # All_means = np.mean(All_returns, axis = 1) print ul.fnp(All_returns).shape print All_means # print All_means - ObjectiveRlist # All_means = np.divide((All_means - ObjectiveRlist),ObjectiveRlist) # print All_means ## Graph with the desired, the obtained returns and the returns of the index gl.bar(windowslist, All_means, labels = ["Obtained returns", "Time (years)", "Return (%)"], legend = ["Index Return"], alpha = 0.8, nf = 1) gl.plot(windowslist, All_vars, labels = ["Obtained returns", "Time (years)", "Return (%)"], legend = ["Index Return"], alpha = 0.8, nf = 0) gl.savefig(folder_images +'best_Objective.png', dpi = 150, sizeInches = [2*8, 2*6])
def IFE_d (self, Rf = 0.01, Rfs_list = [0], year_start = 1996, year_finish = 2016, window = 10): ### The official one can be done executing the exercise c with another Rf ## Just another graph to show that now we should not use all the money. ## The efficient frontier is not going to change. ## Only the market line. But we exexute IFE_c again with the new Rf ## And plot some market lines !! # self.pf.set_interval(dt.datetime(1996,12,5),dt.datetime(2016,2,21)) self.pf.set_interval(dt.datetime(year_start,1,1),dt.datetime(year_finish,1,1)) # Just plot some tangeny lines to the portfolio !! ## First plot some data !! Nalloc = 100000 self.set_Rf(Rf) alloc = self.get_random_allocations(Nalloc, short = "yes", mode = "gaussian") self.scatter_allocations(alloc, alpha = 0.3,nf = 1) # Get upper limit of std to plot market lines w = self.TangentPortfolio(Rf = Rf) self.set_allocation(w) stdR = self.get_PortfolioStd() Optimal_portfolios = [] for Rf in Rfs_list: bias, slope = self.Market_line (Rf = Rf) Optimal_portfolios.append(self.TangentPortfolio(Rf = Rf)) gl.plot([0,4*stdR],[bias, bias + slope*4*stdR], legend = ["Mkt Line Rf: %0.3f, SR:%0.2f" % (Rf,slope)], nf = 0,loc = 2) optimal, portfolios = self.efficient_frontier(kind = "Tangent", max_exp = 20) self.plot_allocations(portfolios, nf = 0, lw = 4, color = "k", legend = ["Efficient Frontier"]) self.scatter_allocations(np.eye(self.Nsym), legend = ["Assets"], nf = 0, alpha = 1.0, lw = 5) self.scatter_allocations(Optimal_portfolios, legend = ["Optimal portfollios"], nf = 0, alpha = 1.0, lw = 5) gl.savefig(folder_images +'marketLines.png', dpi = 150, sizeInches = [2*8, 2*6]) ### Only one market line Rf = 0 bias, slope = self.Market_line (Rf = Rf) gl.plot([0,4*stdR],[bias, bias + slope*4*stdR], legend = ["Mkt Line Rf: %0.3f, SR:%0.2f" % (Rf,slope)], nf = 1,loc = 2) optimal, portfolios = self.efficient_frontier(kind = "Tangent", max_exp = 20) self.plot_allocations(portfolios, nf = 0, lw = 4, color = "k", legend = ["Efficient Frontier"]) self.scatter_allocations(np.eye(self.Nsym), legend = ["Assets"], nf = 0, alpha = 1.0, lw = 5) self.scatter_allocations([Optimal_portfolios[2]], legend = ["Optimal portfollios"], nf = 0, alpha = 1.0, lw = 5) gl.savefig(folder_images +'marketLine.png', dpi = 150, sizeInches = [2*8, 2*6])
def create_Bayesian_analysis_charts_simplified(model, train_dataset, validation_dataset, tr_loss, val_loss, KL_loss, folder_images, epoch_i=None): # Configurations of the plots alpha_points = 0.2 color_points_train = "dark navy blue" color_points_val = "amber" color_train_loss = "cobalt blue" color_val_loss = "blood" color_truth = "k" color_mean = "b" color_most_likey = "y" ################################ Divide in plots ############################## gl.init_figure() ax1 = gl.subplot2grid((6, 3), (0, 0), rowspan=3, colspan=1) ax2 = gl.subplot2grid((6, 3), (3, 0), rowspan=3, colspan=1, sharex=ax1, sharey=ax1) ax3 = gl.subplot2grid((6, 3), (0, 1), rowspan=2, colspan=1) ax4 = gl.subplot2grid((6, 3), (2, 1), rowspan=2, colspan=1, sharex=ax3) ax5 = gl.subplot2grid((6, 3), (4, 1), rowspan=2, colspan=1, sharex=ax3) ax6 = gl.subplot2grid((6, 3), (0, 2), rowspan=3, colspan=1) ax7 = gl.subplot2grid((6, 3), (3, 2), rowspan=3, colspan=1, sharex=ax6) ####### ax1, ax2: Get confusion matrices ########## labels_classes, confusion = model.get_confusion_matrix(train_dataset) plot_confusion_matrix(confusion, labels_classes, ax1) labels_classes, confusion = model.get_confusion_matrix(validation_dataset) plot_confusion_matrix(confusion, labels_classes, ax2) ############## ax3 ax4 ax5: Loss Evolution !! ###################### ## ax3: Evolutoin of the data loss gl.plot([], tr_loss, ax=ax3, lw=3, labels=["Losses", "", "Data loss (MSE)"], legend=["train"], color=color_train_loss) gl.plot([], val_loss, ax=ax3, lw=3, legend=["validation"], color=color_val_loss, AxesStyle="Normal - No xaxis") ## ax4: The evolution of the KL loss gl.plot([], KL_loss, ax=ax4, lw=3, labels=["", "", "KL loss"], legend=["Bayesian Weights"], AxesStyle="Normal - No xaxis", color="k") ## ax5: Evolutoin of the total loss gl.plot([], tr_loss, ax=ax5, lw=3, labels=["", "epoch", "Total Loss (Bayes)"], legend=["train"], color=color_train_loss) gl.plot([], val_loss, ax=ax5, lw=3, legend=["validation"], color=color_val_loss) ############## ax6 ax7: Variational Weights !! ###################### create_plot_variational_weights(model, ax6, ax7) gl.set_zoom(ax=ax6, ylim=[-0.1, 10]) gl.set_zoom(ax=ax7, xlim=[-2.5, 2.5], ylim=[-0.1, 0.5]) # Set final properties and save figure gl.set_fontSizes(ax=[ax1, ax2, ax3, ax4, ax5, ax6, ax7], title=20, xlabel=20, ylabel=20, legend=10, xticks=12, yticks=12) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.10) if (type(epoch_i) == type(None)): gl.savefig(folder_images + 'Training_Example_Data_Bayesian.png', dpi=100, sizeInches=[20, 10]) else: gl.savefig(folder_images + '%i.png' % epoch_i, dpi=100, sizeInches=[20, 10], close=True, bbox_inches="tight")
gl.init_figure() for i in range(Nx): X_i = X[:,[i]] x_grid, y_values = bMA.gaussian1D_points(mean = mus[i], std = stds[i], x_grid = x_grid) color = gl.get_color() gl.scatter(X_i, np.zeros(X_i.shape), alpha = 0.1, lw = 4, AxesStyle = "Normal", color = color, labels = ["3 independent Gaussian distributions","x","pdf(x)"]) gl.plot(x_grid, y_values, color = color, fill = 1, alpha = 0.1, legend = ["X%i: m:%.1f, std:%.1f"%(i+1,mus[i],stds[i])]) gl.savefig(folder_images +'Gaussians.png', dpi = 100, sizeInches = [18, 10]) ############################################################ ################# PLOT DATA ############################### ############################################################ if(distribution_graph_2D): # Get the histogram and gaussian estimations ! ## Scatter plot of the points # gl.init_figure() i_1 = 2 i_2 = 0 X_1,X_2 = X[:,[i_1]], X[:,[i_2]] mu_1, mu_2 = mus[i_1],mus[i_2] std_1, std_2 = stds[i_1],stds[i_2]
xlimPad = [0.1,0.3], ylimPad = [0.1,0.1], marker = marker, AxesStyle = "Normal2") gl.stem([], diffw2, nf = 1, sharex = ax1, sharey = ax1,lw = lw, labels = ["MOMw(%i)"%ndiff2,"lag",""], xlimPad = [0.1,0.3], ylimPad = [0.1,0.1], marker = marker, AxesStyle = "Normal2 - No yaxis") gl.stem([], diffw3, nf = 1,sharex = ax1, sharey = ax1,lw = lw, labels = ["MOMw(%i)"%ndiff3,"lag",""], xlimPad = [0.1,0.3], ylimPad = [0.1,0.1], marker = marker, AxesStyle = "Normal2 - No yaxis") gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.050, hspace=0.01) gl.savefig(folder_images +'MOMw.png', dpi = 100, sizeInches = [2*8, 2*2]) ## Differentation of 1 gl.set_subplots(1,3) # Plotting the 3 of them at the same time. ax1 = gl.stem([], dSMAw, nf = 1, lw = lw, labels = ["SMASMA","lag","value"], legend = ["SMASMA(%i)"%nHMA], xlimPad = [0.1,0.3], ylimPad = [0.1,0.1], marker = marker, AxesStyle = "Normal2") gl.stem([], dWMAw, nf = 1,sharex = ax1, sharey = ax1, lw = lw, labels = ["WMAWMA","lag"], legend = ["WMAWMA(%i)"%nHMA],
def IFE_g (self, Rf = 0, year_start = 1996, year_finish = 2016, window = 10): ## CAPM model question, calculate abs and doubt everything you know self.set_Rf(Rf) self.pf.set_interval(dt.datetime(year_start,1,1),dt.datetime(year_finish,1,1)) # Plot the correlation between some index and the stock gl.set_subplots(2,3) self.pf.set_interval(dt.datetime(year_start,1,1),dt.datetime(year_start + window,1,1)) for i in range(6): self.plot_corrab(self.symbol_names[i]) gl.savefig(folder_images +'SymbolAB.png', dpi = 80, sizeInches = [2*8, 2*6]) # Plot the jensen alpha of some of the stocks gl.set_subplots(2,3) self.pf.set_interval(dt.datetime(year_start,1,1),dt.datetime(year_start + window,1,1)) for i in range(6): JensenAlpha = self.get_symbol_JensenAlpha(self.symbol_names[i]) gl.histogram(JensenAlpha, labels = [self.symbol_names[i]]) gl.savefig(folder_images +'JensenAlphasAll.png', dpi = 80, sizeInches = [4*8, 3*6]) ## We set a stupid initial portfolio (Everything equal) param = self.get_symbol_ab(self.symbol_names[1]) print "Params of %s" % self.symbol_names[1] print param ########## TEST ONE SYMBOL ###### # self.test_symbol_ab(self.symbol_names[1]) # Print stupid portfolio # Param params = self.get_all_symbols_ab() print "All params" print params # Params of stupid porfolio print "Params of stupid portfolio" self.set_allocation([]) param = self.get_portfolio_ab(mode = "normal") # Obtained as definition print param param = self.get_portfolio_ab(mode = "gaussian") # Obtained first getting the cov matrix print param ########## TEST Portfolio ###### # Test the jensenAlpha of the portfolio JensenAlpha = self.get_portfolio_JensenAlpha() ## IDEA !! Maybe use the portfolio in the frontier that maximizes ## the alpha and minimizes the beta !!! Maybe minimizing beta is not as important ## In the CAMP we already have the total Exp and risk. ## Alpha and beta say: Does out portolio perform better than the market ? ## If we just follow the market, investing everything on the index, ## Thus investing in everything proportionally to their capital. ## Then we have alpha = 0 and beta = 1 # CAPMillo.test_symbol_ab(symbols[2]) # Plot random porfolios correlation with the index alloc = self.get_random_allocations(100, short = "yes", mode = "gaussian") gl.set_subplots(2,3) for i in range(6): self.set_allocation(alloc[i]) self.plot_portfoliocorrab( nf = 1) gl.savefig(folder_images +'randomPortCorr.png', dpi = 150, sizeInches = [2*8, 2*6]) # Plot Jesen Alpha for random portfolios flag_nf = 1 for i in range(5): self.set_allocation(alloc[i]) self.test_Jensens_Alpha(nf = flag_nf) flag_nf = 0 gl.savefig(folder_images +'randomPortJA.png', dpi = 150, sizeInches = [2*8, 2*6]) ############################################## ########### ANALIZE 3 optimal portfolios ##### ############################################## Rfs = [0,0.002, 0.0031] print "???????????????:?:::::::::::::::::::::::::::::::::::::::" flag_nf = 1 for Rf in Rfs: # Do it again with an optimal portolio w = self.TangentPortfolio(Rf = Rf) self.set_allocation(w) self.test_Jensens_Alpha(nf = flag_nf) flag_nf = 0 flag_nf = 1 gl.savefig(folder_images +'optimalPortJA.png', dpi = 150, sizeInches = [2*8, 2*6]) gl.set_subplots(1,3) for Rf in Rfs: # Do it again with an optimal portolio w = self.TangentPortfolio(Rf = Rf) self.set_allocation(w) self.plot_portfoliocorrab(nf = 1) flag_nf = 0 gl.savefig(folder_images +'optimalPortCorr.png', dpi = 150, sizeInches = [2*8, 1*6])
def IFE_e(self, ObjectiveR=0.003, Rf=0.0, year_start=1996, year_finish=2016, window=10): # Just, choose a desired return, # Using training Samples calculate using the market line # the optimal porfolio for that. # Then, using also the last year ( test), recalculate the portfolio needed # for that return, and the difference between is the turnover self.set_Rf(Rf) nf_flag = 1 desired_Portfolios = [] all_dates = [] for year_test in range(year_start, year_finish - window + 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test, 1, 1), dt.datetime(year_test + window, 1, 1)) # Obtain the market line !! w = self.TangentPortfolio(Rf=Rf) # Obtain allocation # Obtain the expected return and std when using all our money ! self.set_allocation(w) expRet, stdRet = self.get_metrics(investRf="no") param = bMl.obtain_equation_line(Rf, expRet, stdRet) bias, slope = param # Once we have the equation of the line, we obtain how much money # we need to use to reach the desired Expecred Return. # Rt = (1 - X)Rf + XRp with X = sum(w) # For a desired Rt we solve the X X = (ObjectiveR - Rf) / (expRet - Rf) # print X # So the desired porfolio is: wdesired = w * X desired_Portfolios.append(wdesired) gl.plot([0, 1.3 * abs(X * stdRet)], [bias, bias + 1.3 * abs(slope * stdRet * X)], labels=["Desired Portfolios", "Risk (std)", "Return (%)"], legend=["%s, X: %0.3f" % ((year_test + window), X[0])], nf=nf_flag, loc=2) nf_flag = 0 gl.scatter([abs(X * stdRet)], [ObjectiveR], nf=0) dates = self.get_dates() all_dates.append(dates[-1]) # print wdesired gl.savefig(folder_images + 'desiredPortfolios.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) # Now we calculate the turnovers Turnovers = [] prev_abs_alloc = [] # Previous, absolute allocation percentaje_changed = [] Nport = len(desired_Portfolios) for i in range(Nport - 1): to = bMl.get_TurnOver(desired_Portfolios[i], desired_Portfolios[i + 1]) Turnovers.append(to) prev_abs_alloc.append(np.sum(np.abs(desired_Portfolios[i]))) percentaje_changed.append(Turnovers[-1] / prev_abs_alloc[-1]) print Turnovers gl.set_subplots(1, 3) gl.bar(all_dates[1:], Turnovers, color="g", labels=["Portfolio turnovers", "Year", "Value"]) gl.add_text([all_dates[1:][3], max(Turnovers) * 0.80], "Mean: %0.2f" % np.mean(Turnovers), 30) gl.bar(all_dates[0:-1], prev_abs_alloc, color="r", labels=["Absolute allocations", "Year", "Value"]) gl.bar(all_dates[1:], percentaje_changed, color="b", labels=["Percentage turnover", "Year", "Value"]) gl.add_text( [all_dates[1:][3], max(percentaje_changed) * 0.80], "Mean: %0.2f" % np.mean(percentaje_changed), 30) gl.savefig(folder_images + 'turnovers.png', dpi=150, sizeInches=[2 * 8, 1 * 6])
def IFE_h(self, Rf=0, mktcap=[], year_start=1996, year_finish=2016, window=10): ## Black litterman question !! # The optimal portolio, lets say is the one given by Markovitz # mktcap is a dicktionary with the market capitalizaion of the equities self.pf.set_interval(dt.datetime(year_start, 1, 1), dt.datetime(year_finish, 1, 1)) ## Get the actual stuff !! ExpRet = self.get_MeanReturns() Sigma = self.get_covMatrix() woptimal = self.TangentPortfolio() self.set_allocation(woptimal) R, S = self.get_metrics() delta = (R - self.Rf) / np.power(S, 2) # Optimal risk adversion ## Get the weights by the market capitalization if (len(mktcap) > 0): weq = [] for sym in self.symbol_names: weq.append(mktcap[sym]) weq = ul.fnp(weq) / np.sum(weq) weq = weq.T.tolist()[0] # print weq else: weq = woptimal # Initial prior ############### PUT FECKING BL prior instead ########## # Calculate initial portfolio from the market capitalization # Risk aversion of the market. We say it is the one of the portfolio # The optimal portfolio is the market. # weq = np.ones((1,self.Nsym))/self.Nsym # weq = weq.tolist()[0] # Coefficient of uncertainty in the prior estimate of the mean tau = 10 ### Prior of our Views !!! P1 = np.zeros((2, self.Nsym)) P1[0, 0] = -1 P1[0, 1] = 1 P1[1, 1] = -1 P1[1, 2] = 1 P1 = ul.fnp(P1) # If we invert P1 and Q1 at the same time we get the same Q1 = [0.0002, 0.0001] Q1 = ul.fnp(Q1) Omega1 = np.dot(np.dot(P1, Sigma), P1.T) * np.eye(Q1.shape[0]) postPi, weqpost = self.BlackLitterman( weq, Sigma, delta, # Prior portfolio variables tau, # Uncertainty coefficient of the porfolio priors P1, Q1, Omega1) # Prior views variables # Reference returns of the portfolio of the market # They can just be calculated using the portfolio # A priory the expected return Posteriori does not have to be bigger # Just more accuarate to reality if our views are right :) refPi = delta * np.dot(Sigma, weq) Ereturn = np.dot(refPi, weq) EreturnPost = np.dot(postPi, weqpost) ## Plot the returns !!! # We will plot the real w returns, the Pi Returns, and the Post- Returns gl.set_subplots(2, 3) gl.bar(self.pf.symbols.keys(), ExpRet, labels=["Optimal initial returns"]) gl.bar(self.pf.symbols.keys(), refPi, labels=["Prior Returns"]) gl.bar(self.pf.symbols.keys(), postPi, labels=["Posterior Returns"]) # gl.savefig(folder_images +'returnsBL.png', # dpi = 150, sizeInches = [2*8, 2*6]) ## Plot the weights !!! # We will plot the real w returns, the Pi Returns, and the Post- Returns # gl.set_subplots(1,3) gl.bar(self.pf.symbols.keys(), woptimal, labels=["Optimal intial weights"]) gl.bar(self.pf.symbols.keys(), weq, labels=["Prior Weights"]) gl.bar(self.pf.symbols.keys(), weqpost, labels=["Posterior Weights"]) # gl.savefig(folder_images +'weightsBL.png', # dpi = 150, sizeInches = [2*8, 2*6]) gl.savefig(folder_images + 'weightsreturnsBL.png', dpi=150, sizeInches=[2 * 8, 2 * 6]) pass
def IFE_f2(self, ObjectiveRlist=[0.003], Rf=0.0, year_start=1996, year_finish=2016, window=10): ### The official one can be done executing the exercise c with another Rf ## Just another graph to show that now we should not use all the data. # Just, choose a desired return, # Using training Samples calculate using the market line # the optimal porfolio for that. # Then calculate for the next year, the real return # for that portfolio. # Do this for several years as well. self.set_Rf(Rf) All_returns = [] All_vars = [] windowslist = range(1, 13) ObjectiveR = 0.03 for window in windowslist: PortfolioReturns = [] all_dates = [] for year_test in range(year_start, year_finish - window + 1 - 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test, 1, 1), dt.datetime(year_test + window, 1, 1)) # Obtain the market line !! w = self.TangentPortfolio(Rf=Rf) # Obtain allocation self.set_allocation(w) # Obtain the expected return and std when using all our money ! expRet, stdRet = self.get_metrics(investRf="no") param = bMl.obtain_equation_line(Rf, expRet, stdRet) bias, slope = param X = (ObjectiveR - Rf) / (expRet - Rf) wdesired = w * X self.pf.set_interval(dt.datetime(year_test + window, 1, 1), dt.datetime(year_test + window + 1, 1, 1)) self.set_allocation(wdesired) # Set the allocation expRet, stdRet = self.get_metrics( ) # Get the expected return for that year PortfolioRet = self.yearly_Return(expRet) # Get yearly returns PortfolioReturns.append(PortfolioRet) dates = self.get_dates() all_dates.append(dates[0]) All_returns.append(np.mean(PortfolioReturns)) All_vars.append(np.std(PortfolioReturns) / np.sqrt(np.sqrt(12 * 12))) # All_returns = np.array(All_returns).reshape(len(ObjectiveRlist),10) # print All_returns All_means = All_returns print All_returns # All_means = np.mean(All_returns, axis = 1) print ul.fnp(All_returns).shape print All_means # print All_means - ObjectiveRlist # All_means = np.divide((All_means - ObjectiveRlist),ObjectiveRlist) # print All_means ## Graph with the desired, the obtained returns and the returns of the index gl.bar(windowslist, All_means, labels=["Obtained returns", "Time (years)", "Return (%)"], legend=["Index Return"], alpha=0.8, nf=1) gl.plot(windowslist, All_vars, labels=["Obtained returns", "Time (years)", "Return (%)"], legend=["Index Return"], alpha=0.8, nf=0) gl.savefig(folder_images + 'best_Objective.png', dpi=150, sizeInches=[2 * 8, 2 * 6])
def IFE_h (self, Rf = 0, mktcap = [], year_start = 1996, year_finish = 2016, window = 10): ## Black litterman question !! # The optimal portolio, lets say is the one given by Markovitz # mktcap is a dicktionary with the market capitalizaion of the equities self.pf.set_interval(dt.datetime(year_start,1,1),dt.datetime(year_finish,1,1)) ## Get the actual stuff !! ExpRet = self.get_MeanReturns() Sigma = self.get_covMatrix() woptimal = self.TangentPortfolio() self.set_allocation(woptimal) R,S = self.get_metrics() delta = (R - self.Rf)/np.power(S,2) # Optimal risk adversion ## Get the weights by the market capitalization if (len(mktcap) > 0): weq = [] for sym in self.symbol_names: weq.append(mktcap[sym]) weq = ul.fnp(weq) /np.sum(weq) weq = weq.T.tolist()[0] # print weq else: weq = woptimal # Initial prior ############### PUT FECKING BL prior instead ########## # Calculate initial portfolio from the market capitalization # Risk aversion of the market. We say it is the one of the portfolio # The optimal portfolio is the market. # weq = np.ones((1,self.Nsym))/self.Nsym # weq = weq.tolist()[0] # Coefficient of uncertainty in the prior estimate of the mean tau = 10 ### Prior of our Views !!! P1 = np.zeros((2,self.Nsym)) P1[0,0] = -1; P1[0,1] = 1 P1[1,1] = -1; P1[1,2] = 1 P1 = ul.fnp(P1) # If we invert P1 and Q1 at the same time we get the same Q1 = [0.0002, 0.0001] Q1 = ul.fnp(Q1) Omega1 = np.dot(np.dot(P1,Sigma),P1.T) * np.eye(Q1.shape[0]) postPi,weqpost = self.BlackLitterman(weq, Sigma, delta, # Prior portfolio variables tau, # Uncertainty coefficient of the porfolio priors P1, Q1, Omega1) # Prior views variables # Reference returns of the portfolio of the market # They can just be calculated using the portfolio # A priory the expected return Posteriori does not have to be bigger # Just more accuarate to reality if our views are right :) refPi = delta * np.dot(Sigma, weq) Ereturn = np.dot(refPi,weq) EreturnPost = np.dot(postPi,weqpost) ## Plot the returns !!! # We will plot the real w returns, the Pi Returns, and the Post- Returns gl.set_subplots(2,3) gl.bar(self.pf.symbols.keys(),ExpRet, labels = ["Optimal initial returns"]) gl.bar(self.pf.symbols.keys(),refPi, labels = ["Prior Returns"]) gl.bar(self.pf.symbols.keys(),postPi, labels = ["Posterior Returns"]) # gl.savefig(folder_images +'returnsBL.png', # dpi = 150, sizeInches = [2*8, 2*6]) ## Plot the weights !!! # We will plot the real w returns, the Pi Returns, and the Post- Returns # gl.set_subplots(1,3) gl.bar(self.pf.symbols.keys(),woptimal, labels = ["Optimal intial weights"]) gl.bar(self.pf.symbols.keys(),weq, labels = ["Prior Weights"]) gl.bar(self.pf.symbols.keys(),weqpost, labels = ["Posterior Weights"]) # gl.savefig(folder_images +'weightsBL.png', # dpi = 150, sizeInches = [2*8, 2*6]) gl.savefig(folder_images +'weightsreturnsBL.png', dpi = 150, sizeInches = [2*8, 2*6]) pass
tgrid = tgrid.reshape(tgrid.size,1) N = tgrid.size # Create the signal X = mean_function(tgrid, f1 = 1, f2 = 5, a1 = 0.4, a2 = 0.1, phi2 = 2*np.pi/7, m = 0.1 ) if (plot_mean_signal and plot_flag): ## Plot the orginal function gl.scatter(tgrid,X, lw = 1, alpha = 0.9, color = "k", nf = 1, labels = ["The true determinist signal mu(t)", "t", "mu(t)" ]) gl.plot(tgrid,X, lw = 2, color = "k", ls = "--", legend = ["True signal"]) gl.set_fontSizes( title = 20, xlabel = 20, ylabel = 20, legend = 20, xticks = 20, yticks = 20) gl.savefig(folder_images +'GP_mean.png', dpi = 100, sizeInches = [2*8, 2*2]) ########################################################################### ############### Generate the structural noise ############################# ########################################################################### """ Now we generate the stocastic process that we add to X(t), generating noisy signal Y(t) = X(t) + e(t) Where we will assume e(t) is Gaussian with mean 0 e(t) \sim N(0,\sigma_t) So we have a Gaussian process, since each set of samples forms a jointly gaussian distribution. The relation between the noises will be given by the covariance matrix C. This will tell how big the noises are and how they relate to each other. We will use a basic kernel for now
r_ellipse = bMA.get_ellipse_points(mean,w,h,theta) gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "--",color = "k", lw = 2, legend = ["Corr: .2f"],AxesStyle = "Normal2") gl.plot([mean[0] - vecs[0,0]*w, mean[0] + vecs[0,0]*w], [mean[1] - vecs[0,1]*w, mean[1] + vecs[0,1]*w], ax = ax1, ls = "--",color = "k") gl.plot([mean[0] - vecs[1,0]*h, mean[0] + vecs[1,0]*h], [mean[1] - vecs[1,1]*h, mean[1] + vecs[1,1]*h], ax = ax1, ls = "--",color = "k") ax1.axis('equal') gl.set_zoom(ax = ax1, X =r_ellipse[:,0], Y = r_ellipse[:,1], ylimPad = [0.2,0.2],xlimPad = [0.2,0.2]) gl.savefig(folder_images +'RotatedProjection.png', dpi = 100, sizeInches = [14, 7]) ############################################################ ################# PLOT DATA ############################### ########################################################### ## Now we are gonna plot the projections and the final thing gl.set_subplots(1,3) ### First projections ax1 = gl.scatter(Y[0,:],Y[1,:], alpha = 0.5, lw = 4, AxesStyle = "Normal", labels = ["","U1","U2"], legend = ["%i points"%Nsam], nf = 1)
ax2 = gl.subplot2grid((5,1), (2,0), rowspan=1, colspan=1, sharex = ax1) gl.stem(dates, volume, ax = ax2, dataTransform = dataTransform, AxesStyle = "Normal - No xaxis - Ny:4", labels = ["","",symbolIDs[0] +"("+ str(periods[0])+ "M)"], legend = [ "Volume"]) ax3 = gl.subplot2grid((5,1), (3,0), rowspan=2, colspan=1, sharex = ax1) gl.stem(dates, ret1, ax = ax3, dataTransform = dataTransform, AxesStyle = "Normal", labels = ["","",symbolIDs[0] +"("+ str(periods[0])+ "M)"], legend = ["Return"]) # gl.set_fontSizes(ax = [ax1,ax2,ax3], title = 20, xlabel = 20, ylabel = 20, legend = 20, xticks = 10, yticks = 10) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.01, hspace=0.01) gl.savefig(folder_images +'PriceAndReturns1Symbol_EM.png', dpi = 100, sizeInches = [22, 12]) ########################################################################## ################# PREPROCESS DATA ###################################### ########################################################################## ## Set GAP return as NAN if (remove_gap_return): """ We usually would like to remove the return of gaps if we are dealing with intraday data since they are ouliers for this distribution, they belong to a distribution with more time """ # If we had all the data properly this would do. if(0): gap_ret = np.where(dates.time == opentime)[0]
def IFE_f (self, ObjectiveR = 0.003, Rf = 0.0, year_start = 1996, year_finish = 2016, window = 10): ### The official one can be done executing the exercise c with another Rf ## Just another graph to show that now we should not use all the data. # Just, choose a desired return, # Using training Samples calculate using the market line # the optimal porfolio for that. # Then calculate for the next year, the real return # for that portfolio. # Do this for several years as well. self.set_Rf(Rf) nf_flag = 1 All_stds = [] PortfolioReturns = [] IndexReturns = [] all_dates = [] for year_test in range(year_start,year_finish - window + 1 - 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test,1,1),dt.datetime(year_test + window,1,1)) # Obtain the market line !! w = self.TangentPortfolio(Rf = Rf) # Obtain allocation self.set_allocation(w) # Obtain the expected return and std when using all our money ! expRet, stdRet = self.get_metrics (investRf = "no") param = bMl.obtain_equation_line(Rf, expRet, stdRet) bias, slope = param X = (ObjectiveR - Rf)/(expRet - Rf) wdesired = w*X ## Check that the output of this portfolio is the desired one. self.set_allocation(wdesired) # Set the allocation expRet, stdRet = self.get_metrics() # Get the expected return for that year # print ret ## Now that we have the desired w*X, we will calculate the resturn of ## the portfolio in the following year. # To do so, we set the dates, only to the next year, set the portfolio allocation # And calculate the yearly expected return !! # Set the dates to only the next year !! # Also, one month before in order to get the returns of the first month. self.pf.set_interval(dt.datetime(year_test + window,1,1),dt.datetime(year_test + window + 1,1,1)) self.set_allocation(wdesired) # Set the allocation expRet, stdRet = self.get_metrics() # Get the expected return for that year PortfolioRet = self.yearly_Return(expRet) # Get yearly returns PortfolioReturns.append(PortfolioRet) All_stds.append(self.yearly_covMatrix(stdRet)) indexRet = self.get_indexMeanReturn() indexRet = self.yearly_Return(indexRet) IndexReturns.append(indexRet) # dates = self.get_dates() all_dates.append(year_test + window + 1) ## Graph with the evolutio of the portfolio price after the assignment gl.plot(range(1,13), np.cumsum(self.get_PortfolioReturn()), nf = nf_flag, labels = ["Evolution of returns by month", "Months passed", "Cumulative Return"], legend = [str(year_test + window +1)]) nf_flag = 0 # print ret gl.savefig(folder_images +'returnsEvolMonth.png', dpi = 150, sizeInches = [2*8, 2*6]) ## Graph with the desired, the obtained returns and the returns of the index gl.bar(all_dates[:], IndexReturns, labels = ["Obtained returns", "Time (years)", "Return (%)"], legend = ["Index Return"], alpha = 0.8, nf = 1) gl.bar(all_dates[:], PortfolioReturns, labels = ["Returns of year", "Year","Value"], legend = ["Porfolio Return"], alpha = 0.8, nf = 0) gl.scatter(all_dates[:], self.yearly_Return(ObjectiveR) * np.ones((len(all_dates[:]),1)), legend = ["Objective Return"], nf = 0) gl.scatter(all_dates[:], All_stds, legend = ["Std of the portfolio return"], nf = 0) gl.savefig(folder_images +'returnsEvolYears.png', dpi = 150, sizeInches = [2*8, 2*6]) #### Crazy idea !! Lets plot where the f*****g efficient frontier went nf_flag = 1 PortfolioReturns = [] IndexReturns = [] all_dates = [] gl.set_subplots(2,3) for year_test in range(year_start,year_start + 6): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test,1,1),dt.datetime(year_test + window,1,1)) optimal, portfolios = self.efficient_frontier(kind = "Tangent") self.plot_allocations(portfolios, labels = ["Evolution of the efficient frontier"], legend = ["Frontier " + str(year_test + window) + " before"], color = "k", nf = 1) self.pf.set_interval(dt.datetime(year_test + window,1,1),dt.datetime(year_test + window + 1,1,1)) self.set_allocation(self.TangentPortfolio(Rf = Rf)) self.plot_allocations(portfolios, legend = ["Frontier " + str(year_test + window) + " after"], color = "r",nf = 0) gl.savefig(folder_images +'effEvol.png', dpi = 80, sizeInches = [4*8, 3*6])
def generate_images_iterations_ll(Xs,mus,covs, Ks ,myDManager, logl,theta_list,model_theta_list,folder_images_gif): # os.remove(folder_images_gif) # Remove previous images if existing """ WARNING: MEANT FOR ONLY 3 Distributions due to the color RGB """ import shutil ul.create_folder_if_needed(folder_images_gif) shutil.rmtree(folder_images_gif) ul.create_folder_if_needed(folder_images_gif) ######## Plot the original data ##### Xdata = np.concatenate(Xs,axis = 1).T colors = ["r","b","g"] K_G,K_W,K_vMF = Ks ### FOR EACH ITERATION for i in range(len(theta_list)): # theta_list indx = i gl.init_figure() ax1 = gl.subplot2grid((1,2), (0,0), rowspan=1, colspan=1) ## Get the relative ll of the Gaussian denoising cluster. ll = myDManager.pdf_log_K(Xdata,theta_list[indx]) N,K = ll.shape # print ll.shape for j in range(N): # For every sample #TODO: Can this not be done without a for ? # Normalize the probability of the sample being generated by the clusters Marginal_xi_probability = gf.sum_logs(ll[j,:]) ll[j,:] = ll[j,:]- Marginal_xi_probability ax1 = gl.scatter(Xdata[j,0],Xdata[j,1], labels = ['EM Evolution. Kg:'+str(K_G)+ ', Kw:' + str(K_W) + ', K_vMF:' + str(K_vMF), "X1","X2"], color = (np.exp(ll[j,1]), np.exp(ll[j,0]), np.exp(ll[j,2])) , ### np.exp(ll[j,2]) alpha = 1, nf = 0) # Only doable if the clusters dont die for k_c in myDManager.clusterk_to_Dname.keys(): k = myDManager.clusterk_to_thetak[k_c] distribution_name = myDManager.clusterk_to_Dname[k_c] # G W if (distribution_name == "Gaussian"): ## Plot the ecolution of the mu #### Plot the Covariance of the clusters ! mean,w,h,theta = bMA.get_gaussian_ellipse_params( mu = theta_list[indx][k][0], Sigma = theta_list[indx][k][1], Chi2val = 2.4477) r_ellipse = bMA.get_ellipse_points(mean,w,h,theta) gl.plot(r_ellipse[:,0], r_ellipse[:,1], ax = ax1, ls = "-.", lw = 3, AxesStyle = "Normal2", legend = ["Kg(%i). pi:%0.2f"%(k, float(model_theta_list[indx][0][0,k]))]) elif(distribution_name == "Watson"): #### Plot the pdf of the distributino ! ## Distribution parameters for Watson kappa = float(theta_list[indx][k][1]); mu = theta_list[-1][k][0] Nsa = 1000 # Draw 2D samples as transformation of the angle Xalpha = np.linspace(0, 2*np.pi, Nsa) Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)]) probs = [] # Vector with probabilities for i in range(Nsa): probs.append(np.exp(Wad.Watson_pdf_log(Xgrid[:,i],[mu,kappa]) )) probs = np.array(probs) # Plot it in polar coordinates X1_w = (1 + probs) * np.cos(Xalpha) X2_w = (1 + probs) * np.sin(Xalpha) gl.plot(X1_w,X2_w, alpha = 1, lw = 3, ls = "-.", legend = ["Kw(%i). pi:%0.2f"%(k, float(model_theta_list[indx][0][0,k]))]) elif(distribution_name == "vonMisesFisher"): #### Plot the pdf of the distributino ! ## Distribution parameters for Watson kappa = float(theta_list[indx][k][1]); mu = theta_list[indx][k][0] Nsa = 1000 # Draw 2D samples as transformation of the angle Xalpha = np.linspace(0, 2*np.pi, Nsa) Xgrid= np.array([np.cos(Xalpha), np.sin(Xalpha)]) probs = [] # Vector with probabilities for i in range(Nsa): probs.append(np.exp(vMFd.vonMisesFisher_pdf_log(Xgrid[:,i],[mu,kappa]) )) probs = np.array(probs) probs = probs.reshape((probs.size,1)).T # Plot it in polar coordinates X1_w = (1 + probs) * np.cos(Xalpha) X2_w = (1 + probs) * np.sin(Xalpha) # print X1_w.shape, X2_w.shape gl.plot(X1_w,X2_w, alpha = 1, lw = 3, ls = "-.", legend = ["Kvmf(%i). pi:%0.2f"%(k, float(model_theta_list[indx][0][0,k]))]) gl.set_zoom(xlim = [-6,6], ylim = [-6,6], ax = ax1) ax2 = gl.subplot2grid((1,2), (0,1), rowspan=1, colspan=1) if (indx == 0): gl.add_text(positionXY = [0.1,.5], text = r' Initilization Incomplete LogLike: %.2f'%(logl[0]),fontsize = 15) pass elif (indx >= 1): gl.plot(range(1,np.array(logl).flatten()[1:].size +1),np.array(logl).flatten()[1:(indx+1)], ax = ax2, legend = ["Iteration %i, Incom LL: %.2f"%(indx, logl[indx])], labels = ["Convergence of LL with generated data","Iterations","LL"], lw = 2) gl.scatter(1, logl[1], lw = 2) pt = 0.05 gl.set_zoom(xlim = [0,len(logl)], ylim = [logl[1] - (logl[-1]-logl[1])*pt,logl[-1] + (logl[-1]-logl[1])*pt], ax = ax2) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.2, hspace=0.01) gl.savefig(folder_images_gif +'gif_'+ str(indx) + '.png', dpi = 100, sizeInches = [16, 8], close = "yes",bbox_inches = None) gl.close("all")
color="k", lw=5, alpha=0.7, labels=["Sine chart", "Time (s)", "Voltage(V)"], legend=["Rolling measurement"]) gl.stem(X2, Y2, nf=1, color="k", lw=2, alpha=0.7, labels=["Discrete window", "Sample (k)", "Amplitud"], legend=["Window values"]) gl.savefig(folder_images + 'subplot1.png', dpi=dpi, sizeInches=sizeInches) # Subplot Type 2 if (type_graph == 2): ax1 = gl.subplot2grid((1, 4), (0, 0), rowspan=1, colspan=3) gl.plot(X, Y, nf=0, color="k", lw=5, alpha=0.7, labels=["Sine chart", "Time (s)", "Voltage(V)"], legend=["Rolling measurement"]) # ax2 = gl.subplot2grid((1, 4), (0, 3), rowspan=1, colspan=1) gl.plot(X2, Y2,
symbolID = symbolIDs_pf[i] myTimeData = myPortfolio.get_symbols([symbolID])[0].get_timeData(period) returns = myTimeData.get_timeSeriesReturn(["Close"]) dates = myTimeData.get_dates() AxesStyle = " - No xaxis" if (i == len(symbolIDs_pf) -1): AxesStyle = "" if (i == 0): title = "Bar Chart. " + str(symbolIDs) + r" . Price ($\$$)" title2 = "Return" else: title = "" title2 = "" ylabel = symbolID + " (" + ul.period_dic[myTimeData.period] + ")" ax = gl.tradingBarChart(myTimeData, legend = ["Close price"], color = "k", nf = 1, sharex = axeshare, labels = [title,"",ylabel], AxesStyle = "Normal" + AxesStyle) # dataTransform = dataTransform) ax = gl.stem(dates, returns, legend = ["Return"], color = "k", nf = 1, sharex = axeshare, labels = [title2,"",""], AxesStyle = "Normal" + AxesStyle + " - No yaxis") # dataTransform = dataTransform) axeshare = ax gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.10, hspace=0) image_name = "differentSymbols.png" gl.savefig(folder_images + image_name, dpi = 100, sizeInches = [30, 12])
title = "Comparing MAs. " + str( symbols[0]) + "(" + ul5.period_dic[timeData.period] + ")" gl.plot(dates, [price, SMA1, MWA1, EMA1], nf=1, labels=[title, "", r"Price ($\$$)"], legend=[ r"$P_{CLOSE}$", "SMA(%i)" % nMA1, "WMA(%i)" % nMA1, "EMA(%i)" % nMA1 ], AxesStyle="Normal") # ls = "-", marker = ["*",5,None], fill= 1,AxesStyle = "Normal", alpha = 0.3) # TODO: Why is this one not shown ? gl.savefig(folder_images + 'comparingMAs.png', dpi=100, sizeInches=[10, 6]) if (comparing_lags): # Some basic indicators. price = timeData.get_timeSeries(["Close"]) dates = timeData.get_dates() nSMAs = [7, 20, 50] nEMAs = [7, 20, 50] # For lag and noise SMAs = [] for nMA_i in nSMAs: SMAs.append(timeData.SMA(seriesNames=["Close"], n=nMA_i)) EMAs = [] for nMA_i in nEMAs: EMAs.append(timeData.EMA(seriesNames=["Close"], n=nMA_i))
edate = dt.datetime.strptime("25-11-2016", "%d-%m-%Y") ######## CREATE THE OBJECT AND LOAD THE DATA ########## # Tell which company and which period we want timeData = CTD.CTimeData(symbols[0],periods[0]) TD = DBl.load_TD_from_csv(storage_folder, symbols[1],periods[0]) timeData.set_csv(storage_folder) # Load the data into the model timeData.set_TD(TD) ############## Obtain time series ########################### price = timeData.get_timeSeries(["Close", "Average"]); ############# Plot time Series and save it to disk ######### gl.plot([],price) datafolder = "../maildata/" picdir = datafolder + "pene.png" gl.savefig(picdir) ########################################################################### ############## BASIC PLOTING FUNC ######################################### ########################################################################### user = "******" pwd = "Goldenegg" #user = "******" #pwd = "manumon7g.@" recipient = "*****@*****.**" #recipient = "*****@*****.**" subject = "[Trapyng] Update %s" % ("penesd")
def IFE_e (self, ObjectiveR = 0.003, Rf = 0.0, year_start = 1996, year_finish = 2016, window = 10): # Just, choose a desired return, # Using training Samples calculate using the market line # the optimal porfolio for that. # Then, using also the last year ( test), recalculate the portfolio needed # for that return, and the difference between is the turnover self.set_Rf(Rf) nf_flag = 1 desired_Portfolios = [] all_dates = [] for year_test in range(year_start,year_finish - window + 1): # +1 !! # Set the dates self.pf.set_interval(dt.datetime(year_test,1,1),dt.datetime(year_test + window,1,1)) # Obtain the market line !! w = self.TangentPortfolio(Rf = Rf) # Obtain allocation # Obtain the expected return and std when using all our money ! self.set_allocation(w) expRet, stdRet = self.get_metrics (investRf = "no") param = bMl.obtain_equation_line(Rf, expRet, stdRet) bias, slope = param # Once we have the equation of the line, we obtain how much money # we need to use to reach the desired Expecred Return. # Rt = (1 - X)Rf + XRp with X = sum(w) # For a desired Rt we solve the X X = (ObjectiveR - Rf)/(expRet - Rf) # print X # So the desired porfolio is: wdesired = w*X desired_Portfolios.append(wdesired) gl.plot([0,1.3*abs(X*stdRet)],[bias, bias + 1.3*abs(slope*stdRet*X)], labels = ["Desired Portfolios", "Risk (std)", "Return (%)"], legend = ["%s, X: %0.3f" %((year_test + window ), X[0])], nf = nf_flag, loc = 2) nf_flag = 0 gl.scatter([abs(X*stdRet)],[ObjectiveR], nf = 0) dates = self.get_dates() all_dates.append(dates[-1]) # print wdesired gl.savefig(folder_images +'desiredPortfolios.png', dpi = 150, sizeInches = [2*8, 2*6]) # Now we calculate the turnovers Turnovers = [] prev_abs_alloc = [] # Previous, absolute allocation percentaje_changed = [] Nport = len(desired_Portfolios) for i in range(Nport-1): to = bMl.get_TurnOver(desired_Portfolios[i], desired_Portfolios[i+1]) Turnovers.append(to) prev_abs_alloc.append(np.sum(np.abs(desired_Portfolios[i]))) percentaje_changed.append(Turnovers[-1]/prev_abs_alloc[-1]) print Turnovers gl.set_subplots(1,3) gl.bar(all_dates[1:], Turnovers, color = "g", labels = ["Portfolio turnovers", "Year","Value"]) gl.add_text([all_dates[1:][3],max(Turnovers)*0.80], "Mean: %0.2f" % np.mean(Turnovers), 30) gl.bar(all_dates[0:-1], prev_abs_alloc, color = "r", labels = ["Absolute allocations", "Year","Value"]) gl.bar(all_dates[1:], percentaje_changed, color = "b", labels = ["Percentage turnover", "Year","Value"]) gl.add_text([all_dates[1:][3],max(percentaje_changed)*0.80], "Mean: %0.2f" % np.mean(percentaje_changed), 30) gl.savefig(folder_images +'turnovers.png', dpi = 150, sizeInches = [2*8, 1*6])
def create_Bayesian_analysis_charts(model, X_data_tr, Y_data_tr, X_data_val, Y_data_val, tr_loss, val_loss, KL_loss, final_loss_tr, final_loss_val, xgrid_real_func, ygrid_real_func, folder_images, epoch_i=None): # Configurations of the plots alpha_points = 0.2 color_points_train = "dark navy blue" color_points_val = "amber" color_train_loss = "cobalt blue" color_val_loss = "blood" color_truth = "k" color_mean = "b" color_most_likey = "y" ############################# Data computation ####################### if (type(X_data_tr) == type([])): pass else: if (X_data_tr.shape[1] == 1): # Regression Example x_grid, all_y_grid, most_likely_ygrid = compute_regression_1D_data( model, X_data_tr, X_data_val, Nsamples=100) elif (X_data_tr.shape[1] == 2): # Classification Example xx, yy, all_y_grid, most_likely_ygrid = compute_classification_2D_data( model, X_data_tr, X_data_val, Nsamples=100) else: # RNN x_grid, all_y_grid, most_likely_ygrid = compute_RNN_1D_data( model, X_data_tr, X_data_val, Nsamples=100) ################################ Divide in plots ############################## gl.init_figure() ax1 = gl.subplot2grid((6, 3), (0, 0), rowspan=3, colspan=1) ax2 = gl.subplot2grid((6, 3), (3, 0), rowspan=3, colspan=1, sharex=ax1, sharey=ax1) ax3 = gl.subplot2grid((6, 3), (0, 1), rowspan=2, colspan=1) ax4 = gl.subplot2grid((6, 3), (2, 1), rowspan=2, colspan=1, sharex=ax3) ax5 = gl.subplot2grid((6, 3), (4, 1), rowspan=2, colspan=1, sharex=ax3) ax6 = gl.subplot2grid((6, 3), (0, 2), rowspan=3, colspan=1) ax7 = gl.subplot2grid((6, 3), (3, 2), rowspan=3, colspan=1, sharex=ax6) if (type(X_data_tr) == type([])): Xtrain = [ torch.tensor(X_data_tr[i], device=model.cf_a.device, dtype=model.cf_a.dtype) for i in range(len(X_data_tr)) ] Ytrain = torch.tensor(Y_data_tr, device=model.cf_a.device, dtype=torch.int64) Xval = [ torch.tensor(X_data_val[i], device=model.cf_a.device, dtype=model.cf_a.dtype) for i in range(len(X_data_val)) ] Yval = torch.tensor(Y_data_val, device=model.cf_a.device, dtype=torch.int64) confusion = model.get_confusion_matrix(Xtrain, Ytrain) plot_confusion_matrix(confusion, model.languages, ax1) confusion = model.get_confusion_matrix(Xval, Yval) plot_confusion_matrix(confusion, model.languages, ax2) else: if (X_data_tr.shape[1] == 1): # Regression Example plot_data_regression_1d_2axes( X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val, x_grid, all_y_grid, most_likely_ygrid, alpha_points, color_points_train, color_points_val, color_most_likey, color_mean, color_truth, ax1, ax2) elif (X_data_tr.shape[1] == 2): # Classification Example plot_data_classification_2d_2axes( X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val, xx, yy, all_y_grid, most_likely_ygrid, alpha_points, color_points_train, color_points_val, color_most_likey, color_mean, color_truth, ax1, ax2) else: # RNN example plot_data_RNN_1d_2axes(X_data_tr, Y_data_tr, xgrid_real_func, ygrid_real_func, X_data_val, Y_data_val, x_grid, all_y_grid, most_likely_ygrid, alpha_points, color_points_train, color_points_val, color_most_likey, color_mean, color_truth, ax1, ax2) # gl.fill_between (x_grid, [mean_samples_grid + 2*std_samples_grid, mean_samples_grid - 2*std_samples_grid] # , ax = ax2, alpha = 0.10, color = "b", legend = ["Mean realizaions"]) ## ax2: The uncertainty of the prediction !! # gl.plot (x_grid, std_samples_grid, ax = ax2, labels = ["Std (%i)"%(Nsamples),"X","f(X)"], legend = [" std predictions"], fill = 1, alpha = 0.3) ############## ax3 ax4 ax5: Loss Evolution !! ###################### ## ax3: Evolutoin of the data loss gl.plot([], tr_loss, ax=ax3, lw=3, labels=["Losses", "", "Data loss"], legend=["train"], color=color_train_loss) gl.plot([], val_loss, ax=ax3, lw=3, legend=["validation"], color=color_val_loss, AxesStyle="Normal - No xaxis") ## ax4: The evolution of the KL loss gl.plot([], KL_loss, ax=ax4, lw=3, labels=["", "", "KL loss"], legend=["Bayesian Weights"], AxesStyle="Normal - No xaxis", color="k") ## ax5: Evolutoin of the total loss gl.plot([], final_loss_tr, ax=ax5, lw=3, labels=["", "epoch", "Total Loss (Bayes)"], legend=["train"], color=color_train_loss) gl.plot([], final_loss_val, ax=ax5, lw=3, legend=["validation"], color=color_val_loss) ############## ax6 ax7: Variational Weights !! ###################### create_plot_variational_weights(model, ax6, ax7) ## Plot in chart 7 the acceptable mu = 2sigma -> sigma = |mu|/2sigma mu_grid = np.linspace(-3, 3, 100) y_grid = np.abs(mu_grid) / 2 gl.fill_between(mu_grid, 10 * np.ones(mu_grid.size), y_grid, alpha=0.2, color="r", ax=ax7, legend=["95% non-significant"]) gl.set_zoom(ax=ax6, ylim=[-0.1, 10]) gl.set_zoom(ax=ax7, xlim=[-2.5, 2.5], ylim=[ -0.05, np.exp(model.cf_a.input_layer_prior["log_sigma2"]) * (1 + 0.15) ]) # gl.set_zoom (ax = ax7, xlim = [-2.5, 2.5], ylim = [-0.1,2]) # Set final properties and save figure gl.set_fontSizes(ax=[ax1, ax2, ax3, ax4, ax5, ax6, ax7], title=20, xlabel=20, ylabel=20, legend=10, xticks=12, yticks=12) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.10) if (type(epoch_i) == type(None)): gl.savefig(folder_images + "../" + 'Final_values_regression_1D_' + str(model.cf_a.eta_KL) + '.png', dpi=100, sizeInches=[20, 10]) else: gl.savefig(folder_images + '%i.png' % epoch_i, dpi=100, sizeInches=[20, 10], close=True, bbox_inches="tight")
labels = ["Momentum Indicators MOM and ROC","","Price"], legend = ["Price", " Momentum", "ROC"]) gl.plot(dates, MOM , nf = 1, na = 0, legend = ["MOM(%i)"%nMOM]) # Normalize ROC to MOM ROC = ROC * np.max(np.abs(np.nan_to_num(MOM)))/ np.max(np.abs(np.nan_to_num(ROC))) gl.plot(dates, ROC, nf = 0, na = 0, legend = ["ROC(%i)"%nROC]) # The nect plot is just so that the vision starts in the first date gl.plot(dates, np.zeros((dates.size,1)) , nf = 0, na = 0) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.20, hspace=0) gl.savefig(folder_images +'OscillatorsMOM.png', dpi = 100, sizeInches = [2*8, 2*2]) price = timeData.get_timeSeries(["Close"]); dates = timeData.get_dates() df = timeData.get_timeData() # Momentum and Rate of convergence obtained from the real price. nMOMs = [10, 20, 30] MOM1 = timeData.MOM(n = 1) EMAMOMs = [indl.get_EMA(MOM1, nMOMi) for nMOMi in nMOMs] # Normalize ROC to MOM
def create_image_weights_epoch(model, video_fotograms_folder2, epoch_i): """ Creates the image of the training and validation accuracy """ N_Bayesian_layers = len(model.VBmodels) N_Normal_layers = len(model.LinearModels) # Compute the number of squares we will need: # 1 x linear layers, 2 x LSTMS gl.init_figure() cmap = cm.get_cmap('coolwarm', 30) all_axes = [] for i in range(N_Bayesian_layers): layer = model.VBmodels[i] # if (layer.type_layer == "linear"): if ("linear" in type(layer).__name__.lower()): ax = gl.subplot2grid((1, N_Bayesian_layers + N_Normal_layers), (0, i), rowspan=1, colspan=1) weights = layer.weight.detach().cpu().numpy() biases = layer.bias.detach().cpu().numpy().reshape(-1, 1) neurons = np.concatenate((weights, biases), axis=1) cax = ax.imshow(neurons, interpolation="nearest", cmap=cmap, vmin=-2, vmax=2) all_axes.append(ax) else: ax = gl.subplot2grid((1, N_Bayesian_layers + N_Normal_layers), (0, i), rowspan=1, colspan=1) weights_ih = layer.weight_ih.detach().cpu().numpy() biases_ih = layer.bias_ih.detach().cpu().numpy().reshape(-1, 1) weights_hh = layer.weight_hh.detach().cpu().numpy() biases_hh = layer.bias_hh.detach().cpu().numpy().reshape(-1, 1) weights = np.concatenate((weights_ih, weights_hh), axis=1) biases = np.concatenate((biases_ih, biases_hh), axis=1) neurons = np.concatenate((weights, biases), axis=1) cax = ax.imshow(neurons, interpolation="nearest", cmap=cmap, vmin=-2, vmax=2) all_axes.append(ax) for i in range(N_Normal_layers): layer = model.LinearModels[i] if ("linear" in type(layer).__name__.lower()): ax = gl.subplot2grid((1, N_Bayesian_layers + N_Normal_layers), (0, N_Bayesian_layers + i), rowspan=1, colspan=1) weights = layer.weight.detach().cpu().numpy() biases = layer.bias.detach().cpu().numpy().reshape(-1, 1) neurons = np.concatenate((weights, biases), axis=1) cax = ax.imshow(neurons, interpolation="nearest", cmap=cmap, vmin=-2, vmax=2) all_axes.append(ax) else: ax = gl.subplot2grid((1, N_Bayesian_layers + N_Normal_layers), (0, N_Bayesian_layers + i), rowspan=1, colspan=1) weights_ih = layer.weight_ih.detach().cpu().numpy() biases_ih = layer.bias_ih.detach().cpu().numpy().reshape(-1, 1) weights_hh = layer.weight_hh.detach().cpu().numpy() biases_hh = layer.bias_hh.detach().cpu().numpy().reshape(-1, 1) weights = np.concatenate((weights_ih, weights_hh), axis=1) biases = np.concatenate((biases_ih, biases_hh), axis=1) neurons = np.concatenate((weights, biases), axis=1) cax = ax.imshow(neurons, interpolation="nearest", cmap=cmap, vmin=-2, vmax=2) all_axes.append(ax) # plt.xticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='vertical') # plt.yticks(range(data_df_train.shape[1]), data_df_train.columns, rotation='horizontal') plt.colorbar(cax) # plt.colorbar(cax2) # ax1.set_xticks(data_df_train.columns) # , rotation='vertical' # ax1.grid(True) plt.title('Weights ') # labels=[str(x) for x in range(Nshow )] # ax1.set_xticklabels(labels,fontsize=20) # ax1.set_yticklabels(labels,fontsize=20) # Add colorbar, make sure to specify tick locations to match desired ticklabels plt.show() gl.set_fontSizes(ax=[all_axes], title=20, xlabel=20, ylabel=20, legend=20, xticks=12, yticks=12) # Set final properties and save figure gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.30) gl.savefig(video_fotograms_folder2 + '%i.png' % epoch_i, dpi=100, sizeInches=[14, 10], close=True, bbox_inches=None)
## Get the surface for the loss ####### PLOT THE EVOLUTION OF RMSE AND PARAMETERS ############ gl.init_figure() ax1 = gl.scatter(X_data_tr, Y_data_tr, lw = 3,legend = ["tr points"], labels = ["Data", "X","Y"]) ax2 = gl.scatter(X_data_val, Y_data_val, lw = 3,legend = ["val points"]) gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 20, ylabel = 20, legend = 20, xticks = 12, yticks = 12) x_grid = np.linspace(np.min([X_data_tr]) -1, np.max([X_data_val]) +1, 100) y_grid = x_grid * W_values + b_values gl.plot (x_grid, y_grid, legend = ["training line"]) gl.savefig(folder_images +'Training_Example_Data.png', dpi = 100, sizeInches = [14, 4]) ####### PLOT THE EVOLUTION OF RMSE AND PARAMETERS ############ gl.set_subplots(2,1) ax1 = gl.plot([], tr_loss, nf = 1, lw = 3, labels = ["RMSE loss and parameters. Learning rate: %.3f"%train_config.lr, "","RMSE"], legend = ["train"]) gl.plot([], val_loss, lw = 3, legend = ["validation"]) ax2 = gl.plot([], W_list, nf = 1, lw = 3, sharex = ax1, labels = ["", "","Parameters"], legend = ["W"], color ="b") gl.plot([], b_list, lw = 3, labels = ["", "epochs","Parameters"], legend = ["b"],color ="g") gl.set_fontSizes(ax = [ax1,ax2], title = 20, xlabel = 20, ylabel = 20, legend = 20, xticks = 12, yticks = 12) gl.savefig(folder_images +'Training_Example_Parameters.png', dpi = 100, sizeInches = [14, 7])
gl.set_fontSizes(ax=[ax1, ax2, ax3], title=20, xlabel=20, ylabel=20, legend=10, xticks=15, yticks=15) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.10) gl.savefig(folder_images + images_prefix + "Trimming_accuracies.png", dpi=100, sizeInches=[18, 6], close=False, bbox_inches="tight") #elif (Experiments_generate_results_data): # DataSet_statistics = fill_evaluation_data(model,device, dataset_iterable,num_batches, Evaluate_Model_Results, bayesian_ensemble = bayesian_ensemble) # EM = 100*np.mean(DataSet_statistics["em"]) # F1 = 100*np.mean(DataSet_statistics["f1"]) #metrics, data_loss = general_validation_runner(model) #print ("Loss validation:",data_loss) #print ("metrics: ", metrics) """ ################################################################################## ################# ANALYZE THE SYSTEM INTERNALS FOR A QUERY EXAMPLE ################### ####################################################################################
gl.scatter(Yjoint[0,:],Yjoint[1,:], alpha = 0.5, ax = ax2, lw = 4, AxesStyle = "Normal", labels = ["","X1", "X2"]) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.01, hspace=0.01) xx, yy, zz = bMA.get_gaussian2D_pdf( xbins=40j, ybins=40j, mu = mu, cov = cov, std_K = std_K, x_grid = None) ax2.contour(xx, yy, zz, linewidths = 3, linestyles = "solid", alpha = 0.8, colors = None, zorder = 100) ax1.set_xlim(-6, 4) ax1.set_ylim(-4, 7) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.2, hspace=0.01) gl.savefig(folder_images +'Gaussian_2DX_transform.png', dpi = 100, sizeInches = [18, 9]) ############################################################################### ############################ PLOT Decomposition ##################################### ############################################################################### # A = np.array([[0.9,2],[0.8,0.7]]) mu_b = [-1.5,2] U, s, R = np.linalg.svd(A) S = np.diag(s) Sigma = A.dot(A.T) s, R = np.linalg.eig(Sigma) S = np.diag(s) Sigma_rec = R.dot(S).dot(R.T) #
wspace=.05, hspace=0.2) gl.set_fontSizes(ax=[ ax1, ax2, ], title=20, xlabel=20, ylabel=20, legend=15, xticks=18, yticks=12) gl.savefig(folder_images + 'Classifiers_performance.png', dpi=100, sizeInches=[3 * 8, 3 * 2]) if (plot_results): key_classifier = "QDA" # QDA # GNB RF classifier = cl_d[key_classifier] # Compute how well we have done in each sample using cross entropy Ypredict_test_proba = classifier.predict_proba( Xtest)[:, 1] # probability of 1 Ypredict_train_proba = classifier.predict_proba(Xtrain)[:, 1] Ypredict_test = classifier.predict(Xtest) Ypredict_train = classifier.predict(Xtrain) test_cross_entropy = Ytest * np.log(Ypredict_test_proba) + ( 1 - Ytest) * np.log(1 - Ypredict_test_proba)
else: # nu = np.array([-0.5*m*g/np.sqrt(-1 +4*l*l/(h*h)) , -m*g ]) nu_values = scipy.optimize.fsolve(get_error, nu) # nu_values = nu nu_values[0] = -np.abs(nu_values[0]) ax1 = print_chain(nu_values) print (" For N=%i: nu_guess = "%N,nu,", nu_final: ", nu_values) print("Costate vector: ",get_costate_value(nu_values,0) ) gl.set_fontSizes(ax = [ax1], title = 20, xlabel = 20, ylabel = 20, legend = 15, xticks = 12, yticks = 12) gl.subplots_adjust(left=.09, bottom=.10, right=.90, top=.95, wspace=.30, hspace=0.10) gl.savefig("P2_2.png", dpi = 100, sizeInches = [12, 7], close = False, bbox_inches = "tight") """ ################################ QUESTION 5 ############################## """ print (" ----------------- QUESTION 5 ----------------") def get_half_angle(i, nu_z): theta_i = np.arctan((m*g*(N/2-0.5-i) )/nu_z) return theta_i def get_half_final_position (nu_z, x0): for i in range(int(N/2)): theta_i =get_half_angle(i, nu_z)