def plot_x_query(lwr_theta): print 'Welcome to the plot demo!\n================================\n Locally weighted regression: lwr_theta for an x_query:\n%s' % lwr_theta # Read data to make a scatter plot and LR fit x, y = read_data('/nosql/input/x_data.txt') x_2dim = x[:, [0, COLUMN_NUMBER]] # because we can't plot more than two dimensions theta = batch_linear_regression(x_2dim, y) print 'Simple liner regression produce the following theta for the regression line:\n%s' % theta # construct a line from intercept and coefficient x_pred = [min(x[:, 1])[0, 0], max(x[:, 1])[0, 0]] y_pred = [theta[0, 0] + v * theta[1, 0] for v in x_pred] plt.figure(figsize=(14,10)) plt.plot(x_2dim[:, 1], y, 'bo', label='Training Set') plt.plot(x_pred, y_pred, 'r-', label='Linear Regression') # Predict outcome for x_query y_query = x_query * lwr_theta print 'Given the x_query: %s\nLWR predicts target: %s' % (x_query, y_query) plt.plot(x_query[:, COLUMN_NUMBER], y_query, 'go', markersize=10, label='Locally Wheighted Linear Regression x_query Prediction') # Circle the prediction and fine tune the plot circle = plt.Circle((x_query[:, COLUMN_NUMBER], y_query), 2, color='y', fill=False) plt.gca().add_artist(circle) plt.grid() plt.legend(loc=2) plt.tight_layout() plt.show()
def map_along_line(x, y, q, ax=None, cmap=None, norm=None, time=None, max_step=1., missing=np.nan, new_timebase=None, **kwargs): """Map some quantity q along x,y as a coloured line. With time set, perform linear interpolation of x,y,q onto new_timebase filling with missing, and with max_step.""" if ax is None: ax = plt.gca() if x.shape != y.shape: raise ValueError('Shape mismatch') if x.shape != q.shape: raise ValueError('Shape mismatch') if time is not None: if new_timebase is None: new_timebase = np.arange(time[0], time[-1], np.min(np.diff(time))) # Bit redundant x = interp_safe(new_timebase, time, x, max_step=max_step, missing=missing) y = interp_safe(new_timebase, time, y, max_step=max_step, missing=missing) q = interp_safe(new_timebase, time, q, max_step=max_step, missing=missing) points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) lc = LineCollection(segments, cmap=cmap, norm=norm, **kwargs) lc.set_array(q) plt.gca().add_collection(lc) return lc
def plot_svc(X, y, mysvc, bounds=None, grid=50): if bounds is None: xmin = np.min(X[:, 0], 0) xmax = np.max(X[:, 0], 0) ymin = np.min(X[:, 1], 0) ymax = np.max(X[:, 1], 0) else: xmin, ymin = bounds[0], bounds[0] xmax, ymax = bounds[1], bounds[1] aspect_ratio = (xmax - xmin) / (ymax - ymin) xgrid, ygrid = np.meshgrid(np.linspace(xmin, xmax, grid), np.linspace(ymin, ymax, grid)) plt.gca(aspect=aspect_ratio) plt.xlim(xmin, xmax) plt.ylim(ymin, ymax) plt.xticks([]) plt.yticks([]) plt.hold(True) plt.plot(X[y == 1, 0], X[y == 1, 1], 'bo') plt.plot(X[y == -1, 0], X[y == -1, 1], 'ro') box_xy = np.append(xgrid.reshape(xgrid.size, 1), ygrid.reshape(ygrid.size, 1), 1) if mysvc is not None: scores = mysvc.decision_function(box_xy) else: print 'You must have a valid SVC object.' return None; CS=plt.contourf(xgrid, ygrid, scores.reshape(xgrid.shape), alpha=0.5, cmap='jet_r') plt.contour(xgrid, ygrid, scores.reshape(xgrid.shape), levels=[0], colors='k', linestyles='solid', linewidths=1.5) plt.contour(xgrid, ygrid, scores.reshape(xgrid.shape), levels=[-1,1], colors='k', linestyles='dashed', linewidths=1) plt.plot(mysvc.support_vectors_[:,0], mysvc.support_vectors_[:,1], 'ko', markerfacecolor='none', markersize=10) CB = plt.colorbar(CS)
def on_circle_click(self, x, y): if not self.scale["length_of_scale_in_mu_meter"]: self.on_scala_click(x, y) return plt.scatter(x, y, color="white", s=60, edgecolor="blue", marker="x", lw=2.5) if len(self.current_circle_points) == 3: self.current_circle_points[:] = [] self.current_circle_points.append((x, y)) if len(self.current_circle_points) == 3: p = self.current_circle_points point, radius = cirle_from_three_points(p[0][0], p[0][1], p[1][0], p[1][1], p[2][0], p[2][1]) color = self.colors[self.color_index % len(self.colors)] self.color_index += 1 circle = matplotlib.patches.Circle(point, radius, lw=4, facecolor="none", edgecolor=color) factor = self.scale["length_of_scale_in_mu_meter"] / \ self.scale["length_of_scale_in_px"] plt.text(point[0], point[1], "Radius: %.4fE-6 m" % (radius * factor), horizontalalignment='center', color="black", verticalalignment='center', fontsize=11, bbox=dict(facecolor=color, alpha=0.85, edgecolor="0.7")) plt.gca().add_patch(circle) self.redraw()
def _gaussian_test(): import matplotlib.pyplot as plt n = 10000 mu_x = 0.0 mu_y = 0.0 #sig_x, sig_y = 1.5, 1.5 tau = 0.0 seeing = 1.5 sigma = seeing / (2. * np.sqrt(2. * np.e)) slit_width = 0.2 slit_height = 10.0 slit_x = np.empty(n, dtype=np.float64) slit_y = np.empty(n, dtype=np.float64) slit_x, slit_y = slit_gaussian_psf(n, mu_x, mu_y, sigma, sigma, tau, slit_width, slit_height) log.info("x range: [%s, %s]", slit_x.min(), slit_x.max()) log.info("y range: [%s, %s]", slit_y.min(), slit_y.max()) plt.scatter(slit_x, slit_y, alpha=0.8) plt.fill([-slit_width/2, slit_width/2, slit_width/2, -slit_width/2], [-slit_height/2, -slit_height/2, slit_height/2, slit_height/2], 'r', alpha=0.10, edgecolor='k') plt.gca().set_aspect("equal") plt.title("Gaussian distribution") plt.xlim([-slit_height/2., slit_height/2]) plt.show()
def check_models(self): temp = np.logspace(0, np.log10(600)) num = len(self.available_models()) fig, ax = plt.subplots(1) self.plotting_colours(num, fig, ax, repeats=2) for author in self.available_models(): Nc, Nv = self.update(temp=temp, author=author) # print Nc.shape, Nv.shape, temp.shape ax.plot(temp, Nc, '--') ax.plot(temp, Nv, '.', label=author) ax.loglog() leg1 = ax.legend(loc=0, title='colour legend') Nc, = ax.plot(np.inf, np.inf, 'k--', label='Nc') Nv, = ax.plot(np.inf, np.inf, 'k.', label='Nv') plt.legend([Nc, Nv], ['Nc', 'Nv'], loc=4, title='Line legend') plt.gca().add_artist(leg1) ax.set_xlabel('Temperature (K)') ax.set_ylabel('Density of states (cm$^{-3}$)') plt.show()
def plot_std_meshlines(self, step=0.1): ''' plot mesh circles for stdv ''' color = self.std_color nstdmax = self.stdmax if self.negative: axmin = -np.pi / 2. else: axmin = 0. th = np.arange(axmin, np.pi / 2, 0.01) for ra in np.arange(0, nstdmax + 0.1 * step, step): self.ax.plot(ra * np.sin(th), ra * np.cos(th), ':', color=color) if self.normalize: self.ax.set_ylabel('$\sigma / \sigma_{obs}$', color=color) self.ax.set_xlabel('$\sigma / \sigma_{obs}$', color=color) else: self.ax.set_ylabel('Standard Deviation', color=color) self.ax.set_xlabel('Standard Deviation', color=color) xticklabels = plt.getp(plt.gca(), 'xticklabels') plt.setp(xticklabels, color=color) yticklabels = plt.getp(plt.gca(), 'yticklabels') plt.setp(yticklabels, color=color)
def __init__(self, history_length=100): self.history_length = history_length self.root = Tk.Tk() self.root.wm_title("GPU MEMORY DISPLAY") self.root.protocol('WM_DELETE_WINDOW', self.quit_button_cb) fig = plt.figure(figsize=(8, 3)) self.subplot = plt.subplot(211) plt.gca().invert_xaxis() self.canvas = FigureCanvasTkAgg(fig, master=self.root) self.max_gpu_mem = None self.current_gpu_mem = None self.mem_data = [0] * self.history_length self.mem_range = list(reversed(range(self.history_length))) self.canvas.get_tk_widget().pack(side=Tk.TOP, fill=Tk.BOTH, expand=1) self.update() Tk.mainloop()
def plot_timeseries(self, ax=None, vmin=None, vmax=None, colorbar=False, label=True): if vmin is None: vmin = self.vmin if vmax is None: vmax = self.vmax if ax is None: ax = plt.gca() plt.sca(ax) plt.cla() plt.imshow(self.tser_arr[::-1,:], vmin=vmin, vmax=vmax, interpolation='Nearest', extent=self.extent, origin='upper',aspect='auto') plt.xlim(self.extent[0], self.extent[1]) plt.ylim(self.extent[2], self.extent[3]) # plt.vlines(self.ionogram_list[0].time, self.extent[2], self.extent[3], 'r') if label: celsius.ylabel('f / MHz') if colorbar: old_ax = plt.gca() plt.colorbar( cax = celsius.make_colorbar_cax(), ticks=self.cbar_ticks ).set_label(r"$Log_{10} V^2 m^{-2} Hz^{-1}$") plt.sca(old_ax)
def AR_predict(d,coeffs, names=None, plot=False): """ Plot the auto-regression predictions and the actual data. """ predictions, ax1, ax2 = [], None, None i = 0 for c in coeffs: p = len(c) y_predict = np.convolve(d,c[::-1],mode='valid') y_predict = y_predict[:-1] ## discard the last value because its outside our domain predictions.append(y_predict) if plot: series_name = names[i] if names!=None else "" y_gt = d[p:] N = len(y_gt) plt.subplot(2,1,1) if ax1== None: ax1 = plt.gca() ax1.plot(np.arange(N), y_gt, label="actual") ax1.plot(np.arange(N), y_predict, label="prediction %s (p=%d)"%(series_name,p)) ax1.legend() plt.subplot(2,1,2) if ax2==None: ax2 = plt.gca() ax2.plot(np.arange(p), c[::-1], label= series_name+' coefficients') ax2.legend() i += 1 if plot: plt.show() return predictions
def showladderpca(self): import mdp from matplotlib import pylab as plt import pprint import math cerr('calculating PCA & plotting') peak_sizes = sorted(list([ x.rtime for x in self.alleles ])) #peak_sizes = sorted( peak_sizes )[:-5] #pprint.pprint(peak_sizes) #comps = algo.simple_pca( peak_sizes ) #algo.plot_pca(comps, peak_sizes) from fatools.lib import const std_sizes = const.ladders['LIZ600']['sizes'] x = std_sizes y = [ x * 0.1 for x in peak_sizes ] D = np.zeros( (len(y), len(x)) ) for i in range(len(y)): for j in range(len(x)): D[i,j] = math.exp( ((x[j] - y[i]) * 0.001) ** 2 ) pprint.pprint(D) im = plt.imshow(D, interpolation='nearest', cmap='Reds') plt.gca().invert_yaxis() plt.xlabel("STD") plt.ylabel("PEAK") plt.grid() plt.colorbar() plt.show()
def mwpc_last_spills(df, filename, n_spills): spacing = 6.0 # mm L = spacing*len(df.T) y_max = df.max().max() fig = plt.figure() ax = fig.add_subplot(111) x = ((np.array(df.sum().index))*spacing)-(L/2.0)+(spacing/2) colormap = plt.cm.gist_ncar plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0, 0.9, n_spills)]) for i, index in enumerate(df.index): y = df.ix[i] index = str(index) stime = index.split(' ')[1].split('.')[0] sdate = index.split(' ')[0] ax.plot(x,y, ls='steps-mid', label=stime, linewidth=2) ax.grid() ax.set_xlabel('x [mm]') ax.set_ylabel('y') ax.set_title('MWPC ({}) {}'.format(sdate, filename)) ax.legend(fancybox=True, framealpha=0.5) ax.set_ylim(top=y_max*1.2) figname = filename.split('_')[1].split('.')[0] filename_figure = '{}{}_1.png'.format(directory, figname) try: fig.savefig(filename_figure) except IOError: print('{} access denied!'.format(filename_figure)) pass plt.close(fig) return df
def graphical_test(satisfactory=0): from matplotlib import cm, pylab def cons(): return np.random.random(2)*4-2 def foo(x,y,a,b): "banana function" tmp=a-x tmp*=tmp out=-x*x out+=y out*=out out*=b out+=tmp return out*(abs(np.cos((x-1)**2+(y-1)**2))+10.0/b) def f(params): return foo(params[0], params[1],1,100) optimizer=optimize(f, cons, verbose=False,its=1, hillWalks=0, satisfactory=satisfactory, finalWalk=0) bgx,bgy=np.mgrid[-2:2:1000j,-2:2:1000j] bg=foo(bgx,bgy, 1,100) for i in xrange(20): pylab.clf() pylab.imshow(bg, cmap=cm.RdBu,vmax=bg.mean()/10) for x in optimizer.pool: pylab.plot((x[2]+2)/4*1000,(x[1]+2)/4*1000, ('gx')) print optimizer.pool[0],optimizer.muterate pylab.gca().set_xbound(0,1000) pylab.gca().set_ybound(0,1000) pylab.draw() pylab.colorbar() optimizer.run() raw_input('enter to advance') return optimizer
def mass_flux_plot(*args,**kwargs): fltm = idls.read(args[0]) injm = idls.read(args[1]) f1 = plt.figure() ax1 = f1.add_subplot(211) plt.plot(injm.nt_sc,injm.nmf_rscale,'r') plt.plot(injm.nt_sc,injm.nmf_zscale,'b') plt.plot(injm.nt_sc,injm.nmf_z0scale,'k') plt.plot(injm.nt_sc,(injm.nmf_rscale+injm.nmf_zscale),'g') plt.axis([0.0,160.0,0.0,3.5e-5]) plt.minorticks_on() locs,labels = plt.yticks() plt.yticks(locs, map(lambda x: "%.1f" % x, locs*1e5)) plt.text(0.0, 1.03, r'$10^{-5}$', transform = plt.gca().transAxes) plt.xlabel(r'Time [yr]',labelpad=6) plt.ylabel(r'$\dot{\rm M}_{\rm out} [ \rm{M}_{\odot} \rm{yr}^{-1} ]$',labelpad=15) ax2 = f1.add_subplot(212) plt.plot(fltm.nt_sc,fltm.nmf_rscale,'r') plt.plot(fltm.nt_sc,fltm.nmf_zscale,'b') plt.plot(fltm.nt_sc,fltm.nmf_z0scale,'k') plt.plot(fltm.nt_sc,(fltm.nmf_rscale+fltm.nmf_zscale),'g') plt.axis([0.0,160.0,0.0,4.0e-5]) plt.minorticks_on() locs,labels = plt.yticks() plt.yticks(locs, map(lambda x: "%.1f" % x, locs*1e5)) plt.text(0.0, 1.03, r'$10^{-5}$', transform = plt.gca().transAxes) plt.xlabel(r'Time [yr]',labelpad=6) plt.ylabel(r'$\dot{\rm M}_{\rm out} [ \rm {M}_{\odot} \rm{yr}^{-1} ]$',labelpad=15)
def dateticks(fmt='%Y-%m', **kwargs): '''setup the date ticks''' dateticker = ticker.FuncFormatter(lambda numdate, _: num2date(numdate).strftime(fmt)) pylab.gca().xaxis.set_major_formatter(dateticker) # pylab.gcf().autofmt_xdate() tmp = dict(rotation=30, ha='right') tmp.update(kwargs) pylab.setp(pylab.xticks()[1], **tmp)
def hideaxis(pos=None): # hide x y axis if pos: df = pd.DataFrame(pos.values(), columns=['x', 'y']) plt.xlim([df['x'].min()-5, df['x'].max()+5]) plt.ylim([df['y'].min()-5, df['y'].max()+5]) plt.gca().xaxis.set_major_locator(plt.NullLocator()) plt.gca().yaxis.set_major_locator(plt.NullLocator())
def continuous_calibration(): utc = Calendar() t_start = utc.time(YMDhms(2011, 9, 1)) t_fc_start = utc.time(YMDhms(2015, 10, 1)) dt = deltahours(1) n_obs = int(round((t_fc_start - t_start)/dt)) obs_time_axis = Timeaxis(t_start, dt, n_obs + 1) q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period()) ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg())) initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts) num_opt_days = 30 # Step forward num_opt_days days and store the state for each day: recal_start = t_start + deltahours(num_opt_days*24) t = t_start state = initial_state opt_states = {t: state} while t < recal_start: ptgsk.run(Timeaxis(t, dt, 24), state) t += deltahours(24) state = ptgsk.reg_model_state opt_states[t] = state recal_stop = utc.time(YMDhms(2011, 10, 30)) recal_stop = utc.time(YMDhms(2012, 5, 30)) curr_time = recal_start q_obs_avg = TsTransform().to_average(t_start, dt, n_obs + 1, q_obs_m3s_ts) target_spec = TargetSpecificationPts(q_obs_avg, IntVector([0]), 1.0, KLING_GUPTA) target_spec_vec = TargetSpecificationVector([target_spec]) i = 0 times = [] values = [] p, p_min, p_max = construct_calibration_parameters(ptgsk) while curr_time < recal_stop: print(i) i += 1 opt_start = curr_time - deltahours(24*num_opt_days) opt_state = opt_states.pop(opt_start) p = ptgsk.region_model.get_region_parameter() p_opt = ptgsk.optimize(Timeaxis(opt_start, dt, 24*num_opt_days), opt_state, target_spec_vec, p, p_min, p_max, tr_stop=1.0e-5) ptgsk.region_model.set_region_parameter(p_opt) corr_state = adjust_simulator_state(ptgsk, curr_time, q_obs_m3s_ts) ptgsk.run(Timeaxis(curr_time, dt, 24), corr_state) curr_time += deltahours(24) opt_states[curr_time] = ptgsk.reg_model_state discharge = ptgsk.region_model.statistics.discharge([0]) times.extend(discharge.time(i) for i in range(discharge.size())) values.extend(list(np.array(discharge.v))) plt.plot(utc_to_greg(times), values) plot_results(None, q_obs=observed_tistel_discharge(UtcPeriod(recal_start, recal_stop))) set_calendar_formatter(Calendar()) #plt.interactive(1) plt.title("Continuously recalibrated discharge vs observed") plt.xlabel("Time in UTC") plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal") plt.gca().yaxis.set_label_coords(0, 1.1)
def build_normalized_histogram(digit_freq, labels): fig = plt.figure() plt.hist(digit_freq.keys(), weights=digit_freq.values()) plt.title('Normalized Histogram') plt.xlabel('Digit #') plt.ylabel('Frequency') plt.gca().set_xlim([0, 9]) fig.savefig('norm_hist.png') return
def plot_exon(row): start = int(row["exon_start"]) stop = int(row["exon_stop"]) size = stop - start #print start, stop rectangle = plt.Rectangle((start, -20), size, 10, fc='red') plt.gca().add_patch(rectangle)
def panel(no, xn, yn, bs=True): plt.subplot(no, aspect="equal") if bs: plt.plot(x, y, "k--") plt.xlabel(xn + r"$ / R_M$") celsius.ylabel(yn + r"$ / R_M$", offset=off) plt.xlim(*lims) plt.ylim(*lims) plt.gca().add_patch(plt.Circle((0.0, 0.0), 1.0, fill=False))
def plot_column_elem_degree_distribution(self,logscale = True,bins = None): column_elem_degrees = self.degrees_of_column_elems() if bins is None: bins = int(numpy.max(column_elem_degrees)) pylab.hist(column_elem_degrees,bins=bins) if logscale: pylab.gca().set_yscale("log") pylab.gca().set_xscale("log") pylab.show()
def plot_contours(obj, top_bottom=True): '''A function that plots the BRF as an azimuthal projection with contours over the TOC and soil. Input: rt_layers object, top_bottom - True if only TOC plot, False if both TOC and soil. Output: contour plot of brf. ''' sun = ((np.pi - obj.sun0[0]) * np.cos(obj.sun0[1] + np.pi), \ (np.pi - obj.sun0[0]) * np.sin(obj.sun0[1] + np.pi)) theta = obj.views[:,0] x = np.cos(obj.views[:,1]) * theta y = np.sin(obj.views[:,1]) * theta z = obj.I_top_bottom # * -obj.mu_s if top_bottom == True: if np.max > 1.: maxz = np.max(z) else: maxz = 1. else: maxz = np.max(z[:obj.n/2]) minz = 0. #np.min(z) space = np.linspace(minz, maxz, 11) x = x[:obj.n/2] y = y[:obj.n/2] zt = z[:obj.n/2] zb = z[obj.n/2:] fig = plt.figure() if top_bottom == True: plt.subplot(121) plt.plot(sun[0], sun[1], 'ro') triang = tri.Triangulation(x, y) plt.gca().set_aspect('equal') plt.tricontourf(triang, zt, space, vmax=maxz, vmin=minz) plt.title('TOC BRF') plt.ylabel('Y') plt.xlabel('X') if top_bottom == True: plt.subplot(122) plt.plot(sun[0], sun[1], 'ro') plt.gca().set_aspect('equal') plt.tricontourf(triang, zb, space, vmax=maxz, vmin=minz) plt.title('Soil Absorption') plt.ylabel('Y') plt.xlabel('X') s = obj.__repr__() if top_bottom == True: cbaxes = fig.add_axes([0.11,0.1,0.85,0.05]) plt.suptitle(s,x=0.5,y=0.93) plt.colorbar(orientation='horizontal', ticks=space,\ cax = cbaxes, format='%.3f') else: plt.suptitle(s,x=0.5,y=0.13) plt.colorbar(orientation='horizontal', ticks=space,\ format='%.3f') #plt.tight_layout() plt.show()
def bifurcate(x0, ntransient, nplot, r0, rmax, dr): global x count = int((rmax - r0) / dr) fig = [_Plot(r0 + dr * n, x0, ntransient, nplot) for n in range(count + 1)] plt.gca().set_xlim(r0, rmax) plt.gca().set_ylim(np.min(x), np.max(x)) plt.xlabel('r') plt.ylabel('x') plt.title('Bifurcation Diagram') plt.show()
def pltAgainst(a, label, clr): X = a['X'] low_25 = a['low_25'] high_25 = a['high_25'] medians = a['medians'] plt.plot(X[:,1], low_25, ':', color=clr) plt.plot(X[:,1], high_25, ':', color=clr) plt.plot(X[:,1], medians, color=clr, label=label) plt.gca().yaxis.set_major_formatter(plt.FuncFormatter(lambda x, pos: '%0.2f%%' %(x*100)))
def plotK(JDict, xscale='linear', n='', **kwargs): paths = JDict.values() names = JDict.keys() bnpy.viz.PlotTrace.plotJobs(MakePaths(paths,n), names, MakeStyles(names), yvar='K', tickfontsize=tickfontsize, density=1, **kwargs) set_xscale(xscale) pylab.ylim(Klims); pylab.yticks(Kticks); pylab.gca().yaxis.grid() # horizontal lines pylab.gcf().set_size_inches(W, H);
def plotter_deturing(lam,AB,lam_meas,conv_meas,name): fig = plt.figure(figsize=(20.0, 10.0)) plt.gca().get_xaxis().get_major_formatter().set_useOffset(False) plt.plot(lam*1e9,AB) plt.plot(lam_meas,conv_meas,'o') plt.xlabel(r'$\left|\lambda_p - \lambda_i \right| (nm)$') plt.ylabel('Conv_eff (dB)') plt.savefig(name+'.png',bbox_inches='tight') plt.close(fig) return 0
def Plot(func, x0, ntransient, nplot, r0, rmax, dr): global x count = int((rmax-r0)/dr) fig = [_Plot(func, r0+dr*n, x0, ntransient, nplot) for n in range(count+1)] plt.gca().set_xlim(r0, rmax) plt.gca().set_ylim(np.min(x), np.max(x)) plt.xlabel(r'$r$', fontsize=16) plt.ylabel(r'$x$', fontsize=16) plt.title('Bifurcation Diagram') plt.show()
def plot_daily_means(time_series): ext_ts = time_series.copy() ext_ts['weekday'] = ext_ts.index.weekday ext_ts.boxplot(column=['Internet traffic data (in GB)'], by='weekday') plt.title('') plt.ylabel('data [GB]') plt.xlabel('time') plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%a')) plt.show()
def plotCorrEnh(self, parsList=None, **plotArgs): """ Produces enhanced correlation plots. Parameters ---------- parsList : list of string, optional If not given, all available traces are used. Otherwise a list of at least two parameters has to be specified. plotArgs : dict, optional Keyword arguments handed to plot procedures of pylab. The following keywords are available: contour,bins,cmap,origin,interpolation,colors """ if not ic.check["matplotlib"]: PE.warn(PE.PyARequiredImport("To use 'plotCorr', matplotlib has to be installed.", \ solution="Install matplotlib.")) return tracesDic = {} if parsList is not None: for parm in parsList: self._parmCheck(parm) tracesDic[parm] = self[parm] if len(tracesDic) < 2: raise(PE.PyAValError("For plotting correlations, at least two valid parameters are needed.", \ where="TraceAnalysis::plotCorr")) else: # Use all available traces for parm in self.availableParameters(): tracesDic[parm] = self[parm] pars = tracesDic.keys() traces = tracesDic.values() fontmap = {1:10, 2:9, 3:8, 4:8, 5:8} if not len(tracesDic)-1 in fontmap: fontmap[len(tracesDic)-1] = 8 k = 1 for j in range(len(tracesDic)): for i in range(len(tracesDic)): if i>j: plt.subplot(len(tracesDic)-1,len(tracesDic)-1,k) # plt.title("Pearson's R: %1.5f" % self.pearsonr(pars[j],pars[i])[0], fontsize='x-small') plt.xlabel(pars[j], fontsize=fontmap[len(tracesDic)-1]) plt.ylabel(pars[i], fontsize=fontmap[len(tracesDic)-1]) tlabels = plt.gca().get_xticklabels() plt.setp(tlabels, 'fontsize', fontmap[len(tracesDic)-1]) tlabels = plt.gca().get_yticklabels() plt.setp(tlabels, 'fontsize', fontmap[len(tracesDic)-1]) # plt.plot(traces[j],traces[i],'.',**plotArgs) self.__hist2d(traces[j],traces[i],**plotArgs) if i!=j: k+=1
def plot_S(S, path=None): from matplotlib import pylab as plt im1 = plt.imshow(S, interpolation='nearest', cmap='Reds') #im2 = plt.plot(path[0], path[1], 'k') plt.gca().invert_yaxis() plt.xlabel("STD") plt.ylabel("PEAK") plt.grid() plt.colorbar() plt.show()
def makeplot(tsim, X1, label, pf, *var, **kwargs): """ SUMMARY: It constructs the plot where tsim is on the x-axis, X1,X2,X3 on the y-axis, and label is the label of the y-axis SYNTAX: makeplot(tsim,X1,label,*var): ARGUMENTS: + tsim - x-axis vector (time of the simulation (min)) + X1,X2,X3 - y-axis vectors. X1 represent the actual value X2 the target (eventual) X3 the setpoint (eventual) + label - label for the y-axis + pf - path where the plots are saved + var - positional variables to include another vector/s X2 and X3 to plot together with X1 + kwargs - plot options including linestyle and changing the default legend values """ linetype = '-' #defaul value for linetype lableg = 'Target' #defaul value for legend label for kwkey in kwargs: if kwkey == 'pltopt': linetype = kwargs['pltopt'] if kwkey == 'lableg': lableg = kwargs['lableg'] nt = int(tsim.size) X1 = np.array(X1) sz = old_div(X1.size, nt) Xout1 = np.zeros((nt, sz)) Xout2 = np.zeros((nt, sz)) Xout3 = np.zeros((nt, sz)) for k in range(sz): x1 = X1[k::sz] plt.figure() plt.plot(tsim, x1) plt.xlabel('Time ') plt.ylabel(label + str(k + 1)) plt.gca().set_xlim(left=0, right=tsim[-1]) Xout1[:, k] = np.reshape(x1, (nt, )) for i_var in range(len(var)): # extract dimension of var var_i = var[i_var] Xi = np.array(var_i) xi = Xi[k::sz] plt.plot(tsim, xi, ls=linetype) if i_var == 0: plt.legend(('Actual', lableg)) plt.gca().set_xlim(left=0, right=tsim[-1]) Xout2[:, k] = np.reshape(xi, (nt, )) elif i_var == 1: plt.legend(('Actual', 'Target', 'Set-Point')) plt.gca().set_xlim(left=0, right=tsim[-1]) Xout3[:, k] = np.reshape(xi, (nt, )) plt.grid(True) plt.savefig(pf + label + str(k + 1) + '.pdf', format='pdf', transparent=True, bbox_inches='tight') return [Xout1, Xout2, Xout3]
pylab.imshow(Z, interpolation='nearest', extent=(mx.min(), mx.max(), my.min(), my.max()), cmap=pylab.cm.Blues, aspect='auto', origin='lower') c2a, c2b, c2c = km.cluster_centers_ pylab.scatter(km.cluster_centers_[:, 0], km.cluster_centers_[:, 1], marker='x', linewidth=2, s=100, color='black') pylab.gca().add_patch( pylab.Arrow(c1a[0], c1a[1], c2a[0] - c1a[0], c2a[1] - c1a[1], width=0.1)) pylab.gca().add_patch( pylab.Arrow(c1b[0], c1b[1], c2b[0] - c1b[0], c2b[1] - c1b[1], width=0.1)) pylab.gca().add_patch( pylab.Arrow(c1c[0], c1c[1], c2c[0] - c1c[0], c2c[1] - c1c[1], width=0.1)) pylab.savefig(os.path.join(CHART_DIR, "1400_03_0%i.png" % i)) pylab.clf() i += 1 # 3 iterations #################### km = KMeans(init='random', n_clusters=num_clusters, verbose=1, n_init=1,
# In[110]: get_ipython().run_line_magic('matplotlib', 'inline') plt.rcParams['figure.figsize'] = [9.5, 13] plt.rcParams['figure.subplot.left'] = plt.rcParams['figure.subplot.bottom'] = .1 plt.rcParams['figure.subplot.right'] = plt.rcParams['figure.subplot.top'] = .9 # In[111]: startTime = time.time() plt.figure(figsize=(20,4)); ax = plt.gca() visualize_model(model, ax) plt.axis('tight'); plt.axis('off'); plt.show() print('elapsed time: {}'.format(time.time()-startTime)) # # Determine Target Device for Training # In[112]: targetDeviceCPU = torch.device('cpu') targetDeviceGPU = torch.device('cuda:0')
def clean_ticks(): ax = plt.gca() ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left')
def plot_results(ptxsk, q_obs=None): h_obs = None n_temp = 1 temp=[] precip = None discharge = None plt.figure() # to start a new plot window if ptxsk is not None: plt.subplot(8, 1, 1) # dimension 8 rows of plots discharge = ptxsk.region_model.statistics.discharge([]) # get the sum discharge all areas n_temp=ptxsk.region_model.size() temp = [ptxsk.region_model.statistics.temperature([i]) for i in range(n_temp)] precip = [ptxsk.region_model.statistics.precipitation([i]) for i in range(n_temp)] radiation = [ptxsk.region_model.statistics.radiation([i]) for i in range(n_temp)] rel_hum = [ptxsk.region_model.statistics.rel_hum([i]) for i in range(n_temp)] wind_speed = [ptxsk.region_model.statistics.wind_speed([i]) for i in range(n_temp)] snow_sca = [ptxsk.region_model.hbv_snow_state.sca([i]) for i in range(n_temp)] snow_swe = [ptxsk.region_model.hbv_snow_state.swe([i]) for i in range(n_temp)] # Results on same time axis, so we only need one times = utc_to_greg([discharge.time(i) for i in range(discharge.size())]) plt.plot(times, np.array(discharge.v),color='blue') ax=plt.gca() ax.set_xlim(times[0], times[-1]) plt.ylabel(r"Discharge in $\mathbf{m^3/s}$") set_calendar_formatter(Calendar()) if q_obs is not None: obs_times = utc_to_greg([q_obs.time(i) for i in range(q_obs.size())]) ovs = [q_obs.value(i) for i in range(q_obs.size())] h_obs, = plt.plot(obs_times, ovs, linewidth=2,color='green') if ptxsk is not None: plt.subplot(8, 1, 2,sharex=ax) for i in range(n_temp): plt.plot(times, np.array(temp[i].v)) #set_calendar_formatter(Calendar()) plt.ylabel(r"Temperature in C") plt.subplot(8, 1, 3,sharex=ax) for i in range(n_temp): plt.plot(times, np.array(precip[i].v)) plt.ylabel(r"Precipitation in mm") plt.subplot(8,1,4,sharex=ax) for i in range(n_temp): plt.plot(times,np.array(radiation[i].v)) plt.ylabel(r"Radiation w/m2") plt.subplot(8, 1, 5,sharex=ax) for i in range(n_temp): plt.plot(times, np.array(rel_hum[i].v)) plt.ylabel(r"Rel.hum %") plt.subplot(8, 1, 6,sharex=ax) for i in range(n_temp): plt.plot(times, np.array(wind_speed[i].v)) plt.ylabel(r"Wind speed m/s") plt.subplot(8, 1, 7,sharex=ax) for i in range(n_temp): plt.plot(times, np.asarray(snow_swe[i].v)[:-1]) plt.ylabel(r"SWE mm") plt.subplot(8, 1, 8,sharex=ax) for i in range(n_temp): plt.plot(times, np.asarray(snow_sca[i].v)[:-1]) plt.ylabel(r"SCA %") return h_obs
def add_yaxis(fsp=None, position='right', yscale=None, basey=10, subsy=None): """ Adds a second y-axis to a :class:`Subplot`. This function can also be used as a method. Parameters ---------- fsp : {None, Subplot} Subplot to which the secondary y-axis is added. If None, the current subplot is selected: in that case, it should be a valid :class:`Subplot`. When used as a :class:`Subplot` method, this parameter points automatically to the calling subplot. position : {string} Position of the new axis, as either ``'left'`` or ``'right'``. yscale : {string} Scale of the new axis, as either ``'log'``, ``'linear'`` or ``None``. If None, uses the same scale as the first y axis. basey : {integer} Base of the logarithm for the new axis (if needed). subsy : {sequence} Sequence of the location of the minor ticks; None defaults to autosubs, which depend on the number of decades in the plot. Eg for base 10, ``subsy=(1,2,5)`` will put minor ticks on 1, 2, 5, 11, 12,15, 21, .... To turn off minor ticking, set ``subsy=[]``. Raises ------ TypeError If the selected subplot is not a valid :class:`Subplot` object. """ if fsp is None: fsp = pylab.gca() fig = fsp.figure axisini = fsp.axis() fsp_alt_args = (fsp._rows, fsp._cols, fsp._num + 1) fsp_alt = fig.add_subplot(frameon=False, position=fsp.get_position(), sharex=fsp, *fsp_alt_args) # Set position .................... if position.lower() == 'right': (inipos, newpos) = ('left', 'right') else: (inipos, newpos) = ('right', 'left') # Force scales tics to one side ... fsp.yaxis.set_ticks_position(inipos) fsp.yaxis.set_label_position(inipos) # Force 2nd ticks to the other side.. fsp_alt.yaxis.set_ticks_position(newpos) fsp_alt.yaxis.set_label_position(newpos) # Force period axis scale.......... if yscale is None: yscale = fsp.get_yscale() try: basey = fsp.yaxis.get_major_locator()._base except AttributeError: basey = 10. fsp_alt.set_yscale(yscale, basey=basey, subsy=subsy) pylab.draw_if_interactive() return fsp_alt
def run_test_3(self): """ adding an anomaly at a certain point (constant additional demand) comparing the evolution of clusters (with partial clustering) to the normal evolution of clusters observing the velocity of change that gives the time until steady state error """ day_start_deviation = 10 day_end = 20 deviation = 200 # iterations = day_end - day_start_deviation iterations = day_end deviation_element_vect = [None] * iterations deviation_total_vect = [None] * iterations res_partial_whole_vect = [None] * iterations res_partial_whole_anomaly_vect = [None] * iterations data = copy.deepcopy(self.data[0]["series"]) data_with_constant_anomaly = copy.deepcopy(self.data[0]["series"]) # add constant deviation (anomaly) to the second data set # starting with day_start_deviation (index for the day of the start of the anomaly) for i, d in enumerate(range(day_start_deviation, day_end)): for j in range(len(data_with_constant_anomaly[d])): data_with_constant_anomaly[d][j] += deviation centroids_init = dcluster.reinit(data[0:day_start_deviation - 1], 2, 5) res_partial_whole, a = dcluster.k_means_clust_dynamic() # for i, d in enumerate(range(day_start_deviation, day_end)): for i, d in enumerate(range(day_end)): print(str(i) + "," + str(d)) res_partial_whole_vect[i] = copy.deepcopy( dcluster.k_means_clust_dynamic_partial_update_whole(data[d])) # run clustering update for second data set with anomalies dcluster.reinit(data_with_constant_anomaly[0:day_start_deviation - 1], 2, 5, centroids_init) res_partial_whole_anomaly, a = dcluster.k_means_clust_dynamic() # for i,d in enumerate(range(day_start_deviation, day_end)): for i, d in enumerate(range(day_end)): print(str(i) + "," + str(d)) res_partial_whole_anomaly_vect[i] = copy.deepcopy( dcluster.k_means_clust_dynamic_partial_update_whole( data_with_constant_anomaly[d])) # plot the results (deviation between the 2 data sets) for i, d in enumerate(range(day_end)): total_deviation, average_deviation, deviation = get_comp( res_partial_whole_anomaly_vect[i], res_partial_whole_vect[i]) print(average_deviation) deviation_total_vect[i] = average_deviation deviation_element_vect[i] = deviation plt.clf() for ts in res_partial_whole_vect[i]: plt.plot(ts) for ts in res_partial_whole_anomaly_vect[i]: plt.plot(ts) plt.gca().set_title("deviation " + str(d) + ", with anomaly from day " + str(day_start_deviation)) plt.pause(0.2) print(deviation_total_vect) plt.clf() # plt.subplot(212) plt.plot(deviation_total_vect) # plt.ylim([-100, 100]) plt.gca().set_title("anomaly transient effect on cluster centroids") plt.xlabel("time (days)") plt.ylabel("standard deviation") plt.show()
def percent_ylabel(): plt.gca().set_yticklabels( ['%d%%' % (x * 100) for x in plt.gca().get_yticks()])
print '\ndepletion rates = \n', beta #for j in range (0,m/2): # aux = (radius[j])/5 # for i in range(0,360): # x[i][j] = (radius[j]*np.cos(np.deg2rad(i))) + a[j] #polar coordinates# # y[i][j] = (radius[j]*np.sin(np.deg2rad(i)) + e[j]) + aux #plotting# plt.figure(1) plt.subplot(2, 1, 1) plt.xlabel('Semimajor Axis (AU)') plt.ylabel('Eccentricity') plt.title('Initial Disk') axes = plt.gca() axes.set_xlim([0, 5]) axes.set_ylim([0, 0.2]) plt.scatter(a, b, s=400, edgecolors='none', color=(0.2, 0.1, 0.8)) #embryos# plt.scatter(plan, c, s=30, color=(0.6, 0.9, 0), edgecolors='none', alpha=0.8) #planetesimals# plt.subplot(2, 2, 3) plt.xlabel('Semimajor Axis (AU)') plt.ylabel('Earth Mass') plt.title('Initial Disk') axes = plt.gca() axes.set_xlim([0, 5]) axes.set_yscale('log') axes.set_ylim([0.001, 1]) plt.scatter(a, mass_old, s=50, edgecolors='none',
# split data into train and test sets pred_train, pred_test, tar_train, tar_test = train_test_split(predictors, target, test_size=.3, random_state=123) # specify the lasso regression model model = LassoLarsCV(cv=10, precompute=False).fit(pred_train, tar_train) # print variable names and regression coefficients dict(zip(predictors.columns, model.coef_)) # plot coefficient progression m_log_alphas = -np.log10(model.alphas_) ax = plt.gca() plt.plot(m_log_alphas, model.coef_path_.T, label=model.coef_path_.T) plt.axvline(-np.log10(model.alpha_), linestyle='--', color='k', label='alpha CV') plt.ylabel('Regression Coefficients') plt.xlabel('-log(alpha)') plt.title('Regression Coefficients Progression for Lasso Paths') # plot mean square error for each fold m_log_alphascv = -np.log10(model.cv_alphas_) plt.figure() plt.plot(m_log_alphascv, model.cv_mse_path_, ':') plt.plot(m_log_alphascv, model.cv_mse_path_.mean(axis=-1),
def plot_options_greedy(self, sess, coord, saver): eigenvectors_path = os.path.join( os.path.join(self.config.stage_logdir, "models"), "eigenvectors.npy") eigenvalues_path = os.path.join( os.path.join(self.config.stage_logdir, "models"), "eigenvalues.npy") eigenvectors = np.load(eigenvectors_path) eigenvalues = np.load(eigenvalues_path) for k in ["poz", "neg"]: for option in range(len(eigenvalues)): # eigenvalue = eigenvalues[option] eigenvector = eigenvectors[ option] if k == "poz" else -eigenvectors[option] prefix = str(option) + '_' + k + "_" plt.clf() with sess.as_default(), sess.graph.as_default(): for idx in range(self.nb_states): dx = 0 dy = 0 d = False s, i, j = self.env.get_state(idx) if not self.env.not_wall(i, j): plt.gca().add_patch( patches.Rectangle( (j, self.config.input_size[0] - i - 1), # (x,y) 1.0, # width 1.0, # height facecolor="gray")) continue # Image.fromarray(np.asarray(scipy.misc.imresize(s, [512, 512], interp='nearest'), np.uint8)).show() feed_dict = {self.orig_net.observation: np.stack([s])} fi = sess.run(self.orig_net.fi, feed_dict=feed_dict)[0] transitions = [] terminations = [] for a in range(self.action_size): s1, r, d, _ = self.env.fake_step(a) feed_dict = { self.orig_net.observation: np.stack([s1]) } fi1 = sess.run(self.orig_net.fi, feed_dict=feed_dict)[0] transitions.append( self.cosine_similarity((fi1 - fi), eigenvector)) terminations.append(d) transitions.append( self.cosine_similarity(np.zeros_like(fi), eigenvector)) terminations.append(True) a = np.argmax(transitions) # if a == 4: # d = True if a == 0: # up dy = 0.35 elif a == 1: # right dx = 0.35 elif a == 2: # down dy = -0.35 elif a == 3: # left dx = -0.35 if terminations[a] or np.all( transitions[a] == np.zeros_like( fi)): # termination circle = plt.Circle( (j + 0.5, self.config.input_size[0] - i + 0.5 - 1), 0.025, color='k') plt.gca().add_artist(circle) continue plt.arrow(j + 0.5, self.config.input_size[0] - i + 0.5 - 1, dx, dy, head_width=0.05, head_length=0.05, fc='k', ec='k') plt.xlim([0, self.config.input_size[1]]) plt.ylim([0, self.config.input_size[0]]) for i in range(self.config.input_size[1]): plt.axvline(i, color='k', linestyle=':') plt.axvline(self.config.input_size[1], color='k', linestyle=':') for j in range(self.config.input_size[0]): plt.axhline(j, color='k', linestyle=':') plt.axhline(self.config.input_size[0], color='k', linestyle=':') plt.savefig( os.path.join( self.summary_path, "SuccessorFeatures_" + prefix + 'policy.png')) plt.close()
plt.savefig(path, format="png", dpi=300) # Declare datasets X1D = np.linspace(-4, 4, 9).reshape(-1, 1) X2D = np.c_[X1D, X1D**2] y = np.array([0, 0, 1, 1, 1, 1, 1, 0, 0]) plt.figure(figsize=(11, 4)) plt.subplot(121) plt.grid(True, which='both') plt.axhline(y=0, color='k') plt.plot(X1D[:, 0][y == 0], np.zeros(4), "bs") plt.plot(X1D[:, 0][y == 1], np.zeros(5), "g^") plt.gca().get_yaxis().set_ticks([]) plt.xlabel(r"$x_1$", fontsize=20) plt.axis([-4.5, 4.5, -0.2, 0.2]) plt.subplot(122) plt.grid(True, which='both') plt.axhline(y=0, color='k') plt.axvline(x=0, color='k') plt.plot(X2D[:, 0][y == 0], X2D[:, 1][y == 0], "bs") plt.plot(X2D[:, 0][y == 1], X2D[:, 1][y == 1], "g^") plt.xlabel(r"$x_1$", fontsize=20) plt.ylabel(r"$x_2$", fontsize=20, rotation=0) plt.gca().get_yaxis().set_ticks([0, 4, 8, 12, 16]) plt.plot([-4.5, 4.5], [6.5, 6.5], "r--", linewidth=3) plt.axis([-4.5, 4.5, -1, 17])
dataframe = read_csv('../file/final_data/lstm/purchase/all_purchase230.csv', index_col='report_date', parse_dates=[0]) # dataset = dataframe.values sub_total_purchase_residual_tmp = dataframe['residual2'] sub_total_purchase_residual = sub_total_purchase_residual_tmp[:] print(sub_total_purchase_residual.describe()) stationarity_test(sub_total_purchase_residual) sub_total_purchase_residual.describe() sub_total_purchase_residual.plot() plt.title('Purchase Residual By Lstm') plt.show() # 直方图 是否正态分布 sub_total_purchase_residual.hist() plt.title('Purchase Residual Histogram By Lstm') plt.show() # autocorrelation plot_acf(sub_total_purchase_residual, ax=plt.gca(), lags=60) plt.title('Purchase Residual ACF By Lstm') plt.show() # LBQ 检验 from statsmodels.stats import diagnostic print( diagnostic.acorr_ljungbox(sub_total_purchase_residual, lags=None, boxpierce=True))
ax1.grid(axis="x", linestyle="--", color='black', linewidth=0.25, alpha=0.5) ax1.grid(axis="y", linestyle="--", color='black', linewidth=0.25, alpha=0.5) # Show the minor grid lines with very faint and almost transparent grey lines plt.minorticks_on() plt.grid(b=True, which='minor', color='#999999', linestyle='-', alpha=0.2) #xposition = [1970, 2000, 2010, 2040, 2070] #for xc in xposition: # plt.axvline(x=xc, color='k', linestyle='--') #for label in ax1.get_yticklabels(): # label.set_fontsize(20) #for tick in ax1.xaxis.get_major_ticks(): # tick.label.set_fontsize(20) plt.setp(plt.gca().get_xticklabels(), rotation=0, ha="right") plt.xlabel('Année', fontsize=20, color='black', weight='semibold') plt.ylabel('', fontsize=20, color='black', weight='semibold') plt.title( 'Variabilités interannuelles des anomalies standardisées de l\'épaisseur de neige au 31 mars par rapport à la normale 1981-2010 \n', fontsize=20, color='black', weight='semibold') for label in ax1.get_yticklabels(): label.set_fontsize(20) plt.savefig( 'K:/PROJETS/PROJET_CLIMHUNOR/Atlas/Atlas_figures//VI_Ano_Stand_SD_31mars_vs_1981-2010.png', bbox_inches='tight', format='png',
plt.ylabel('Distancia (km)') plt.title('Distancia al centro de la tierra del satélite vs. el tiempo') plt.show() plt.figure(dpi=300) num_segmentos = 1000 rad = r+80000 cx = 0 cy = 0 angulo = np.linspace(0, 2*np.pi, num_segmentos+1) x1 = rad * np.cos(angulo) + cx y1 = rad * np.sin(angulo) + cy plt.plot(x1, y1, color='g', label= 'Superficie terrestre') plt.xlabel('X (km)') plt.ylabel('Y (km)') plt.yticks((-r-80000,r+80000),('7 km','7 km')) plt.xticks((-r-80000,r+80000),('7 km','7 km')) plt.ylim(top=8000000, bottom = -8000000) plt.gca().set_aspect('equal') plt.grid() plt.plot(x,y, c='lightblue', label='Trayectoria satelite',alpha=0.7) plt.legend() plt.tight_layout() plt.show()
V = zeros((L, L), float)#make initial grid+BC rho = zeros(V.shape,float)#Make rho rho[25:76,37] = linear(rho[25:76,37], A)#set plate1 rho[25:76,63] = -rho[25:76,37]#set plate2 poisson(V,rho) plot(V,"""Parallel Plate Capacitor w/ Triangular Charge Density""", "cs4_6", elev=el,azim=ang) #Make plot of charge densities cs = ones(51,float)*100#Normal parallel plate fg = gaussian(zeros(51,float), A, 0, s)#fitted gaus lg = gaussian(zeros(51,float), A, 0, 2*s)#large gaus pb = parab(zeros(51,float), A)#parab tg = linear(zeros(51,float), A)#triang fig = p.figure()#make fig ax = p.gca() p.plot(arange(25,76),cs,arange(25,76),fg,arange(25,76),\ lg,arange(25,76),pb,arange(25,76),tg) ax.set_xlabel('X'); ax.set_ylabel('Y') ax.set_xlim(25,75); ax.set_ylim(0,100.5) p.xticks(arange(25, 76, 5)); p.yticks(arange(0, 101, 10)) ax.set_title("Charge Distributions") p.grid(True) p.legend(["Constant","Fitted Gaussian", "Large Gaussian", \ "Parabolic", "Triangular"]) p.savefig('figures/cs4_cd.png', bbox_inches='tight') #Tried sep of variables just to see soln L = 40 sC = pi/L C1 = 100/(1-exp(-2*sC*L))
def plot_SeaIce_argo_QC_temp_sal(plevIntp, temp2d, psal2d, platformDf_plev, tdelta, seaice_val, platform_number): temp2d_new = np.empty((len(plevIntp), 1)) psal2d_new = np.empty((len(plevIntp), 1)) temp2d_new[:, 0] = temp2d[:, 0] psal2d_new[:, 0] = psal2d[:, 0] time_new = pd.Series(pd.to_datetime(platformDf_plev['date'][0])) for i in np.arange(1, len(platformDf_plev['date'])): nnans_days = (pd.to_datetime(platformDf_plev['date'][i]) - pd.to_datetime(platformDf_plev['date'][i - 1])).days # in the following we add 1 profile of nan in between profiles that are too far apart in time # (if more profiles of nan are needed, then you need to edit np.empty and the number of iterations in j-loop) if nnans_days > tdelta: temp2d_new = np.append(temp2d_new, np.nan * np.empty((len(plevIntp), 1)), axis=1) psal2d_new = np.append(psal2d_new, np.nan * np.empty((len(plevIntp), 1)), axis=1) bfr_date = pd.to_datetime(platformDf_plev['date'][i - 1]) # this loop is now only looping once as we only need to add one profile of nans for j in np.arange(1, 2, 1): bfr_date += timedelta(days=1) time_new = time_new.append(pd.Series(bfr_date)) temp2d_new = np.append(temp2d_new, temp2d[:, np.newaxis, i], axis=1) psal2d_new = np.append(psal2d_new, psal2d[:, np.newaxis, i], axis=1) time_new = time_new.append( pd.Series(pd.to_datetime(platformDf_plev['date'][i]))) plt.figure(figsize=(30, 20)) for i in [1, 2, 3]: if i == 1: ax = plt.subplot(311) color = 'tab:red' plt.plot(pd.to_datetime(platformDf_plev['date']), platformDf_plev['position_qc'], 'sr', markersize=14) ax.set_ylabel('Position QC flag', size=24, labelpad=0) ax.yaxis.label.set_color('red') ax.tick_params(axis='y', colors='red') ax2 = ax.twinx() color = 'tab:blue' ax2.plot(pd.to_datetime(platformDf_plev['date']), np.stack(seaice_val), 'ob', markersize=12) ax2.set_ylabel('SOSE sea-ice fraction', size=24, labelpad=0) ax2.yaxis.label.set_color('blue') ax2.tick_params(axis='y', colors='blue') plt.title('Float #' + platform_number, size=24) plt.grid() if i == 2: ax = plt.subplot(312) mt = np.ma.masked_where(np.isnan(temp2d_new), temp2d_new) plt.pcolor(pd.to_datetime(time_new), plevIntp, mt, cmap='plasma') #plt.pcolormesh(pd.to_datetime(platformDf_plev['date']),plevIntp,temp2d) # pd.Series(np.arange(0,79,1)) #plt.title('Temperature, degC',size=24) cbar = plt.colorbar(orientation="horizontal", pad=0.2) cbar.ax.tick_params(labelsize=24) cbar.set_label('Temperature, degC', fontsize=24) if i == 3: ax = plt.subplot(313) ms = np.ma.masked_where(np.isnan(psal2d_new), psal2d_new) plt.pcolor(pd.to_datetime(time_new), plevIntp, ms, cmap='viridis_r') # plt.pcolor(pd.to_datetime(platformDf_plev['date']),plevIntp,psal2d) # pd.Series(np.arange(0,79,1)) #plt.title('Salinity, psu',size=24) cbar = plt.colorbar(orientation="horizontal", pad=0.2) cbar.ax.tick_params(labelsize=24) cbar.set_label('Salinity, PSU', fontsize=24) if i == 2 or i == 3: plt.gca().invert_yaxis() ax.set_ylabel('Pressure, dbar', size=24, labelpad=0) # change font for tick in ax.xaxis.get_majorticklabels(): # example for xaxis tick.set_fontsize(24) for tick in ax.yaxis.get_majorticklabels(): # example for xaxis tick.set_fontsize(24) for tick in ax2.yaxis.get_majorticklabels(): # example for xaxis tick.set_fontsize(24) plt.xlim([ min(pd.to_datetime(platformDf_plev['date'])), max(pd.to_datetime(platformDf_plev['date'])) ]) plt.subplots_adjust(left=0.1, bottom=0.1, right=0.9, top=0.9, wspace=0.4, hspace=0.4) plt.show()
def shap_deep_explainer(self, model_no, num_reference, img_input, norm_reverse=True, blend_original_image=False, gif_fps=1, ranked_outputs=1, base_dir_save='/tmp/DeepExplain'): # region mini-batch because of GPU memory limitation list_shap_values = [] batch_size = self.dicts_models[model_no]['batch_size'] split_times = math.ceil(num_reference / batch_size) for i in range(split_times): #shap 0.26 #shap 0.4, check_additivity=False # shap_values_tmp1 = self.list_e[model_no][i].shap_values(img_input, ranked_outputs=ranked_outputs, # check_additivity=check_additivity) shap_values_tmp1 = self.list_e[model_no][i].shap_values( img_input, ranked_outputs=ranked_outputs, ) # shap_values ranked_outputs # [0] [0] (1,299,299,3) # [1] predict_class array shap_values_copy = copy.deepcopy(shap_values_tmp1) list_shap_values.append(shap_values_copy) for i in range(ranked_outputs): for j in range(len(list_shap_values)): if j == 0: shap_values_tmp2 = list_shap_values[0][0][i] else: shap_values_tmp2 += list_shap_values[j][0][i] shap_values_results = copy.deepcopy(list_shap_values[0]) shap_values_results[0][i] = shap_values_tmp2 / split_times # endregion # region save files str_uuid = str(uuid.uuid1()) list_classes = [] list_images = [] for i in range(ranked_outputs): predict_class = int( shap_values_results[1][0][i]) # numpy int 64 - int list_classes.append(predict_class) save_filename = os.path.join( base_dir_save, str_uuid, 'Shap_Deep_Explainer{}.jpg'.format(predict_class)) os.makedirs(os.path.dirname(save_filename), exist_ok=True) list_images.append(save_filename) pred_class_num = len(shap_values_results[0]) if blend_original_image: if norm_reverse: img_original = np.uint8(input_norm_reverse(img_input[0])) else: img_original = np.uint8(img_input[0]) img_original_file = os.path.join(os.path.dirname(list_images[0]), 'deepshap_original.jpg') cv2.imwrite(img_original_file, img_original) for i in range(pred_class_num): # predict_max_class = attributions[1][0][i] attribution1 = shap_values_results[0][i] # attributions.shape: (1, 299, 299, 3) data = attribution1[0] data = np.mean(data, -1) abs_max = np.percentile(np.abs(data), 100) abs_min = abs_max # dx, dy = 0.05, 0.05 # xx = np.arange(0.0, data1.shape[1], dx) # yy = np.arange(0.0, data1.shape[0], dy) # xmin, xmax, ymin, ymax = np.amin(xx), np.amax(xx), np.amin(yy), np.amax(yy) # extent = xmin, xmax, ymin, ymax # cmap = 'RdBu_r' # cmap = 'gray' cmap = 'seismic' plt.axis('off') # plt.imshow(data1, extent=extent, interpolation='none', cmap=cmap, vmin=-abs_min, vmax=abs_max) # plt.imshow(data, interpolation='none', cmap=cmap, vmin=-abs_min, vmax=abs_max) # fig = plt.gcf() # fig.set_size_inches(2.99 / 3, 2.99 / 3) # dpi = 300, output = 700*700 pixels plt.gca().xaxis.set_major_locator(plt.NullLocator()) plt.gca().yaxis.set_major_locator(plt.NullLocator()) plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0) plt.margins(0, 0) if blend_original_image: plt.imshow(data, interpolation='none', cmap=cmap, vmin=-abs_min, vmax=abs_max) save_filename1 = list_images[i] plt.savefig(save_filename1, bbox_inches='tight', pad_inches=0) plt.close() img_heatmap = cv2.imread(list_images[i]) (tmp_height, tmp_width) = img_original.shape[:-1] img_heatmap = cv2.resize(img_heatmap, (tmp_width, tmp_height)) img_heatmap_file = os.path.join( os.path.dirname(list_images[i]), 'deepshap_{0}.jpg'.format(i)) cv2.imwrite(img_heatmap_file, img_heatmap) dst = cv2.addWeighted(img_original, 0.65, img_heatmap, 0.35, 0) img_blend_file = os.path.join( os.path.dirname(list_images[i]), 'deepshap_blend_{0}.jpg'.format(i)) cv2.imwrite(img_blend_file, dst) # region create gif import imageio mg_paths = [ img_original_file, img_heatmap_file, img_blend_file ] gif_images = [] for path in mg_paths: gif_images.append(imageio.imread(path)) img_file_gif = os.path.join(os.path.dirname(list_images[i]), 'deepshap_{0}.gif'.format(i)) imageio.mimsave(img_file_gif, gif_images, fps=gif_fps) list_images[i] = img_file_gif # endregion else: plt.imshow(data, interpolation='none', cmap=cmap, vmin=-abs_min, vmax=abs_max) save_filename1 = list_images[i] plt.savefig(save_filename1, bbox_inches='tight', pad_inches=0) plt.close() # endregion return list_classes, list_images
def plot_many(bins, linestyles, bin_labels, age_then=None, z_func=None, colors=None, **kwargs): import numpy as np import matplotlib.pylab as plt from seren3.array import SimArray from seren3.utils.plot_utils import ncols ax1 = plt.gca() legendArtists = [] for i in range(len(bins)): ls = linestyles[i] plot_mean_integrated_fesc(bins[i], linestyle=ls, colors=colors,\ label=False, legend=False, **kwargs) legendArtists.append( plt.Line2D((0, 1), (0, 0), color='k', linestyle=ls)) handles, labels = ax1.get_legend_handles_labels() display = tuple(range(len(bins[0]))) ax1.set_xlim(0.2, 0.7) # Shrink current axis's height by 10% on the bottom box = ax1.get_position() ax1.set_position( [box.x0, box.y0 + box.height * 0.1, box.width, box.height * 0.9]) # Put a legend below current axis legend_kwargs = {"title" : r"log$_{10}$(Mvir)", "loc" : "upper center", \ "bbox_to_anchor" : (0.5, -0.1), "fancybox" : True, "shadow" : True, "ncol" : 4} ax1.legend([handle for i,handle in enumerate(handles) if i in display]+legendArtists,\ [label for i,label in enumerate(labels) if i in display]+bin_labels, **legend_kwargs) # Redshift axis if (z_func is not None) and (age_then is not None): ax2 = ax1.twiny() ax2.set_position( [box.x0, box.y0 + box.height * 0.1, box.width, box.height * 0.9]) def tick_function(X): # return ["%1.1f" % i for i in z_func( age_now - X )] # We've scaled the x-axis by the age of halos, so need # to account for zero-point in age return ["%1.1f" % i for i in z_func(SimArray(X + age_then, "Gyr"))] xtickpos = ax1.get_xticks() print xtickpos new_tick_locations = np.linspace(0.215, 0.685, 5) print new_tick_locations # new_tick_locations = np.array(ax1.get_xticks()) ax2.set_xlim(ax1.get_xlim()) ax2.set_xticks(new_tick_locations) ax2.set_xticklabels(tick_function(new_tick_locations)) ax2.set_xlabel(r"Redshift") ax1.set_xlabel(r"t$_{\mathrm{H}}$ [Gyr]") ax1.set_ylabel( r"$\langle \mathrm{f}_{\mathrm{esc}} \rangle$ (<t$_{\mathrm{H}}$ [%])")
def save_plot(title, out_dir=os.path.join('/', 'tmp')): plt.title(title, fontsize=11) min_y = plt.gca().get_ylim()[0] if min_y > 0: plt.gca().set_ylim([0, plt.gca().get_ylim()[1]]) plt.gca().spines['left'].set_visible(False) plt.gca().spines['right'].set_visible(False) plt.gca().spines['top'].set_visible(False) plt.gca().spines['bottom'].set_visible(False) plt.gca().tick_params('x', length=0) plt.gca().tick_params('y', length=0) plt.tight_layout() plt.savefig(os.path.join(out_dir, title.replace(' ', '_') + '.png')) plt.cla()
def plot_waveforms(waveforms, sensors, evt, *, wf_type="PMT", range=(None, ), overlay=False, sum=False, zoomx=False, zoomy=False, dual=False): range = slice(*range) wfsize = waveforms.shape[2] time = np.arange(wfsize).astype(float) if wf_type == "PMT": time /= 40 elif wf_type == "BLR": time /= 40 elif wf_type == "SiPM": pass else: raise ValueError("Unrecognized wf type {}. ".format(wf_type) + "Valid options: are 'PMT', 'BLR' and 'SiPM'") if sum: wf_type += " SUM" gmin, gmax = float("inf"), -float("inf") plt.ion() ax1 = plt.gca() if sum: sum_wf = np.zeros(waveforms.shape[2]) if dual: for wf, wf_dual, ID, color in zip(waveforms[0][range], waveforms[1][range], sensors[range], _colors): ymin, ymax = min(wf_dual), max(wf_dual) if ymin < gmin: gmin = ymin if ymax > gmax: gmax = ymax plt.plot(wf, drawstyle="steps", label=str(ID[0]), c=color) plt.plot(wf_dual, drawstyle="steps", label=str(ID[0]), c=next(_colors)) ylim = (0.99 * ymin, 1.01 * ymax) customize_plot(zoomx, zoomy if zoomy else ylim, wf_type, evt, ID[0]) show_and_wait() else: for wf, ID, color in zip(waveforms[0][range], sensors[range], _colors): ymin, ymax = min(wf), max(wf) if ymin < gmin: gmin = ymin if ymax > gmax: gmax = ymax if sum: bls_wf = wf - mode(wf) sum_wf = sum_wf + bls_wf * (1 if "SiPM" in wf_type else -1) else: plt.plot(wf, drawstyle="steps", label=str(ID[0]), c=color) if not overlay and not sum: ylim = (0.99 * ymin, 1.01 * ymax) customize_plot(zoomx, zoomy if zoomy else ylim, wf_type, evt, ID[0]) show_and_wait() if overlay: ylim = 0.99 * gmin, 1.01 * gmax customize_plot(zoomx, zoomy if zoomy else ylim, wf_type, evt) show_and_wait() if sum: ylim = np.min(sum_wf) - 50, np.max(sum_wf) + 50 plt.plot(sum_wf, drawstyle="steps", c="k") customize_plot(zoomx, zoomy if zoomy else ylim, wf_type, evt) show_and_wait()
def plot_results(detector, img, x, small, mixcomp=None, bounding_boxes=[], img_resized=None): # Get max peak #print ix, iy #print '---' #print x.shape #print small.shape plt.clf() if small is None and x is None: plt.subplot(111) else: plt.subplot(121) plt.title('Input image') plt.imshow(img, cmap=plt.cm.gray) for dbb in bounding_boxes[::-1]: bb = dbb.box color = 'cyan' if dbb.correct else 'red' plt.gca().add_patch( plt.Rectangle((bb[1], bb[0]), bb[3] - bb[1], bb[2] - bb[0], facecolor='none', edgecolor=color, linewidth=2.0)) #plt.text(bb[1], bb[0], "{0:.2f}".format(dbb.confidence), color='white', backgroundcolor=color, size=8, ha='left', va='bottom') plt.text(bb[1], bb[0], "{0:.2f}".format(dbb.confidence), color='yellow', size=6, ha='left', va='bottom') if x is not None: plt.subplot(122) #plt.title('Response map ({:.2f}, {:.2f})'.format(float(x.min()), float(x.max()))) plt.title('Response map') plt.imshow(x, interpolation='nearest') #, vmin=-40000, vmax=-36000) #plt.colorbar() if 0: if small is not None: plt.subplot(223) plt.title('Feature activity') plt.imshow(small.sum(axis=-1), interpolation='nearest') plt.colorbar() if img_resized is not None: plt.subplot(224) plt.title('Resized image') plt.imshow(img_resized, interpolation='nearest', cmap=plt.cm.gray) if 0: pass plt.title('Normalized stuff') plt.imshow(x / np.clip(small.sum(axis=-1), 5, np.inf), interpolation='nearest') plt.colorbar() else: #if mixcomp is not None: #plt.title('Kernel Bernoulli probability averages') #plt.imshow(detector.kernels[mixcomp].mean(axis=-1), interpolation='nearest', cmap=plt.cm.RdBu, vmin=0, vmax=1) #plt.colorbar() pass
import sys from os import listdir from os.path import isfile, join import numpy as np import matplotlib.pylab as plt if __name__ == "__main__": data_path = sys.argv[1] data_files = [] for f in listdir(data_path): if isfile(join(data_path, f)): data_files.append(join(data_path, f)) fig, ax = plt.subplots() for data_file in data_files: data = np.loadtxt(open(data_file, "r"), delimiter=",") track = np.array([data[:, 0], data[:, 1]]).T plt.plot(track[:, 0], track[:, 1]) # ax.suptitle("Feature Track") ax.set_xlim([-1.0, 1.0]) ax.set_ylim([-1.0, 1.0]) ax.xaxis.tick_top() plt.gca().invert_yaxis() plt.show()
def wiggle(Data, SH={}, maxval=-1, skipt=1, lwidth=.5, x=[], t=[], gain=1, type='VA', color='black', ntmax=1e+9): """ wiggle(Data,SH) """ import matplotlib.pylab as plt import numpy as np import copy yl = 'Sample number' ns = Data.shape[0] ntraces = Data.shape[1] if ntmax < ntraces: skipt = int(np.floor(ntraces / ntmax)) if skipt < 1: skipt = 1 if len(x) == 0: x = range(0, ntraces) if len(t) == 0: t = range(0, ns) else: yl = 'Time [s]' # overrule time form SegyHeader if 'time' in SH: t = SH['time'] yl = 'Time [s]' dx = x[1] - x[0] if (maxval <= 0): Dmax = np.nanmax(Data) maxval = -1 * maxval * Dmax print('segypy.wiggle: maxval = %g' % maxval) #fig, (ax1) = plt.subplots(1, 1)' fig = plt.gcf() ax1 = plt.gca() for i in range(0, ntraces, skipt): # use copy to avoid truncating the data trace = copy.copy(Data[:, i]) trace[0] = 0 trace[-1] = 0 ax1.plot(x[i] + gain * skipt * dx * trace / maxval, t, color=color, linewidth=lwidth) if type == 'VA': for a in range(len(trace)): if (trace[a] < 0): trace[a] = 0 # pylab.fill(i+Data[:,i]/maxval,t,color='k',facecolor='g') #ax1.fill(x[i] + dx * Data[:, i] / maxval, t, 'k', linewidth=0, color=color) ax1.fill(x[i] + gain * skipt * dx * trace / (maxval), t, 'k', linewidth=0, color=color) ax1.grid(True) ax1.invert_yaxis() plt.ylim([np.max(t), np.min(t)]) plt.xlabel('Trace number') plt.ylabel(yl) if 'filename' in SH: plt.title(SH['filename'])
def streamplot( UV, ax=None, map=None, geodata=None, drawlonlatlines=False, basemap_resolution="l", cartopy_scale="50m", lw=0.5, cartopy_subplot=(1, 1, 1), axis="on", **kwargs, ): """Function to plot a motion field as streamlines. .. _`mpl_toolkits.basemap`: https://matplotlib.org/basemap/api/basemap_api.html#module-mpl_toolkits.basemap .. _SubplotSpec: https://matplotlib.org/api/_as_gen/matplotlib.gridspec.SubplotSpec.html?highlight=subplotspec#matplotlib.gridspec.SubplotSpec .. _cartopy: https://scitools.org.uk/cartopy/docs/latest/ Parameters ---------- UV : array-like Array of shape (2, m,n) containing the input motion field. ax : axis object Optional axis object to use for plotting. map : {'basemap', 'cartopy'}, optional Optional method for plotting a map: 'basemap' or 'cartopy'. The former uses `mpl_toolkits.basemap`_, while the latter uses cartopy_. geodata : dictionary Optional dictionary containing geographical information about the field. If geodata is not None, it must contain the following key-value pairs: +----------------+----------------------------------------------------+ | Key | Value | +================+====================================================+ | projection | PROJ.4-compatible projection definition | +----------------+----------------------------------------------------+ | x1 | x-coordinate of the lower-left corner of the data | | | raster | +----------------+----------------------------------------------------+ | y1 | y-coordinate of the lower-left corner of the data | | | raster | +----------------+----------------------------------------------------+ | x2 | x-coordinate of the upper-right corner of the data | | | raster | +----------------+----------------------------------------------------+ | y2 | y-coordinate of the upper-right corner of the data | | | raster | +----------------+----------------------------------------------------+ | yorigin | location of the first element in the data raster | | | element in the data raster w.r.t. y-axis: | | | | | | 'upper' = upper border, 'lower' = lower border | +----------------+----------------------------------------------------+ drawlonlatlines : bool, optional If set to True, draw longitude and latitude lines. Applicable if map is 'basemap' or 'cartopy'. basemap_resolution : str, optional The resolution of the basemap, see the documentation of `mpl_toolkits.basemap`_. Applicable if map is 'basemap'. cartopy_scale : {'10m', '50m', '110m'}, optional The scale (resolution) of the map. The available options are '10m', '50m', and '110m'. Applicable if map is 'cartopy'. lw: float, optional Linewidth of the map (administrative boundaries and coastlines). cartopy_subplot : tuple or SubplotSpec_ instance, optional Cartopy subplot. Applicable if map is 'cartopy'. axis : {'off','on'}, optional Whether to turn off or on the x and y axis. Other Parameters ---------------- density : float Controls the closeness of streamlines. Default : 1.5 color : string Optional streamline color. This is a synonym for the PolyCollection facecolor kwarg in matplotlib.collections. Default : black Returns ------- out : axis object Figure axes. Needed if one wants to add e.g. text inside the plot. """ if map is not None and geodata is None: raise ValueError("map!=None but geodata=None") # defaults density = kwargs.get("density", 1.5) color = kwargs.get("color", "black") # prepare x y coordinates reproject = False if geodata is not None: x = ( np.linspace(geodata["x1"], geodata["x2"], UV.shape[2]) + geodata["xpixelsize"] / 2.0 ) y = ( np.linspace(geodata["y1"], geodata["y2"], UV.shape[1]) + geodata["ypixelsize"] / 2.0 ) extent = (geodata["x1"], geodata["x2"], geodata["y1"], geodata["y2"]) # check geodata and project if different from axes if ax is not None and map is None: if type(ax).__name__ == "GeoAxesSubplot": try: ccrs = utils.proj4_to_cartopy(geodata["projection"]) except UnsupportedSomercProjection: # Define fall-back projection for Swiss data(EPSG:3035) # This will work reasonably well for Europe only. t_proj4str = "+proj=laea +lat_0=52 +lon_0=10 +x_0=4321000 +y_0=3210000 +ellps=GRS80 +units=m +no_defs" reproject = True elif type(ax).__name__ == "Basemap": utils.proj4_to_basemap(geodata["projection"]) if reproject: geodata = utils.reproject_geodata( geodata, t_proj4str, return_grid="coords" ) extent = (geodata["x1"], geodata["x2"], geodata["y1"], geodata["y2"]) X, Y = geodata["X_grid"], geodata["Y_grid"] else: x = np.arange(UV.shape[2]) y = np.arange(UV.shape[1]) if not reproject: X, Y = np.meshgrid(x, y) # draw basemaps if map is not None: try: ax = basemaps.plot_geography( map, geodata["projection"], extent, UV.shape[1:], drawlonlatlines, basemap_resolution, cartopy_scale, lw, cartopy_subplot, ) except UnsupportedSomercProjection: # Define default fall-back projection for Swiss data(EPSG:3035) # This will work reasonably well for Europe only. t_proj4str = "+proj=laea +lat_0=52 +lon_0=10 +x_0=4321000 +y_0=3210000 +ellps=GRS80 +units=m +no_defs" geodata = utils.reproject_geodata(geodata, t_proj4str, return_grid="coords") extent = (geodata["x1"], geodata["x2"], geodata["y1"], geodata["y2"]) X, Y = geodata["X_grid"], geodata["Y_grid"] ax = basemaps.plot_geography( map, geodata["projection"], extent, UV.shape[1:], drawlonlatlines, basemap_resolution, cartopy_scale, lw, cartopy_subplot, ) else: ax = plt.gca() # plot streamplot ax.streamplot( x, np.flipud(y), UV[0, :, :], -UV[1, :, :], density=density, color=color, zorder=1e6, ) if geodata is None or axis == "off": axes = plt.gca() axes.xaxis.set_ticks([]) axes.xaxis.set_ticklabels([]) axes.yaxis.set_ticks([]) axes.yaxis.set_ticklabels([]) return plt.gca()
def plot_network(self, state_sizes=None, state_scale=1.0, state_colors='#ff5500', state_labels='auto', arrow_scale=1.0, arrow_curvature=1.0, arrow_labels='weights', arrow_label_format='%10.2f', max_width=12, max_height=12, max_flux=None, figpadding=0.2, xticks=False, yticks=False, show_frame=False, **textkwargs): """ Draws a network using discs and curved arrows. The thicknesses and labels of the arrows are taken from the off-diagonal matrix elements in A. """ plt = self.plt if self.pos is None: self.layout_automatic() # number of nodes n = len(self.pos) # get bounds and pad figure xmin = np.min(self.pos[:, 0]) xmax = np.max(self.pos[:, 0]) Dx = xmax - xmin xmin -= Dx * figpadding xmax += Dx * figpadding Dx *= 1 + figpadding ymin = np.min(self.pos[:, 1]) ymax = np.max(self.pos[:, 1]) Dy = ymax - ymin ymin -= Dy * figpadding ymax += Dy * figpadding Dy *= 1 + figpadding # sizes of nodes if state_sizes is None: state_sizes = 0.5 * state_scale * \ min(Dx, Dy)**2 * np.ones(n) / float(n) else: state_sizes = 0.5 * state_scale * \ state_sizes / (np.max(state_sizes) * float(n)) #min(Dx, Dy)**2 * state_sizes / (np.max(state_sizes) * float(n)) # JFR # automatic arrow rescaling **JFR - Soooo confusing, don't do this! #arrow_scale *= 1.0 / \ # (np.max(self.A - np.diag(np.diag(self.A))) * math.sqrt(n)) # size figure if (Dx / max_width > Dy / max_height): figsize = (max_width, Dy * (max_width / Dx)) else: figsize = (Dx / Dy * max_height, max_height) fig = plt.gcf() fig.set_size_inches(figsize, forward=True) # font sizes from matplotlib import rcParams old_fontsize = rcParams['font.size'] rcParams['font.size'] = 20 # remove axis labels frame = plt.gca() if not xticks: frame.axes.get_xaxis().set_ticks([]) if not yticks: frame.axes.get_yaxis().set_ticks([]) # show or suppress frame frame.set_frame_on(show_frame) # set node labels if state_labels is 'auto': state_labels = [str(i) for i in np.arange(n)] else: assert len( state_labels ) == n, "Mistmatch between nstates and nr. state_labels (%u vs %u)" % ( n, len(state_labels)) # set node colors if state_colors is None: state_colors = '#ff5500' # None is not acceptable if isinstance(state_colors, str): state_colors = [state_colors] * n if isinstance(state_colors, list): assert len( state_colors ) == n, "Mistmatch between nstates and nr. state_colors (%u vs %u)" % ( n, len(state_colors)) try: colorscales = _types.ensure_ndarray(state_colors, ndim=1, kind='numeric') colorscales /= colorscales.max() state_colors = [ plt.cm.binary(int(256.0 * colorscales[i])) for i in range(n) ] except: pass # assume we have a list of strings now. # set arrow labels if isinstance(arrow_labels, np.ndarray): L = arrow_labels else: L = np.empty(np.shape(self.A), dtype=object) if arrow_labels is None: L[:, :] = '' elif arrow_labels.lower() == 'weights': for i in range(n): for j in range(n): L[i, j] = arrow_label_format % self.A[i, j] else: rcParams['font.size'] = old_fontsize raise ValueError('invalid arrow label format') # Set the default values for the text dictionary textkwargs.setdefault('size', 14) textkwargs.setdefault('horizontalalignment', 'center') textkwargs.setdefault('verticalalignment', 'center') textkwargs.setdefault('color', 'black') # draw circles circles = [] for i in range(n): fig = plt.gcf() # choose color c = plt.Circle(self.pos[i], radius=math.sqrt(0.5 * state_sizes[i]) / 2.0, color=state_colors[i], zorder=2) circles.append(c) fig.gca().add_artist(c) # add annotation plt.text(self.pos[i][0], self.pos[i][1], state_labels[i], zorder=3, **textkwargs) assert len(circles) == n, "%i != %i" % (len(circles), n) # draw arrows # my own colormap from matplotlib import pyplot as plt from matplotlib.pyplot import * # define the colormap #print self.A #print np.all(self.A >= 0) if (np.all(self.A >= 0)): mycmap = plt.cm.Greys #mycmap = plt.cm.winter mycmap_max = np.max(np.abs(self.A)) mycmap_min = -1. * mycmap_max #0. # np.min(self.A[self.A != 0]) else: mycmap = plt.cm.bwr #mycmap = plt.cm.jet if (max_flux is None): mycmap_max = np.max(np.abs(self.A)) mycmap_min = -mycmap_max else: mycmap_max = max_flux mycmap_min = -mycmap_max # extract all colors from the .jet map mycmaplist = [mycmap(i) for i in range(mycmap.N)] # create the new map mycmap = mycmap.from_list('Custom cmap', mycmaplist, mycmap.N) # define the bins and normalize bounds = np.linspace(mycmap_min, mycmap_max, mycmap.N) norm = matplotlib.colors.BoundaryNorm(bounds, mycmap.N) mycmaplist = [mycmap(i) for i in range(mycmap.N)] dx = bounds[1] - bounds[0] for i in range(n): for j in range(i + 1, n): if (abs(self.A[i, j]) > 0): # JFR - let's allow for neg delta-F grid = int((self.A[i, j] - mycmap_min) / dx + 0.5) color = mycmaplist[grid] self._draw_arrow(self.pos[i, 0], self.pos[i, 1], self.pos[j, 0], self.pos[j, 1], Dx, Dy, label=str(L[i, j]), width=arrow_scale * abs(self.A[i, j]), color=color, arrow_curvature=arrow_curvature, patchA=circles[i], patchB=circles[j], shrinkA=3, shrinkB=0) if (abs(self.A[j, i]) > 0): grid = int((self.A[j, i] - mycmap_min) / dx + 0.5) color = mycmaplist[grid] self._draw_arrow(self.pos[j, 0], self.pos[j, 1], self.pos[i, 0], self.pos[i, 1], Dx, Dy, label=str(L[j, i]), width=arrow_scale * abs(self.A[j, i]), color=color, arrow_curvature=arrow_curvature, patchA=circles[j], patchB=circles[i], shrinkA=3, shrinkB=0) # plot plt.xlim(xmin, xmax) plt.ylim(ymin, ymax) rcParams['font.size'] = old_fontsize return fig
def _plot_seasonality(self, alpha: float, plot_kwargs: bool): # two_tailed_alpha = int(alpha / 2 * 100) periods = list(set([float(i.split("_")[1]) for i in self.seasonality])) additive_ts, multiplicative_ts = self._fit_seasonality() all_seasonalities = [("additive", additive_ts)] if len(self.multiplicative_data): all_seasonalities.append(("multiplicative", multiplicative_ts)) for sn, ts in all_seasonalities: if (sn == "multiplicative" and np.sum(ts) == 1) or (sn == "additive" and np.sum(ts) == 0): continue ddf = pd.DataFrame( np.vstack([ np.percentile(ts[:, :, self.skip_first:], 50, axis=-1), np.percentile(ts[:, :, self.skip_first:], alpha / 2 * 100, axis=-1), np.percentile(ts[:, :, self.skip_first:], (1 - alpha / 2) * 100, axis=-1), ]).T, columns=[ "%s_%s" % (p, l) for l in ["mid", "low", "high"] for p in periods[::-1] ], ) ddf.loc[:, "ds"] = self.data["ds"] for period in periods: if int(period) == 0: step = int(self.data["ds"].diff().mean().total_seconds() // float(period)) else: step = int(period) graph = ddf.head(step) if period == 7: ddf.loc[:, "dow"] = [i for i in ddf["ds"].dt.weekday] graph = (ddf[[ "dow", "%s_low" % period, "%s_mid" % period, "%s_high" % period, ]].groupby("dow").mean().sort_values("dow")) graph.loc[:, "ds"] = [[ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun" ][i] for i in graph.index] graph = graph.sort_index() plt.figure(**plot_kwargs) graph.plot(y="%s_mid" % period, x="ds", color="C0", legend=False, ax=plt.gca()) plt.grid() if period == 7: plt.xticks(range(7), graph["ds"].values) plt.fill_between( np.arange(0, 7), graph["%s_low" % period].values.astype(float), graph["%s_high" % period].values.astype(float), alpha=0.3, ) else: plt.fill_between( graph["ds"].values, graph["%s_low" % period].values.astype(float), graph["%s_high" % period].values.astype(float), alpha=0.3, ) plt.title("Model Seasonality (%s) for period: %s days" % (sn, period)) plt.axes().xaxis.label.set_visible(False) plt.show()
def peerSensitivity(): space = 1 af = 4 allasCount = {} resultsFile = "../results/peerSensitivity/KLdiv_%s.pickle" collectorsDataFile = "../results/peerSensitivity/collectorData.pickle" allPeersFile = "../results/peerSensitivity/allPeersDist.pickle" collectorsDist = [] results = defaultdict(list) if not os.path.exists(resultsFile % "Betweenness"): ribFiles = glob.glob("/data/routeviews/archive.routeviews.org/*/*/RIBS/rib.20160601.0000.bz2") ribFiles.extend(glob.glob("/data/routeviews/archive.routeviews.org/*/*/*/RIBS/rib.20160601.0000.bz2")) ribFiles.extend(glob.glob("/data/ris/*/*/bview.20160601.0000.gz")) ribFiles.append("/data/bgpmon/ribs/201606/ribs") for i, ribFile in enumerate(ribFiles): words = ribFile.split("/") if "routeviews" in ribFile: if words[-4] == "route-views3": label = "rv3" elif words[-5] == "archive.routeviews.org" and words[-4] == "bgpdata": label = "rv2" elif not "." in words[-5] and words[-5].startswith("route-views"): label = "rv"+words[-5][-1] else: label = words[-5].split(".")[-1] elif "ris" in ribFile: label = words[-3] else: label = "bgpmon" asCountFile = "../results/peerSensitivity/20160601.0000_asCount%s.pickle" % (i) if not os.path.exists(asCountFile): rtree, _ = ashash.readrib(ribFile, space, af) asCount = rtree.search_exact("0.0.0.0/0").data asHegemony, _, nbPeers = ashash.computeCentrality(asCount, space) # asBetweenness = ashash.computeBetweenness(asCount, space) pickle.dump((asCount, asHegemony, nbPeers), open(asCountFile, "wb")) else: asCount, asHegemony, nbPeers = pickle.load(open(asCountFile,"rb")) collectorsDist.append( (label, nbPeers, asHegemony) ) print "%s: %s peers" % (label, len(asCount)) for peer, count in asCount.iteritems(): if count["totalCount"]>2000000000: if not peer in allasCount: allasCount[peer] = count else: print "Warning: peer %s is observed multiple times (%s)" % (peer, ribFile) asHegemonyRef, _, nbPeers = ashash.computeCentrality(allasCount, space) asBetweennessRef, _, _ = ashash.computeBetweenness(allasCount, space) pickle.dump((asHegemonyRef, asBetweennessRef, nbPeers), open(allPeersFile,"wb")) for metricLabel, ref, computeMetric in [("Hegemony", asHegemonyRef, ashash.computeCentrality), ("Betweenness", asBetweennessRef, ashash.computeBetweenness)]: if not os.path.exists(resultsFile % metricLabel): # Remove AS with a score of 0.0 toremove = [asn for asn, score in ref.iteritems() if score==0.0] for asn in toremove: del ref[asn] minVal = min(ref.values()) nbPeersList = range(0, len(allasCount), 10) nbPeersList[0] = 1 for nbPeers in nbPeersList: tmp = [] for _ in range(10): # Randomly select peers peersIndex = random.sample(range(len(allasCount)), nbPeers) asCount = {} for p in peersIndex: asCount[allasCount.keys()[p]] = allasCount.values()[p] asMetric, _, _ = computeMetric(asCount, space) # Remove AS with a score == 0.0 toremove = [asn for asn, score in asMetric.iteritems() if score==0.0] if not toremove is None: for asn in toremove: del asMetric[asn] # Set the same number of ASN for both distributions missingAS = set(ref.keys()).difference(asMetric.keys()) if not missingAS is None: for asn in missingAS: asMetric[asn] = minVal # Compute the KL-divergence dist = [asMetric[asn] for asn in ref.keys()] kldiv = sps.entropy(dist, ref.values()) tmp.append(kldiv) results[metricLabel].append(tmp) print tmp # save final results pickle.dump((nbPeersList, results[metricLabel]),open(resultsFile % metricLabel,"wb")) pickle.dump(collectorsDist, open(collectorsDataFile,"wb")) else: (nbPeersList, results[metricLabel]) = pickle.load(open(resultsFile % metricLabel,"rb")) collectorsDist = pickle.load(open(collectorsDataFile,"rb")) else: for metricLabel in ["Hegemony", "Betweenness"]: nbPeersList, results[metricLabel] = pickle.load(open(resultsFile % metricLabel,"rb")) collectorsDist = pickle.load(open(collectorsDataFile,"rb")) asHegemonyRef, asBetweennessRef, allFullFeedPeers = pickle.load(open(allPeersFile,"rb")) #return asHegemonyRef def plotRef(references, legendFmt="%s", alpha=1.0): for asDistRef, metricLabel, color in references: m = np.mean(results[metricLabel][1:], axis=1) s = np.std(results[metricLabel][1:], axis=1) # mi = m-np.min(results[metricLabel][1:], axis=1) # ma = np.max(results[metricLabel][1:], axis=1)-m # mi, ma = sms.DescrStatsW(results[metricLabel][1:]).tconfint_mean(alpha=0.1) mi = np.min(results[metricLabel][1:], axis=1) ma = np.max(results[metricLabel][1:], axis=1) x = nbPeersList[1:] plt.fill_between(x, mi, ma, facecolor=color, alpha=alpha*0.5) # plt.plot(x, m,"-+", ms=4, color="0.6", label="Randomly selected") # plt.plot(x, m,"-+", ms=4, color="0.6", label="Random peers (%s)" % metricLabel) try: plt.plot(x, m,"-o", ms=3, label=legendFmt % metricLabel, color=color, alpha=alpha) except TypeError: plt.plot(x, m,"-o", ms=3, label=legendFmt, color=color, alpha=alpha) # plt.errorbar(x,m, [mi, ma], fmt="C3.", ms=4) plt.xlabel("Number of peers") plt.ylabel("KL divergence") # plt.yscale("log") plt.legend() plt.tight_layout() # Compare betweenness and hegemony plt.figure() plotRef([(asHegemonyRef, "Hegemony", "C0"), (asBetweennessRef, "Betweenness", "C5")]) plt.ylim([0.00001, 1]) plt.xscale("log") plt.savefig("../results/peerSensitivity/meanKL_%s.pdf" % metricLabel) # Compare collectors and random sample (hegemony): # Remove AS with a score of 0.0 toremove = [asn for asn, score in asHegemonyRef.iteritems() if score==0.0] for asn in toremove: del asHegemonyRef[asn] minVal = min(asHegemonyRef.values()) plt.figure() plotRef([(asHegemonyRef, "Hegemony", "C0")], legendFmt="Random peers", alpha=0.5) plt.xlabel("Number of peers") plt.ylabel("KL divergence") for collectorLabel, nbPeers, asHegemony in collectorsDist: if asHegemony is None : print "warning: ignore collector %s" % collectorLabel continue # Remove AS with a score == 0.0 toremove = [asn for asn, score in asHegemony.iteritems() if score==0.0] if not toremove is None: for asn in toremove: del asHegemony[asn] # Set the same number of ASN for both distributions missingAS = set(asHegemonyRef.keys()).difference(asHegemony.keys()) if not missingAS is None: for asn in missingAS: asHegemony[asn] = minVal # Compute the KL-divergence dist = [asHegemony[asn] for asn in asHegemonyRef.keys()] kldiv = sps.entropy(dist, asHegemonyRef.values()) print "%s:\t %s peers \t %s " % (collectorLabel, nbPeers, kldiv) if kldiv>0.4 : continue if collectorLabel.startswith("rrc"): plt.plot(nbPeers, kldiv,"C1x", label="RIS") collectorLabel = collectorLabel.replace("rrc","") elif collectorLabel == "bgpmon": plt.plot(nbPeers, kldiv,"C3^") else: plt.plot(nbPeers, kldiv,"C3+", label="Route Views") if kldiv<1 or nbPeers>10: plt.text(nbPeers+0.005, kldiv+0.005, collectorLabel, fontsize=8) # plt.yscale("log") # plt.xscale("log") handles, labels = plt.gca().get_legend_handles_labels() by_label = OrderedDict(zip(labels, handles)) plt.legend(by_label.values(), by_label.keys()) plt.ylim([0.00001, 0.4]) plt.xlim([9, 50]) plt.tight_layout() plt.savefig("../results/peerSensitivity/collectorDiversity.pdf") return (nbPeersList, results)
def predict( self, forecasting_periods: int = 10, freq: str = "D", extra_data: pd.DataFrame = None, include_history: bool = True, alpha: float = 0.05, plot: bool = False, ): """Predict using the PMProphet model. Parameters ---------- forecasting_periods : int, > 0 Number of future points to forecast freq : string, default: 'D' extra_data : pd.DataFrame include_history : bool If True, predictions are concatenated to the data. alpha : float Width of the the credible intervals. plot : bool Plot the predictions. Returns ------- A pd.DataFrame with the forecast components. """ if self.auto_changepoints: self._finalize_auto_changepoints(plot=False) last_date = self.data["ds"].max() dates = pd.date_range( start=last_date, periods=forecasting_periods + 1, # An extra in case we include start freq=freq, ) dates = dates[dates > last_date] # Drop start if equals last_date dates = dates[:forecasting_periods] # Return correct number of periods new_df = pd.DataFrame() if include_history: new_df["y"] = np.concatenate( [self.data["y"], np.zeros(forecasting_periods) * np.nan]) new_df["ds"] = np.concatenate([self.data["ds"], dates]) else: new_df["y"] = np.zeros(forecasting_periods) new_df["ds"] = dates for regressor in self.regressors: new_df[regressor] = self.data[regressor] if extra_data is not None: for column in extra_data.columns: if column not in ["y", "ds"]: new_df[column] = extra_data[column] m = PMProphet( data=new_df, growth=self.growth, intercept=self.intercept, model=self.model, name=self.name, ) m.changepoints = self.changepoints periods = {} for column in self.data.columns: if column.startswith("f_"): period, order = column[2:].split("_") periods.setdefault(period, []) periods[period].append(int(order)) for period, orders in periods.items(): m.add_seasonality(seasonality=float(period), fourier_order=max(orders) + 1) m.priors = self.priors m.priors_names = self.priors_names m.trace = self.trace m.multiplicative_data = self.multiplicative_data draws = max(self.trace[var].shape[-1] for var in self.trace.varnames if "hat_{}".format(self.name) not in var) if self.growth: # Start with the trend y_hat = m._fit_growth(prior=False) else: y_hat = np.zeros((len(m.data.ds.values), draws)) multiplicative_seasonality = np.zeros((len(m.data.ds.values), draws)) additive_seasonality = np.zeros((len(m.data.ds.values), draws)) if self.seasonality: # Add seasonality additive_seasonality, multiplicative_seasonality = m._fit_seasonality( flatten_components=True) if self.intercept: # Add intercept y_hat += self.trace[self.priors_names["intercept"]] # Add regressors multiplicative_regressors = np.zeros((len(m.data.ds.values), draws)) additive_regressors = np.zeros((len(m.data.ds.values), draws)) for idx, regressor in enumerate(self.regressors): trace = m.trace[m.priors_names["regressors"]][:, idx] if regressor in self.multiplicative_data: multiplicative_regressors += trace * np.repeat( [m.data[regressor]], len(trace)).reshape( len(m.data), len(trace)) else: additive_regressors += trace * np.repeat( [m.data[regressor]], len(trace)).reshape( len(m.data), len(trace)) # Add holidays additive_holidays = np.zeros((len(m.data.ds.values), draws)) multiplicative_holidays = np.zeros((len(m.data.ds.values), draws)) for idx, holiday in enumerate(self.holidays): trace = m.trace[m.priors_names["holidays"]][:, idx] if holiday in self.multiplicative_data: multiplicative_holidays += trace * np.repeat( [m.data[holiday]], len(trace)).reshape( len(m.data), len(trace)) else: additive_holidays += trace * np.repeat( [m.data[holiday]], len(trace)).reshape( len(m.data), len(trace)) if (np.sum(multiplicative_holidays + multiplicative_seasonality + multiplicative_regressors) == 0): multiplicative_term = 1 else: multiplicative_term = (multiplicative_holidays + multiplicative_seasonality + multiplicative_regressors) y_hat *= multiplicative_term y_hat += additive_seasonality + additive_holidays + additive_regressors y_hat_noised = np.random.normal( y_hat[:, self.skip_first:], self.data['y'].std() * self.trace[self.priors_names["sigma"]][self.skip_first:]) ddf = pd.DataFrame([ np.percentile(y_hat_noised, 50, axis=-1), np.percentile(y_hat_noised, alpha / 2 * 100, axis=-1), np.percentile(y_hat_noised, (1 - alpha / 2) * 100, axis=-1), ]).T ddf["ds"] = m.data["ds"] ddf.columns = ["y_hat", "y_low", "y_high", "ds"] if plot: plt.figure(figsize=(20, 10)) ddf.plot("ds", "y_hat", ax=plt.gca()) ddf["orig_y"] = self.data["y"] plt.fill_between( ddf["ds"].values, ddf["y_low"].values.astype(float), ddf["y_high"].values.astype(float), alpha=0.3, ) ddf.plot("ds", "orig_y", style="k.", ax=plt.gca(), alpha=0.2) for idx, change_point in enumerate(self.changepoints): if self.auto_changepoints: plt.axvline( change_point, color="C2", lw=1, ls="dashed", alpha=self.changepoint_weights[idx], ) else: plt.axvline(change_point, color="C2", lw=1, ls="dashed") plt.axvline(pd.to_datetime(self.data.ds).max(), color="C3", lw=1, ls="dotted") plt.grid(axis="y") plt.show() return ddf
########################################################## # plot energies plt.subplot(3, 1, 2) plt.plot(data2[:, 0], data2[:, 1], ':', label='potential energy', alpha=0.5) plt.plot(data2[:, 0], data2[:, 2], ':', label='kinetic energy', alpha=0.5) plt.plot(data2[:, 0], data2[:, 3], '-', label='total energy') #plt.plot(data2[:,0], np.ones((data2.shape[0],1)) * (data1.shape[1] - 1),'-',label='total energy') # labels plt.xlabel(r'time $t$', fontsize=12) plt.ylabel(r'total energies', fontsize=12) # legend plt.legend() leg = plt.gca().get_legend() ltext = leg.get_texts() plt.setp(ltext, fontsize=12) # axis limits #plt.xlim([0,10000]) # tick fontsize plt.xticks(fontsize=12) plt.yticks(fontsize=12) ########################################################## # plot energy modes plt.subplot(3, 1, 3) for i in range(1, data3.shape[1]): plt.plot(data3[:, 0], data3[:, i], '-', alpha=0.5, label='mode ' + str(i))