def serve_css(name, length, keys, values): from pylab import plt, mpl mpl.rcParams['font.sans-serif'] = ['SimHei'] mpl.rcParams['axes.unicode_minus'] = False from matplotlib.font_manager import FontProperties # font = FontProperties(fname="d:\Users\ll.tong\Desktop\msyh.ttf", size=12) font = FontProperties(fname="/usr/share/fonts/msyh.ttf", size=11) plt.xlabel(u'') plt.ylabel(u'出现次数',fontproperties=font) plt.title(u'词频统计',fontproperties=font) plt.grid() keys = keys.decode("utf-8").split(' ') values = values.split(' ') valuesInt = [] for value in values: valuesInt.append(int(value)) plt.xticks(range(int(length)), keys) plt.plot(range(int(length)), valuesInt) plt.xticks(rotation=defaultrotation, fontsize=9,fontproperties=font) plt.yticks(fontsize=10,fontproperties=font) name = name + str(datetime.now().date()).replace(':', '') + '.png' imgUrl = 'static/temp/' + name fig = matplotlib.pyplot.gcf() fig.set_size_inches(12.2, 2) plt.savefig(imgUrl, bbox_inches='tight', figsize=(20,4), dpi=100) plt.close() tempfile = static_file(name, root='./static/temp/') #os.remove(imgUrl) return tempfile
def plot2(self, figNum, time1, data1, time2, data2, title='', units='', options=''): plt.figure(figNum) # plt.hold(True); plt.grid(True) if title: self.title = title if not units: self.units = units # plt.cla() if self.preTitle: fig = plt.gcf() fig.canvas.set_window_title("Figure %d - %s" % (figNum, self.preTitle)) plt.title("%s" % (self.title)) plt.plot(time1, data1, options) plt.plot(time2, data2, options) plt.ylabel('(%s)' % (self.units)) plt.xlabel('Time (s)') plt.margins(0.04)
def plot_inference_summary(inference_record): ll = [] lp = [] wlp_plus_ll = [] for step in inference_record.steps: ll += step.ll[ 1:] # start from 1 and not 0: to skip the initial guess if inference_record.use_prior: lp += step.lp[1:] wlp_plus_ll += list((step.wlp * np.asarray(step.lp[1:]) + np.asarray(step.ll[1:])).tolist()) plt.title('ll', fontsize=30) plt.plot(ll, lw=2) if inference_record.use_prior: plt.plot(lp, lw=2) plt.plot(wlp_plus_ll, lw=2) counter = 0 for i, step in enumerate(inference_record.steps): if i % 2 == 1: facecolor = ".2" else: facecolor = ".5" plt.axvspan(xmin=counter, xmax=counter + step.nAccepted, facecolor=facecolor, alpha=0.2) counter += step.nAccepted
def CHART_Running_Annual_Vol_with_Daily_Samples_on_Specific_Time_of_Day(frequency,sampling_time,window,trading_hours_per_day): Original_DAILY_Sample=data['Price'][sampling_time-1::trading_hours_per_day] #Grabs the Hourly Data and Converts it into Daily Data based on Sampling Time of Day AND Trading Hours per Day NEW_Sample=Original_DAILY_Sample[frequency-1::frequency] #Creates New Sampling list based on sampling frequency input Returns=np.log(NEW_Sample) - np.log(NEW_Sample.shift(1)) #Calculates Returns on New Sample Running_Variance=Returns.rolling(window).var() #Calculates daily running variance based on 'window size' input Running_Annual_Vol=np.sqrt(Running_Variance)*np.sqrt(252/frequency) #Place NEW Sampled data (prices) and Running Vols in DataFrame DF=pd.DataFrame(NEW_Sample) DF['Running_Vol']=Running_Annual_Vol #Create Plot DF.Price.plot() plt.legend() #data.Price.plot() plt.ylabel('Yield (%)') DF.Running_Vol.plot(secondary_y=True, style='g',rot=90) plt.xlabel('Date') plt.ylabel('Running Vol') plt.title('10 Year Bund Yield vs Annualized Running Vol ') plt.legend(bbox_to_anchor=(0.8, 1)) plt.text(0.8, 3.5, "Sampling Time={}. Window Size={}. Trading Hours per Day={}".format(sampling_time, window,trading_hours_per_day)) return plt
def subplotSingle2x(self, figNum, plotNum, numRows, numCols, time, data, title='', units='', options=''): print("subplotSingle2x") plt.figure(figNum) if title: self.title = title if not units: self.units = units if self.preTitle: fig = plt.gcf() fig.canvas.set_window_title("%s" % (figNum, self.preTitle)) if not figNum in self.sharex.keys(): self.sharex[figNum] = plt.subplot(numRows, numCols, plotNum) plt.plot(time, data, options) plt.subplot(numRows, numCols, plotNum, sharex=self.sharex[figNum]) # plt.hold(True); plt.grid(True) plt.title("%s" % (self.title)) plt.plot(time, data, options) plt.ylabel('(%s)' % (self.units)) plt.margins(0.04)
def cumulative_freq_plot(rast, band=0, mask=None, bins=100, xlim=None, nodata=-9999): ''' Plots an empirical cumulative frequency curve for the input raster array in a given band. NOTE: Thiscurrently only works for single-band arrays. ''' if mask is not None: arr = binary_mask(rast, mask) else: arr = rast.copy() if nodata is not None: arr = subarray(arr) values, base = np.histogram(arr, bins=bins) cumulative = np.cumsum(values) # Evaluate the cumulative distribution plt.plot(base[:-1], cumulative, c='blue') # Plot the cumulative function plt.title('Empirical Cumulative Distribution: Band %d' % band) if xlim is not None: axes = plt.gca() axes.set_xlim(xlim) plt.show() return arr
def CHART_Running_Annual_Vol_with_Hourly_Samples(frequency,window,trading_hours_per_day): Trading_Hours_in_Trading_Year=252*trading_hours_per_day #Calculates Trading Hours in a year Sample=data['Price'][frequency-1::frequency] #Creates New Sampling list based on frequency input Returns=np.log(Sample) - np.log(Sample.shift(1)) #Calculates Returns on New Sample Running_Variance=Returns.rolling(window).var() #Calculates hourly running variance based on 'window size' input Running_Annual_Vol=np.sqrt(Running_Variance)*np.sqrt(Trading_Hours_in_Trading_Year/frequency) #Place Running Vols and Time Series in DataFrame DF=pd.DataFrame(Sample) DF['Running_Vol']=Running_Annual_Vol #Create Plot DF.Price.plot() plt.legend() plt.ylabel('Yield (%)') DF.Running_Vol.plot(secondary_y=True, style='g',rot=90) plt.xlabel('Date') plt.ylabel('Running Vol') plt.title('10 Year Bund Yield vs Annualized Running Vol (Window Size=200)') plt.legend(bbox_to_anchor=(0.8, 1)) plt.text(0.8, 5.4, "Frequency={}. Window Size={}. Trading Hours per Day={}".format(frequency, window,trading_hours_per_day)) return plt
def plot_cumu_disp_decomposition(self, site, cmpt, loc=2, leg_fs=7, if_ylim=False): self.plot_cumu_obs_linres(site, cmpt) y = self.plot_cumu_disp_pred_added(site, cmpt, label='pred.') y += self.plot_R_co(site, cmpt, style='-^', label='Rco', color='orange') y += self.plot_E_cumu_slip(site, cmpt, color='green') y += self.plot_R_aslip(site, cmpt, color='black') plt.grid('on') if if_ylim: plt.ylim(calculate_lim(y)) plt.ylabel(r'meter') plt.legend(loc=loc, prop={'size': leg_fs}) plt.gcf().autofmt_xdate() plt.title('Cumulative Disp.: {site} - {cmpt}'.format( site=get_site_true_name(site_id=site), cmpt=cmpt))
def plot_post_disp_decomposition( self, site, cmpt, loc=2, leg_fs=7, marker_for_obs='x', ): y = self.plot_post_obs_linres(site, cmpt, label='obs.', marker=marker_for_obs) y += self.plot_post_disp_pred_added(site, cmpt, label='pred.') y += self.plot_R_co(site, cmpt, style='-^', label='Rco', color='orange') y += self.plot_E_aslip(site, cmpt, color='green') y += self.plot_R_aslip(site, cmpt, color='black') plt.grid('on') plt.legend(loc=loc, prop={'size': leg_fs}) plt.ylabel(r'meter') plt.gcf().autofmt_xdate() plt.title('Postseismic Disp. : {site} - {cmpt}'.format( site=get_site_true_name(site_id=site), cmpt=cmpt))
def plot(self): Zdelta = [] for aa in self.Z: Zdelta += [aa - 1] figure() imshow(Zdelta, interpolation='bilinear', origin='lower', cmap=cm.bone, extent=(self.Vzs, self.Vze, self.V1s, self.V1e)) CS = contour(self.X, self.Y, Zdelta, [0], linewidths=4, colors='white') CS2 = contour(self.X, self.Y, Zdelta, 16, linewidths=1, colors='k') plt.clabel(CS2, fontsize=6, inline=1) plt.clabel(CS, fontsize=9, inline=1) if self.mode == 'stab': plt.title('Radial stability map %s V\n %s\n alpha=%s\n b=%s' % (str(self.setup.ener), str(self.setup.V), str(self.setup.alpha), str(self.setup.b))) else: plt.title( 'Synchronization stability map %s V\n %s\n alpha=%s\n b=%s' % (str(self.setup.ener), str(self.setup.V), str( self.setup.alpha), str(self.setup.b))) xlabel('Vz (V)') ylabel('V1 (V)') show()
def draw(cls, t_max, agents_proportions, eco_idx, parameters): color_set = ["green", "blue", "red"] for agent_type in range(3): plt.plot(np.arange(t_max), agents_proportions[:, agent_type], color=color_set[agent_type], linewidth=2.0, label="Type-{} agents".format(agent_type)) plt.ylim([-0.1, 1.1]) plt.xlabel("$t$") plt.ylabel("Proportion of indirect exchanges") # plt.suptitle('Direct choices proportion per type of agents', fontsize=14, fontweight='bold') plt.legend(loc='upper right', fontsize=12) print(parameters) plt.title( "Workforce: {}, {}, {}; displacement area: {}; vision area: {}; alpha: {}; tau: {}\n" .format( parameters["x0"], parameters["x1"], parameters["x2"], parameters["movement_area"], parameters["vision_area"], parameters["alpha"], parameters["tau"] ), fontsize=12) if not path.exists("../../figures"): mkdir("../../figures") plt.savefig("../../figures/figure_{}.pdf".format(eco_idx)) plt.show()
def geometric_brownian_motion_option_pricing( initial_val=100, num_samples=10000, riskless_rate=0.05, volatility_sigma=0.25, time_year=2.0, num_time_interval_discretization=50): dt = time_year / num_time_interval_discretization samples = np.zeros((num_time_interval_discretization + 1, num_samples)) samples[0] = initial_val for t in range(1, num_time_interval_discretization + 1): samples[t] = samples[t - 1] * np.exp( (riskless_rate - 0.5 * (volatility_sigma**2)) * dt + volatility_sigma * np.sqrt(dt) * npr.standard_normal(num_samples)) print(45 * "=") print(samples[1]) plt.figure(figsize=(10, 6)) plt.hist(samples[50], bins=50) plt.title("Geometric Brownian Motion") plt.xlabel('index level') plt.ylabel('frequency') plt.show() plt.figure(figsize=(10, 6)) plt.plot(samples[:, :10], lw=1.5) plt.xlabel('time') plt.ylabel('index level') plt.title('Sample Path') plt.show() return samples
def square_root_diffusion_exact(initial_val=0.05, kappa=3.0, theta=0.02, sigma=0.1, time_year=2, num_samples=10000, num_time_interval_discretization=50): x = np.zeros((num_time_interval_discretization + 1, num_samples)) x[0] = initial_val dt = time_year / num_time_interval_discretization for t in range(1, num_time_interval_discretization + 1): df = 4 * theta * kappa / sigma**2 c = (sigma**2 * (1 - np.exp(-kappa * dt))) / (4 * kappa) nc = np.exp(-kappa * dt) / c * x[t - 1] x[t] = c * npr.noncentral_chisquare(df, nc, size=num_samples) plt.figure(figsize=(10, 6)) plt.hist(x[-1], bins=50) plt.title("Square root diffusion Exact") plt.xlabel('value') plt.ylabel('frequency') plt.show() plt.figure(figsize=(10, 6)) plt.plot(x[:, :10], lw=1.5) plt.xlabel('time') plt.ylabel('index level') plt.title('Sample Path SRD Exact') plt.show() return x
def plot_profits(self, player, period): profits = self.results["profits"][-period:] plt.title("Profits") time_window = 100 x = np.arange(len(profits[:, player])) y = [] for i in x: if i < time_window: y_value = np.mean(profits[:i + 1, player]) else: y_value = np.mean(profits[i - time_window:i + 1, player]) y.append(y_value) plt.plot(x, y, color="black") maximum_profit = \ self.parameters["n_positions"] * \ self.parameters["n_prices"] plt.ylim(0, maximum_profit) plt.annotate("Time window: {}".format(time_window), xy=(0.8, 0.1), xycoords='axes fraction', fontsize=6) # plt.annotate(self.string_parameters, xy=(-0.05, -0.1), xycoords='axes fraction', fontsize=6) plt.savefig(self.format_fig_name("profits_player{}".format(player))) plt.close()
def square_root_diffusion_euler(initial_val=0.05, kappa=3.0, theta=0.02, sigma=0.1, time_year=2, num_samples=10000, num_time_interval_discretization=50): dt = time_year / num_time_interval_discretization xh = np.zeros((num_time_interval_discretization + 1, num_samples)) x = np.zeros_like(xh) xh[0] = initial_val x[0] = initial_val for t in range(1, num_time_interval_discretization + 1): xh[t] = (xh[t - 1] + kappa * (theta - np.maximum(xh[t - 1], 0)) * dt + sigma * np.sqrt(np.maximum(xh[t - 1], 0)) * math.sqrt(dt) * npr.standard_normal(num_samples)) x = np.maximum(xh, 0) plt.figure(figsize=(10, 6)) plt.hist(x[-1], bins=50) plt.xlabel('value') plt.ylabel('frequency') plt.title('Square root diffusion Approx Euler') plt.show() plt.figure(figsize=(10, 6)) plt.plot(x[:, :10], lw=1.5) plt.xlabel('time') plt.ylabel('index level') plt.title('Sample Path SRD approx') plt.show() return x
def example_filterbank(): from pylab import plt import numpy as np x = _create_impulse(2000) gfb = GammatoneFilterbank(density=1) analyse = gfb.analyze(x) imax, slopes = gfb.estimate_max_indices_and_slopes() fig, axs = plt.subplots(len(gfb.centerfrequencies), 1) for (band, state), imx, ax in zip(analyse, imax, axs): ax.plot(np.real(band)) ax.plot(np.imag(band)) ax.plot(np.abs(band)) ax.plot(imx, 0, 'o') ax.set_yticklabels([]) [ax.set_xticklabels([]) for ax in axs[:-1]] axs[0].set_title('Impulse responses of gammatone bands') fig, ax = plt.subplots() def plotfun(x, y): ax.semilogx(x, 20*np.log10(np.abs(y)**2)) gfb.freqz(nfft=2*4096, plotfun=plotfun) plt.grid(True) plt.title('Absolute spectra of gammatone bands.') plt.xlabel('Normalized Frequency (log)') plt.ylabel('Attenuation /dB(FS)') plt.axis('Tight') plt.ylim([-90, 1]) plt.show() return gfb
def example_filterbank(): from pylab import plt import numpy as np x = _create_impulse(2000) gfb = GammatoneFilterbank(density=1) analyse = gfb.analyze(x) imax, slopes = gfb.estimate_max_indices_and_slopes() fig, axs = plt.subplots(len(gfb.centerfrequencies), 1) for (band, state), imx, ax in zip(analyse, imax, axs): ax.plot(np.real(band)) ax.plot(np.imag(band)) ax.plot(np.abs(band)) ax.plot(imx, 0, 'o') ax.set_yticklabels([]) [ax.set_xticklabels([]) for ax in axs[:-1]] axs[0].set_title('Impulse responses of gammatone bands') fig, ax = plt.subplots() def plotfun(x, y): ax.semilogx(x, 20 * np.log10(np.abs(y)**2)) gfb.freqz(nfft=2 * 4096, plotfun=plotfun) plt.grid(True) plt.title('Absolute spectra of gammatone bands.') plt.xlabel('Normalized Frequency (log)') plt.ylabel('Attenuation /dB(FS)') plt.axis('Tight') plt.ylim([-90, 1]) plt.show() return gfb
def graph_error_mean_per_hour(dataset, pred, column): data = dataset.iloc[dataset.shape[0] - len(pred):] data["Pred"] = np.around(pred, 5) if column == "Diff": res = np.delete(data["Close"].to_numpy(), -1) res = np.insert(res, 0, 0) data["Pred"] = data["Pred"] + res data["AEM"] = np.abs(np.around(data.Close - data.Pred, 5)) data['Hour'] = data.Date.apply(lambda x: x.hour) data["Diff"] = data.Close.diff().apply(abs).fillna(0) plt.figure(figsize=(14, 6)) plt.hist(data.where((data.Hour > 5) & (data.Hour < 20)).dropna()["AEM"], 150, density=True, range=(0, 0.003)) plt.show() diff_mean = data.groupby("Hour").mean().Diff x_ch = diff_mean.index y_ch = diff_mean diff_mean_aem = data.groupby("Hour").mean().AEM x = diff_mean_aem.index y = diff_mean_aem plt.figure(figsize=(14, 6)) plt.bar(x_ch, y_ch, color="green") plt.bar(x, y, color="r", alpha=0.7) plt.title( "Absolute mean of error in predictions per hour compared to absolute mean of price changes", fontsize=16) plt.show()
def plot_response(data, plate_name, save_folder = 'Figures/'): """ """ if not os.path.isdir(save_folder): os.makedirs(save_folder) for block in data: # group = group_similar(data[block].keys()) names = data[block].keys() names.sort() # plt.figure(figsize=(16, 4 + len(names)/8), dpi=300) # for i, name in enumerate(names): a, b, c = get_index(group, name) color, pattern = color_shade_pattern(a, b, c, group) mean = data[block][name]['mean'][0] std = data[block][name]['std'][0] plt.barh([i], [mean], height=1.0, color=color, hatch=pattern) plt.errorbar([mean], [i+0.5], xerr=[std], ecolor = [0,0,0], linestyle = '') plt.yticks([i+0.5 for i in xrange(len(names))], names, size = 8) plt.title(plate_name) plt.ylim(0, len(names)) plt.xlabel('change') plt.tight_layout() plt.savefig(save_folder + 'response_' + str(block + 1)) # return None
def draw(cls, t_max, agents_proportions, eco_idx, parameters): color_set = ["green", "blue", "red"] for agent_type in range(3): plt.plot(np.arange(t_max), agents_proportions[:, agent_type], color=color_set[agent_type], linewidth=2.0, label="Type-{} agents".format(agent_type)) plt.ylim([-0.1, 1.1]) plt.xlabel("$t$") plt.ylabel("Proportion of indirect exchanges") # plt.suptitle('Direct choices proportion per type of agents', fontsize=14, fontweight='bold') plt.legend(loc='upper right', fontsize=12) print(parameters) plt.title( "Workforce: {}, {}, {}; displacement area: {}; vision area: {}; alpha: {}; tau: {}\n" .format(parameters["x0"], parameters["x1"], parameters["x2"], parameters["movement_area"], parameters["vision_area"], parameters["alpha"], parameters["tau"]), fontsize=12) if not path.exists("../../figures"): mkdir("../../figures") plt.savefig("../../figures/figure_{}.pdf".format(eco_idx)) plt.show()
def plot_pre(fn): t = read_t(fn) y = read_y(fn) yres = read_yres(fn) plt.plot_date(t+_adj_dates, y, 'x', color='lightblue') plt.plot_date(t+_adj_dates, yres, 'x', color='lightgreen') linsec = read_linsec(fn) ch = cut_ts(t, linsec) plt.plot_date(t[ch]+_adj_dates, y[ch], 'x', color='blue', label='original') plt.plot_date(t[ch]+_adj_dates, yres[ch], 'x', color='green', label='residual') outliers = read_outlier(fn) idx = outlier_index(t, outliers) plt.plot_date(t[idx]+_adj_dates, y[idx], 'o', mec='red', mew=1, mfc='blue') plt.plot_date(t[idx]+_adj_dates, yres[idx], 'o', mec='red', mew=1, mfc='green') for jump in read_jumps(fn): plt.axvline(jump + _adj_dates, color='red', ls='--') plt.grid('on') site = basename(fn).split('.')[0] cmpt = basename(fn).split('.')[1] plt.title('%s - %s'%(site, cmpt))
def test_screenstate_1(self): from gdesk import gui from pylab import plt from pathlib import Path gui.load_layout('console') samplePath = Path(r'./samples') gui.img.select(1) gui.img.open(samplePath / 'kodim05.png') gui.img.zoom_fit() plt.plot(gui.vs.mean(2).mean(1)) plt.title('Column means of image 1') plt.xlabel('Column Number') plt.ylabel('Mean') plt.grid() plt.show() gui.img.select(2) gui.img.open(samplePath / 'kodim23.png') gui.img.zoom_full() plt.figure() plt.plot(gui.vs.mean(2).mean(0)) plt.title('Row means of image 2') plt.xlabel('Row Number') plt.ylabel('Mean') plt.grid() plt.show()
def plot_inference_summary(inference_record): ll = [] lp = [] wlp_plus_ll=[] for step in inference_record.steps: ll += step.ll[1:] # start from 1 and not 0: to skip the initial guess try: lp += step.lp[1:] wlp_plus_ll += list((step.wlp * np.asarray(step.lp[1:]) + np.asarray(step.ll[1:])).tolist()) except AttributeError: pass plt.title('ll',fontsize=30) plt.plot(ll,lw=2) plt.plot(lp,lw=2) plt.plot(wlp_plus_ll,lw=2) counter = 0 for i,step in enumerate(inference_record.steps): if i%2==1: facecolor = ".2" else: facecolor = ".5" plt.axvspan(counter, counter+step.nAccepted, facecolor=facecolor, alpha=0.2) counter += step.nAccepted
def plot_comfort(fingers_org=range(1, 6, 1), fingers_dst=range(1, 6, 1), jumps=range(-12, 13, 1)): import seaborn from mpl_toolkits.mplot3d import Axes3D from pylab import plt xs, ys, zs, cs = calculate_comforts(fingers_org, fingers_dst, jumps) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(xs, ys, zs, c=cs) ax.set_zlabel("Interval (half steps)", fontsize=15) ax.set_zlim(jumps[0], jumps[-1]) # ax.set_zticks(jumps) plt.xticks(fingers_org) plt.xlim(fingers_org[0], fingers_org[-1]) plt.xlabel("From finger", fontsize=15) plt.yticks(fingers_dst) plt.ylim(fingers_dst[0], fingers_dst[-1]) plt.ylabel("To finger", fontsize=15) plt.title("Difficulty of finger passages", fontsize=25) plt.savefig('./figures/image.png', figsize=(16, 12), dpi=300) plt.show()
def plot_zipf(*freq): ''' basic plotting using matplotlib and pylab ''' ranks, frequencies = [], [] langs, colors = [], [] langs = ["English", "German", "Finnish"] colors = ['#FF0000', '#00FF00', '#0000FF'] if bonus_part: colors.extend(['#00FFFF', '#FF00FF', '#FFFF00']) langs.extend(["English (Stemmed)", "German (Stemmed)", "Finnish (Stemmed)"]) plt.subplot(111) # 1, 1, 1 num = 6 if bonus_part else 3 for i in xrange(num): ranks.append(range(1, len(freq[i]) + 1)) frequencies.append([e[1] for e in freq[i]]) # log x and y axi, both with base 10 plt.loglog(ranks[i], frequencies[i], marker='', basex=10, color=colors[i], label=langs[i]) plt.legend() plt.grid(True) plt.title("Zipf's law!") plt.xlabel('Rank') plt.ylabel('Frequency') plt.show()
def visual_results(Image_data, preds, Labels=None, Top=0): from pylab import plt pred_age_value = preds['age'] pred_gender_value = preds['gender'] pred_smile_value = preds['smile'] pred_glass_value = preds['glass'] Num = Image_data.shape[0] if Top == 0 else Top for k in xrange(Num): print k, Num plt.figure(1) plt.imshow(de_preprocess_image(Image_data[k])) title_str = 'Prediction: Age %0.1f, %s, %s, %s.' % ( pred_age_value[k], gender_list[pred_gender_value[k]], glass_list[pred_glass_value[k]], smile_list[pred_smile_value[k]]) x_label_str = 'GT: ' try: x_label_str = x_label_str + 'Age %0.1f' % Labels['age'][k] except: pass try: x_label_str = x_label_str + '%s, %s, %s' % (gender_list[int( Labels['gender'][k])], glass_list[int( Labels['glass'][k])], smile_list[int(Labels['smile'][k])]) except: pass plt.title(title_str) plt.xlabel(x_label_str) plt.show()
def generate_start_time_figures(self): recording_time_grouped_by_patient = self.pain_data[["PatientID", "NRSTimeFromEndSurgery_mins"]].groupby("PatientID") recording_start_minutes = recording_time_grouped_by_patient.min() fig1 = "fig1.pdf" fig2 = "fig2.pdf" plt.figure(figsize=[8,4]) plt.title("Pain score recording start times", fontsize=14).set_y(1.05) plt.ylabel("Occurrences", fontsize=14) plt.xlabel("Recording Start Time (minutes)", fontsize=14) plt.hist(recording_start_minutes.values, bins=20, color="0.5") plt.savefig(os.path.join(self.tmp_directory, fig1), bbox_inches="tight") plt.figure(figsize=[8,4]) plt.title("Pain score recording start times, log scale", fontsize=14).set_y(1.05) plt.ylabel("Occurrences", fontsize=14) plt.xlabel("Recording Start Time (minutes)", fontsize=14) plt.hist(recording_start_minutes.values, bins=20, log=True, color="0.5") plt.savefig(os.path.join(self.tmp_directory, fig2), bbox_inches="tight") #save the figures in panel format f = open(os.path.join(self.tmp_directory, "tmp.tex"), 'w') f.write(r""" \documentclass[% ,float=false % this is the new default and can be left away. ,preview=true ,class=scrartcl ,fontsize=20pt ]{standalone} \usepackage[active,tightpage]{preview} \usepackage{varwidth} \usepackage{graphicx} \usepackage[justification=centering]{caption} \usepackage{subcaption} \usepackage[caption=false,font=footnotesize]{subfig} \renewcommand{\thesubfigure}{\Alph{subfigure}} \begin{document} \begin{preview} \begin{figure}[h] \begin{subfigure}{0.5\textwidth} \includegraphics[width=\textwidth]{""" + fig1 + r"""} \caption{Normal scale} \end{subfigure}\begin{subfigure}{0.5\textwidth} \includegraphics[width=\textwidth]{""" + fig2 + r"""} \caption{Log scale} \end{subfigure} \end{figure} \end{preview} \end{document} """) f.close() subprocess.call(["pdflatex", "-halt-on-error", "-output-directory", self.tmp_directory, os.path.join(self.tmp_directory, "tmp.tex")]) shutil.move(os.path.join(self.tmp_directory, "tmp.pdf"), os.path.join(self.output_directory, "pain_score_start_times.pdf"))
def plot_charts(self): print(self.final_portfolio_valuation) plt.figure(figsize=(10, 6)) plt.hist( self.final_portfolio_valuation, bins=100) plt.title("Final Exit Valuation complete Portfolio after {} year as Geometric Brownian Motion".format(self.max_year)) plt.xlabel('Exit Valuation') plt.ylabel('frequency'); plt.show()
def plot_data(self): plt.clf() # clear the figure plt.title("Parent incomes vs. student grade") plt.plot(self.incomes, self.grades, color='orange', marker='o', linestyle='')
def plot(self, new_plot=False, xlim=None, ylim=None, title=None, figsize=None, xlabel=None, ylabel=None, fontsize=None, show_legend=True, grid=True): """ Plot data using matplotlib library. Use show() method for matplotlib to see result or :: %pylab inline in IPython to see plot as cell output. :param bool new_plot: create or not new figure :param xlim: x-axis range :param ylim: y-axis range :type xlim: None or tuple(x_min, x_max) :type ylim: None or tuple(y_min, y_max) :param title: title :type title: None or str :param figsize: figure size :type figsize: None or tuple(weight, height) :param xlabel: x-axis name :type xlabel: None or str :param ylabel: y-axis name :type ylabel: None or str :param fontsize: font size :type fontsize: None or int :param bool show_legend: show or not labels for plots :param bool grid: show grid or not """ xlabel = self.xlabel if xlabel is None else xlabel ylabel = self.ylabel if ylabel is None else ylabel figsize = self.figsize if figsize is None else figsize fontsize = self.fontsize if fontsize is None else fontsize self.fontsize_ = fontsize self.show_legend_ = show_legend title = self.title if title is None else title xlim = self.xlim if xlim is None else xlim ylim = self.ylim if ylim is None else ylim new_plot = self.new_plot or new_plot if new_plot: plt.figure(figsize=figsize) plt.xlabel(xlabel, fontsize=fontsize) plt.ylabel(ylabel, fontsize=fontsize) plt.title(title, fontsize=fontsize) plt.tick_params(axis='both', labelsize=fontsize) plt.grid(grid) if xlim is not None: plt.xlim(xlim) if ylim is not None: plt.ylim(ylim) self._plot() if show_legend: plt.legend(loc='best', scatterpoints=1)
def plotSigmoidTanh(fname=None): fig, ax = plt.subplots() xs = np.linspace(-10.0, 10.0, num = 50, endpoint=True) ys = [sigmoidTanh(x, 0.9) for x in xs] ax.plot(xs, ys, 'black') plt.title("y=sigmoid(s)") plt.grid(True) if fname: plt.savefig(fname) plt.show()
def show_melspectrogram(conf, mels, title='Log-frequency power spectrogram'): librosa.display.specshow(mels, x_axis='time', y_axis='mel', sr=conf.sampling_rate, hop_length=conf.hop_length, fmin=conf.fmin, fmax=conf.fmax) plt.colorbar(format='%+2.0f dB') plt.title(title) plt.show()
def loadSchemeDetail(request): company = request.GET.get('Company_Name') print(company) schemaarry = [] objschema = mfc.objects.filter(Company_Name=company).values_list( "Scheme_Code", "Scheme_Name") for schm in objschema: mfd = mutualfunddetail() mfd.Company_Name = company mfd.Schema_Code = schm[0] mfd.Scheme_Name = schm[1] schema = schm[0] objmutualfund = mf.objects.filter(Scheme_Code=int(schema)).values_list( 'NAV', 'Date') data = pd.DataFrame(list(objmutualfund), columns=['NAV', 'Date']) data = data.set_index('Date') print("######## DATA BEFORE############") print(data) data = data.astype(float) data = data.fillna(data.mean()) print("NP ARRAY##########") print(np.asarray(data['NAV'])) data['SMA200'] = talib.SMA(np.asarray(data['NAV']), 50) data['SMA50'] = talib.SMA(np.asarray(data['NAV']), 20) data = data.fillna(data.mean()) print("######## DATA AFTER SMA############") print(data) data.head() data.tail() data[['NAV', 'SMA50', 'SMA200']].plot(figsize=(10, 6)) data['BUY'] = np.where(data['SMA50'] > data['SMA200'], 1, -1) data.dropna(inplace=True) data.head() data[['NAV', 'SMA50', 'SMA200', 'BUY']].plot(figsize=(10, 6), secondary_y='BUY') plt.title(company + " Mutual Fund") imgsrc = root_path + "/Figures/" + schema + "_mutualfund.png" plt.savefig(imgsrc) imgsrc = "/static/Portfolio_Tracker/Figures/" + schema + "_mutualfund.png" data = data.fillna(data.mean()) mfd.NAV = data['NAV'].mean() mfd.Day_50_Moving_Average = data['SMA50'].mean() mfd.Day_200_Moving_Average = data['SMA200'].mean() mfd.Buy = np.where( mfd.Day_50_Moving_Average > mfd.Day_200_Moving_Average, 1, -1) mfd.ImageUrl = imgsrc schemaarry.append(mfd) print("### objschema ##") print(objschema) return render(request, "MutualFundAjax.html", { "Company_Name": company, "scheme": schemaarry })
def detect(self, img): bboxes = None # pnet if not self.pnet: return None bboxes = self.detect_pnet(img) if bboxes is None: return None ## 可视化PNET的结果 if SHOW_FIGURE: plt.figure() tmp = img.copy() for i in bboxes: x0 = int(i[0]) y0 = int(i[1]) x1 = x0 + int(i[2]) y1 = y0 + int(i[3]) cv2.rectangle(tmp, (x0, y0), (x1, y1), (0, 0, 255), 2) plt.imshow(tmp[:, :, ::-1]) plt.title("pnet result") # rnet if not self.rnet: return bboxes bboxes = bboxes[:, 0:4].astype(np.int32) bboxes = self.detect_ronet(img, bboxes, 24) if bboxes is None: return None ## 可视化RNET的结果 if SHOW_FIGURE: plt.figure() tmp = img.copy() for i in bboxes: x0 = int(i[0]) y0 = int(i[1]) x1 = x0 + int(i[2]) y1 = y0 + int(i[3]) cv2.rectangle(tmp, (x0, y0), (x1, y1), (0, 0, 255), 2) plt.imshow(tmp[:, :, ::-1]) plt.title("rnet result") #onet if not self.onet: return bboxes bboxes = bboxes[:, 0:4].astype(np.int32) bboxes = self.detect_ronet(img, bboxes, 48) return bboxes
def plot_positions(self, player, period): positions = self.results["positions"][-period:] plt.title("Positions") plt.plot(positions[:, player], "o", markersize=0.2, color="black") plt.ylim(0.9, self.parameters["n_positions"] + 0.1) # plt.annotate(self.string_parameters, xy=(-0.05, -0.1), xycoords='axes fraction', fontsize=8) plt.savefig(self.format_fig_name("positions_player{}".format(player))) plt.close()
def plot2piFft(self, func, Fs, L): ''' Fs is the sampling freq. L is length of signal list. This plot is for a func that has period of 2pi. If you found the time domain wave is not very accurate, that is because you set too small Fs, which leads to to big step Ts. ''' base_freq = 1.0/(2*np.pi) #频域横坐标除以基频,即以基频为单位,此处的基频为 2*pi rad/s Ts = 1.0/Fs t = [el*Ts for el in range(0,L)] x = [func(el) for el in t] # https://www.ritchievink.com/blog/2017/04/23/understanding-the-fourier-transform-by-example/ # 小明给的代码: # sampleF = Fs # print('小明:') # for f, Y in zip( # np.arange(0, len(x)*sampleF,1) * 1/len(x) * sampleF, # np.log10(np.abs(np.fft.fft(x) / len(x))) # ): # print('\t', f, Y) L_4pi = int(4*np.pi / Ts) +1 # 画前两个周期的 self.fig_plot2piFft = plt.figure(7) plt.subplot(211) plt.plot(t[:L_4pi], x[:L_4pi]) #title('Signal in Time Domain') #xlabel('Time / s') #ylabel('x(t)') plt.title('Winding Function') plt.xlabel('Angular location along air gap [mech. rad.]') plt.ylabel('Current Linkage by unit current [Ampere]') NFFT = 2**nextpow2(L) print('NFFT =', NFFT, '= 2^%g' % (nextpow2(L)), '>= L =', L) y = fft(x,NFFT) # y is a COMPLEX defined in numpy Y = [2 * el.__abs__() / L for el in y] # /L for spectrum aplitude consistent with actual signal. 2* for single-sided. abs for amplitude. f = Fs/2.0/base_freq*linspace(0,1,int(NFFT/2+1)) # unit is base_freq Hz #f = Fs/2.0*linspace(0,1,NFFT/2+1) # unit is Hz plt.subplot(212) plt.plot(f, Y[0:int(NFFT/2+1)]) plt.title('Single-Sided Amplitude Spectrum of x(t)') plt.xlabel('Frequency divided by base_freq [base freq * Hz]') #plt.ylabel('|Y(f)|') plt.ylabel('Amplitude [1]') plt.xlim([0,50])
def plot_smoothed_alpha_comparison(self,rmsval,suffix=''): plt.plot(self.f,self.alpha,'ko',label='data set') plt.plot(self.f,self.salpha,'c-',lw=2,label='smoothed angle $\phi$') plt.xlabel('frequency in Hz') plt.ylabel('angle $\phi$ in coordinates of circle') plt.legend() ylims=plt.axes().get_ylim() plt.yticks((arange(9)-4)*0.5*pi, ['$-2\pi$','$-3\pi/2$','$-\pi$','$-\pi/2$','$0$','$\pi/2$','$\pi$','$3\pi/2$','$2\pi$']) plt.ylim(ylims) plt.title('RMS offset from smooth curve: {:.4f}'.format(rmsval)) if self.show: plt.show() else: plt.savefig(join(self.sdc.plotpath,'salpha','c{}_salpha_on_{}_circle'.format(self.sdc.case,self.ZorY)+self.sdc.suffix+self.sdc.outsuffix+suffix+'.png'), dpi=240) plt.close()
def plot_cf(cf, color): t = cf.data.t y0 = cf.data.y0 plt.plot_date(t+_adj_dates,y0,'x',label=cf.SITE+cf.CMPT, color='light'+color) plt.plot_date(cf.data._t+_adj_dates, cf.data._y0,'x',label=cf.SITE+cf.CMPT, color=color) t1 = min(t) t2 = max(t) ls=200 plot_func(cf.func,linspace(t1,t2,ls)) plt.title(cf.SITE+'-'+cf.CMPT) plt.gcf().autofmt_xdate()
def main(args=sys.argv[1:]): # there are some cases when this script is run on systems without DISPLAY variable being set # in such case matplotlib backend has to be explicitly specified # we do it here and not in the top of the file, as inteleaving imports with code lines is discouraged import matplotlib matplotlib.use('Agg') from pylab import plt, ylabel, grid, xlabel, array parser = argparse.ArgumentParser() parser.add_argument("rst_file", help="location of rst file in TRiP98 format", type=str) parser.add_argument("output_file", help="location of PNG file to save", type=str) parser.add_argument("-s", "--submachine", help="Select submachine to plot.", type=int, default=1) parser.add_argument("-f", "--factor", help="Factor for scaling the blobs. Default is 1000.", type=int, default=1000) parser.add_argument("-v", "--verbosity", action='count', help="increase output verbosity", default=0) parser.add_argument('-V', '--version', action='version', version=pt.__version__) args = parser.parse_args(args) file = args.rst_file sm = args.submachine fac = args.factor a = pt.Rst() a.read(file) # convert data in submachine to a nice array b = a.machines[sm] x = [] y = [] z = [] for _x, _y, _z in b.raster_points: x.append(_x) y.append(_y) z.append(_z) title = "Submachine: {:d} / {:d} - Energy: {:.3f} MeV/u".format(sm, len(a.machines), b.energy) print(title) cc = array(z) cc = cc / cc.max() * fac fig = plt.figure() ax = fig.add_subplot(111) ax.scatter(x, y, c=cc, s=cc, alpha=0.75) ylabel("mm") xlabel("mm") grid(True) plt.title(title) plt.savefig(args.output_file) plt.close()
def draw(x, y, x_text, y_text, title): plt.figure(figsize=(30, 5)) plt.plot(x, y, color='red', label='data_check_result') for i in range(1, len(x)): plt.text(x[i], y[i], str((x[i], round(y[i], 4)))) #plt.text(x,y,(x,y),color='red') plt.xlabel(x_text) plt.ylabel(y_text) plt.title(title) plt.grid(True) plt.legend() pic = time.strftime("%Y-%m-%d_%H_%S_%M", time.localtime()) + ".pdf" plt.savefig(pic) plt.show()
def plot_overview(self,suffix=''): x=self.x; y=self.y; r=self.radius; cx,cy=self.center.real,self.center.imag ax=plt.axes() plt.scatter(x,y, marker='o', c='b', s=40) plt.axhline(y=0,color='grey', zorder=-1) plt.axvline(x=0,color='grey', zorder=-2) t=linspace(0,2*pi,201) circx=r*cos(t) + cx circy=r*sin(t) + cy plt.plot(circx,circy,'g-') plt.plot([cx],[cy],'gx',ms=12) if self.ZorY == 'Z': philist,flist=[self.phi_a,self.phi_p,self.phi_n],[self.fa,self.fp,self.fn] elif self.ZorY == 'Y': philist,flist=[self.phi_m,self.phi_s,self.phi_r],[self.fm,self.fs,self.fr] for p,f in zip(philist,flist): if f is not None: xpos=cx+r*cos(p); ypos=cy+r*sin(p); xos=0.2*(xpos-cx); yos=0.2*(ypos-cy) plt.plot([0,xpos],[0,ypos],'co-') ax.annotate('{:.3f} Hz'.format(f), xy=(xpos,ypos), xycoords='data', xytext=(xpos+xos,ypos+yos), textcoords='data', #textcoords='offset points', arrowprops=dict(arrowstyle="->", shrinkA=0, shrinkB=10) ) #plt.xlim(0,0.16) #plt.ylim(-0.1,0.1) plt.axis('equal') if self.ZorY == 'Z': plt.xlabel(r'resistance $R$ in Ohm'); plt.ylabel(r'reactance $X$ in Ohm') if self.ZorY == 'Y': plt.xlabel(r'conductance $G$ in Siemens'); plt.ylabel(r'susceptance $B$ in Siemens') plt.title("fitting the admittance circle with Powell's method") tx1='best fit (fmin_powell):\n' tx1+='center at G+iB = {:.5f} + i*{:.8f}\n'.format(cx,cy) tx1+='radius = {:.5f}; '.format(r) tx1+='residue: {:.2e}'.format(self.resid) txt1=plt.text(-r,cy-1.1*r,tx1,fontsize=8,ha='left',va='top') txt1.set_bbox(dict(facecolor='gray', alpha=0.25)) idxlist=self.to_be_annotated('triple') ofs=self.annotation_offsets(idxlist,factor=0.1,xshift=0.15) for i,j in enumerate(idxlist): xpos,ypos = x[j],y[j]; xos,yos = ofs[i].real,ofs[i].imag ax.annotate('{:.1f} Hz'.format(self.f[j]), xy=(xpos,ypos), xycoords='data', xytext=(xpos+xos,ypos+yos), textcoords='data', #textcoords='offset points', arrowprops=dict(arrowstyle="->", shrinkA=0, shrinkB=10) ) if self.show: plt.show() else: plt.savefig(join(self.sdc.plotpath,'c{}_fitted_{}_circle'.format(self.sdc.case,self.ZorY)+suffix+'.png'), dpi=240) plt.close()
def test_dep(self): xf = arange(0, 425) deps = self.fm.get_dep(xf) plt.plot(xf,deps) plt.gca().set_yticks(self.fm.DEP) plt.gca().set_xticks(self.fm.Y_PC) plt.grid('on') plt.title('Ground x versus depth') plt.xlabel('Ground X (km)') plt.ylabel('depth (km)') plt.axis('equal') plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~Y_PC_vs_deps.png')) plt.close()
def plot(site): tp = np.loadtxt('../post_offsets/%s.post'%site) t = dc.asmjd([ii[0] for ii in tp]) + dc.adjust_mjd_for_plot_date e = [ii[1] for ii in tp] n = [ii[2] for ii in tp] u = [ii[3] for ii in tp] plt.plot_date(t,e,'x-', label = 'eastings') plt.plot(t,n,'x-', label = 'northings') plt.plot(t,u,'x-', label = 'upings') plt.gcf().autofmt_xdate() plt.legend(loc=0) plt.title(site) plt.savefig('%s.png'%site) #plt.show() plt.close()
def plot_baseline(data, plate_name, save_folder = r'Figures/'): """ """ colors = ((0.2, 0.2, 0.2), (0.5, 0.5, 0.5), (0.7, 0.7, 0.7), (0.3, 0.3, 0.3)) names = data.keys() names.sort() fig, axs = plt.subplots(figsize=(8,3)) for index, name in enumerate(names): for value in data[name]['original_data']: plot_color = colors[index % len(colors)] if abs(value - data[name]['mean'][0]) > data[name]['std'][0] * 2.0: axs.plot([value], [index], 'ko', markerfacecolor = [1,1,1]) else: axs.plot([value], [index], 'ko', color = plot_color) axs.plot([data[name]['mean'][0] for _ in xrange(2)], [index-0.25, index+0.25], 'k-') axs.plot([data[name]['mean'][0] - data[name]['std'][0] for _ in xrange(2)], [index-0.25, index+0.25], 'k--') axs.plot([data[name]['mean'][0] + data[name]['std'][0] for _ in xrange(2)], [index-0.25, index+0.25], 'k--') plt.yticks([i for i in xrange(len(names))], names, size = 10) plt.title(plate_name) plt.ylim(-0.5,len(names)-0.5) plt.xlabel('Fluorescent intensity') plt.tight_layout() save_filename = save_folder + 'baseline_average' pdf = PdfPages(save_filename.split('.')[0] + '.pdf') pdf.savefig(fig) pdf.close() plt.savefig(save_filename) # return None
def plot(self): Zdelta = [] for aa in self.Z: Zdelta += [aa-1] figure() imshow(Zdelta, interpolation='bilinear', origin='lower', cmap=cm.bone, extent=(self.Vzs, self.Vze, self.V1s, self.V1e)) CS = contour(self.X, self.Y, Zdelta, [0], linewidths=4, colors='white') CS2 = contour(self.X, self.Y, Zdelta, 16, linewidths=1, colors='k') plt.clabel(CS2, fontsize=6, inline=1) plt.clabel(CS, fontsize=9, inline=1) if self.mode == 'stab': plt.title('Radial stability map %s V\n %s\n alpha=%s\n b=%s' % (str(self.setup.ener), str(self.setup.V), str(self.setup.alpha), str(self.setup.b))) else: plt.title('Synchronization stability map %s V\n %s\n alpha=%s\n b=%s' % (str(self.setup.ener), str(self.setup.V), str(self.setup.alpha), str(self.setup.b))) xlabel('Vz (V)') ylabel('V1 (V)') show()
def convolve(arrays, melBank, genere, filter_idx): x = [] melBank_time = np.fft.ifft(melBank) #need to transform melBank to time domain for eachClip in arrays: result = np.convolve(eachClip, melBank_time) x.append(result) plotBeforeAfterFilter(eachClip, melBank, melBank_time, result, genere, filter_idx) m = np.asmatrix(np.array(x)) fig, ax = plt.subplots() ax.matshow(m.real) #each element has imaginary part. So just plot real part plt.axis('equal') plt.axis('tight') plt.title(genere) plt.tight_layout() # filename = "./figures/convolution/Convolution_"+"Filter"+str(filter_idx)+genere+".png" # plt.savefig(filename) plt.show()
def dynamic_img_show(img,title_str='',fig_size=[14,8],hide_axes=True): '''Show image <img>. If called repeatedly within a cycle will dynamically redraw image. #DEMO import time for i in range(10): img = np.zeros([50,50]) img[:i*5]=1 dynamic_img_show(img,'iter=%s'%i) time.sleep(0.1) ''' plt.clf() plt.title(title_str) plt.imshow(img) plt.xticks([]); plt.yticks([]); plt.gcf().set_size_inches(fig_size) display.display(plt.gcf()) display.clear_output(wait=True)
def plotter(mode,Bc,Tc,Q): col = ['#000080','#0000FF','#4169E1','#6495ED','#00BFFF','#B0E0E6'] plt.figure() ax = plt.subplot(111) for p in range(Bc.shape[1]): plt.plot(Tc[:,p],Bc[:,p],'-',color=str(col[p])) plt.xlabel('Tc [TW]') plt.ylabel('Bc normalised to total EU load') plt.title(str(mode)+' flow') # Shrink current axis by 25% to make room for legend box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.75, box.height]) plt.legend(\ ([str(Q[i]*100) for i in range(len(Q))]),\ loc='center left', bbox_to_anchor=(1, 0.5),title='Quantiles') plt.savefig('figures/bctc_'+str(mode)+'.eps')
def plot_vel_decomposition(self, site, cmpt, loc=0, leg_fs=7, if_ylim=False ): y = self.plot_pred_vel_added(site, cmpt, label='total') y += self.plot_vel_R_co(site, cmpt, style='-^', label='Rco', color='orange') y += self.plot_vel_E_cumu_slip(site, cmpt, color='green') y += self.plot_vel_R_aslip(site, cmpt, color='black') plt.grid('on') if if_ylim: plt.ylim(calculate_lim(y)) plt.ylabel(r'mm/yr') plt.legend(loc=loc, prop={'size':leg_fs}) plt.gcf().autofmt_xdate() plt.title('Cumulative Disp.: {site} - {cmpt}'.format( site = get_site_true_name(site_id=site), cmpt = cmpt ))
def freqz(sosmat, nsamples=44100, sample_rate=44100, plot=True): """Plots Frequency response of sosmat.""" from pylab import np, plt, fft, fftfreq x = np.zeros(nsamples) x[nsamples/2] = 0.999 y, states = sosfilter_double_c(x, sosmat) Y = fft(y) f = fftfreq(len(x), 1.0/sample_rate) if plot: plt.grid(True) plt.axis([0, sample_rate / 2, -100, 5]) L = 20*np.log10(np.abs(Y[:len(x)/2]) + 1e-17) plt.semilogx(f[:len(x)/2], L, lw=0.5) plt.hold(True) plt.title('freqz sos filter') plt.xlabel('Frequency / Hz') plt.ylabel('Damping /dB(FS)') plt.xlim((10, sample_rate/2)) plt.hold(False) return x, y, f, Y
def plot_post_disp_decomposition(self, site, cmpt, loc=2, leg_fs=7, added_label = None, marker_for_obs = 'x', ): y = self.plot_post_obs_linres(site,cmpt, label='obs.', marker=marker_for_obs) y += self.plot_post_disp_pred_from_result_file(site,cmpt, label='pred.') y += self.plot_R_co(site, cmpt, style = '-^', label='Rco', color='orange') y += self.plot_E_aslip(site, cmpt, color='green') plt.grid('on') self.plot_post_disp_pred_added(site, cmpt, label=added_label) plt.legend(loc=loc, prop={'size':leg_fs}) plt.ylabel(r'm') plt.gcf().autofmt_xdate() plt.title('Postseismic Disp. : {site} - {cmpt}'.format( site = get_site_true_name(site_id = site), cmpt = cmpt ))
def plot_cumu_disp_decomposition(self, site, cmpt, loc=2, leg_fs=7, if_ylim=False, added_label = None, ): self.plot_cumu_obs_linres(site, cmpt) y = self.plot_cumu_disp_pred_from_result_file(site, cmpt, label='pred.') y += self.plot_R_co(site, cmpt, style='-^', label='Rco', color='orange') y += self.plot_E_cumu_slip(site, cmpt, color='green') plt.grid('on') if if_ylim: plt.ylim(calculate_lim(y)) self.plot_cumu_disp_pred_added(site, cmpt, label=added_label) plt.ylabel(r'm') plt.legend(loc=loc, prop={'size':leg_fs}) plt.gcf().autofmt_xdate() plt.title('Cumulative Disp.: {site} - {cmpt}'.format( site = get_site_true_name(site_id=site), cmpt = cmpt ))
def load_mnist(path, filename='mnist.pkl.gz', plot=True): """ Loads the MNIST dataset. Downloads the data if it doesn't already exist. This code is adapted from the deeplearning.net tutorial on classifying MNIST data with Logistic Regression: http://deeplearning.net/tutorial/logreg.html#logreg :param path: (str) Path to where data lives or should be downloaded too :param filename: (str) name of mnist file to download or load :return: train_set, valid_set, test_set """ dataset = '{}/{}'.format(path, filename) data_dir, data_file = os.path.split(dataset) if data_dir == "" and not os.path.isfile(dataset): new_path = os.path.join(os.path.split(__file__)[0], "..", "data", dataset) if os.path.isfile(new_path) or data_file == 'mnist.pkl.gz': dataset = new_path if (not os.path.isfile(dataset)) and data_file == 'mnist.pkl.gz': import urllib origin = ('http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz') print 'Downloading data from {}'.format(origin) urllib.urlretrieve(origin, dataset) print '... loading data' f = gzip.open(dataset, 'rb') train_set, valid_set, test_set = cPickle.load(f) f.close() X_train = train_set[0] y_train = train_set[1] if plot: for k in range(25): plt.subplot(5,5,k) plt.imshow(np.reshape(X_train[k,:], (28,28))) plt.axis('off') plt.title(y_train[k]) return train_set, valid_set, test_set
def plot_variable(u, name, direc, cmap=cmaps.parula, scale='lin', numLvls=100, umin=None, umax=None, \ tp=False, \ tpAlpha=1.0, show=False, hide_ax_tick_labels=False, label_axes=True, title='', use_colorbar=True, hide_axis=False, colorbar_loc='right'): """ show -- whether to show the plot on the screen tp -- show triangle cmap -- colors: gist_yarg - grey gnuplot, hsv, gist_ncar jet - typical colors """ mesh = u.function_space().mesh() v = u.compute_vertex_values(mesh) x = mesh.coordinates()[:,0] y = mesh.coordinates()[:,1] t = mesh.cells() if not os.path.isdir( direc ): os.makedirs(direc) full_path = os.path.join(direc, name) if umin != None: vmin = umin else: vmin = v.min() if umax != None: vmax = umax else: vmax = v.max() # countour levels : if scale == 'log': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import LogFormatter levels = np.logspace(np.log10(vmin), np.log10(vmax), numLvls) tick_numLvls = min( numLvls, 8 ) tick_levels = np.logspace(np.log10(vmin), np.log10(vmax), tick_numLvls) formatter = LogFormatter(10, labelOnlyBase=False) norm = colors.LogNorm() elif scale == 'lin': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import ScalarFormatter levels = np.linspace(vmin, vmax, numLvls) tick_numLvls = min( numLvls, 8 ) tick_levels = np.linspace(vmin, vmax, tick_numLvls) formatter = ScalarFormatter() norm = None elif scale == 'bool': from matplotlib.ticker import ScalarFormatter levels = [0, 1, 2] formatter = ScalarFormatter() norm = None fig = plt.figure(figsize=(5,5)) ax = fig.add_subplot(111) c = ax.tricontourf(x, y, t, v, levels=levels, norm=norm, cmap=plt.get_cmap(cmap)) plt.axis('equal') if tp == True: p = ax.triplot(x, y, t, '-', lw=0.2, alpha=tpAlpha) ax.set_xlim([x.min(), x.max()]) ax.set_ylim([y.min(), y.max()]) if label_axes: ax.set_xlabel(r'$x$') ax.set_ylabel(r'$y$') if hide_ax_tick_labels: ax.set_xticklabels([]) ax.set_yticklabels([]) if hide_axis: plt.axis('off') # include colorbar : if scale != 'bool' and use_colorbar: divider = make_axes_locatable(plt.gca()) cax = divider.append_axes(colorbar_loc, "5%", pad="3%") cbar = plt.colorbar(c, cax=cax, format=formatter, ticks=tick_levels) tit = plt.title(title) if use_colorbar: plt.tight_layout(rect=[.03,.03,0.97,0.97]) else: plt.tight_layout() plt.savefig( full_path + '.eps', dpi=300) if show: plt.show() plt.close(fig)
region_code='near') # prediction reader = vj.inv.DispResultReader(res_file) sites = reader.sites disp = reader.get_post_pred_at_epoch(epoch) mplt = vj.plots.MapPlotDisplacement(bm) mplt.plot_disp(disp.flatten(), sites, color='red', X = X, Y = Y, U=U, label=label_pred, scale = scale) # observation: reader = vj.EpochalFileReader(disp_obs_file) dips_obs = reader[epoch] - reader[0] sites_obs = [site.decode() for site in reader['sites']] mplt = vj.plots.MapPlotDisplacement(bm) mplt.plot_disp(dips_obs, sites_obs, color='black', X = X, Y = Y+0.1, U=U, label=label_obs, scale = scale) # trench: mplt = vj.plots.MapPlotSlab() mplt.plot_top() plt.title('Postseismic disp. (1100 days) pred. vs. obs.') plt.savefig('post_seismic_disp.pdf') plt.show()
ax2 = ax1.twinx() ax2.plot(np.asarray(epochs[1:])/365., vel*365,'r', label=r'vel ($yr^{-1}$)') #ax2.set_xlim([0,10]) ax2.set_ylabel(r'$yr^{-1}$') #ax2.set_ylim([-1e-6, 2e-6]) ax2.legend(loc=0) ax2.set_position(pos2) align_yaxis(ax1, 0, ax2, 0) plt.title('%s - %s'%(site, cmpt)) ##################33 ################### files = sorted(glob.glob('../outs/*'), key = lambda f : int(f.split('_')[-3])) sites = np.loadtxt('../stations.in','4a', usecols=(0,)) sites = [site.decode() for site in sites] def load(fn, site, cmpt): idx = sites.index(site) if cmpt=='e': c = 2 elif cmpt=='n':
def example(img=None,tess='I',eval_cell_idx=True,eval_v=True,show_downsampled_pts=True, valid_outside=True,base=[1,1], scale_spatial=.1, scale_value=100, permute_cell_idx_for_display=True, nLevels=3, vol_preserve=False, zero_v_across_bdry=[0,0], use_lims_when_plotting=True): show_downsampled_pts = bool(show_downsampled_pts) eval_cell_idx = bool(eval_cell_idx) eval_v = bool(eval_cell_idx) valid_outside = bool(valid_outside) permute_cell_idx_for_display = bool(permute_cell_idx_for_display) vol_preserve = bool(vol_preserve) if img is None: img = Img(get_std_test_img()) else: img=Img(img) img = img[:,:,::-1] # bgr2rgb tw = TransformWrapper(nRows=img.shape[0], nCols=img.shape[1], nLevels=nLevels, base=base, scale_spatial=scale_spatial, # controls the prior's smoothness scale_value=scale_value, # controls the prior's variance tess=tess, vol_preserve=vol_preserve, zero_v_across_bdry=zero_v_across_bdry, valid_outside=valid_outside) print tw # You probably want to do that: padding image border with zeros border_width=1 img[:border_width]=0 img[-border_width:]=0 img[:,:border_width]=0 img[:,-border_width:]=0 # The tw.calc_T_fwd (or tw.calc_T_inv) is always done in gpu. # After using it to compute new pts, # you may want to use remap (to warp an image accordingly). # If you will use tw.remap_fwd (or tw.remap_inv), which is done in gpu, # then the image type can be either float32 or float64. # But if you plan to use tw.tw.remap_fwd_opencv (or tw.remap_inv_opencv), # which is done in cpu (hence slightly lower) but supports better # interpolation methods, then the image type must be np.float32. # img_original = CpuGpuArray(img.copy().astype(np.float32)) img_original = CpuGpuArray(img.copy().astype(np.float64)) img_wrapped_fwd= CpuGpuArray.zeros_like(img_original) img_wrapped_bwd= CpuGpuArray.zeros_like(img_original) seed=0 np.random.seed(seed) ms_Avees=tw.get_zeros_PA_all_levels() ms_theta=tw.get_zeros_theta_all_levels() for level in range(tw.ms.nLevels): if level==0: tw.sample_gaussian(level,ms_Avees[level],ms_theta[level],mu=None)# zero mean else: tw.sample_from_the_ms_prior_coarse2fine_one_level(ms_Avees,ms_theta, level_fine=level) print('\nimg shape: {}\n'.format(img_original.shape)) # You don't have use these. You can use any 2d array # that has two columns (regardless of the number of rows). pts_src = tw.pts_src_dense # Create buffers for the output pts_fwd = CpuGpuArray.zeros_like(pts_src) pts_inv = CpuGpuArray.zeros_like(pts_src) for level in range(tw.ms.nLevels): ####################################################################### # instead of the tw.sample_from_the_ms_prior() above, # you may want to use one of the following. # 1) # tw.sample_gaussian(level,ms_Avees[level],ms_theta[level],mu=None)# zero mean # 2) # tw.sample_gaussian(level,ms_Avees[level],ms_theta[level],mu=some_user_specified_mu) # The following should be used only for level>0 : # 3) # tw.sample_normal_in_one_level_using_the_coarser_as_mean(Avees_coarse=ms_Avees[level-1], # Avees_fine=ms_Avees[level], # theta_fine=ms_theta[level], # level_fine=level) # ####################################################################### # You can also change the values this way: # cpa_space = tw.ms.L_cpa_space[level] # theta = cpa_space.get_zeros_theta() # theta[:] = some values # Avees = cpa_space.get_zeros_PA() # cpa_space.theta2Avees(theta,Avees) # cpa_space.update_pat(Avees) # This step is important and must be done # before are trying to "use" the new values of # the (vectorized) A's. tw.update_pat_from_Avees(ms_Avees[level],level) if eval_v: # Evaluating the velocity field. # You don't have to do it in unless you want to visualize v. # (when evaluting the treansformation, v will be internally # evaluated anyway -- but its result won't be stored) tw.calc_v(level=level) # optional, if you want to time it timer_gpu_T_fwd = GpuTimer() # Simply calling # tic = time.clock() # and then # tic = time.clock() # won't work. # In fact, most likely you will get that toc-tic is zero. # You need to use the GpuTimer object. When you do that, # one side effect is that suddenly the toc-tic from above will # give you a more realistic result. tic = time.clock() timer_gpu_T_fwd.tic() tw.calc_T_fwd(pts_src,pts_fwd,level=level) timer_gpu_T_fwd.toc() toc = time.clock() print 'Time, in sec, for computing T_fwd:' print timer_gpu_T_fwd.secs print toc-tic # likely to be 0, unless you also used the GpuTimer. # You can also time the inv of course. Results will be similar. tw.calc_T_inv(pts_src,pts_inv,level=level) if eval_cell_idx: # cell_idx is computed here just for display. cell_idx = CpuGpuArray.zeros(len(pts_src),dtype=np.int32) tw.calc_cell_idx(pts_src,cell_idx,level, permute_for_disp=permute_cell_idx_for_display) # If may also want ro to time the remap. # However, the remap is usually very fast (e.g, about 2 milisec). # timer_gpu_remap_fwd = GpuTimer() # tic = time.clock() # timer_gpu_remap_fwd.tic() # tw.remap_fwd(pts_inv=pts_inv,img=img_original,img_wrapped_fwd=img_wrapped_fwd) tw.remap_fwd(pts_inv=pts_inv,img=img_original,img_wrapped_fwd=img_wrapped_fwd) # timer_gpu_remap_fwd.toc() # toc = time.clock() # If the img type is np.float32, you may also use # tw.remap_fwd_opencv instead of tw.remap_fw. The differences between # the two methods are explained above tw.remap_inv(pts_fwd=pts_fwd,img=img_original,img_wrapped_inv=img_wrapped_bwd) # For display purposes, do gpu2cpu transfer print ("For display purposes, do gpu2cpu transfer") if eval_cell_idx: cell_idx.gpu2cpu() if eval_v: tw.v_dense.gpu2cpu() pts_fwd.gpu2cpu() pts_inv.gpu2cpu() img_wrapped_fwd.gpu2cpu() img_wrapped_bwd.gpu2cpu() figsize = (12,12) plt.figure(figsize=figsize) if eval_v: plt.subplot(332) tw.imshow_vx() plt.title('vx') plt.subplot(333) tw.imshow_vy() plt.title('vy') if eval_cell_idx: plt.subplot(331) cell_idx_disp = cell_idx.cpu.reshape(img.shape[0],-1) plt.imshow(cell_idx_disp) plt.title('tess (type {})'.format(tess)) if show_downsampled_pts: ds=20 pts_src_grid = pts_src.cpu.reshape(tw.nRows,-1,2) pts_src_ds=pts_src_grid[::ds,::ds].reshape(-1,2) pts_fwd_grid = pts_fwd.cpu.reshape(tw.nRows,-1,2) pts_fwd_ds=pts_fwd_grid[::ds,::ds].reshape(-1,2) pts_inv_grid = pts_inv.cpu.reshape(tw.nRows,-1,2) pts_inv_ds=pts_inv_grid[::ds,::ds].reshape(-1,2) use_lims=use_lims_when_plotting # return tw plt.subplot(334) plt.plot(pts_src_ds[:,0],pts_src_ds[:,1],'r.') plt.title('pts ds') tw.config_plt() plt.subplot(335) plt.plot(pts_fwd_ds[:,0],pts_fwd_ds[:,1],'g.') plt.title('fwd(pts)') tw.config_plt(axis_on_or_off='on',use_lims=use_lims) plt.subplot(336) plt.plot(pts_inv_ds[:,0],pts_inv_ds[:,1],'b.') plt.title('inv(pts)') tw.config_plt(axis_on_or_off='on',use_lims=use_lims) plt.subplot(337) plt.imshow(img_original.cpu.astype(np.uint8)) plt.title('img') # plt.axis('off') plt.subplot(338) plt.imshow(img_wrapped_fwd.cpu.astype(np.uint8)) # plt.axis('off') plt.title('fwd(img)') plt.subplot(339) plt.imshow(img_wrapped_bwd.cpu.astype(np.uint8)) # plt.axis('off') plt.title('inv(img)') return tw
range_start=y.min() range_end=y.max() # Add noise y += 0.4*np.random.standard_normal(y.shape) if 1: plt.figure(0) of.plt.set_figure_size_and_location(1000,0,1000,500) plt.clf() plt.subplot(121) plt.cla() plt.plot(x,y,'.',lw=3) plt.title('data') ax = plt.gca() ax.tick_params(axis='y', labelsize=50) ax.tick_params(axis='x', labelsize=30) nPtsDense = 10000 mr = MonotonicRegression(base=[12],nLevels=4) mr.set_dense(domain_start=-10,domain_end=10) mr.set_data(x=x,y=y,range_start=range_start,range_end=range_end) print mr