def make_plotI(self): # retrieve data D=self.D kmap={} kmap['AV18'] = {'c':'r','ls':'-'} kmap['CDBONN'] = {'c':'g','ls':'--'} kmap['WJC1'] = {'c':'k','ls':'-.'} kmap['WJC2'] = {'c':'b','ls':':'} ax=py.subplot(111) for k in ['AV18','CDBONN','WJC1','WJC2']: DF=D[k] DF=DF[DF.Q2==10] if k=='CDBONN': label='CDBonn' else: label=k cls=kmap[k]['c']+kmap[k]['ls'] ax.plot(DF.X,DF.THEORY,cls,lw=2.0,label=tex(label)) ax.set_xlabel('$x$',size=25) ax.set_ylabel(r'$F_2^d\, /\, F_2^N$',size=25) ax.set_ylim(0.97,1.08) ax.axhline(1,color='k',ls='-',alpha=0.2) ax.legend(frameon=0,loc=2,fontsize=22) py.tick_params(axis='both',labelsize=22) py.tight_layout() py.savefig('gallery/F2d_F2_I.pdf') py.close()
def plotBar(data=None,color_id=None,figure_id=None,name=None,flag=False): ax = pl.subplot(figure_id) width = 0.8 x=sp.arange(7) if not (name=="VaribenchSelected"): pl.bar(x-0.4,data,width=width,color=color_t[color_id],hatch="/o/o/") else: pl.bar(x-0.4,data,width=width,color=color_t[color_id],hatch="ooo") tmp = data.copy() tmp[1::] = 0 pl.xticks(x,['All','Pure',']0.0,1.0[','[0.1,0.9]','[0.2,0.8]','[0.3,0.7]','[0.4,0.6]'],fontsize=font_size,rotation=90) ln = sp.log10(len(name)) pl.text(3.5-ln,0.95,name) if flag: remove_border(left=False) pl.yticks([0.5,0.6,0.7,0.8,0.9,1.0]) pl.grid(axis='y') pl.tick_params(axis='y',which="both",labelleft='off',left='off') else: pl.ylabel("AUC") remove_border() pl.yticks([0.5,0.6,0.7,0.8,0.9,1.0]) pl.grid(axis='y') pl.ylim(0.5,1) pl.xlim(-0.5,7.5) return ax
def fixup_innerprod(current_data): import pylab size = 28 addgauges(current_data) pylab.title('Inner Product', fontsize=size) pylab.xticks(fontsize=size) pylab.tick_params(axis='y', labelleft='off')
def make_plotII(self): # retrieve data D=self.D kmap={} kmap['Q2 = 2'] = {'c':'r','ls':'-'} kmap['Q2 = 5'] = {'c':'g','ls':'--'} kmap['Q2 = 10'] = {'c':'b','ls':'-.'} kmap['Q2 = 100'] = {'c':'k','ls':':'} ax=py.subplot(111) DF=D['AV18'] for Q2 in [2,5,10,100]: k='Q2 = %d'%Q2 Q2=float(k.split('=')[1]) DF=D['AV18'][D['AV18'].Q2==Q2] cls=kmap[k]['c']+kmap[k]['ls'] ax.plot(DF.X,DF.THEORY,cls,lw=2.0,label=r'$Q^2=%0.0f~{\rm GeV}^2$'%Q2) ax.set_xlabel('$x$',size=25) ax.set_ylabel(r'$F_2^d\, /\, F_2^N$',size=25) ax.set_ylim(0.97,1.08) ax.axhline(1,color='k',ls='-',alpha=0.2) ax.legend(frameon=0,loc=2,fontsize=22) py.tick_params(axis='both',labelsize=22) py.tight_layout() py.savefig('gallery/F2d_F2_II.pdf')
def chart(idx, a, b, label, FILE): pylab.ioff() fig_width_pt = 350 # Get this from LaTeX using \showthe\columnwidth inches_per_pt = 1.0/72.27 # Convert pt to inch golden_mean = ((5**0.5)-1.0)/2.0 # Aesthetic ratio fig_width = fig_width_pt*inches_per_pt # width in inches fig_height = fig_width*golden_mean # height in inches fig_size = [fig_width*0.42,fig_height] params = { 'backend': 'ps', 'axes.labelsize': 10, 'text.fontsize': 10, 'legend.fontsize': 10, 'xtick.labelsize': 8, 'ytick.labelsize': 8, 'text.usetex': True, 'figure.figsize': fig_size } pylab.rcParams.update(params) home = '/home/nealbob' folder = '/Dropbox/Thesis/IMG/chapter3/' img_ext = '.pdf' pylab.figure() pylab.boxplot(idx, whis=100) pylab.ylim(a, b) #pylab.ylabel(label) pylab.tick_params(axis='x', which = 'both', labelbottom='off') pylab.savefig(home + folder + FILE + img_ext) pylab.show()
def plot_data(comp, c='b'): """utility function to make the Kantrowitz Limit Plot""" MN = [] W_tube = [] W_kant = [] for m in np.arange(.1,1.1,.1): comp.Mach_pod = m comp.run() #print comp.radius_tube, comp.Mach_pod, comp.W_tube, comp.W_kant, comp.W_excess MN.append(m) W_kant.append(comp.W_kant) W_tube.append(comp.W_tube) fig = p.plot(MN,W_tube, '-', label="%3.1f Req."%(comp._tube_area/comp._inlet_area), lw=3, c=c) p.plot(MN,W_kant, '--', label="%3.1f Limit"%(comp._tube_area/comp._inlet_area), lw=3, c=c) #p.legend(loc="best") p.tick_params(axis='both', which='major', labelsize=15) p.xlabel('Pod Mach Number', fontsize=18) p.ylabel('Flow Rate (kg/sec)', fontsize=18) p.title('Tube Flow Limits for Three Area Ratios', fontsize=20) return fig
def plot(self, ylog10scale = False, timescale = "years", year = 25): """ Generate figure and axis for the population structure timescale choose from "2N0", "4N0", "generation" or "years" """ time = self.Time pop = self.pop for i in range(1,len(self.pop)): if type(pop[i]) == type(""): # ignore migration commands, and replace by (unchanged) pop size pop[i] = pop[i-1] if time[0] != 0 : time.insert(0, float(0)) pop.insert(0, float(1)) if timescale == "years": time = [ti * 4 * self.scaling_N0 * year for ti in time ] pl.xlabel("Time (years, "+`year`+" years per generation)", fontsize=20) #pl.xlabel("Years") elif timescale == "generation": time = [ti * 4 * self.scaling_N0 for ti in time ] pl.xlabel("Generations)") elif timescale == "4N0": time = [ti*1 for ti in time ] pl.xlabel("Time (4N generations)") elif timescale == "2N0": time = [ti*2 for ti in time ] pl.xlabel("Time (2N generations)") else: print "timescale must be one of \"4N0\", \"generation\", or \"years\"" return time[0] = time[1] / float(20) time.append(time[-1] * 2) yaxis_scaler = 10000 pop = [popi * self.scaling_N0 / float(yaxis_scaler) for popi in pop ] pop.insert(0, pop[0]) pl.xscale ('log', basex = 10) #pl.xlim(min(time), max(time)) pl.xlim(1e3, 1e7) if ylog10scale: pl.ylim(0.06, 10000) pl.yscale ('log', basey = 10) else: pl.ylim(0, max(pop)+2) pl.ylim(0,5) pl.tick_params(labelsize=20) #pl.step(time, pop , color = "blue", linewidth=5.0) pl.step(time, pop , color = "red", linewidth=5.0) pl.grid() #pl.step(time, pop , color = "black", linewidth=5.0) #pl.title ( self.case + " population structure" ) #pl.ylabel("Pop size ($*$ "+`yaxis_scaler` +")") pl.ylabel("Effective population size",fontsize=20 )
def createBasisGraph(self, data): plt.figure(self.nFig) plt.suptitle('Basis') nBase = data.shape[1] # The cols number of subplot is nSubCols = nBase / 10 if nSubCols > 0: if nBase % 2 == 0: nSubRows = nBase / nSubCols else: nSubRows = nBase / nSubCols + 1 else: nSubRows = nBase nSubCols = 1 # freqList = np.fft.fftfreq(513, d = 1.0 / 44100) for i in range(nBase): nowFig = self.nFig + (i / nSubRows) + 1 # Because Index of graph is start by 1, The Graph index start from i + 1. plt.subplot(nSubRows, nSubCols, i + 1) plt.tick_params(labelleft='off', labelbottom='off') # FIXME #plt.ylabel(self.st5[i%12] + str(i/12 + 1)) plt.ylabel(str(i)) plt.plot(data[:,i]) # Beacuse I want to add lable in bottom, xlabel is declaration after loop. plt.tick_params(labelleft='off', labelbottom='on') plt.xlabel('frequency [Hz]') #self.nFig += nowFig self.nFig += 1
def GraphMaker(self,name,c,d,e): """Produces graphs based on the user entered equations in the Result Window""" model = name.rstrip("']") model = model.lstrip("'[") model = str(model) name = '' for i in xrange(len(model)): if model[i] == '_': name += ' ' else: name += model[i] __location__ = os.path.dirname(sys.argv[0]) loc = os.path.join(__location__, 'Data/') Params = ["w_0", "w_a", "w_p", "w_DE", "Omega_M", "Omega_DE", "x1", "x2", "y1", "y2", "c1", "c2", "u", "Lambda_1", "Lambda_2", "n"] for entry in xrange(len(Params)): exec( Params[entry] + " = np.genfromtxt(loc+str(model)+'.dat', usecols = "+str(entry)+", skip_header = 1)" ) A = eval(c) B = eval(d) py.figure(int(e)) py.plot(A, B, 'b.') py.xlabel(c, fontsize = 55) py.ylabel(d, fontsize = 55) py.title('Results from '+str(name), fontsize = 55) py.tick_params(labelsize = 35, size = 15, width = 5, top = 0, right = 0) py.show()
def histplot(self, extradataA = [], extradataG = [], intensity = []): pylab.figure(figsize = (25,8)) cat = ['NT, 500ng/mL DOX', 'DLG siRNA, 500ng/mL DOX', 'NuMA siRNA, 500ng/mL DOX', 'NT, 1ug/mL DOX'] pops = [] for i in xrange(3): pylab.subplot(1,3,i+1) pop = self.angles[(self.categories == i)]# & (self.GFP > -np.log(12.5))]# & (intensity == 'r')] print "cat {0}, pop {1}, pop + GFP {2}".format(i, len(self.angles[self.categories == i]), len(pop)) pops.append(pop) hist, binedges = np.histogram(pop, bins = 18) pylab.tick_params(axis='both', which='major', labelsize=25) pylab.plot(binedges[:-1], np.cumsum(hist)/1./len(pop), data.colors[i], label = data.cat[i], linewidth = 4) if len(extradataA) > i: print extradataA[i] h, bins = np.histogram(extradataA[i], bins= 18) hbis = h/1./len(extradataA[i]) x, y = [], [] for index in xrange(len(hbis)): x.extend([bins[index], bins[index+1]]) y.extend([hbis[index], hbis[index]]) print x, y, len(x) pylab.tick_params(axis='both', which='major', labelsize=25) pylab.plot(bins[:-1], np.cumsum(h)/1./len(extradataA[i]), 'k', linewidth = 4) pylab.xlabel("Angle (degre)", fontsize = 25) #pylab.title(cat[i]) pylab.ylim([0., 1.2]) pylab.legend(loc = 2, prop = {'size' : 20}) for ip, p in enumerate(pops): for ip2, p2 in enumerate(pops): ksstat, kspval = scipy.stats.ks_2samp(p2, p) print "#### cat{0} & cat{3} : ks Stat {1}, pvalue {2}".format(ip, ksstat, kspval, ip2) pylab.show()
def plotAgainstGFP(self, extradataA = [], extradataG = [], intensity = [], seq = []): fig1 = pylab.figure(figsize = (25, 10)) print len(self.GFP) for i in xrange(min(len(data.cat), 3)): print len(self.GFP[self.categories == i]) vect = [] pylab.subplot(1,3,i+1) #pylab.hist(self.GFP[self.categories == i], bins = 20, color = data.colors[i]) pop = self.GFP[self.categories == i] pylab.plot(self.GFP[self.categories == i], self.angles[self.categories == i], data.colors[i]+'o', markersize = 8)#, label = data.cat[i]) print "cat", i, "n pop", len(self.GFP[(self.categories == i) & (self.GFP > -np.log(12.5))]) x = np.linspace(np.min(self.GFP[self.categories == i]), np.percentile(self.GFP[self.categories == i], 80),40) #fig1.canvas.mpl_connect('pick_event', onpick) for j in x: vect.append(np.median(self.angles[(self.GFP > j) & (self.categories == i)])) pylab.plot([-4.5, -0.5], [vect[0], vect[0]], data.colors[i], label = "mediane de la population entiere", linewidth = 5) print vect[0], vect[np.argmax(x > -np.log(12.5))] pylab.plot([-np.log(12.5), -0.5], [vect[np.argmax(x > -np.log(12.5))] for k in [0,1]], data.colors[i], label = "mediane de la population de droite", linewidth = 5, ls = '--') pylab.axvline(x = -np.log(12.5), color = 'm', ls = '--', linewidth = 3) pylab.xlim([-4.5, -0.5]) pylab.legend(loc = 2, prop = {'size':17}) pylab.title(data.cat[i].split(',')[0], fontsize = 24) pylab.xlabel('score GFP', fontsize = 20) pylab.ylabel('Angle (degre)', fontsize = 20) pylab.tick_params(axis='both', which='major', labelsize=20) pylab.ylim([-5, 105]) ##pylab.xscale('log') pylab.show()
def chart(SW, a, b, label, folder, FILE): pylab.ioff() fig_width_pt = 350 # Get this from LaTeX using \showthe\columnwidth inches_per_pt = 1.0/72.27 # Convert pt to inch golden_mean = ((5**0.5)-1.0)/2.0 # Aesthetic ratio fig_width = fig_width_pt*inches_per_pt # width in inches fig_height = fig_width*golden_mean # height in inches fig_size = [fig_width,fig_height] params = { 'backend': 'ps', 'axes.labelsize': 10, 'text.fontsize': 10, 'legend.fontsize': 10, 'xtick.labelsize': 8, 'ytick.labelsize': 8, 'text.usetex': True, 'figure.figsize': fig_size } pylab.rcParams.update(params) home = '/home/nealbob' img_ext = '.pdf' pylab.figure() pylab.boxplot([SW['SWA'], SW['OA'], SW['NS']], whis=5) pylab.axhline(y=1.0, color='0.5', linewidth=0.5, alpha=0.75, linestyle=':') pylab.ylim(a, b) pylab.ylabel(label) pylab.tick_params(axis='x', which = 'both', labelbottom='off') pylab.figtext(0.225, 0.06, 'SWA', fontsize = 10) pylab.figtext(0.495, 0.06, 'OA', fontsize = 10) pylab.figtext(0.76, 0.06, 'NS', fontsize = 10) pylab.savefig(home + folder + FILE + img_ext) pylab.show()
def createCoefGraph(data, nFig, lim, ymin): plt.figure(nFig) plt.suptitle('Coef') nBase = data.shape[0] nSubCols = nBase / 10 if nSubCols > 0: nSubRows = nBase / nSubCols else: nSubRows = nBase nSubCols = 1 # print data.shape # サンプリング周波数とシフト幅によって式を変える必要あり timeLine = [i * 1024 / 8000.0 for i in range(data.shape[1])] # print len(timeLine) for i in range(nBase): plt.subplot(nSubRows, nSubCols, i + 1) plt.tick_params(labelleft='off', labelbottom='off') # FIXME: Arguments of X # plt.plot(timeLine, data[i,:]) if lim: plt.ylim(ymin=ymin) plt.plot(timeLine, data[i,:]) # Beacuse I want to add lable in bottom, xlabel is declaration after loop. plt.tick_params(labelleft='off', labelbottom="on") plt.xlabel('time [ms]')
def plot_data(p, c='b'): '''utility function to make the Kantrowitz Limit Plot''' Machs = [] W_tube = [] W_kant = [] for Mach in np.arange(.2, 1.1, .1): p['comp.Mach'] = Mach p.run() Machs.append(Mach) W_kant.append(p['comp.W_kant']) W_tube.append(p['comp.W_tube']) print('Area in:', p['comp.inlet.area_out']) fig = pylab.plot(Machs, W_tube, '-', label="%3.1f Req." % (p['comp.tube_area'] / p['comp.inlet.area_out']), lw=3, c=c) pylab.plot(Machs, W_kant, '--', label="%3.1f Limit" % (p['comp.tube_area'] / p['comp.inlet.area_out']), lw=3, c=c) pylab.tick_params(axis='both', which='major', labelsize=15) pylab.xlabel('Pod Mach Number', fontsize=18) pylab.ylabel('Flow Rate (kg/sec)', fontsize=18) pylab.title('Tube Flow Limits for Three Area Ratios', fontsize=20) return fig
def plots_of_lp_event_exchanges(): pylab.title('Remote Events Sent Between LPs') data = np.loadtxt("analysisData/eventsExchanged-remote.csv", dtype=np.float_, delimiter = ",", skiprows=2, usecols=(2,3,4,5)) outFile = outDir + 'countsOfLpToLpEventExchanges' pylab.plot(data[data[:,0].argsort()][:,0].astype(np.intc)) # pylab.xlabel('Number of Events') pylab.tick_params(axis='x',labelbottom='off') pylab.ylabel('Number of Events Sent') display_graph(outFile) pylab.title('Timestamp Deltas of Remote Events') outFile = outDir + 'timeStampDeltasOfRemoteEvents' stride = max(int(max(len(data[:,1]),len(data[:,2]),len(data[:,3]))/20),1) pylab.plot(data[data[:,1].argsort()][:,1], color=colors[0], label="Minimum", marker='o', markevery=stride) pylab.plot(data[data[:,3].argsort()][:,3], color=colors[1], label="Average", marker='x', markevery=stride) # pylab.plot(data[data[:,2].argsort()][:,2], color=colors[2], label="Maximum", marker='*', markevery=stride) pylab.tick_params(axis='x',labelbottom='off') pylab.ylabel('Timestamp Delta (ReceiveTime - SendTime)') pylab.ylim([-.1,np.amax(data[:,3].astype(np.intc))+1]) # pylab.yscale('log') pylab.legend(loc='best') display_graph(outFile) pylab.title('Histogram of Timestamp Deltas of Remote Events') outFile = outDir + 'timeStampDeltasOfRemoteEvents-hist' pylab.hist((data[:,1],data[:,3],data[:,2]), label=('Minimum', 'Average', 'Maximum'), color=(colors[0], colors[1], colors[2]), bins=10) pylab.xlabel('Timestamp Delta (ReceiveTime - SendTime)') pylab.ylabel('Number of LPs') pylab.legend(loc='best') display_graph(outFile) return
def profile_of_local_events_exec_by_lp(): pylab.title('Locally Generated Events') outFile = outDir + 'percentOfExecutedEventsThatAreLocal' data = np.loadtxt("analysisData/eventsExecutedByLP.csv", dtype=np.intc, delimiter = ",", skiprows=2, usecols=(1,2,3)) x_index = np.arange(len(data)) pylab.plot(x_index, sorted(percent_of_LP_events_that_are_local(data))) pylab.xlabel('LPs (sorted by percent local)') pylab.tick_params(axis='x',labelbottom='off') pylab.ylabel('Percent of Total Executed (Ave=%.2f%%)' % np.mean(percent_of_LP_events_that_are_local(data))) pylab.ylim((0,100)) # fill the area below the line ax = pylab.gca() # ax.fill_between(x_index, sorted(percent_of_LP_events_that_are_local(data)), 0, facecolor=colors[0]) ax.get_yaxis().set_major_formatter(mpl.ticker.FormatStrFormatter('%.1f%%')) display_graph(outFile) pylab.title('Locally Generated Events Executed') outFile = outDir + 'percentOfExecutedEventsThatAreLocal-histogram' pylab.hist(sorted(percent_of_LP_events_that_are_local(data))) ax = pylab.gca() ax.get_xaxis().set_major_formatter(mpl.ticker.FormatStrFormatter('%.1f%%')) pylab.xlabel('Percent of Local Events Executed') pylab.ylabel('Number of LPs (Total=%s)' % "{:,}".format(total_lps)) display_graph(outFile) return
def plot_currents(self, plot_which): global figures if plot_which == "all": plot_which = [i for i in range(1, self.instance["no_feeders"] + 1)] time_scale = [datetime.datetime(2000, 1, 1, 0) + datetime.timedelta(minutes=i) for i in range(1440)] plt.figure(figures) figures += 1 lines_to_plot = len(plot_which) for i in range(lines_to_plot): CR = np.array(self.extract_from_csv(plot_which[i], self.instance["iteration"])) CR = np.reshape(CR, (-1, 3)) plt.subplot(lines_to_plot, 1, 1 + i) if (i + 1) != lines_to_plot: plt.plot(time_scale, CR) plt.tick_params(axis='x', which='both', bottom='off', top='off', labelbottom='off') else: plt.plot(time_scale, CR) plt.subplot(lines_to_plot, 1, 1) plt.title("Current at the head of the Feeders")
def embedSystemtsne(embedding): pl.figure() pl.scatter(embedding[:,0],embedding[:,1],c=range(len(embedding[:,0])),linewidths=0) pl.title('t-distributed stochastic neighbor embedding of observed system states') pl.tick_params(labelleft='off', labelbottom='off') pl.colorbar().set_label('Time (ms)') pl.show()
def embedIndividualstsne(embedding): pl.figure() pl.scatter(embedding[:,0],embedding[:,1],c=d,linewidths=0) pl.title('t-distributed stochastic neighbor embedding of all individual neurons') pl.tick_params(labelleft='off', labelbottom='off') pl.colorbar().set_label('Parameter d (after-spike increment value of recovery variable u)') pl.show()
def fixup_adjoint(current_data): import pylab size = 36 addgauges(current_data) pylab.title('Adjoint Pressure', fontsize=size) pylab.xticks([-2, 0, 2, 4, 6], fontsize=size) pylab.tick_params(axis='y', labelleft='off')
def clusterHeatmap(df, title, row_label_map, col_label_map, colormap=my_cmap, cluster_rows=False, cluster_columns=False, cluster_data=None, row_dendrogram=False, column_dendrogram=False, width=30, height=20, vmin=-3, vmax=3, distmethod="correlation", colorbar=True, colorbar_shrink=0.2, label_values=False): cm = pylab.get_cmap(colormap) cm.set_bad("0.9") # do clustering if cluster_data is None: cluster_data = df # cluster the same data that we are plotting matplotlib.rcParams['figure.figsize'] = [width, height] # pylab.figsize(20, 10) pylab.title(title) # pylab.text(0,-5,str(datetime.date.today())) # ylabels = [genesym[geneid] for geneid in pt.axes[0][Z['leaves']]] # xlabels = pt.axes[1][cZ['leaves']] orderedVal = df if cluster_rows: distances = scipy.cluster.hierarchy.distance.pdist(cluster_data.values, distmethod) rowY = fastcluster.linkage(distances) rowZ = scipy.cluster.hierarchy.dendrogram(rowY, orientation='right', no_plot=True) orderedVal = df.reindex(index=df.axes[0][rowZ['leaves']]) if cluster_columns: coldist = scipy.cluster.hierarchy.distance.pdist(df.values.transpose(), distmethod) cY = scipy.cluster.hierarchy.linkage(coldist) cZ = scipy.cluster.hierarchy.dendrogram(cY, no_plot=True) orderedVal = orderedVal.reindex(columns=df.axes[1][cZ['leaves']]) # row labels if row_label_map is not None: pylab.yticks(range(0, len(orderedVal.index)), [row_label_map[i] for i in orderedVal.index]) else: pylab.yticks(range(0, len(orderedVal.index)), orderedVal.index) pylab.xticks(range(0, len(orderedVal.columns)), orderedVal.columns, rotation=90) if col_label_map is not None: pylab.xticks(range(0, len(orderedVal.columns)), [col_label_map[i] for i in orderedVal.columns]) if label_values: cmatrix = orderedVal.as_matrix() for x in range(cmatrix.shape[0]): for y in range(cmatrix.shape[1]): if cmatrix[x, y] >= 0: pylab.text(y, x, "%.1f" % cmatrix[x,y], horizontalalignment='center', verticalalignment='center') #orderedVal = orderedVal[:,] pylab.tick_params(direction="out") pylab.imshow(orderedVal, interpolation="nearest", cmap=cm, aspect='auto', norm=None, vmin=vmin, vmax=vmax) if colorbar: pylab.colorbar(shrink=colorbar_shrink)
def fixup_innerprod(current_data): import pylab size = 28 addgauges(current_data) pylab.title("Inner Product", fontsize=size) pylab.xticks(fontsize=size) pylab.tick_params(axis="y", labelleft="off")
def aa_innerprod(current_data): from pylab import ticklabel_format, xticks, gca, cos, pi, yticks plotcc(current_data) title_innerproduct(current_data) ticklabel_format(format='plain',useOffset=False) xticks([180, 200, 220, 240], rotation=20, fontsize = 28) pylab.tick_params(axis='y', labelleft='off') a = gca() a.set_aspect(1./cos(41.75*pi/180.))
def plot(data_x,data_y,title = "", lim = True): "Helper function to plot results of koch curve" py.title(title) py.plot(data_x,data_y,'r',lw=2) py.tick_params(axis='both', which='both', bottom=0, top=0, left=0, right=0, labelbottom=0, labelleft =0) if lim: py.xlim([0,3]) py.ylim([0,1])
def _setup_grid_and_axes(label_x, label_y): grid(True) # Set axes labels xlabel(label_x, fontsize=AXIS_LABEL_SIZE) ylabel(label_y, fontsize=AXIS_LABEL_SIZE) # Set the axis ticks tick_params(axis='both', which='major', labelsize=TICKS_LABEL_SIZE)
def include_png(in_file, title=None, figsize=(11.7,8.3)): fig = plt.figure(figsize=figsize) img = plt.imread(in_file) plt.imshow(img) plt.grid(False) plt.tick_params(labelbottom='off', labeltop='off', labelleft='off', labelright='off') plt.title(title, fontsize='10') plt.close() return fig
def theme(ax=None, minorticks=False): """ update plot to make it nice and uniform """ from matplotlib.ticker import AutoMinorLocator from pylab import rcParams, gca, tick_params if minorticks: if ax is None: ax = gca() ax.yaxis.set_minor_locator(AutoMinorLocator()) ax.xaxis.set_minor_locator(AutoMinorLocator()) tick_params(which='both', width=rcParams['lines.linewidth'])
def embedSystem(embedding,explained_variance): pl.figure(figsize=(fw,fh)) pl.scatter(embedding[:,0],embedding[:,1],c=spikes,linewidths=0) pl.title('Two-dimensional embedding explains ' + str(round(explained_variance,3)*100) + '% of variance among observed system states') pl.xlabel('Principal component 1') pl.ylabel('Principal component 2') pl.tick_params(labelleft='off', labelbottom='off') pl.colorbar().set_label('Instantaneous spike rate (spikes/ms)') #pl.show() pl.savefig(str(N) + "--" + "2Dsystemembedding.pdf")
def embedIndividuals(embedding,explained_variance): pl.figure(figsize=(fw,fh)) pl.scatter(embedding[:,0],embedding[:,1],c=d,linewidths=0) pl.title('Two-dimensional embedding explains ' + str(round(explained_variance,3)*100) + '% of variance among individual neurons') pl.xlabel('Principal component 1') pl.ylabel('Principal component 2') pl.tick_params(labelleft='off', labelbottom='off') pl.colorbar().set_label('Parameter d (after-spike increment value of recovery variable u)') #pl.show() pl.savefig(str(N) + "--" + "2Dindividualembedding.pdf")
def plot_stc_time_point(stc, subject, limits=[5, 10, 15], time_index=0, surf='inflated', measure='dSPM', subjects_dir=None): """Plot a time instant from a SourceEstimate using matplotlib The same could be done with mayavi using proper 3D. Parameters ---------- stc : instance of SourceEstimate The SourceEstimate to plot. subject : string The subject name (only needed if surf is a string). time_index : int Time index to plot. surf : str, or instance of surfaces Surface to use (e.g., 'inflated' or 'white'), or pre-loaded surfaces. measure : str The label for the colorbar. None turns the colorbar off. subjects_dir : str, or None Path to the SUBJECTS_DIR. If None, the path is obtained by using the environment variable SUBJECTS_DIR. """ subjects_dir = get_subjects_dir(subjects_dir) pl.figure(facecolor='k', figsize=(8, 5)) hemis = ['lh', 'rh'] if isinstance(surf, str): surf = [read_surface(op.join(subjects_dir, subject, 'surf', '%s.%s' % (h, surf))) for h in hemis] my_cmap = mne_analyze_colormap(limits) for hi, h in enumerate(hemis): coords = surf[hi][0][stc.vertno[hi]] if hi == 0: vals = stc_all_cluster_vis.lh_data[:, time_index] else: vals = stc_all_cluster_vis.rh_data[:, time_index] ax = pl.subplot(1, 2, 1 - hi, axis_bgcolor='none') pl.tick_params(labelbottom='off', labelleft='off') flipper = -1 if hi == 1 else 1 sc = ax.scatter(flipper * coords[:, 1], coords[:, 2], c=vals, vmin=-limits[2], vmax=limits[2], cmap=my_cmap, edgecolors='none', s=5) ax.set_aspect('equal') pl.axis('off') try: pl.tight_layout(0) except: pass if measure is not None: cax = pl.axes([0.85, 0.15, 0.025, 0.15], axisbg='k') cb = pl.colorbar(sc, cax, ticks=[-limits[2], 0, limits[2]]) cb.set_label(measure, color='w') pl.setp(pl.getp(cb.ax, 'yticklabels'), color='w') pl.draw() pl.show()
def plot_held_units(rec_dirs, held_df, save_dir, rec_names=None): '''Plot waveforms of held units side-by-side Parameters ---------- rec_dirs : list of str full paths to recording directories held_df : pandas.DataFrame dataframe listing held units with columns matching the names of the recording directories or the given rec_names. Also colulmns: - unit : str, unit name - single_unit : bool - unit_type : str, unit_type - electrode : int - J3 : list of float, J3 values for the held unit save_dir : str, directory to save plots in rec_names : list of str (optional) abbreviated rec_names if any were used for held_df creation if not given, rec_names are assumed to be the basenames of rec_dirs ''' if rec_names is None: rec_names = [os.path.basename(x) for x in rec_dirs] rec_labels = {x: y for x, y in zip(rec_names, rec_dirs)} print('\n----------\nPlotting held units\n----------\n') for idx, row in held_df.iterrows(): n_subplots = 0 units = {} for rn in rec_names: if not pd.isna(row.get(rn)): n_subplots += 1 units[rn] = row.get(rn) if n_subplots == 0: continue single_unit = row['single_unit'] if single_unit: single_str = 'single-unit' else: single_str = 'multi-unit' unit_type = row['unit_type'] unit_name = row['unit'] electrode = row['electrode'] area = row['area'] J3_vals = row['J3'] J3_str = np.array2string(np.array(J3_vals), precision=3) print('Plotting Unit %s...' % unit_name) title_str = 'Unit %s\nElectrode %i: %s %s\nJ3: %s' % ( unit_name, electrode, unit_type, single_str, J3_str) fig, fig_ax = plt.subplots(ncols=n_subplots, figsize=(20, 10)) ylim = [0, 0] row_ax = [] for ax, unit_info in zip(fig_ax, units.items()): rl = unit_info[0] u = unit_info[1] rd = rec_labels.get(rl) params = dio.params.load_params('clustering_params', rd) if params is None: raise FileNotFoundError('No dataset pickle file for %s' % rd) #waves, descriptor, fs = get_unit_waveforms(rd, x[1]) waves, descriptor, fs = dio.h5io.get_raw_unit_waveforms(rd, u) waves = waves[:, ::10] fs = fs / 10 time = np.arange(0, waves.shape[1], 1) / (fs / 1000) snapshot = params['spike_snapshot'] t_shift = snapshot['Time before spike (ms)'] time = time - t_shift mean_wave = np.mean(waves, axis=0) std_wave = np.std(waves, axis=0) ax.plot(time, mean_wave, linewidth=5.0, color='black') ax.plot(time, mean_wave - std_wave, linewidth=2.0, color='black', alpha=0.5) ax.plot(time, mean_wave + std_wave, linewidth=2.0, color='black', alpha=0.5) ax.set_xlabel('Time (ms)', fontsize=35) ax.set_title('%s %s\ntotal waveforms = %i' % (rl, u, waves.shape[0]), fontsize=20) ax.autoscale(axis='x', tight=True) plt.tick_params(axis='both', which='major', labelsize=32) if np.min(mean_wave - std_wave) - 20 < ylim[0]: ylim[0] = np.min(mean_wave - std_wave) - 20 if np.max(mean_wave + std_wave) + 20 > ylim[1]: ylim[1] = np.max(mean_wave + std_wave) + 20 for ax in row_ax: ax.set_ylim(ylim) fig_ax[0].set_ylabel('Voltage (microvolts)', fontsize=35) plt.subplots_adjust(top=.75) plt.suptitle(title_str) fig.savefig(os.path.join(save_dir, 'Unit%s_waveforms.png' % unit_name), bbox_inches='tight') plt.close('all')
time_per_turn_seconds_nslice = computational_time_seconds_nslices / N_turns_done_nslices # Figure parameters fig_index = 0 fig_size = (12, 8) axis_font = {'fontname': 'Arial', 'size': '24'} axis_font_title = {'fontname': 'Arial', 'size': '20'} labelsize_choice = 24 labelsize_legend = 24 line_width = 3.5 fig_index = fig_index + 1 fig = pl.figure(fig_index, figsize=fig_size) pl.plot(n_slices_vect, time_per_turn_seconds_nslice, 'o-', linewidth=line_width) pl.ylim(bottom=0) pl.xlabel('Slices', **axis_font) pl.ylabel('Computational Time [s/Turn]', **axis_font) pl.title( 'ArcQuad %s Segments = %d MPs/Slice = %d e$^-$ MPs = %d' % (PyPICmode_tag, n_segments, macroparticles_per_slice, eMPs), **axis_font_title) pl.tick_params(labelsize=labelsize_choice) pl.grid(linestyle='dashed') pl.savefig(folder_work + 'computational_time_nSlices.png', dpi=300) pl.show()
def plot_held_units(rec_dirs, held_df, J3_df, save_dir): '''Plot waveforms of held units side-by-side Parameters ---------- rec_dirs : list of str full paths to recording directories held_df : pandas.DataFrame dataframe listing held units with columns matching the names of the recording directories and a unit column with the unit names J3_df : pandas.DataFrame dataframe with same rows and columns as held df except the values are lists fo inter_J3 values for units that were found to be held save_dir : str, directory to save plots in ''' print('\n----------\nPlotting held units\n----------\n') for idx, row in held_df.iterrows(): unit_name = row.pop('unit') electrode = row.pop('electrode') area = row.pop('area') n_subplots = row.notnull().sum() idx = np.where(row.notnull())[0] cols = row.keys()[idx] units = row.values[idx] fig = plt.figure(figsize=(18, 6)) ylim = [0, 0] row_ax = [] for i, x in enumerate(zip(cols, units)): J3_vals = J3_df[x[0]][J3_df['unit'] == unit_name].values[0] J3_str = np.array2string(np.array(J3_vals), precision=3) ax = plt.subplot(1, n_subplots, i+1) row_ax.append(ax) rd = [y for y in rec_dirs if x[0] in y][0] params = get_clustering_parameters(rd) if params is None: raise FileNotFoundError('No dataset pickle file for %s' % rd) #waves, descriptor, fs = get_unit_waveforms(rd, x[1]) waves, descriptor, fs = get_raw_unit_waveforms(rd, x[1]) waves = waves[:, ::10] fs = fs/10 time = np.arange(0, waves.shape[1], 1) / (fs/1000) snapshot = params['spike_snapshot'] t_shift = snapshot['Time before spike (ms)'] time = time - t_shift mean_wave = np.mean(waves, axis=0) std_wave = np.std(waves, axis=0) plt.plot(time, mean_wave, linewidth=5.0, color='black') plt.plot(time, mean_wave - std_wave, linewidth=2.0, color='black', alpha=0.5) plt.plot(time, mean_wave + std_wave, linewidth=2.0, color='black', alpha=0.5) plt.xlabel('Time (ms)', fontsize=35) if i==0: plt.ylabel('Voltage (microvolts)', fontsize=35) plt.title('%s %s\ntotal waveforms = %i, Electrode: %i\n' 'J3: %s, Single Unit: %i, RSU: %i, FS: %i' % (x[0], x[1], waves.shape[0], descriptor['electrode_number'], J3_str, descriptor['single_unit'], descriptor['regular_spiking'], descriptor['fast_spiking']), fontsize = 20) plt.tick_params(axis='both', which='major', labelsize=32) if np.min(mean_wave - std_wave) - 20 < ylim[0]: ylim[0] = np.min(mean_wave - std_wave) - 20 if np.max(mean_wave + std_wave) + 20 > ylim[1]: ylim[1] = np.max(mean_wave + std_wave) + 20 for ax in row_ax: ax.set_ylim(ylim) plt.subplots_adjust(top=.7) plt.suptitle('Unit %s' % unit_name) fig.savefig(os.path.join(save_dir, 'Unit%s_waveforms.png' % unit_name), bbox_inches='tight') plt.close('all')
def plot_hams(self): print("plotting normal hams") fig, ax = pylab.subplots(figsize=(self.fig_size, self.fig_size)) box_font = self.box_font_size # axes labels xlabel = r"$d$" ylabel = "f" if self.protein == "Kinase": start = 120 end = 230 x_tick_range = np.arange(start, end, 20) pylab.xlim(start, end) all_freqs = dict() for label, seqs_file in self.vis_seqs.items(): if label in self.skip: print("skipping ", label) continue if not self.which_models[label]: # model is 'false' in the which_models{}, then continue continue label = self.label_dict[label] print("computing hams for:\t", label) seqs = loadSeqs(self.msa_dir + "/" + seqs_file, names=self.ALPHA)[0][0:self.keep_hams] h = histsim(seqs).astype(float) h = h/np.sum(h) all_freqs[label] = h rev_h = h[::-1] if label == "Target": if "nat" in self.synth_nat: target_label = "Nat-Target" else: target_label = "Synth-Target" line_style = "dashed" my_dashes = (1, 1) ax.plot(rev_h, linestyle=line_style, linewidth=self.line_width, dashes=my_dashes, alpha=self.line_alpha, color=self.color_set[label], label=target_label, zorder=self.z_order[label]) else: line_style = "solid" ax.plot(rev_h, linestyle=line_style, linewidth=self.line_width, alpha=self.line_alpha, color=self.color_set[label], label=label, zorder=self.z_order[label]) tvds = dict() print("all_freqs") print(all_freqs.keys()) delete_key = '' save_value = '' for data_label, f in all_freqs.items(): if 'arget' in data_label: save_value = f delete_key = data_label del all_freqs[delete_key] all_freqs["Target"] = save_value for data_label, f in all_freqs.items(): if data_label != 'Target': tvds[data_label] = round(np.sum(np.abs(all_freqs['Target'] - f))/2, 4) print(tvds) y_tick_range = np.arange(0.0, 0.08, 0.02) pylab.ylabel(ylabel, fontsize=self.label_size) pylab.xlabel(xlabel, fontsize=self.label_size) pylab.xticks(x_tick_range, rotation=45) pylab.yticks(y_tick_range) pylab.tick_params(direction='in',axis='both', which='major', labelsize=self.tick_size, length=self.tick_length, width=self.tick_width) #my_title = "Hamming Distance Distributions\n" + self.parent_dir_name file_name = "ham_" + self.name + "_" + self.synth_nat + "_" + self.which_size + ".pdf" #pylab.title(self.which_size, fontsize=self.title_size) pylab.tight_layout() pylab.legend(fontsize=self.tick_size-3, loc="upper left", frameon=False) save_name = self.output_dir + "/" + file_name print(save_name) pylab.savefig(save_name, dpi=self.dpi, format='pdf') pylab.close()
def parse_and_plot_ref(runfile, spectrum_file): fields = [('wl', 'f8'), ('gf', 'f8'), ('z', 'i'), ('istg', 'i'), ('chi', 'f8')] ref = N.loadtxt("ref.dat", dtype=fields) model = N.loadtxt(spectrum_file) mylist = parse_runsynow(runfile) numref = mylist['parms']['numref'] an = [] ai = [] for x,y,z in zip(mylist['parms']['tau1'][:numref],\ mylist['parms']['an'][:numref],\ mylist['parms']['ai'][:numref]): if x > 0.: an.append(y) ai.append(z) ions_used = [z * 100 + istg for z, istg in zip(an, ai)] ref_ions = [] for i in xrange(N.size(ref['wl'])): ref_ions.append(ref['z'][i] * 100 + ref['istg'][i]) ref_index = [] for ion in ions_used: ref_index.append(ref_ions.index(ion)) pylab.interactive(True) # One can supply an argument to AutoMinorLocator to # specify a fixed number of minor intervals per major interval, e.g.: # minorLocator = AutoMinorLocator(2) # would lead to a single minor tick between major ticks. minorLocator = AutoMinorLocator() golden = (pylab.sqrt(5) + 1.) / 2. figprops = dict(figsize=(8., 8. / golden), dpi=128) # Figure properties for single and stacked plots # figprops = dict(figsize=(16., 8./golden), dpi=128) # Figure properties for side by sides adjustprops = dict(left=0.15, bottom=0.1, right=0.90, top=0.93, wspace=0.2, hspace=0.2) # Subp fig = pylab.figure(1, **figprops) # New figure fig.clf() fig.subplots_adjust(**adjustprops) # Tunes the subplot layout ax1 = fig.add_subplot(1, 1, 1) my_funcs.bold_labels(ax1) p1, = ax1.plot(model[:, 0], model[:, 1], linewidth=2.0) ax1.set_ylabel(r'$F_\lambda$', fontsize=14) ax1.set_xlabel(r'$\lambda\ (\AA)$', fontsize=14) # ax1.set_xlim([0.,60.]) # ax1.set_ylim([10.**41.4,10.**43.5]) # ax1.set_yscale('log') # ax1.legend([p1,p2,p3,p4],['Day 10','Day 15','Day 25','Day 50'],frameon=False) ax1.xaxis.set_minor_locator(minorLocator) pylab.tick_params(which='both', width=2) pylab.tick_params(which='major', length=7) pylab.tick_params(which='minor', length=4, color='r') #ax1.xaxis.grid(True,which='minor') ax1.xaxis.grid(True, which='both') wl_ref = [] f_ref = [] ymin, ymax = ax1.get_ybound() for i in ref_index: wl_ref.append([10. * ref['wl'][i], 10. * ref['wl'][i]]) ihelp = N.abs(model[:, 0] - 10. * ref['wl'][i]).argmin() yhelp = model[ihelp, 1] f_ref.append([ymin, yhelp]) for x, y in zip(wl_ref, f_ref): ax1.plot(x, y, lw=2) fields = [('Z','i'),('A','f8'),('Name','S13'),('sym','S4'),('MP','f8'),\ ("BP",'f8'),('rho','f8'),('crust','f8'),('year','i'),\ ('group','i'), ('config','S23'), ('chiion',"f8")] # labels = N.loadtxt("periodic_table.dat",skiprows=1,delimiter=',',dtype=fields) labels = N.genfromtxt("periodic_table.dat", skip_header=1, delimiter=',', dtype=None) syms = [] for x in labels['f3']: syms.append(x.replace(" ", "")) ref_Zs = [] for z in labels['f0']: ref_Zs.append(z) sym_indices = [] for z in an: sym_indices.append(ref_Zs.index(z)) spect_notation = [ "I", "II", "III", "IV", "V", "VI", "VII", "VIII", "IX", "X" ] text_labels = [] for i, j in enumerate(sym_indices): help = syms[j] + " " + spect_notation[ai[i]] text_labels.append(help) for x, y, l in zip(wl_ref, f_ref, text_labels): ax1.text(x[0], min(y[1] * 1.08, ymax), l, fontsize=8)
def PLOTdetrendPOSITIONfluxFULL(time, flux, position, TCK, **kwargs): """ Routine going with detrendPOSITIONflux to visualize the detrending of the photometry for the instrumental correction between position and flux of BRITE photometry. Returns: Figure with -hopefully- enough diagnostics to determine the strength / weaknessess of the detrending @param flux: flux measurements [adu] @type flux: numpy array of length N @param position: CCD position measurements along a position axis [pixel] @type position: numpy array of length N @param TCK: spline tck for the favored fit @type TCK: tuple """ # Calculating the corrections # --------------------------- fluxCORRECTION = scInterp.splev(position, TCK) # Setting up the figure window # ---------------------------- figPOScorr = pl.figure(figsize=(16, 16)) gsPOScorr = gridspec.GridSpec(2, 2, height_ratios=[1, 1], width_ratios=[3, 2]) axTIMEorig = figPOScorr.add_subplot(gsPOScorr[0, 0]) # Initial flux with time axTIMEcorr = figPOScorr.add_subplot( gsPOScorr[1, 0], sharex=axTIMEorig, sharey=axTIMEorig) # Corrected flux with time axPOSorig = figPOScorr.add_subplot( gsPOScorr[0, 1], sharey=axTIMEorig) # Initial flux with position axPOScorr = figPOScorr.add_subplot( gsPOScorr[1, 1], sharex=axPOSorig, sharey=axTIMEorig) # Corrected flux with position # Panels related to time # ---------------------- axTIMEorig.plot(time, flux, 'k.', ms=6, alpha=.4) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axTIMEorig.set_title('Original') axTIMEorig.set_ylabel('Flux [adu]') axTIMEcorr.plot(time, flux - fluxCORRECTION, 'k.', ms=6, alpha=.4) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axTIMEcorr.set_title('After correction') axTIMEcorr.set_ylabel('Flux [adu]') axTIMEcorr.set_ylabel('Time [d]') # Panels related to position # -------------------------------------- axPOSorig.plot(position, flux, 'k.', ms=6, alpha=.4) axPOSorig.plot(position, fluxCORRECTION, 'r.', ms=6, alpha=.8) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axPOSorig.set_title('Correction') axPOScorr.plot(position, flux - fluxCORRECTION, 'k.', ms=6, alpha=.4) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axPOScorr.set_title('Residuals correction') axPOScorr.set_ylabel('CCD position [pixel]') # Settings # -------- axTIMEorig.set_xlim([np.min(time), np.max(time)]) axTIMEorig.set_ylim([np.min(flux) * 1.2, np.max(flux) * 1.2]) axPOSorig.set_xlim([np.min(position) + 0.1, np.max(position) + 0.1]) axTIMEcorr.set_xlabel('Time [d]') axTIMEorig.set_ylabel('Flux [adu]') axTIMEcorr.set_ylabel('Flux [adu]') axPOScorr.set_xlabel('Position [pixel]') return
color=colours[k], label="%s (%d deg^2)" % (labels[k], sarea[k]), marker='.') #line[0].set_dashes(linestyle[k]) for i in range(kc.size): print "%03d -- %3.3e" % (i, kc[i]) # Title is k value of k bin P.title("k = %3.3e Mpc$^{-1}$" % kc[kbin]) P.legend(loc='upper right', prop={'size': 'medium'}, frameon=False) P.tick_params(axis='both', which='major', labelsize=20, size=8., width=1.5, pad=8.) P.tick_params(axis='both', which='minor', labelsize=20, size=5., width=1.5) P.xlabel(r"$z$", fontdict={'fontsize': 'x-large'}) P.ylabel(r"$\Delta P / P$", fontdict={'fontsize': 'x-large'}) P.ylim((5e-3, 5e-1)) P.yscale('log') P.gcf().set_size_inches(8., 6.) P.tight_layout() P.savefig("pk_redshift_k%3.3e.pdf" % kc[kbin], transparent=True) print "Output: pk_redshift_k%3.3e.pdf" % kc[kbin] #P.show()
#loss = K.mean(layer_output[:, filter_index, :, :]) ## compute the gradient of the input picture wrt this loss #grads = K.gradients(loss, input_img)[0] ## normalization trick: we normalize the gradient #grads /= (K.sqrt(K.mean(K.square(grads))) + 1e-5) ## this function returns the loss and grads given the input picture #iterate = K.function([input_img], [loss, grads]) ## we start from a gray image with some noise #input_img_data = np.random.random(((1,) + input_shape)) * 20 + 128. ## run gradient ascent for 20 steps #for i in range(20): # loss_value, grads_value = iterate([input_img_data]) # input_img_data += grads_value * step # Visualize weights W = DCNN_flatwindow.layers[0].W.get_value(borrow=True) W = np.squeeze(W) print("W shape : ", W.shape) pl.figure(figsize=(15, 15)) pl.title('conv1 weights') nice_imshow(pl.gca(), make_mosaic(W, 6, 3), cmap=cm.binary) pl.tick_params(axis='y', labelleft='off') pl.tight_layout() pl.savefig('C://DATA//koumura birds//Bird1//bird1_conv1_1_kernel_weights.eps')
def find_time_points(t=0, t_want=0, i_plot=0, i_test=0, i_warn=1, method='even'): """ Find time index for an array of time points on time array t """ if i_test == 1: print('ueven test data used') ind = numpy.linspace(-3, 2.2, 343) t = 10**ind t_want = numpy.array([9, 0.1, 0.05, 1.732, 12]) i_plot = 1 elif i_test == 2: t = numpy.linspace(0, 5, 50000) t_want = numpy.array([1.111, 3.222]) i_plot = 1 # convert a pure number t_want to a list with length and attribute if not (type(t_want) == list): t_want = [t_want] if numpy.min(t_want) < t[0] or numpy.max(t_want) > t[-1]: print('t[0]=', t[0]) print('t[len(t)-1]', t[len(t) - 1]) raise Exception('Error: t_want goes out the range of t') # sum up the difference of time difference to judge whether it is even. dt_sum = numpy.sum(numpy.diff(numpy.diff(t))) if dt_sum < 10**-10 or method == 'even': dt = (t[3] - t[0]) / 3.0 i_want = numpy.round((t_want - t[0]) / dt) # convert i_want to python style index that start from 0 i_want = i_want - 1 elif dt_sum > 10**-10 or method == 'uneven': if i_warn == 1 and method == 'even': print('Sum of ddt: ', dt_sum) print('Time array is not even, slow loop method used!') # i_want = numpy.ones(len(t_want))*-1 i_want = numpy.zeros(len(t_want)) for i in range(0, len(t_want)): for j in range(0, len(t)): if t_want[i] >= t[j] and t_want[i] < t[j + 1]: i_want[i] = j # convert index i_want to integers i_want = numpy.int_(i_want) if i_plot == 1: print('t_want: ', t_want) print('i_want: ', i_want) pylab.figure() x = numpy.arange(0, t.shape[0], 1) pylab.plot(x, t, '-o', color='blue', markersize=3) pylab.hold('on') pylab.plot(x[i_want], t[i_want], 'o', color='red') pylab.xlabel('index') pylab.ylabel('time (s)') pylab.grid('on') pylab.minorticks_on() pylab.tick_params(which='major', labelsize=10, width=2, length=10, color='black') pylab.tick_params(which='minor', width=1, length=5) return i_want
def uniqHostsVsJobs(df): ''' roi_df = df[['totalCores', 'Thrashing']] roi_df = roi_df[(roi_df['Thrashing'] == True)] totalCores = (df['totalCores'].values.tolist()) thrashingCores = roi_df['totalCores'].values.tolist() ziplist = [] for coreNo in list(set(totalCores)): totFltCorePcnt = (float(thrashingCores.count(coreNo))/float(totalCores.count(coreNo)))*100 totCorePcnt = (float(totalCores.count(coreNo))/float(len(totalCores)))*100 pcnt = (totCorePcnt * totFltCorePcnt)/100.00 ziplist.append((coreNo, totCorePcnt, pcnt, totCorePcnt-pcnt)) coreNo, totalpcnt, bottompcnt, topPcnt = zip(*sorted(ziplist, key=lambda x: x[1], reverse=True)) noOfBars = 30 filename = "500TTotalCoresVSJobs_Percentages.png" fig, ax = mplt.subplots() indices = np.arange(0, 0.3*noOfBars, 0.3) bottomBar = ax.bar(indices, bottompcnt[:noOfBars], bar_width, color=blue, linewidth=outlinewgt[-1], hatch='////') topBar = ax.bar(indices, topPcnt[:noOfBars], bar_width, bottom=bottompcnt[:noOfBars], color=lightblue, linewidth=outlinewgt[-1], hatch="////") ax.set_xticks(indices + 0.5*(bar_width)) ax.set_xticklabels(coreNo[:noOfBars]) ax.set_xlabel('Total Cores', fontsize=labelFontSZ, fontweight=labelFontWT) ax.set_ylabel('% of jobs', fontsize=labelFontSZ, fontweight=labelFontWT) ax.text(ImgNoteX, ImgNoteY, 'Threshold: Peak major page fault > 500', \ horizontalalignment='center', \ verticalalignment='center', \ transform=ax.transAxes, fontsize=ticksFontSZ, fontweight=labelFontWT) mplt.xticks(fontsize=ticksFontSZ, rotation='vertical') mplt.yticks(fontsize=ticksFontSZ) fig = matplotlib.pyplot.gcf() fig.set_size_inches(ImgWidth, ImgHeight) mplt.legend( (bottomBar[1], topBar[0]), ("% of Job > Threshold", "% of Jobs < Threshold")) mplt.savefig(sys.argv[2]+"/"+ filename, format=ImgFormat, dpi=ImgDPI, bbox_inches=ImgProp) ''' # Second plot roi_df = df[['uniqHosts', 'Thrashing']] totalCores = (df['uniqHosts'].values.tolist()) roi_df = roi_df[(roi_df['Thrashing'] == True)] thrashingCores = roi_df['uniqHosts'].values.tolist() ziplist = [] for coreNo in list(set(totalCores)): totFltCoreCnt = thrashingCores.count(coreNo) totCoreCnt = totalCores.count(coreNo) ziplist.append( (coreNo, totCoreCnt, totFltCoreCnt, totCoreCnt - totFltCoreCnt)) coreNo, totalCnt, bottomCnt, topCnt = zip( *sorted(ziplist, key=lambda x: x[1], reverse=True)) noOfBars = 10 filename = "500TUniqHostsVSJobs_RawNumbers.png" fig, ax = mplt.subplots() indices = np.arange(0, 0.3 * noOfBars, 0.3) topBar = ax.bar(indices, totalCnt[:noOfBars], bar_width, \ color=yellow, linewidth=outlinewgt[-1])#hatch="/") bottomBar = ax.bar(indices, bottomCnt[:noOfBars], 0.75*bar_width, \ color=red, linewidth=outlinewgt[-1])#hatch='/') for i in range(2): labelBar.labelBar(ax, topBar[i], None, \ str(int(float(totalCnt[i])*100.00/len(totalCores)))+'%', 0.03, 0) labelBar.labelBar(ax, bottomBar[i], None, \ str(int(float(bottomCnt[i])*100.00/totalCnt[i]))+'%', 0.0, 0) ax.set_xticks(indices + 0.5 * (bar_width)) ax.set_xticklabels(coreNo[:noOfBars]) ax.set_xlabel('No. of nodes requested', fontsize=labelFontSZ, fontweight=labelFontWT) ax.set_ylabel('No of jobs', fontsize=labelFontSZ, fontweight=labelFontWT) plt.tick_params( axis='both', # changes apply to the x-axis which='both', # both major and minor ticks are affected bottom='off', # ticks along the bottom edge are off top='off', # ticks along the top edge are off right='off', # ticks along the left edge are off left='off') # ticks along the right edge are off #ax.text(ImgNoteX, ImgNoteY, 'Threshold: Peak major page fault > 500', \ # horizontalalignment='center', \ # verticalalignment='center', \ # transform=ax.transAxes, fontsize=ticksFontSZ, fontweight=labelFontWT) mplt.xticks(fontsize=ticksFontSZ) mplt.yticks(fontsize=ticksFontSZ) fig = matplotlib.pyplot.gcf() fig.set_size_inches(ImgWidth, ImgHeight) mplt.legend((bottomBar[0], topBar[0]), ("No of jobs > Threshold", "No of jobs")) mplt.savefig(sys.argv[2] + "/" + filename, format=ImgFormat, dpi=ImgDPI, bbox_inches=ImgProp) '''
#Makes plots of S_lambda masked model #print gc_result print gc_result_masked_sl for a in gc_result_masked_sl.median.keys(): setattr(model,a,gc_result_masked_sl.median[a]) sl_w,sl_f = model() #for a in gc_result.median.keys(): # setattr(model,a,gc_result.median[a]) sl_unmasked_w,sl_unmasked_f = model() plt.tick_params(axis='both', which='major', labelsize=11) plt.plot(w/(unmasked_median_fits['vrad_2']/3e5+0.0), starspectrum35.flux.value, label="Data") ''' plt.plot(w/(unmasked_median_fits['vrad_2']/3e5+1.0),f,label="Unmasked model with best fit unmasked values") plt.plot(sl_w/(gc_result_masked_sl.median['vrad_2']/3e5+1.0),sl_f-0.5,label="Masked model with best fit masked values") plt.plot(sl_w/(gc_result_masked_sl.median['vrad_2']/3e5+1.0),masked_data_sl.flux.value-sl_f,label='Masked Model-Masked Data Residuals') ''' plt.plot(w/(unmasked_median_fits['vrad_2']/3e5+1.0),f,label="Best Fit Model (No mask)") #plt.plot(sl_w/(gc_result_masked_sl.median['vrad_2']/3e5+1.0),sl_f-0.5,label="Masked model with best fit masked values") #plt.plot(w/(gc_result_masked_sl.median['vrad_2']/3e5+1.0),starspectrum35.flux.value-f,label='Masked Model-Masked Data Residuals')
#--setting y-axis range pl.ylim(ylow, yhigh) pl.yticks(np.linspace(ylow, tick_high, num_ticks, endpoint=True)) margin = bar_width + (1 - len(labels) * bar_width) #-- setting x-axis pl.xlim(side_margin - margin, len(data) - side_margin) if len(xnote.strip()) != 0: pl.xlabel(xnote) #--adding minor ticks, one tick every 0.02 unit #ml = MultipleLocator(10) #pl.axes().yaxis.set_minor_locator(ml) #--drawing lines for major and minor ticks #pl.grid(True) pl.axes().yaxis.grid(b=True, which='major', color='k', linestyle='--') #pl.axes().yaxis.grid(b=True, which='minor', color=(0.5,0.5,0.5), linestyle=':') #--setting the tick marks pl.xticks(np.arange(len(data)) + bar_width * 0.5 * (NUM_BARS - 1), data[headers[0]], rotation=rotation_angle) pl.tick_params(top="off") pl.tick_params(bottom="off") #pl.show() #pl.savefig(datafile + ".eps", format='eps', dpi=1000, bbox_inches='tight') pl.savefig(datafile + "." + figure_f, format=figure_f, dpi=1000, bbox_inches='tight') pl.close()
U[:, -1, 0], label='Final Mass Density', color=tableau20[0], lw=3) plt.plot(xPoints, U[:, 0, 1], label='Initial Momentum Density', color=tableau20[3], lw=3) plt.plot(xPoints, U[:, -1, 1], label='Final Momentum Density', color=tableau20[2], lw=3) plt.plot(xPoints, U[:, 0, 2], label='Initial Energy Density', color=tableau20[5], lw=3) plt.plot(xPoints, U[:, -1, 2], label='Final Energy Density', color=tableau20[4], lw=3) plt.title('Evolution of the Sod Tube at t=' + str(tMax), fontsize=24) plt.xlabel('X Position', fontsize=18) plt.ylabel('Conserved Quantity', fontsize=18) plt.tick_params(labelsize=14) plt.legend(loc='upper right') plt.show()
def PLOTdetrendPOSITIONfluxDIAGinformCRIT(flux, position, AICtck, BICtck, **kwargs): """ Routine going with detrendPOSITIONflux to visualize diagnostics in case the AIC and BIC do not favor the same fit. Returns: Figure with -hopefully- enough diagnostics to determine the strength / weaknessess of the detrending @param flux: flux measurements [adu] @type flux: numpy array of length N @param position: CCD position measurements along a position axis [pixel] @type position: numpy array of length N @param AICtck: spline tck for the favored fit by the AIC @type AICtck: tuple @param BICtck: spline tck for the favored fit by the BIC @type BICtck: tuple """ # Calculating the corrections fluxCORRECTIONaic = scInterp.splev(position, AICtck) fluxCORRECTIONbic = scInterp.splev(position, BICtck) # Setting up the figure window # ---------------------------- figDIAGN = pl.figure(figsize=(16, 16)) axAIC = figDIAGN.add_subplot(221) axBIC = figDIAGN.add_subplot(222, sharey=axAIC, sharex=axAIC) axAICres = figDIAGN.add_subplot(223, sharey=axAIC, sharex=axAIC) axBICres = figDIAGN.add_subplot(224, sharey=axAIC, sharex=axAIC) # -- axAIC.plot(position, flux, 'k.', ms=6, alpha=.4) axAIC.plot(position, fluxCORRECTIONaic, 'r.', ms=8, alpha=.6) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axAIC.set_title('best AIC fit') axAIC.set_ylabel('Flux [adu]') # -- axAICres.plot(position, flux - fluxCORRECTIONaic, 'k.', ms=6, alpha=.4) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axAICres.set_ylabel('Res. Flux [adu]') # -- axBIC.plot(position, flux, 'k.', ms=6, alpha=.4) axBIC.plot(position, fluxCORRECTIONbic, 'r.', ms=8, alpha=.6) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axBIC.set_title('best BIC fit') # -- axBICres.plot(position, flux - fluxCORRECTIONbic, 'k.', ms=6, alpha=.4) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) # Settings # -------- axAIC.set_xlim([np.min(position) - 0.1, np.max(position) + 0.1]) axAICres.set_xlabel('CCD position [pixel]') axBICres.set_xlabel('CCD position [pixel]') return
# plot the wave time = np.arange(0, nframes) * (1.0 / framerate) time2 = np.arange(0, len(volume11)) * (frameSize - overLap) * 1.0 / framerate pl.figure(figsize=(6, 2.5)) pl.plot(time, waveData) pl.ylabel("Amplitude", fontsize=11) pl.xlabel('Time(s)', fontsize=11) pl.ylim(-1, 1) pl.xticks([ 0.025, 0.100, 0.250, 0.365, 0.450, 0.540, 0.680, 0.939, 1.024, 1.150, 1.270, 1.350, 1.408, 1.507, 1.600 ], [ 0.025, '0.100', '0.250', 0.365, '0.450', '0.540', '0.680', 0.939, 1.024, '1.150', '1.270', '1.350', 1.408, 1.507, '1.600' ]) pl.tick_params(axis='x', rotation=50) pl.plot([0.025, 0.025], [-1, 1], linestyle='dashed', color='r') pl.plot([0.1, 0.1], [-1, 1], linestyle='dashed', color='r') pl.plot([0.25, 0.25], [-1, 1], linestyle='dashed', color='r') pl.plot([0.365, 0.365], [-1, 1], linestyle='dashed', color='r') pl.plot([0.45, 0.45], [-1, 1], linestyle='dashed', color='r') pl.plot([0.54, 0.54], [-1, 1], linestyle='dashed', color='r') pl.plot([0.68, 0.68], [-1, 1], linestyle='dashed', color='r') pl.plot([0.939, 0.939], [-1, 1], linestyle='dashed', color='r') pl.plot([1.024, 1.024], [-1, 1], linestyle='dashed', color='r') pl.plot([1.15, 1.15], [-1, 1], linestyle='dashed', color='r') pl.plot([1.27, 1.27], [-1, 1], linestyle='dashed', color='r') pl.plot([1.35, 1.35], [-1, 1], linestyle='dashed', color='r') pl.plot([1.408, 1.408], [-1, 1], linestyle='dashed', color='r') pl.plot([1.507, 1.507], [-1, 1], linestyle='dashed', color='r')
def imag_proc(file_name, num_of_tx, camera): BLACK = (0, 0, 0) WHITE = (255, 255, 255) BLUE = (255, 0, 0) GREEN = (0, 255, 0) RED = (0, 0, 255) YELLOW = (0, 255, 255) TEAL = (255, 255, 0) MAGENTA = (255, 0, 255) if 'PICS' in os.environ: debug = True else: debug = False if 'DEBUG' in os.environ and int(os.environ['DEBUG']) >= 3: logger.warn("DEBUG=3 doesn't save pictures any more") logger.warn( "I split saving pictures out to its own independent setting") logger.warn("Use PICS=1 to save intermediate images") if debug: global dbg_step dbg_step = 0 # Load image and convert to grayscale logger.start_op("Loading image") gray_image = cv2.imread(file_name, cv2.IMREAD_GRAYSCALE) logger.debug('gray_image.shape = {}'.format(gray_image.shape)) if debug: dbg_save('gray_image', gray_image) logger.end_op() # Handle orientation logger.start_op("Normalizing image rotation") if gray_image.shape[1] > gray_image.shape[0]: logger.debug("Rotated image") gray_image = numpy.rot90(gray_image, 3) else: logger.debug("No rotation") if debug: dbg_save('gray_image_rotated', gray_image) logger.debug('gray_image.shape = {}'.format(gray_image.shape)) logger.end_op() # Blur image logger.start_op("Applying blur") #m2 = cv2.GaussianBlur(gray_image, (31,31), 0) m2 = cv2.blur(gray_image, (50, 50)) # faster and good enough #m2 = cv2.blur(gray_image, (150,150)) # faster and good enough if debug: dbg_save('after_blur', m2) logger.debug('m2.shape = {}'.format(m2.shape)) logger.end_op() # Replace manual threshold with more efficient OTSU filter logger.start_op("Threshold image") #threshold, thresholded_img = cv2.threshold(m2, 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU) thresholded_img = cv2.adaptiveThreshold(m2, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 101, 2) if debug: dbg_save('thresholded_img', thresholded_img) logger.end_op() # Find and label disjoint sets of pixels (each transmitter blob) logger.start_op("Locate transmitters") # opencv3.0 has a connectedComponents API but that's sadly not released yet #ret, markers = cv2.connectedComponents(thresholded_img) # We solve this by drawing an outline ("contour") around each blob contours, heirarchy = cv2.findContours(thresholded_img, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) if debug: # drawContours draws contours on the supplied image, need a copy contour_image = gray_image.copy() cv2.drawContours(contour_image, contours, -1, 255, 3) dbg_save('contours', contour_image) #contours_kept_image = gray_image.copy() contours_kept_image = cv2.imread(file_name, cv2.IMREAD_COLOR) # Draw the center point; useful for eyeballing calibration kept_center = (contours_kept_image.shape[1] / 2, contours_kept_image.shape[0] / 2) cv2.circle( contours_kept_image, kept_center, 5, # radius RED, # color -1 # fill circle ) cv2.circle( contours_kept_image, (kept_center[0], kept_center[1] + 20), 5, # radius RED, # color -1 # fill circle ) cv2.circle( contours_kept_image, (kept_center[0] + 20, kept_center[1]), 5, # radius RED, # color -1 # fill circle ) cv2.circle( contours_kept_image, (kept_center[0] + 40, kept_center[1]), 5, # radius RED, # color -1 # fill circle ) # And then fitting a circle to that contour centers = [] radii = [] for contour in contours: center, radius = cv2.minEnclosingCircle(contour) center = map(int, center) radius = int(radius) if radius <= 33: logger.debug( 'Skipping transmitter at {} with small radius ({} pixels)'. format(center, radius)) continue # For some reason minEnclosingCircle flips x and y? center = (center[1], center[0]) #assert thresholded_img[center[0], center[1]] == 1, 'Center of blob is not lit?' reject = False for pt in contour: # List of lists? Maybe some contour structure could have blob points? assert len(pt) == 1 pt = pt[0] # More x,y flip? if \ pt[1] < 10 or \ pt[0] < 10 or \ pt[1] > (thresholded_img.shape[0]-10) or \ pt[0] > (thresholded_img.shape[1]-10): reject = True logger.debug("Bad edge point: {}".format(pt)) break if reject: logger.debug('Rejecting edge contour at {}'.format(center)) continue contour_area = cv2.contourArea(contour) circle_area = math.pi * radius**2 logger.debug( 'Transmitter area {:0.1f}. Radius {} px. Contour area {}. %age {:0.1f}' .format(circle_area, radius, contour_area, (contour_area / circle_area) * 100)) if (contour_area / circle_area) < .5: logger.debug('Rejecting non-circular contour at {}'.format(center)) continue centers.append(center) radii.append(radius) if debug: cv2.drawContours(contours_kept_image, [ contour, ], -1, TEAL, 3) if debug: dbg_save('contours-kept', contours_kept_image) number_of_transmitters = len(centers) #assert number_of_transmitters >= 3, 'not enough transmitters' logger.end_op() # Compute transmitter frequencies logger.start_op("Computing transmitter frequencies") Fs = 1 / camera.rolling_shutter_r T = 1 / Fs # 2**14 good balance of speed / resolution [5~10 hz for small sample set] NFFT = 2**14 gain = 5 estimated_frequencies = [] window_size = 100 average_window = 40 avg_threshold = 20 light_circles = gray_image.copy() for i in xrange(number_of_transmitters): try: row_start = max(0, centers[i][0] - radii[i]) row_end = min(gray_image.shape[0] - 1, centers[i][0] + radii[i]) column_start = max(0, centers[i][1] - radii[i]) column_end = min(gray_image.shape[1] - 1, centers[i][1] + radii[i]) #Slice image around current center and sum across all rows image_slice = gray_image[row_start:row_end, column_start:column_end] image_slice_mean = numpy.mean(image_slice) image_row = numpy.sum(image_slice, axis=0) #Remove any DC component image_row = image_row - numpy.mean(image_row) #Apply window y = image_row * numpy.hamming(image_row.shape[0]) #Take FFT L = len(y) Y = numpy.fft.fft(y * gain, NFFT) / float(L) f = Fs / 2 * numpy.linspace(0, 1, NFFT / 2.0 + 1) Y_plot = 2 * abs(Y[0:NFFT / 2.0 + 1]) #TODO: Apply heuristic to determine SNR if debug: pylab.subplot(number_of_transmitters, 2, 2 * i + 1) pylab.title(str(centers[i]), size='xx-small') pylab.ylim([-13000, 13000]) pylab.yticks([-13000, 0, 13000]) pylab.tick_params(labelsize=4) pylab.plot(y) ##Improve center by thresholding image and obtaining minimum enclosing circle #_, image_slice_thresh = cv2.threshold(image_slice, image_slice_mean*1.5, 1, cv2.THRESH_BINARY) #image_slice_thresh_contours, _ = cv2.findContours(image_slice_thresh, cv2.RETR_LIST, # cv2.CHAIN_APPROX_SIMPLE) #image_slice_thresh_contours = numpy.vstack(image_slice_thresh_contours) #center, radius = cv2.minEnclosingCircle(image_slice_thresh_contours) #center = map(int, center) #radius = int(radius) #center = (center[1] + row_start, center[0] + column_start) #cv2.circle(light_circles, (center[1], center[0]), radius + 3, WHITE, 3) #centers[i] = center #radii[i] = radius #Find the best fit for the largest circle radius = int(image_slice.shape[0] / 2) first_time = True max_val = 0 circle_area = 0 max_loc = (0, 0) while radius > 0: last_radius = radius last_max_loc = max_loc last_max_val = max_val last_circle_area = circle_area circle_template = numpy.zeros((radius * 2 + 1, radius * 2 + 1), type(image_slice[0][0])) cv2.circle(circle_template, (radius, radius), radius, WHITE, -1) res = cv2.matchTemplate(image_slice, circle_template, cv2.TM_CCORR) min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res) circle_area = math.pi * math.pow(radius, 2) #Continue to decrease the circle size until more than 10% of the remaining pixels #print('{} {}'.format(max_val, last_max_val)) if first_time or max_val > last_max_val * ( (.4 * circle_area + .6 * last_circle_area) / last_circle_area): first_time = False radius = radius - 1 else: #print('GOT HERE') radii[i] = last_radius centers[i] = (row_start + last_max_loc[1] + last_radius + 1, column_start + last_max_loc[0] + last_radius + 1) cv2.circle(light_circles, (centers[i][1], centers[i][0]), radius + 5, WHITE, 3) break if radii[i] <= 33: raise NotImplementedError("hack") if debug: pylab.subplot(number_of_transmitters, 2, 2 * i + 2) pylab.plot(f, Y_plot) pylab.title(str(centers[i]), size='xx-small') #pylab.xlabel('Frequency (Hz)') pylab.xlim([0, 16000]) pylab.tick_params(labelsize=4) peaks = scipy.signal.argrelmax(Y_plot)[0] logger.debug2('peaks =\n{}'.format(peaks)) logger.debug2('f[peaks] =\n{}'.format(f[peaks])) logger.debug2('Y_plot[peaks] =\n{}'.format(Y_plot[peaks])) idx = numpy.argmax(Y_plot[peaks]) peak_freq = f[peaks[idx]] logger.debug('center {}\tradius {}\tpeak_freq = {}'.format( centers[i], radii[i], peak_freq)) if debug: cv2.circle(contours_kept_image, (centers[i][1], centers[i][0]), 5, GREEN, -1) cv2.circle(contours_kept_image, (centers[i][1], centers[i][0]), radius + 5, GREEN, 2) cv2.putText( contours_kept_image, "({} {}) {} Hz".format(centers[i][1], centers[i][0], int(peak_freq)), (centers[i][1] + 100, centers[i][0]), cv2.FONT_HERSHEY_TRIPLEX, 2, YELLOW) estimated_frequencies.append(peak_freq) except: logger.debug("Dropped failed center at {}".format(centers[i])) estimated_frequencies.append(10) if debug: dbg_plot_subplots('freq_fft_transmitters') dbg_save('contours-kept-labeled', contours_kept_image) dbg_save('circles', light_circles) logger.end_op() centers = numpy.array(centers) radii = numpy.array(radii) estimated_frequencies = numpy.array(estimated_frequencies) return (centers, radii, estimated_frequencies, gray_image.shape)
def plot(self, dir1, dir2): read = LIGGGHTSER.read.Read() top = read.read_ave(dir1) ke = read.read_ave(dir2) plt.figure(figsize=(20, 15)) font1 = { 'weight': 'normal', 'size': 18, } fontlabel = { 'weight': 'normal', 'size': 18, } ax411 = plt.subplot(411) x = top['v_xForce'] y = top['v_yForce'] x = np.array(x) y = np.array(y) friction = x / y l11 = plt.plot(top['TimeStep'], friction, linewidth=5.0, linestyle='-') # plt.xlim((3e7,6e7)) # plt.ylim((0.1,0.51)) # plt.xticks(np.arange(3e7,6e7,0.5e7)) # plt.yticks(np.arange(0.1,0.5,0.1)) plt.tick_params(labelsize=18, direction='in', pad=15) ax411.spines['bottom'].set_linewidth(3) ax411.spines['top'].set_linewidth(3) ax411.spines['left'].set_linewidth(3) ax411.spines['right'].set_linewidth(3) plt.title('Friction', loc='right', fontsize=24, pad=10) # ax411.set_title('Friction',fontsize=18) # plt.xlabel('TimeStep') plt.ylabel('Friciton ratio', fontlabel) ax412 = plt.subplot(412) l21 = plt.plot(top['TimeStep'], top['v_yPos'], linewidth=5.0, linestyle='-') # plt.xlim((3e7,6e7)) # plt.ylim((0.148,0.150)) # plt.xticks(np.arange(3e7,6e7,0.5e7)) # plt.yticks(np.arange(1.48e-1,1.5e-1,5e-4)) plt.tick_params(labelsize=18, direction='in', pad=15) ax412.spines['bottom'].set_linewidth(3) ax412.spines['top'].set_linewidth(3) ax412.spines['left'].set_linewidth(3) ax412.spines['right'].set_linewidth(3) plt.title('Mesh Position', loc='right', fontsize=24, pad=10) # plt.xlabel('TimeStep') plt.ylabel('Topmesh Position', fontlabel) ax413 = plt.subplot(413) l31 = plt.plot(ke['TimeStep'], ke['c_2'], linewidth=5.0, linestyle='-', zorder=30) # plt.xlim((3e7,6e7)) # plt.ylim((0,10000)) # plt.xticks(np.arange(3e7,6e7,0.5e7)) # plt.yticks(np.arange(0,10000,1000)) plt.tick_params(labelsize=18, direction='in', pad=15) ax413.spines['bottom'].set_linewidth(3) ax413.spines['top'].set_linewidth(3) ax413.spines['left'].set_linewidth(3) ax413.spines['right'].set_linewidth(3) # ax413.set_yscale("log") # plt.title('Energy',loc='right',fontsize=24,pad=10) # plt.xlabel('TimeStep') plt.ylabel('Kinetic Energy', fontlabel) plt.subplots_adjust(left=0.1, right=0.97, bottom=0.05, top=0.95, wspace=0.1, hspace=0.4) plt.show()
times = evoked.times * 1000 sel = fiff.pick_types(evoked.info, meg=False, eeg=False, include=channelList) print sel data = evoked.data[sel] * 1e13 square = np.power(data, 2) meanSquare = np.mean(square, 0) rms = np.power(meanSquare, .5) pl.plot(times, rms, color=colorList[c], linewidth=lWidth) pl.ylim([ymin, ymax]) pl.xlim([xmin, xmax]) pl.box('off') # turn off the box frame pl.axhline(y=0, xmin=0, xmax=1, color='k', linewidth=2) #draw a thicker horizontal line at 0 pl.axvline( x=0, ymin=0, ymax=1, color='k', linewidth=2 ) #draw a vertical line at 0 that goes 1/8 of the range in each direction from the middle (e.g., if the range is -8:8, =16, 1/8 of 16=2, so -2:2). pl.tick_params(axis='both', right='off', top='off') #turn off all the tick marks pl.yticks(np.array([0., 4., 8., 12., 16.])) pl.xticks(np.array([0, 200, 400, 600])) #pl.title(hem + group) #pl.show() outFile = results_path + args.prefix + '-' + str(args.set1) + '-' + str( args.set2) + '-' + group + '.png' pl.savefig(outFile)
def PLOTdetrendORBITfluxFULL(time, flux, orbitalPHASE, TCK, **kwargs): """ Routine going with detrendORBITflux to visualize the detrending of the photometry for the instrumental correction between the satellite's orbital phase and BRITE flux. Returns: Figure with -hopefully- enough diagnostics to determine the strength / weaknessess of the detrending @param flux: flux measurements [adu] @type flux: numpy array of length N @param orbitalPHASE: orbital phase measurements []; ranges from 0 to 1 @type orbitalPHASE: numpy array of length N @param TCK: spline tck for the favored fit @type TCK: tuple """ # Calculating the corrections # --------------------------- fluxCORRECTION = scInterp.splev(orbitalPHASE, TCK) # Setting up the figure window # ---------------------------- figORBITcorr = pl.figure(figsize=(16, 16)) gsORBITcorr = gridspec.GridSpec(2, 2, height_ratios=[1, 1], width_ratios=[3, 2]) axTIMEorig = figORBITcorr.add_subplot( gsORBITcorr[0, 0]) # Initial flux with time axTIMEcorr = figORBITcorr.add_subplot( gsORBITcorr[1, 0], sharex=axTIMEorig, sharey=axTIMEorig) # Corrected flux with time axORBITorig = figORBITcorr.add_subplot( gsORBITcorr[0, 1], sharey=axTIMEorig) # Initial flux with orbitalPHASE axORBITcorr = figORBITcorr.add_subplot( gsORBITcorr[1, 1], sharex=axORBITorig, sharey=axTIMEorig) # Corrected flux with orbitalPHASE # Panels related to time # ---------------------- axTIMEorig.plot(time, flux, 'k.', ms=6, alpha=.4) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axTIMEorig.set_title('Original') axTIMEorig.set_ylabel('Flux [adu]') axTIMEcorr.plot(time, flux - fluxCORRECTION, 'k.', ms=6, alpha=.4) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axTIMEcorr.set_title('After correction') axTIMEcorr.set_ylabel('Flux [adu]') axTIMEcorr.set_ylabel('Time [d]') # Panels related to orbitalPHASE # -------------------------------------- axORBITorig.plot(orbitalPHASE, flux, 'k.', ms=6, alpha=.4) axORBITorig.plot(orbitalPHASE + 1., flux, 'k.', ms=6, alpha=.4) axORBITorig.plot(orbitalPHASE, fluxCORRECTION, 'r.', ms=6, alpha=.8) axORBITorig.plot(orbitalPHASE + 1., fluxCORRECTION, 'r.', ms=6, alpha=.8) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axORBITorig.set_title('Correction') axORBITcorr.plot(orbitalPHASE, flux - fluxCORRECTION, 'k.', ms=6, alpha=.4) axORBITcorr.plot(orbitalPHASE + 1., flux - fluxCORRECTION, 'k.', ms=6, alpha=.4) pl.tick_params('both', length=10, width=2, which='major') pl.tick_params('both', length=10, width=1, which='minor') pyplot.locator_params(axis='x', nbins=5) pyplot.locator_params(axis='y', nbins=5) axORBITcorr.set_title('Residuals correction') axORBITcorr.set_ylabel('Orbital phase') # Settings # -------- axTIMEorig.set_xlim([np.min(time), np.max(time)]) axTIMEorig.set_ylim([np.min(flux) * 1.2, np.max(flux) * 1.2]) axORBITorig.set_xlim( [np.min(orbitalPHASE) - 0.1, np.max(orbitalPHASE) + 1.1]) axTIMEcorr.set_xlabel('Time [d]') axTIMEorig.set_ylabel('Flux [adu]') axTIMEcorr.set_ylabel('Flux [adu]') axORBITcorr.set_xlabel('Orbital phase') return
def plot_numpoints(targname, filt, exp_length, flashlvl, aperture, outloc, ylims): """ Plots fraction of non-ctecorr to ctecorr sources recovered vs fluxbin, for a subset of epochs. """ proposid_ls = ['12379', '12692', '13083', '13566', '14012'] color_ls = ['red', 'orange', 'lime', 'green', 'blue'] marker_ls = ['o', 'd', 's', '*', '^'] #color_ls = itertools.cycle(colors) epoch_ls = [] #proposid_ls = [] for proposid in proposid_ls: if exp_length == 'l': if '104' in targname: exptime = 348 elif '6791' in targname: exptime = 420 elif exp_length == 's': if '104' in targname: exptime = 30 elif '6791' in targname: exptime = 60 epochs = query_for_dateobss(targname, proposid, filt, exptime) epochs = list(set(epochs)) epoch_ls.append(epochs[-1]) #for epoch in epochs: # proposid_ls.append(proposid) print("epoch_ls: {}".format(epoch_ls)) # Set the figure. pylab.figure(figsize=(12.5,8.5)) for epoch, proposid, color, marker in zip(epoch_ls, proposid_ls, color_ls, marker_ls): # ctecorr True slopes_ctecorr, stderrs_ctecorr, mjds_ctecorr, fluxbins_ctecorr, numpoints_ctecorr = query_db_python( targname, filt, exp_length, flashlvl, ctecorr=True, aperture=aperture, epoch=epoch, proposid=proposid) mjds_ctecorr_cut = np.array(mjds_ctecorr[np.where(mjds_ctecorr == epoch)]) fluxbins_ctecorr_cut = np.array(fluxbins_ctecorr[np.where(mjds_ctecorr == epoch)]) numpoints_ctecorr_cut = np.array(numpoints_ctecorr[np.where(mjds_ctecorr == epoch)]) # ctecorr False slopes, stderrs, mjds, fluxbins, numpoints, = query_db_python( targname, filt, exp_length, flashlvl, ctecorr=False, aperture=aperture, epoch=epoch, proposid=proposid) mjds_cut = np.array(mjds[np.where(mjds == epoch)]) fluxbins_cut = np.array(fluxbins[np.where(mjds == epoch)]) numpoints_cut = np.array(numpoints[np.where(mjds == epoch)]) print("fluxbins_cut: {}".format(fluxbins_cut)) print("numpoints_cut: {}".format(numpoints_cut)) print("numpoints_ctecorr_cut: {}".format(numpoints_ctecorr_cut)) if len(numpoints_cut) == len(numpoints_ctecorr_cut) and len(numpoints_cut) != 0: fluxbins_log = [] for fluxbin in fluxbins_cut: fluxlo = float(fluxbin.split('-')[0]) fluxhi = float(fluxbin.split('-')[1]) flux_av = (fluxlo + fluxhi)/2.0 fluxbins_log.append(np.log10(flux_av)) # Calculate fraction recovered frac_recovered =(1.0 - (numpoints_ctecorr_cut.astype(float) - numpoints_cut.astype(float))/ numpoints_ctecorr_cut.astype(float))*100. print("frac_recovered: {}".format(frac_recovered)) # Set the next color in sequence. #color = next(color_ls) pylab.scatter(fluxbins_log, frac_recovered, s=120, marker=marker, color='grey', alpha=0.6, label='MJD={}'.format(epoch)) pylab.xlabel('LOG10 Flux [e-])', fontsize=22, weight='bold') pylab.ylabel('% Sources Recovered w/ CTEcorr', fontsize=22, weight='bold') pylab.axhline(y=100.0, linewidth=2, linestyle='--', color='grey') pylab.tick_params(axis='both', which='major', labelsize=20) title = "{} {} explen'{}' pf{} ap{}".format(targname, filt, exp_length, flashlvl, aperture) pylab.title(title, fontsize=16) pylab.xlim([2.5, 4.5]) pylab.ylim([40.,120]) pylab.legend(scatterpoints=1, loc='lower right') pylab.savefig(os.path.join(outloc, '{}_{}_{}_pf{}_r{}_fracrecoverd.png'.format( targname, filt, exp_length, flashlvl, aperture)), bbox_inches='tight')
value = 'Density' # Choose quantity to plot: density, pressure, velocity colormap = cm.jet # Choose colormap to use title = 'Colormap of ' + value + ' for Bessel Beam' tickSize = 14 labelSize = 18 titleSize = 24 ## Plot colormap at some time fig = plt.figure() ax = fig.gca() if value == 'Density': data = rawData[:, :, 0, t] plt.imshow(data, origin='lower', cmap=colormap) plt.tick_params(labelsize=tickSize) plt.title(title, fontsize=titleSize) plt.xlabel('X Cell Number', fontsize=labelSize) plt.ylabel('Y Cell Number', fontsize=labelSize) plt.xlim([3, Nx - 2]) plt.ylim([3, Ny - 2]) m = cm.ScalarMappable(cmap=colormap) m.set_array(data) plt.colorbar(m) dataNameShort = dataName[0:-4] plt.savefig(dataNameShort + 't' + str(t) + value + '.png') #plt.show()
def labelPlot(xlab, ylab, title,textsize): plt.tick_params(labelsize=textsize) plt.xlabel(xlab,fontsize=textsize) plt.ylabel(ylab,fontsize=textsize) plt.title(title,fontsize=textsize)
def cnn_visualization(folder_name=None, h5name=None, num=None): """ NAME: cnn_visualization PURPOSE: To visualize CNN model INPUT: folder_name = parent folder name data = OUTPUT: plots HISTORY: 2017-Nov-02 Henry Leung """ # Set number of spectra for CNN visualization if num is None: num = 20 random.seed(3) data = h5name + '_train.h5' currentdir = os.getcwd() fullfolderpath = currentdir + '/' + folder_name vis_parent_path = os.path.join(fullfolderpath, 'cnn_visual') # load model modelname = '/model_{}.h5'.format(folder_name[-11:]) model = load_model(os.path.normpath(fullfolderpath + modelname)) layer_1 = K.function([model.layers[0].input, K.learning_phase()], [model.layers[1].output]) layer_2 = K.function([model.layers[0].input, K.learning_phase()], [model.layers[2].output]) target = np.load(fullfolderpath + '/targetname.npy') spec_meanstd = np.load(fullfolderpath + '/spectra_meanstd.npy') with h5py.File(data) as F: # ensure the file will be cleaned up i = 0 index_not9999 = [] for tg in target: temp = np.array(F['{}'.format(tg)]) temp_index = np.where(temp != -9999) if i == 0: index_not9999 = temp_index i += 1 else: index_not9999 = reduce(np.intersect1d, (index_not9999, temp_index)) spectra = np.array(F['spectra']) rel_index = np.array(F['index']) spectra = spectra[index_not9999] spectra -= spec_meanstd[0] spectra /= spec_meanstd[1] random_number = num ran = random.sample(range(0, spectra.shape[0], 1), random_number) spectra = spectra[ran] rel_index = rel_index[ran] num_label = spectra.shape[1] for i in range(random_number): temp_path = os.path.join(vis_parent_path, str(i)) if not os.path.exists(temp_path): os.makedirs(temp_path) reshaped = spectra[i].reshape((1, num_label, 1)) layer_1_output = layer_1([reshaped, 0])[0] layer_2_output = layer_2([reshaped, 0])[0] apogee_id = astroNN.NN.train_tools.apogee_id_fetch(relative_index=rel_index, dr=14) plt.figure(figsize=(30, 15), dpi=200) plt.rcParams['axes.grid'] = False plt.plot(spectra[i] * spec_meanstd[1] + spec_meanstd[0], alpha=0.8, linewidth=0.7, label='APOGEE Spectra') plt.xlabel('Pixel', fontsize=25) plt.ylabel('Flux ', fontsize=25) plt.title(apogee_id[i], fontsize=30) plt.xlim((0, num_label)) plt.ylim((0.5, 1.5)) plt.tick_params(labelsize=20, width=1, length=10) leg = plt.legend(loc='best', fontsize=20) for legobj in leg.legendHandles: legobj.set_linewidth(4.0) plt.tight_layout() plt.savefig(temp_path + '/spectra_{}.png'.format(apogee_id[i])) plt.close('all') plt.clf() plt.figure(figsize=(25, 20), dpi=200) plt.ylabel('Pixel', fontsize=35) plt.xlabel('CNN Filter number', fontsize=35) plt.title(apogee_id[i], fontsize=30) plt.imshow(layer_1_output[0, :, :], aspect='auto', norm=colors.PowerNorm(gamma=1. / 2.), cmap='gray') plt.tick_params(labelsize=25, width=1, length=10) cbar = plt.colorbar() cbar.ax.tick_params(labelsize=25, width=1, length=10) plt.tight_layout() plt.savefig(temp_path + '/cnn_layer1.png') plt.close('all') plt.clf() plt.figure(figsize=(25, 20), dpi=200) plt.ylabel('Pixel', fontsize=35) plt.xlabel('CNN Filter number', fontsize=35) plt.title(apogee_id[i], fontsize=30) plt.imshow(layer_2_output[0, :, :], aspect='auto', norm=colors.PowerNorm(gamma=1. / 2.), cmap='gray') plt.tick_params(labelsize=25, width=1, length=10) cbar = plt.colorbar() cbar.ax.tick_params(labelsize=25, width=1, length=10) plt.tight_layout() plt.savefig(temp_path + '/cnn_layer2.png') plt.close('all') plt.clf()
burn = 3500 print '# mean, st. dev., best $\chi^2$' plottinghist('CC.chain', 'r', 900) plottinghist('DC.chain', '--r', burn) plottinghist('GC.chain', ':r', burn) plottinghist('CD.chain', 'b', 700) plottinghist('DD.chain', '--b', 700) plottinghist('GD.chain', ':b', 900) plottinghist('CG.chain', 'g', 8500) plottinghist('DG.chain', '--g', burn) plottinghist('GG.chain', ':g', 500) #pylab.axvline(x=Rhalf(50.0,30.0,15.0,10.0),color='r',ls='-.',lw=2) #pylab.axvline(x=50.0/2.0**0.5,color='b',ls='-.',lw=2) #pylab.axvline(x=numpy.log(4.0)**0.5*50.0/3.0,color='g',ls='-.',lw=2) pylab.legend(loc=9, fontsize=12) pylab.xlim(25, 102) #pylab.xlim(15,50) pylab.xlabel('$\mathrm{R_{p}}$', fontsize=20) pylab.ylabel('$\mathrm{Probability\ density\ function}$', fontsize=20) pylab.ylim(ymax=1.0) pylab.axvline(x=50, color='k', ls='-.', lw=2) pylab.savefig('Rp4all.eps') pylab.tick_params(axis='both', which='major', labelsize=20) pylab.tick_params(axis='both', which='minor', labelsize=20) pylab.show()
l11 = plt.plot(top1['TimeStep'], friction, linewidth=linewidth_set, linestyle='-', label='Unbreakable') x = top2['v_xForce'] y = top2['v_yForce'] x = np.array(x) y = np.array(y) friction = x / y l12 = plt.plot(top2['TimeStep'], friction, linewidth=linewidth_set, linestyle='-', label='breakable s=120MPa') plt.tick_params(labelsize=18, direction='in', pad=15) plt.xlim((xlow, xhigh)) plt.ylim((0.2, 0.51)) plt.yticks(np.arange(0.2, 0.51, 0.1)) ax411.spines['bottom'].set_linewidth(3) ax411.spines['top'].set_linewidth(3) ax411.spines['left'].set_linewidth(3) ax411.spines['right'].set_linewidth(3) plt.ylabel('Friction', fontlabel) # plt.legend([l11,l12],labels=['Unbreakable','breakable s=120MPa'],loc='lower right',prop=font1) ax411y2 = ax411.twinx() l13 = ax411y2.plot(timestep2, break_atom2, linewidth=linewidth_set, color='blue', linestyle='-',
def embed(words, matrix, classes, usermodel, fname): perplexity = int(len( words)**0.5) # We set perplexity to a square root of the words number embedding = TSNE(n_components=2, perplexity=perplexity, metric="cosine", n_iter=500, init="pca") y = embedding.fit_transform(matrix) print("2-d embedding finished", file=sys.stderr) class_set = [c for c in set(classes)] colors = plot.cm.rainbow(np.linspace(0, 1, len(class_set))) class2color = [colors[class_set.index(w)] for w in classes] xpositions = y[:, 0] ypositions = y[:, 1] seen = set() plot.clf() for color, word, class_label, x, y in zip(class2color, words, classes, xpositions, ypositions): plot.scatter( x, y, 20, marker=".", color=color, label=class_label if class_label not in seen else "", ) seen.add(class_label) lemma = word.split("_")[0].replace("::", " ") mid = len(lemma) / 2 mid *= 4 # TODO Should really think about how to adapt this variable to the real plot size plot.annotate( lemma, xy=(x - mid, y), size="x-large", weight="bold", fontproperties=font, color=color, ) plot.tick_params(axis="x", which="both", bottom=False, top=False, labelbottom=False) plot.tick_params(axis="y", which="both", left=False, right=False, labelleft=False) plot.legend(loc="best") plot.savefig( root + "data/images/tsneplots/" + usermodel + "_" + fname + ".png", dpi=150, bbox_inches="tight", ) plot.close() plot.clf()
def BenchmarkSummaryFigure(models, variables, data, figname, vcolor=None, rel_only=False): """Creates a summary figure for the benchmark results contained in the data array. Parameters ---------- models : list a list of the model names variables : list a list of the variable names data : numpy.ndarray or numpy.ma.ndarray data scores whose shape is ( len(variables), len(models) ) figname : str the full path of the output file to write vcolor : list, optional an array parallel to the variables array containing background colors for the labels to be displayed on the y-axis. """ from mpl_toolkits.axes_grid1 import make_axes_locatable # data checks assert type(models) is type(list()) assert type(variables) is type(list()) assert (type(data) is type(np.empty(1)) or type(data) is type(np.ma.empty(1))) assert data.shape[0] == len(variables) assert data.shape[1] == len(models) assert type(figname) is type("") if vcolor is not None: assert type(vcolor) is type(list()) assert len(vcolor) == len(variables) # define some parameters nmodels = len(models) nvariables = len(variables) maxV = max([len(v) for v in variables]) maxM = max([len(m) for m in models]) wpchar = 0.1 wpcell = 0.19 hpcell = 0.25 w = maxV * wpchar + max(4, nmodels) * wpcell if not rel_only: w += (max(4, nmodels) + 1) * wpcell h = maxM * wpchar + nvariables * hpcell + 1.0 bad = 0.5 if "stoplight" not in plt.colormaps(): RegisterCustomColormaps() # plot the variable scores if rel_only: fig, ax = plt.subplots(figsize=(w, h), ncols=1, tight_layout=True) ax = [ax] else: fig, ax = plt.subplots(figsize=(w, h), ncols=2, tight_layout=True) # absolute score if not rel_only: cmap = plt.get_cmap('stoplight') cmap.set_bad('k', bad) qc = ax[0].pcolormesh(np.ma.masked_invalid(data[::-1, :]), cmap=cmap, vmin=0, vmax=1, linewidth=0) div = make_axes_locatable(ax[0]) fig.colorbar(qc, ticks=(0, 0.25, 0.5, 0.75, 1.0), format="%g", cax=div.append_axes("bottom", size="5%", pad=0.05), orientation="horizontal", label="Absolute Score") plt.tick_params(which='both', length=0) ax[0].xaxis.tick_top() ax[0].set_xticks(np.arange(nmodels) + 0.5) ax[0].set_xticklabels(models, rotation=90) ax[0].set_yticks(np.arange(nvariables) + 0.5) ax[0].set_yticklabels(variables[::-1]) ax[0].tick_params('both', length=0, width=0, which='major') ax[0].tick_params(axis='y', pad=10) if vcolor is not None: for i, t in enumerate(ax[0].yaxis.get_ticklabels()): t.set_backgroundcolor(vcolor[::-1][i]) # relative score i = 0 if rel_only else 1 np.seterr(invalid='ignore', under='ignore') data = np.ma.masked_invalid(data) data.data[data.mask] = 1. data = np.ma.masked_values(data, 1.) mean = data.mean(axis=1) std = data.std(axis=1).clip(0.02) np.seterr(invalid='ignore', under='ignore') Z = (data - mean[:, np.newaxis]) / std[:, np.newaxis] Z = np.ma.masked_invalid(Z) np.seterr(invalid='warn', under='raise') cmap = plt.get_cmap('RdGn') cmap.set_bad('k', bad) qc = ax[i].pcolormesh(Z[::-1], cmap=cmap, vmin=-2, vmax=2, linewidth=0) div = make_axes_locatable(ax[i]) fig.colorbar(qc, ticks=(-2, -1, 0, 1, 2), format="%+d", cax=div.append_axes("bottom", size="5%", pad=0.05), orientation="horizontal", label="Relative Score") plt.tick_params(which='both', length=0) ax[i].xaxis.tick_top() ax[i].set_xticks(np.arange(nmodels) + 0.5) ax[i].set_xticklabels(models, rotation=90) ax[i].tick_params('both', length=0, width=0, which='major') ax[i].set_yticks([]) ax[i].set_ylim(0, nvariables) if rel_only: ax[i].set_yticks(np.arange(nvariables) + 0.5) ax[i].set_yticklabels(variables[::-1]) if vcolor is not None: for i, t in enumerate(ax[i].yaxis.get_ticklabels()): t.set_backgroundcolor(vcolor[::-1][i]) # save figure fig.savefig(figname)
#r' $\epsilon$ = ' +str(params['eps']) , #fontsize=14, fontweight='bold') ## [GHO08a], [VUK13] #fig.suptitle('FHN - Local Dynamics : '+r'$\alpha$ = ' +str(params['alpha'])+ #r' $ \gamma$ = '+str(params['gamma']) + ' $ b$ = '+ #str(params['b']) + r' $\tau$ = '+str(params['TAU']) + #' D = ' + str(params['D']) , #fontsize=25) pl.subplot(121) pl.xlabel('t', fontsize=30) pl.ylabel('x(t) , y(t)', fontsize=30) pl.plot(t, x, 'r', label='$x(t)$') pl.plot(t, y, 'b', label='$y(t)$') pl.tick_params(labelsize=30) #pl.plot(sol_adap['t'],sol_adap['x'],'.r',linewidth=0.2) #pl.plot(sol_adap['t'],sol_adap['y'],'.b',linewidth=0.2) pl.axis([0, tfinal, -3, 3]) lg = legend(prop={'size': 30}) lg.draw_frame(False) pl.subplot(122) pl.xlabel('x', fontsize=30) pl.ylabel('y', fontsize=30) pl.plot(x, y, 'k', label='$x(t),y(t)$') pl.tick_params(labelsize=30) pl.plot(x[0], y[0], '.r', markersize=25) #pl.plot(x_, y_ , 'b',label='$x_{nullcline}$') #pl.plot(xx,yy,'k',label='$y_{nullcline}$') #pl.plot(-params['a'],(-params['a']+pow(params['a'],3)/3),'ok')
#pl.xlim(xmin=1) pl.ylim(ymin=0) if int(args.matrixsize) != 1: ax.legend((methodsReal), 'lower left', shadow=True, fancybox=True) else: ax.legend((methodsReal), 'upper left', shadow=True, fancybox=True) # take real time of sequential computation to figure out the # granularity of the yaxis tmp_ticks = ax.yaxis.get_majorticklocs() granu = tmp_ticks[len(tmp_ticks) - 1] // (len(tmp_ticks) - 1) // 5 if granu == 0.0: granu = 1.0 ax.yaxis.set_minor_locator(MultipleLocator(granu)) pl.tick_params(axis='both', which='major', labelsize=6) pl.tick_params(axis='both', which='minor', labelsize=6) #pl.savefig('timings.pdf',format='pdf',papertype='a4',orientation='landscape') pl.savefig(pp, format='pdf', papertype='a4', orientation='landscape') fig = pl.figure() ax = fig.add_subplot(111) fig.suptitle('GFLOPS/sec: CALU fully-dynamic scheduling w/ MKL', fontsize=10) if int(args.matrixsize) != 1: pl.title('Matrix dimensions: ' + dimensions[0] + ' x ' + dimensions[1], fontsize=8) else: pl.title('Number of threads: ' + str(args.threads), fontsize=8) if int(args.matrixsize) != 1:
marker='D', label=r"WFIRST") # Plot n(z) for Euclid for comparison zmin, zmax, _a, nz_euclid, _b = np.genfromtxt("nz_euclid.dat").T P.plot(0.5 * (zmin + zmax), nz_euclid * 0.67**3., 'm-', lw=1.5, marker='D', label=r"Euclid") P.legend(loc='upper right', ncol=1, frameon=False, prop={'size': 'medium'}) P.xlim((0., 5.5)) P.ylim((1e-7, 1e-3)) P.tick_params(axis='both', which='major', labelsize=18, width=1.5, size=8., pad=10) P.tick_params(axis='both', which='minor', labelsize=18, width=1.5, size=8.) P.xlabel("z", fontsize='x-large') P.ylabel("n(z) [Mpc$^{-3}$]", fontsize='x-large') P.yscale('log') P.tight_layout() P.show()