def plot_fit(self, size=None, tol=0.1, axis_on=True): n, d = self.D.shape if size: nrows, ncols = size else: sq = np.ceil(np.sqrt(n)) nrows = int(sq) ncols = int(sq) ymin = np.nanmin(self.D) ymax = np.nanmax(self.D) print 'ymin: {0}, ymax: {1}'.format(ymin, ymax) numplots = np.min([n, nrows * ncols]) plt.figure() for n in xrange(numplots): plt.subplot(nrows, ncols, n + 1) plt.ylim((ymin - tol, ymax + tol)) plt.plot(self.L[n, :] + self.S[n, :], 'r') plt.plot(self.L[n, :], 'b') if not axis_on: plt.axis('off')
def matrixMontage(spcomps,*args, **kwargs): numcomps, width, height=spcomps.shape rowcols=int(np.ceil(np.sqrt(numcomps))); for k,comp in enumerate(spcomps): plt.subplot(rowcols,rowcols,k+1) plt.imshow(comp,*args, **kwargs) plt.axis('off')
def plot_weightings(): """Plots all weighting functions defined in :module: splweighting.""" from scipy.signal import freqz from pylab import plt, np sample_rate = 48000 num_samples = 2*4096 fig, ax = plt.subplots() for name, weight_design in sorted( _weighting_coeff_design_funsd.items()): b, a = weight_design(sample_rate) w, H = freqz(b, a, worN=num_samples) freq = w*sample_rate / (2*np.pi) ax.semilogx(freq, 20*np.log10(np.abs(H)+1e-20), label='{}-Weighting'.format(name)) plt.legend(loc='lower right') plt.xlabel('Frequency / Hz') plt.ylabel('Damping / dB') plt.grid(True) plt.axis([10, 20000, -80, 5]) return fig, ax
def plot_fit(self, size=None, tol=0.1, axis_on=True): n, d = self.D.shape if size: nrows, ncols = size else: sq = np.ceil(np.sqrt(n)) nrows = int(sq) ncols = int(sq) ymin = np.nanmin(self.D) ymax = np.nanmax(self.D) print('ymin: {0}, ymax: {1}'.format(ymin, ymax)) numplots = np.min([n, nrows * ncols]) plt.figure() for n in range(numplots): plt.subplot(nrows, ncols, n + 1) plt.ylim((ymin - tol, ymax + tol)) plt.plot(self.L[n, :] + self.S[n, :], 'r') plt.plot(self.L[n, :], 'b') if not axis_on: plt.axis('off')
def plot_stat(rows, cache): "Use matplotlib to plot DAS statistics" if not PLOT_ALLOWED: raise Exception('Matplotlib is not available on the system') if cache in ['cache', 'merge']: # cachein, cacheout, mergein, mergeout name_in = '%sin' % cache name_out = '%sout' % cache else: # webip, webq, cliip, cliq name_in = '%sip' % cache name_out = '%sq' % cache def format_date(date): "Format given date" val = str(date) return '%s-%s-%s' % (val[:4], val[4:6], val[6:8]) date_range = [r['date'] for r in rows] formated_dates = [format_date(str(r['date'])) for r in rows] req_in = [r[name_in] for r in rows] req_out = [r[name_out] for r in rows] plt.plot(date_range, req_in , 'ro-',\ date_range, req_out, 'gv-',) plt.grid(True) plt.axis([min(date_range), max(date_range), \ 0, max([max(req_in), max(req_out)])]) plt.xticks(date_range, tuple(formated_dates), rotation=17) # plt.xlabel('dates [%s, %s]' % (date_range[0], date_range[-1])) plt.ylabel('DAS %s behavior' % cache) plt.savefig('das_%s.pdf' % cache, format='pdf', transparent=True) plt.close()
def plot_scenarios(scenarios): nrows = len(scenarios) fig = plt.figure(figsize=(24, nrows)) n_plot = nrows plt.axis('off') # plot fake samples for iplot in range(nrows): for jplot in range(24): ax = plt.subplot(n_plot, 24, iplot * 24 + jplot + 1) if iplot == 0: ax.annotate(f'{jplot:02d}' ':00', xy=(0.5, 1), xytext=(0, 5), xycoords='axes fraction', textcoords='offset points', size='large', ha='center', va='baseline') im = plt.imshow(scenarios[iplot, jplot - 1, :, :], cmap=plt.cm.gist_earth_r, norm=LogNorm(vmin=0.01, vmax=50)) plt.axis('off') fig.subplots_adjust(right=0.93) cbar_ax = fig.add_axes([0.93, 0.15, 0.007, 0.7]) cbar = fig.colorbar(im, cax=cbar_ax) cbar.set_label('fraction of daily precipitation', fontsize=16) cbar.ax.tick_params(labelsize=16) return fig
def plot_stat(rows, cache): "Use matplotlib to plot DAS statistics" if not PLOT_ALLOWED: raise Exception('Matplotlib is not available on the system') if cache in ['cache', 'merge']: # cachein, cacheout, mergein, mergeout name_in = '%sin' % cache name_out = '%sout' % cache else: # webip, webq, cliip, cliq name_in = '%sip' % cache name_out = '%sq' % cache def format_date(date): "Format given date" val = str(date) return '%s-%s-%s' % (val[:4], val[4:6], val[6:8]) date_range = [r['date'] for r in rows] formated_dates = [format_date(str(r['date'])) for r in rows] req_in = [r[name_in] for r in rows] req_out = [r[name_out] for r in rows] plt.plot(date_range, req_in , 'ro-', date_range, req_out, 'gv-', ) plt.grid(True) plt.axis([min(date_range), max(date_range), \ 0, max([max(req_in), max(req_out)])]) plt.xticks(date_range, tuple(formated_dates), rotation=17) # plt.xlabel('dates [%s, %s]' % (date_range[0], date_range[-1])) plt.ylabel('DAS %s behavior' % cache) plt.savefig('das_%s.pdf' % cache, format='pdf', transparent=True) plt.close()
def plot_weightings(): """Plots all weighting functions defined in :module: splweighting.""" from scipy.signal import freqz from pylab import plt, np sample_rate = 48000 num_samples = 2 * 4096 fig, ax = plt.subplots() for name, weight_design in sorted(_weighting_coeff_design_funsd.items()): b, a = weight_design(sample_rate) w, H = freqz(b, a, worN=num_samples) freq = w * sample_rate / (2 * np.pi) ax.semilogx(freq, 20 * np.log10(np.abs(H) + 1e-20), label='{}-Weighting'.format(name)) plt.legend(loc='lower right') plt.xlabel('Frequency / Hz') plt.ylabel('Damping / dB') plt.grid(True) plt.axis([10, 20000, -80, 5]) return fig, ax
def create_image(csv_path): csv_data, _, _ = read_csv_data(csv_path) plt.figure() plt.plot(csv_data) plt.axis('off') plt.savefig('wind_data.png', bbox_inches='tight', dpi=500)
def plot_roc_curve(fpr, tpr, label=None): """ Plots Rceiver Operating Characteristic (ROC) curve from false_positive_rate(fpr), true_positive_rate(tpr) Requires imports: from sklearn.metrics import roc_curve Returns: Nothing """ from pylab import mpl, plt import matplotlib.pyplot as plt import numpy as np plt.style.use('seaborn') mpl.rcParams['font.family'] = 'arial' np.random.seed(1000) np.set_printoptions(suppress=True, precision=4) plt.plot(fpr, tpr, linewidth=2, label=label) plt.plot([0, 1], [0, 1], 'k--') plt.axis([0, 1, 0, 1]) plt.xlabel('False Positive Rate') plt.ylabel('True Negatove Rate') plot_roc_curve(fpr, tpr)
def example_filterbank(): from pylab import plt import numpy as np x = _create_impulse(2000) gfb = GammatoneFilterbank(density=1) analyse = gfb.analyze(x) imax, slopes = gfb.estimate_max_indices_and_slopes() fig, axs = plt.subplots(len(gfb.centerfrequencies), 1) for (band, state), imx, ax in zip(analyse, imax, axs): ax.plot(np.real(band)) ax.plot(np.imag(band)) ax.plot(np.abs(band)) ax.plot(imx, 0, 'o') ax.set_yticklabels([]) [ax.set_xticklabels([]) for ax in axs[:-1]] axs[0].set_title('Impulse responses of gammatone bands') fig, ax = plt.subplots() def plotfun(x, y): ax.semilogx(x, 20*np.log10(np.abs(y)**2)) gfb.freqz(nfft=2*4096, plotfun=plotfun) plt.grid(True) plt.title('Absolute spectra of gammatone bands.') plt.xlabel('Normalized Frequency (log)') plt.ylabel('Attenuation /dB(FS)') plt.axis('Tight') plt.ylim([-90, 1]) plt.show() return gfb
def example_filterbank(): from pylab import plt import numpy as np x = _create_impulse(2000) gfb = GammatoneFilterbank(density=1) analyse = gfb.analyze(x) imax, slopes = gfb.estimate_max_indices_and_slopes() fig, axs = plt.subplots(len(gfb.centerfrequencies), 1) for (band, state), imx, ax in zip(analyse, imax, axs): ax.plot(np.real(band)) ax.plot(np.imag(band)) ax.plot(np.abs(band)) ax.plot(imx, 0, 'o') ax.set_yticklabels([]) [ax.set_xticklabels([]) for ax in axs[:-1]] axs[0].set_title('Impulse responses of gammatone bands') fig, ax = plt.subplots() def plotfun(x, y): ax.semilogx(x, 20 * np.log10(np.abs(y)**2)) gfb.freqz(nfft=2 * 4096, plotfun=plotfun) plt.grid(True) plt.title('Absolute spectra of gammatone bands.') plt.xlabel('Normalized Frequency (log)') plt.ylabel('Attenuation /dB(FS)') plt.axis('Tight') plt.ylim([-90, 1]) plt.show() return gfb
def matrixMontage(spcomps, *args, **kwargs): numcomps, width, height = spcomps.shape rowcols = int(np.ceil(np.sqrt(numcomps))) for k, comp in enumerate(spcomps): plt.subplot(rowcols, rowcols, k + 1) plt.imshow(comp, *args, **kwargs) plt.axis('off')
def imshow(self, name): ''' 显示灰度图 ''' img = self.buffer2img(name) plt.imshow(img, cmap='gray') plt.axis('off') plt.show()
def save_images(images, path): fig = plt.figure() for i, image in enumerate(images): fig.add_subplot(1, len(images), i + 1) plt.imshow(image) plt.axis('off') plt.savefig(path, bbox_inches='tight') plt.close()
def draw_partitioned_graph(G, partition_obj, layout=None, labels=None, layout_type='spring', node_size=70, node_alpha=0.7, cmap=plt.get_cmap('jet'), node_text_size=12, edge_color='blue', edge_alpha=0.5, edge_tickness=1, edge_text_pos=0.3, text_font='sans-serif'): # if a premade layout haven't been passed, create a new one if not layout: if graph_type == 'spring': layout = nx.spring_layout(G) elif graph_type == 'spectral': layout = nx.spectral_layout(G) elif graph_type == 'random': layout = nx.random_layout(G) else: layout = nx.shell_layout(G) # prepare the partition list noeds and colors list_nodes, node_color = partition_to_draw(partition_obj) # draw graph nx.draw_networkx_nodes(G, layout, list_nodes, node_size=node_size, alpha=node_alpha, node_color=node_color, cmap=cmap) nx.draw_networkx_edges(G, layout, width=edge_tickness, alpha=edge_alpha, edge_color=edge_color) #nx.draw_networkx_labels(G, layout,font_size=node_text_size, # font_family=text_font) if labels is None: labels = range(len(G)) edge_labels = dict(zip(G, labels)) #nx.draw_networkx_edge_labels(G, layout, edge_labels=edge_labels, # label_pos=edge_text_pos) # show graph plt.axis('off') plt.xlim(0, 1) plt.ylim(0, 1)
def plot_mat(self, mat, fn): plt.matshow(asarray(mat.todense())) plt.axis('equal') sh = mat.shape plt.gca().set_yticks(range(0, sh[0])) plt.gca().set_xticks(range(0, sh[1])) plt.grid('on') plt.colorbar() plt.savefig(join(self.outs_dir, fn)) plt.close()
def plot_mat(self, mat, fn): plt.matshow(asarray(mat.todense())) plt.axis('equal') sh = mat.shape plt.gca().set_yticks(range(0,sh[0])) plt.gca().set_xticks(range(0,sh[1])) plt.grid('on') plt.colorbar() plt.savefig(join(self.outs_dir, fn)) plt.close()
def convert_all_to_png(vis_path, out_dir="maps_png", size=None): units = { 'gas_density': 'Gas Density [g/cm$^3$]', 'Tm': 'Temperature [K]', 'Tew': 'Temperature [K]', 'S': 'Entropy []', 'dm': 'DM Density [g/cm$^3$]', 'v': 'Velocity [km/s]' } log_list = ['gas_density'] for vis_file in os.listdir(vis_path): if ".dat" not in vis_file: continue print "converting %s" % vis_file map_type = re.search('sigma_(.*)_[xyz]', vis_file).group(1) (image, pixel_size, axis_values) = read_visualization_data(vis_path + "/" + vis_file, size) print "image width in Mpc/h: ", axis_values[-1] * 2.0 x, y = np.meshgrid(axis_values, axis_values) cmap_max = image.max() cmap_min = image.min() ''' plotting ''' plt.figure(figsize=(5, 4)) if map_type in log_list: plt.pcolor(x, y, image, norm=LogNorm(vmax=cmap_max, vmin=cmap_min)) else: plt.pcolor(x, y, image, vmax=cmap_max, vmin=cmap_min) cbar = plt.colorbar() if map_type in units.keys(): cbar.ax.set_ylabel(units[map_type]) plt.axis( [axis_values[0], axis_values[-1], axis_values[0], axis_values[-1]]) del image plt.xlabel(r"$Mpc/h$", fontsize=18) plt.ylabel(r"$Mpc/h$", fontsize=18) out_file = vis_file.replace("dat", "png") plt.savefig(out_dir + "/" + out_file, dpi=150) plt.close() plt.clf()
def draw_mock_graph(): '''draw the toe energy per meter graph. ''' plt.plot([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [1, 2, 4, 5, 6, 7, 10, 15, 15, 15], 'ro') plt.axis([0, 10, 0, 20]) canvas = pylab.get_current_fig_manager().canvas canvas.draw() pil_image = Image.frombytes("RGB", canvas.get_width_height(), canvas.tostring_rgb()) pylab.close() return pil_image
def convert_all_to_png(vis_path, out_dir = "maps_png", size = None) : units = { 'gas_density' : 'Gas Density [g/cm$^3$]', 'Tm' : 'Temperature [K]', 'Tew' : 'Temperature [K]', 'S' : 'Entropy []', 'dm' : 'DM Density [g/cm$^3$]', 'v' : 'Velocity [km/s]' } log_list = ['gas_density'] for vis_file in os.listdir(vis_path) : if ".dat" not in vis_file : continue print "converting %s" % vis_file map_type = re.search('sigma_(.*)_[xyz]', vis_file).group(1) (image, pixel_size, axis_values) = read_visualization_data(vis_path+"/"+vis_file, size) print "image width in Mpc/h: ", axis_values[-1]*2.0 x, y = np.meshgrid( axis_values, axis_values ) cmap_max = image.max() cmap_min = image.min() ''' plotting ''' plt.figure(figsize=(5,4)) if map_type in log_list: plt.pcolor(x,y,image, norm=LogNorm(vmax=cmap_max, vmin=cmap_min)) else : plt.pcolor(x,y,image, vmax=cmap_max, vmin=cmap_min) cbar = plt.colorbar() if map_type in units.keys() : cbar.ax.set_ylabel(units[map_type]) plt.axis([axis_values[0], axis_values[-1],axis_values[0], axis_values[-1]]) del image plt.xlabel(r"$Mpc/h$", fontsize=18) plt.ylabel(r"$Mpc/h$", fontsize=18) out_file = vis_file.replace("dat", "png") plt.savefig(out_dir+"/"+out_file, dpi=150 ) plt.close() plt.clf()
def drawAdoptionNetworkMPL(G, fnum=1, show=False, writeFile=None): """Draws the network to matplotlib, coloring the nodes based on adoption. Looks for the node attribute 'adopted'. If the attribute is True, colors the node a different color, showing adoption visually. This function assumes that the node attributes have been pre-populated. :param networkx.Graph G: Any NetworkX Graph object. :param int fnum: The matplotlib figure number. Defaults to 1. :param bool show: :param str writeFile: A filename/path to save the figure image. If not specified, no output file is written. """ Gclean = G.subgraph([n for n in G.nodes() if n not in nx.isolates(G)]) plt.figure(num=fnum, figsize=(6,6)) # clear figure plt.clf() # Blue ('b') node color for adopters, red ('r') for non-adopters. nodecolors = ['b' if Gclean.node[n]['adopted'] else 'r' \ for n in Gclean.nodes()] layout = nx.spring_layout(Gclean) nx.draw_networkx_nodes(Gclean, layout, node_size=80, nodelist=Gclean.nodes(), node_color=nodecolors) nx.draw_networkx_edges(Gclean, layout, alpha=0.5) # width=4 # TODO: Draw labels of Ii values. Maybe vary size of node. # TODO: Color edges blue based on influences from neighbors influenceEdges = [] for a in Gclean.nodes(): for n in Gclean.node[a]['influence']: influenceEdges.append((a,n)) if len(influenceEdges)>0: nx.draw_networkx_edges(Gclean, layout, alpha=0.5, width=5, edgelist=influenceEdges, edge_color=['b']*len(influenceEdges)) #some extra space around figure plt.xlim(-0.05,1.05) plt.ylim(-0.05,1.05) plt.axis('off') if writeFile != None: plt.savefig(writeFile) if show: plt.show()
def plot_fault_framework(fault_framework): fm = fault_framework plt.plot(fm.Y_PC, fm.DEP, '-o') plt.axis('equal') plt.axhline(0, color='black') plt.gca().set_yticks(fm.DEP) plt.gca().set_xticks(fm.Y_PC) plt.grid('on') plt.xlabel('From trench to continent(km)') plt.ylabel('depth (km)') for xi, yi, dip in zip(fm.Y_PC, fm.DEP, fm.DIP_D): plt.text(xi, yi, 'dip = %.1f'%dip) plt.gca().invert_yaxis()
def plot_fault_framework(fault_framework): fm = fault_framework plt.plot(fm.Y_PC, fm.DEP, '-o') plt.axis('equal') plt.axhline(0, color='black') plt.gca().set_yticks(fm.DEP) plt.gca().set_xticks(fm.Y_PC) plt.grid('on') plt.xlabel('From trench to continent(km)') plt.ylabel('depth (km)') for xi, yi, dip in zip(fm.Y_PC, fm.DEP, fm.DIP_D): plt.text(xi, yi, 'dip = %.1f' % dip) plt.gca().invert_yaxis()
def plot_overview(self,suffix=''): x=self.x; y=self.y; r=self.radius; cx,cy=self.center.real,self.center.imag ax=plt.axes() plt.scatter(x,y, marker='o', c='b', s=40) plt.axhline(y=0,color='grey', zorder=-1) plt.axvline(x=0,color='grey', zorder=-2) t=linspace(0,2*pi,201) circx=r*cos(t) + cx circy=r*sin(t) + cy plt.plot(circx,circy,'g-') plt.plot([cx],[cy],'gx',ms=12) if self.ZorY == 'Z': philist,flist=[self.phi_a,self.phi_p,self.phi_n],[self.fa,self.fp,self.fn] elif self.ZorY == 'Y': philist,flist=[self.phi_m,self.phi_s,self.phi_r],[self.fm,self.fs,self.fr] for p,f in zip(philist,flist): if f is not None: xpos=cx+r*cos(p); ypos=cy+r*sin(p); xos=0.2*(xpos-cx); yos=0.2*(ypos-cy) plt.plot([0,xpos],[0,ypos],'co-') ax.annotate('{:.3f} Hz'.format(f), xy=(xpos,ypos), xycoords='data', xytext=(xpos+xos,ypos+yos), textcoords='data', #textcoords='offset points', arrowprops=dict(arrowstyle="->", shrinkA=0, shrinkB=10) ) #plt.xlim(0,0.16) #plt.ylim(-0.1,0.1) plt.axis('equal') if self.ZorY == 'Z': plt.xlabel(r'resistance $R$ in Ohm'); plt.ylabel(r'reactance $X$ in Ohm') if self.ZorY == 'Y': plt.xlabel(r'conductance $G$ in Siemens'); plt.ylabel(r'susceptance $B$ in Siemens') plt.title("fitting the admittance circle with Powell's method") tx1='best fit (fmin_powell):\n' tx1+='center at G+iB = {:.5f} + i*{:.8f}\n'.format(cx,cy) tx1+='radius = {:.5f}; '.format(r) tx1+='residue: {:.2e}'.format(self.resid) txt1=plt.text(-r,cy-1.1*r,tx1,fontsize=8,ha='left',va='top') txt1.set_bbox(dict(facecolor='gray', alpha=0.25)) idxlist=self.to_be_annotated('triple') ofs=self.annotation_offsets(idxlist,factor=0.1,xshift=0.15) for i,j in enumerate(idxlist): xpos,ypos = x[j],y[j]; xos,yos = ofs[i].real,ofs[i].imag ax.annotate('{:.1f} Hz'.format(self.f[j]), xy=(xpos,ypos), xycoords='data', xytext=(xpos+xos,ypos+yos), textcoords='data', #textcoords='offset points', arrowprops=dict(arrowstyle="->", shrinkA=0, shrinkB=10) ) if self.show: plt.show() else: plt.savefig(join(self.sdc.plotpath,'c{}_fitted_{}_circle'.format(self.sdc.case,self.ZorY)+suffix+'.png'), dpi=240) plt.close()
def test_dep(self): xf = arange(0, 425) deps = self.fm.get_dep(xf) plt.plot(xf, deps) plt.gca().set_yticks(self.fm.DEP) plt.gca().set_xticks(self.fm.Y_PC) plt.grid('on') plt.title('Ground x versus depth') plt.xlabel('Ground X (km)') plt.ylabel('depth (km)') plt.axis('equal') plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~Y_PC_vs_deps.png')) plt.close()
def test_dep(self): xf = arange(0, 425) deps = self.fm.get_dep(xf) plt.plot(xf,deps) plt.gca().set_yticks(self.fm.DEP) plt.gca().set_xticks(self.fm.Y_PC) plt.grid('on') plt.title('Ground x versus depth') plt.xlabel('Ground X (km)') plt.ylabel('depth (km)') plt.axis('equal') plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~Y_PC_vs_deps.png')) plt.close()
def convolve(arrays, melBank, genere, filter_idx): x = [] melBank_time = np.fft.ifft(melBank) #need to transform melBank to time domain for eachClip in arrays: result = np.convolve(eachClip, melBank_time) x.append(result) plotBeforeAfterFilter(eachClip, melBank, melBank_time, result, genere, filter_idx) m = np.asmatrix(np.array(x)) fig, ax = plt.subplots() ax.matshow(m.real) #each element has imaginary part. So just plot real part plt.axis('equal') plt.axis('tight') plt.title(genere) plt.tight_layout() # filename = "./figures/convolution/Convolution_"+"Filter"+str(filter_idx)+genere+".png" # plt.savefig(filename) plt.show()
def draw_partitioned_graph(G, partition_obj, layout=None, labels=None,layout_type='spring', node_size=70, node_alpha=0.7, cmap=plt.get_cmap('jet'), node_text_size=12, edge_color='blue', edge_alpha=0.5, edge_tickness=1, edge_text_pos=0.3, text_font='sans-serif'): # if a premade layout haven't been passed, create a new one if not layout: if graph_type == 'spring': layout=nx.spring_layout(G) elif graph_type == 'spectral': layout=nx.spectral_layout(G) elif graph_type == 'random': layout=nx.random_layout(G) else: layout=nx.shell_layout(G) # prepare the partition list noeds and colors list_nodes, node_color = partition_to_draw(partition_obj) # draw graph nx.draw_networkx_nodes(G,layout,list_nodes,node_size=node_size, alpha=node_alpha, node_color=node_color, cmap = cmap) nx.draw_networkx_edges(G,layout,width=edge_tickness, alpha=edge_alpha,edge_color=edge_color) #nx.draw_networkx_labels(G, layout,font_size=node_text_size, # font_family=text_font) if labels is None: labels = range(len(G)) edge_labels = dict(zip(G, labels)) #nx.draw_networkx_edge_labels(G, layout, edge_labels=edge_labels, # label_pos=edge_text_pos) # show graph plt.axis('off') plt.xlim(0,1) plt.ylim(0,1)
def freqz(sosmat, nsamples=44100, sample_rate=44100, plot=True): """Plots Frequency response of sosmat.""" from pylab import np, plt, fft, fftfreq x = np.zeros(nsamples) x[int(nsamples/2)] = 0.999 y, states = sosfilter_double_c(x, sosmat) Y = fft(y) f = fftfreq(len(x), 1.0/sample_rate) if plot: plt.grid(True) plt.axis([0, sample_rate / 2, -100, 5]) L = 20*np.log10(np.abs(Y[:int(len(x)/2)]) + 1e-17) plt.semilogx(f[:int(len(x)/2)], L, lw=0.5) plt.hold(True) plt.title(u'freqz sos filter') plt.xlabel('Frequency / Hz') plt.ylabel(u'Damping /dB(FS)') plt.xlim((10, sample_rate/2)) plt.hold(False) return x, y, f, Y
def freqz(sosmat, nsamples=44100, sample_rate=44100, plot=True): """Plots Frequency response of sosmat.""" from pylab import np, plt, fft, fftfreq x = np.zeros(nsamples) x[nsamples/2] = 0.999 y, states = sosfilter_double_c(x, sosmat) Y = fft(y) f = fftfreq(len(x), 1.0/sample_rate) if plot: plt.grid(True) plt.axis([0, sample_rate / 2, -100, 5]) L = 20*np.log10(np.abs(Y[:len(x)/2]) + 1e-17) plt.semilogx(f[:len(x)/2], L, lw=0.5) plt.hold(True) plt.title('freqz sos filter') plt.xlabel('Frequency / Hz') plt.ylabel('Damping /dB(FS)') plt.xlim((10, sample_rate/2)) plt.hold(False) return x, y, f, Y
def generate_word_cloud(text, no, name=None, show=True): ''' Generates a word cloud bitmap given a text document (string). It uses the Term Frequency (TF) and Inverse Document Frequency (IDF) vectorization approach to derive the importance of a word -- represented by the size of the word in the word cloud. Parameters ========== text: str text as the basis no: int number of words to be included name: str path to save the image show: bool whether to show the generated image or not ''' tokens = tokenize(text) vec = TfidfVectorizer(min_df=2, analyzer='word', ngram_range=(1, 2), stop_words='english') vec.fit_transform(tokens) wc = pd.DataFrame({'words': vec.get_feature_names(), 'tfidf': vec.idf_}) words = ' '.join(wc.sort_values('tfidf', ascending=True)['words'].head(no)) wordcloud = WordCloud(max_font_size=110, background_color='white', width=1024, height=768, margin=10, max_words=150).generate(words) if show: plt.figure(figsize=(10, 10)) plt.imshow(wordcloud, interpolation='bilinear') plt.axis('off') plt.show() if name is not None: wordcloud.to_file(name)
def load_mnist(path, filename='mnist.pkl.gz', plot=True): """ Loads the MNIST dataset. Downloads the data if it doesn't already exist. This code is adapted from the deeplearning.net tutorial on classifying MNIST data with Logistic Regression: http://deeplearning.net/tutorial/logreg.html#logreg :param path: (str) Path to where data lives or should be downloaded too :param filename: (str) name of mnist file to download or load :return: train_set, valid_set, test_set """ dataset = '{}/{}'.format(path, filename) data_dir, data_file = os.path.split(dataset) if data_dir == "" and not os.path.isfile(dataset): new_path = os.path.join(os.path.split(__file__)[0], "..", "data", dataset) if os.path.isfile(new_path) or data_file == 'mnist.pkl.gz': dataset = new_path if (not os.path.isfile(dataset)) and data_file == 'mnist.pkl.gz': import urllib origin = ('http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz') print 'Downloading data from {}'.format(origin) urllib.urlretrieve(origin, dataset) print '... loading data' f = gzip.open(dataset, 'rb') train_set, valid_set, test_set = cPickle.load(f) f.close() X_train = train_set[0] y_train = train_set[1] if plot: for k in range(25): plt.subplot(5,5,k) plt.imshow(np.reshape(X_train[k,:], (28,28))) plt.axis('off') plt.title(y_train[k]) return train_set, valid_set, test_set
def __call__(self,axis_on_or_off='off',use_lims=True,use_lims_ext=False): if 0: axis_on_or_off = axis_on_or_off.lower() if axis_on_or_off not in ['off','on']: raise ValueError(axis_on_or_off) if use_lims_ext: use_lims=False if use_lims_ext and use_lims: msg="use_lims={0} AND ".format(use_lims) msg+="use_lims_ext={0} ".format(use_lims_ext) msg+="but at most one of these can be True." raise ValueError(msg) Nx=self.Nx Ny=self.Ny # plt.axis(axis_on_or_off) plt.axis('scaled') # if use_lims: # plt.xlim([0,Nx]) # plt.ylim([0,Ny]) # if use_lims_ext: # plt.xlim([0,Nx+1]) # plt.ylim([0,Ny+1]) of.plt.axis_ij() return axis_on_or_off = axis_on_or_off.lower() if axis_on_or_off not in ['off','on']: raise ValueError(axis_on_or_off) if use_lims_ext: use_lims=False if use_lims_ext and use_lims: msg="use_lims={0} AND ".format(use_lims) msg+="use_lims_ext={0} ".format(use_lims_ext) msg+="but at most one of these can be True." raise ValueError(msg) Nx=self.Nx Ny=self.Ny plt.axis(axis_on_or_off) plt.axis('scaled') if use_lims: plt.xlim([0,Nx]) plt.ylim([0,Ny]) if use_lims_ext: plt.xlim([0,Nx+1]) plt.ylim([0,Ny+1]) of.plt.axis_ij()
def show_chan_mpl(code, start_date, end_date, stock_days, resample, show_mpl=True, least_init=3, chanK_flag=False, windows=20): def get_least_khl_num(resample, idx=0, init_num=3): # init = 3 if init_num - idx > 0: initw = init_num - idx else: initw = 0 return init_num if resample == 'd' else initw if resample == 'w' else init_num-idx-1 if init_num-idx-1 >0 else 0\ if resample == 'm' else 5 stock_code = code # 股票代码 # stock_code = '002176' # 股票代码 # start_date = '2017-09-05' # start_date = None # end_date = '2017-10-12 15:00:00' # 最后生成k线日期 # end_date = None # stock_days = 60 # 看几天/分钟前的k线 # resample = 'd' # resample = 'w' x_jizhun = 3 # window 周期 x轴展示的时间距离 5:日,40:30分钟, 48: 5分钟 least_khl_num = get_least_khl_num(resample, init_num=least_init) # stock_frequency = '5m' # 1d日线, 30m 30分钟, 5m 5分钟,1m 1分钟 stock_frequency = resample # 1d日线, 30m 30分钟, 5m 5分钟,1m 1分钟 w:week # chanK_flag = chanK # True 看缠论K线, False 看k线 # chanK_flag = True # True 看缠论K线, False 看k线 show_mpl = show_mpl def con2Cxianduan(stock, k_data, chanK, frsBiType, biIdx, end_date, cur_ji=1, recursion=False, dl=None, chanK_flag=False, least_init=3): max_k_num = 4 if cur_ji >= 6 or len(biIdx) == 0 or recursion: return biIdx idx = biIdx[len(biIdx) - 1] k_data_dts = list(k_data.index) st_data = chanK['enddate'][idx] if st_data not in k_data_dts: return biIdx # 重构次级别线段的点到本级别的chanK中 def refactorXd(biIdx, xdIdxc, chanK, chanKc, cur_ji): new_biIdx = [] biIdxB = biIdx[len(biIdx) - 1] if len(biIdx) > 0 else 0 for xdIdxcn in xdIdxc: for chanKidx in range(len(chanK.index))[biIdxB:]: if judge_day_bao(chanK, chanKidx, chanKc, xdIdxcn, cur_ji): new_biIdx.append(chanKidx) break return new_biIdx # 判断次级别日期是否被包含 def judge_day_bao(chanK, chanKidx, chanKc, xdIdxcn, cur_ji): _end_date = chanK['enddate'][chanKidx] + datetime.timedelta( hours=15) if cur_ji == 1 else chanK['enddate'][chanKidx] _start_date = chanK.index[chanKidx] if chanKidx == 0\ else chanK['enddate'][chanKidx - 1] + datetime.timedelta(minutes=1) return _start_date <= chanKc.index[xdIdxcn] <= _end_date # cur_ji = 1 #当前级别 # 符合k线根数大于4根 1日级别, 2 30分钟, 3 5分钟, 4 一分钟 if not recursion: resample = 'd' if cur_ji + 1 == 2 else '5m' if cur_ji + 1 == 3 else \ 'd' if cur_ji + 1 == 5 else 'w' if cur_ji + 1 == 6 else 'd' least_khl_num = get_least_khl_num(resample, 1, init_num=least_init) print "次级:%s st_data:%s k_data_dts:%s least_khl_num:%s" % ( len(k_data_dts) - k_data_dts.index(st_data), str(st_data)[:10], len(k_data_dts), least_khl_num) if cur_ji + 1 != 2 and len(k_data_dts) - k_data_dts.index( st_data) >= least_khl_num + 1: frequency = '30m' if cur_ji + 1 == 2 else '5m' if cur_ji + 1 == 3 else '1m' # else: # frequency = 'd' if cur_ji+1==2 else '5m' if cur_ji+1==3 else \ # 'd' if cur_ji+1==5 else 'w' if cur_ji+1==6 else 'd' start_lastday = str(chanK.index[biIdx[-1]])[0:10] print "次级别为:%s cur_ji:%s %s" % (resample, cur_ji, start_lastday) # print [chanK.index[x] for x in biIdx] k_data_c, cname = get_quotes_tdx(stock, start=start_lastday, end=end_date, dl=dl, resample=resample) print k_data_c.index[0], k_data_c.index[-1] chanKc = chan.parse2ChanK( k_data_c, k_data_c.values) if chanK_flag else k_data_c fenTypesc, fenIdxc = chan.parse2ChanFen(chanKc, recursion=True) if len(fenTypesc) == 0: return biIdx biIdxc, frsBiTypec = chan.parse2ChanBi( fenTypesc, fenIdxc, chanKc, least_khl_num=least_khl_num - 1) if len(biIdxc) == 0: return biIdx print "biIdxc:", [round(k_data_c.high[x], 2) for x in biIdxc ], [str(k_data_c.index[x])[:10] for x in biIdxc] xdIdxc, xdTypec = chan.parse2Xianduan( biIdxc, chanKc, least_windows=1 if least_khl_num > 0 else 0) biIdxc = con2Cxianduan(stock, k_data_c, chanKc, frsBiTypec, biIdxc, end_date, cur_ji + 1, recursion=True) print "xdIdxc:%s xdTypec:%s biIdxc:%s" % (xdIdxc, xdTypec, biIdxc) if len(xdIdxc) == 0: return biIdx # 连接线段位为上级别的bi lastBiType = frsBiType if len(biIdx) % 2 == 0 else -frsBiType if len(biIdx) == 0: return refactorXd(biIdx, xdIdxc, chanK, chanKc, cur_ji) lastbi = biIdx.pop() firstbic = xdIdxc.pop(0) # 同向连接 if lastBiType == xdTypec: biIdx = biIdx + refactorXd(biIdx, xdIdxc, chanK, chanKc, cur_ji) # 逆向连接 else: # print '开始逆向连接' _mid = [lastbi] if (lastBiType == -1 and chanK['low'][lastbi] <= chanKc['low'][firstbic])\ or (lastBiType == 1 and chanK['high'][lastbi] >= chanKc['high'][firstbic]) else\ [chanKidx for chanKidx in range(len(chanK.index))[biIdx[len(biIdx) - 1]:] if judge_day_bao(chanK, chanKidx, chanKc, firstbic, cur_ji)] biIdx = biIdx + [_mid[0]] + refactorXd(biIdx, xdIdxc, chanK, chanKc, cur_ji) # print "次级:",len(biIdx),biIdx,[str(k_data_c.index[x])[:10] for x in biIdx] return biIdx def get_quotes_tdx(code, start=None, end=None, dl=120, resample='d', show_name=True): quotes = tdd.get_tdx_append_now_df_api( code=stock_code, start=start, end=end, dl=dl).sort_index(ascending=True) if not resample == 'd' and resample in tdd.resample_dtype: quotes = tdd.get_tdx_stock_period_to_type(quotes, period_day=resample) quotes.index = quotes.index.astype('datetime64') if show_name: if 'name' in quotes.columns: cname = quotes.name[0] # cname_g =cname else: dm = tdd.get_sina_data_df(code) if 'name' in dm.columns: cname = dm.name[0] else: cname = '-' else: cname = '-' if quotes is not None and len(quotes) > 0: quotes = quotes.loc[:, [ 'open', 'close', 'high', 'low', 'vol', 'amount' ]] else: # log.error("quotes is None check:%s"%(code)) raise Exception("Code:%s error, df is None%s" % (code)) return quotes, cname quotes, cname = get_quotes_tdx(stock_code, start_date, end_date, dl=stock_days, resample=resample, show_name=show_mpl) # quotes.rename(columns={'amount': 'money'}, inplace=True) # quotes.rename(columns={'vol': 'vol'}, inplace=True) # print quotes[-2:] # print quotes[:1] # 缠论k线 # open close high low volume money # 2017-05-03 15.69 15.66 15.73 15.53 10557743 165075887 # 2017-05-04 15.66 15.63 15.70 15.52 8343270 130330396 # 2017-05-05 15.56 15.65 15.68 15.41 18384031 285966842 # 2017-05-08 15.62 15.75 15.76 15.54 12598891 197310688 quotes = chan.parse2ChanK(quotes, quotes.values) if chanK_flag else quotes # print quotes[:1].index # print quotes[-1:].index quotes[quotes['vol'] == 0] = np.nan quotes = quotes.dropna() Close = quotes['close'] Open = quotes['open'] High = quotes['high'] Low = quotes['low'] T0 = quotes.index.values # T0 = mdates.date2num(T0) length = len(Close) initial_trend = "down" cur_ji = 1 if stock_frequency == 'd' else \ 2 if stock_frequency == '30m' else \ 3 if stock_frequency == '5m' else \ 4 if stock_frequency == 'w' else \ 5 if stock_frequency == 'm' else 6 log.debug('======笔形成最后一段未完成段判断是否是次级别的走势形成笔=======:%s %s' % (stock_frequency, cur_ji)) x_date_list = quotes.index.values.tolist() # for x_date in x_date_list: # d = datetime.datetime.fromtimestamp(x_date/1000000000) # print d.strftime("%Y-%m-%d %H:%M:%S.%f") # print x_date_list k_data = quotes k_values = k_data.values # 缠论k线 chanK = quotes if chanK_flag else chan.parse2ChanK( k_data, k_values, chan_kdf=chanK_flag) fenTypes, fenIdx = chan.parse2ChanFen(chanK) # log.debug("code:%s fenTypes:%s fenIdx:%s k_data:%s" % (stock_code,fenTypes, fenIdx, len(k_data))) biIdx, frsBiType = chan.parse2ChanBi(fenTypes, fenIdx, chanK, least_khl_num=least_khl_num) # log.debug("biIdx1:%s chanK:%s" % (biIdx, len(chanK))) print("biIdx1:%s %s chanK:%s" % (biIdx, str(chanK.index.values[biIdx[-1]])[:10], len(chanK))) biIdx = con2Cxianduan(stock_code, k_data, chanK, frsBiType, biIdx, end_date, cur_ji, least_init=least_init) # log.debug("biIdx2:%s chanK:%s" % (biIdx, len(biIdx))) chanKIdx = [(chanK.index[x]) for x in biIdx] if len(biIdx) == 0 and len(chanKIdx) == 0: print "BiIdx is None and chanKidx is None:%s" % (code) return None log.debug("con2Cxianduan:%s chanK:%s %s" % (biIdx, len(chanK), chanKIdx[-1] if len(chanKIdx) > 0 else None)) # print quotes['close'].apply(lambda x:round(x,2)) # print '股票代码', get_security_info(stock_code).display_name # print '股票代码', (stock_code), resample, least_khl_num # 3.得到分笔结果,计算坐标显示 def plot_fenbi_seq(biIdx, frsBiType, plt=None, color=None): x_fenbi_seq = [] y_fenbi_seq = [] for i in range(len(biIdx)): if biIdx[i] is not None: fenType = -frsBiType if i % 2 == 0 else frsBiType # dt = chanK['enddate'][biIdx[i]] # 缠论k线 dt = chanK.index[biIdx[i]] if chanK_flag else chanK['enddate'][ biIdx[i]] # print i,k_data['high'][dt], k_data['low'][dt] time_long = long( time.mktime( (dt + datetime.timedelta(hours=8)).timetuple()) * 1000000000) # print x_date_list.index(time_long) if time_long in x_date_list else 0 if fenType == 1: if plt is not None: if color is None: plt.text(x_date_list.index(time_long), k_data['high'][dt], str(k_data['high'][dt]), ha='left', fontsize=12) else: col_v = color[0] if fenType > 0 else color[1] plt.text(x_date_list.index(time_long), k_data['high'][dt], str(k_data['high'][dt]), ha='left', fontsize=12, bbox=dict(facecolor=col_v, alpha=0.5)) x_fenbi_seq.append(x_date_list.index(time_long)) y_fenbi_seq.append(k_data['high'][dt]) if fenType == -1: if plt is not None: if color is None: plt.text(x_date_list.index(time_long), k_data['low'][dt], str(k_data['low'][dt]), va='bottom', fontsize=12) else: col_v = color[0] if fenType > 0 else color[1] plt.text(x_date_list.index(time_long), k_data['low'][dt], str(k_data['low'][dt]), va='bottom', fontsize=12, bbox=dict(facecolor=col_v, alpha=0.5)) x_fenbi_seq.append(x_date_list.index(time_long)) y_fenbi_seq.append(k_data['low'][dt]) # bottom_time = None # for k_line_dto in m_line_dto.member_list[::-1]: # if k_line_dto.low == m_line_dto.low: # # get_price返回的日期,默认时间是08:00:00 # bottom_time = k_line_dto.begin_time.strftime('%Y-%m-%d') +' 08:00:00' # break # x_fenbi_seq.append(x_date_list.index(long(time.mktime(datetime.strptime(bottom_time, "%Y-%m-%d %H:%M:%S").timetuple())*1000000000))) # y_fenbi_seq.append(m_line_dto.low) return x_fenbi_seq, y_fenbi_seq # print T0[-len(T0):].astype(dt.date) T1 = T0[-len(T0):].astype(datetime.date) / 1000000000 Ti = [] if len(T0) / x_jizhun > 12: x_jizhun = len(T0) / 12 for i in range(len(T0) / x_jizhun): # print "len(T0)/x_jizhun:",len(T0)/x_jizhun a = i * x_jizhun d = datetime.date.fromtimestamp(T1[a]) # print d T2 = d.strftime('$%Y-%m-%d$') Ti.append(T2) # print tab d1 = datetime.date.fromtimestamp(T1[len(T0) - 1]) d2 = (d1 + datetime.timedelta(days=1)).strftime('$%Y-%m-%d$') Ti.append(d2) ll = Low.min() * 0.97 hh = High.max() * 1.03 # ht = HoverTool(tooltips=[ # ("date", "@date"), # ("open", "@open"), # ("close", "@close"), # ("high", "@high"), # ("low", "@low"), # ("volume", "@volume"), # ("money", "@money"),]) # TOOLS = [ht, WheelZoomTool(dimensions=['width']),\ # ResizeTool(), ResetTool(),\ # PanTool(dimensions=['width']), PreviewSaveTool()] if show_mpl: fig = plt.figure(figsize=(10, 6)) ax1 = plt.subplot2grid((10, 1), (0, 0), rowspan=8, colspan=1) # ax1 = fig.add_subplot(2,1,1) #fig = plt.figure() #ax1 = plt.axes([0,0,3,2]) X = np.array(range(0, length)) pad_nan = X + nan # 计算上 下影线 max_clop = Close.copy() max_clop[Close < Open] = Open[Close < Open] min_clop = Close.copy() min_clop[Close > Open] = Open[Close > Open] # 上影线 line_up = np.array([High, max_clop, pad_nan]) line_up = np.ravel(line_up, 'F') # 下影线 line_down = np.array([Low, min_clop, pad_nan]) line_down = np.ravel(line_down, 'F') # 计算上下影线对应的X坐标 pad_nan = nan + X pad_X = np.array([X, X, X]) pad_X = np.ravel(pad_X, 'F') # 画出实体部分,先画收盘价在上的部分 up_cl = Close.copy() up_cl[Close <= Open] = nan up_op = Open.copy() up_op[Close <= Open] = nan down_cl = Close.copy() down_cl[Open <= Close] = nan down_op = Open.copy() down_op[Open <= Close] = nan even = Close.copy() even[Close != Open] = nan # 画出收红的实体部分 pad_box_up = np.array([up_op, up_op, up_cl, up_cl, pad_nan]) pad_box_up = np.ravel(pad_box_up, 'F') pad_box_down = np.array([down_cl, down_cl, down_op, down_op, pad_nan]) pad_box_down = np.ravel(pad_box_down, 'F') pad_box_even = np.array([even, even, even, even, pad_nan]) pad_box_even = np.ravel(pad_box_even, 'F') # X的nan可以不用与y一一对应 X_left = X - 0.25 X_right = X + 0.25 box_X = np.array([X_left, X_right, X_right, X_left, pad_nan]) # print box_X box_X = np.ravel(box_X, 'F') # print box_X # Close_handle=plt.plot(pad_X,line_up,color='k') vertices_up = np.array([box_X, pad_box_up]).T vertices_down = np.array([box_X, pad_box_down]).T vertices_even = np.array([box_X, pad_box_even]).T handle_box_up = mat.patches.Polygon(vertices_up, color='r', zorder=1) handle_box_down = mat.patches.Polygon(vertices_down, color='g', zorder=1) handle_box_even = mat.patches.Polygon(vertices_even, color='k', zorder=1) ax1.add_patch(handle_box_up) ax1.add_patch(handle_box_down) ax1.add_patch(handle_box_even) handle_line_up = mat.lines.Line2D(pad_X, line_up, color='k', linestyle='solid', zorder=0) handle_line_down = mat.lines.Line2D(pad_X, line_down, color='k', linestyle='solid', zorder=0) ax1.add_line(handle_line_up) ax1.add_line(handle_line_down) v = [0, length, Open.min() - 0.5, Open.max() + 0.5] plt.axis(v) ax1.set_xticks(np.linspace(-2, len(Close) + 2, len(Ti))) ax1.set_ylim(ll, hh) ax1.set_xticklabels(Ti) plt.grid(True) plt.setp(plt.gca().get_xticklabels(), rotation=30, horizontalalignment='right') ''' 以上代码拷贝自https://www.joinquant.com/post/1756 感谢alpha-smart-dog K线图绘制完毕 ''' # print "biIdx:%s chankIdx:%s"%(biIdx,str(chanKIdx[-1])[:10]) if show_mpl: x_fenbi_seq, y_fenbi_seq = plot_fenbi_seq(biIdx, frsBiType, plt) # plot_fenbi_seq(fenIdx,fenTypes[0], plt,color=['red','green']) plot_fenbi_seq(fenIdx, frsBiType, plt, color=['red', 'green']) else: x_fenbi_seq, y_fenbi_seq = plot_fenbi_seq(biIdx, frsBiType, plt=None) plot_fenbi_seq(fenIdx, frsBiType, plt=None, color=['red', 'green']) # 在原图基础上添加分笔蓝线 inx_value = chanK.high.values inx_va = [round(inx_value[x], 2) for x in biIdx] log.debug("inx_va:%s count:%s" % (inx_va, len(quotes.high))) log.debug("yfenbi:%s count:%s" % ([round(y, 2) for y in y_fenbi_seq], len(chanK))) j_BiType = [ -frsBiType if i % 2 == 0 else frsBiType for i in range(len(biIdx)) ] BiType_s = j_BiType[-1] if len(j_BiType) > 0 else -2 # bi_price = [str(chanK.low[idx]) if i % 2 == 0 else str(chanK.high[idx]) for i,idx in enumerate(biIdx)] # print ("笔 :%s %s"%(biIdx,bi_price)) # fen_dt = [str(chanK.index[fenIdx[i]])[:10] if chanK_flag else str(chanK['enddate'][fenIdx[i]])[:10]for i in range(len(fenIdx))] fen_dt = [(chanK.index[fenIdx[i]]) if chanK_flag else (chanK['enddate'][fenIdx[i]]) for i in range(len(fenIdx))] if len(fenTypes) > 0: if fenTypes[0] == -1: # fen_price = [str(k_data.low[idx]) if i % 2 == 0 else str(k_data.high[idx]) for i,idx in enumerate(fen_dt)] low_fen = [idx for i, idx in enumerate(fen_dt) if i % 2 == 0] high_fen = [idx for i, idx in enumerate(fen_dt) if i % 2 <> 0] else: # fen_price = [str(k_data.high[idx]) if i % 2 == 0 else str(k_data.low[idx]) for i,idx in enumerate(fen_dt)] high_fen = [idx for i, idx in enumerate(fen_dt) if i % 2 == 0] low_fen = [idx for i, idx in enumerate(fen_dt) if i % 2 <> 0] # fen_duration =[fenIdx[i] - fenIdx[i -1 ] if i >0 else 0 for i,idx in enumerate(fenIdx)] else: # fen_price = fenTypes # fen_duration = fenTypes low_fen = [] high_fen = [] # fen_dt = [str(k_data.index[idx])[:10] for i,idx in enumerate(fenIdx)] # print low_fen,high_fen def dataframe_mode_round(df): roundlist = [1, 0] df_mode = [] # df.high.cummin().value_counts() for i in roundlist: df_mode = df.apply(lambda x: round(x, i)).mode() if len(df_mode) > 0: break return df_mode kdl = k_data.loc[low_fen].low kdl_mode = dataframe_mode_round(kdl) kdh = k_data.loc[high_fen].high kdh_mode = dataframe_mode_round(kdh) print("kdl:%s" % (kdl.values)) print("kdh:%s" % (kdh.values)) print("kdl_mode:%s kdh_mode%s chanKidx:%s" % (kdl_mode.values, kdh_mode.values, str(chanKIdx[-1])[:10])) lastdf = k_data[k_data.index >= chanKIdx[-1]] if BiType_s == -1: keydf = lastdf[((lastdf.close >= kdl_mode.max()) & (lastdf.low >= kdl_mode.max()))] elif BiType_s == 1: keydf = lastdf[((lastdf.close >= kdh_mode.max()) & (lastdf.high >= kdh_mode.min()))] else: keydf = lastdf[((lastdf.close >= kdh_mode.max()) & (lastdf.high >= kdh_mode.min())) | ((lastdf.close <= kdl_mode.min()) & (lastdf.low <= kdl_mode.min()))] print("BiType_s:%s keydf:%s key:%s" % (BiType_s, None if len(keydf) == 0 else str( keydf.index.values[0])[:10], len(keydf))) # return BiType_s,None if len(keydf) == 0 else str(keydf.index.values[0])[:10],len(keydf) # import ipdb;ipdb.set_trace() log.debug("Fentype:%s " % (fenTypes)) log.debug("fenIdx:%s " % (fenIdx)) # print ("fen_duration:%s "%(fen_duration)) # print ("fen_price:%s "%(fen_price)) # print ("fendt:%s "%(fen_dt)) print("BiType :%s frsBiType:%s" % (j_BiType, frsBiType)) if len(j_BiType) > 0: if j_BiType[0] == -1: tb_price = [ str(quotes.low[idx]) if i % 2 == 0 else str(quotes.high[idx]) for i, idx in enumerate(x_fenbi_seq) ] else: tb_price = [ str(quotes.high[idx]) if i % 2 == 0 else str(quotes.low[idx]) for i, idx in enumerate(x_fenbi_seq) ] tb_duration = [ x_fenbi_seq[i] - x_fenbi_seq[i - 1] if i > 0 else 0 for i, idx in enumerate(x_fenbi_seq) ] else: tb_price = j_BiType tb_duration = j_BiType print "图笔 :", x_fenbi_seq, tb_price print "图笔dura :", tb_duration # 线段画到笔上 xdIdxs, xfenTypes = chan.parse2ChanXD(frsBiType, biIdx, chanK) print '线段', xdIdxs, xfenTypes x_xd_seq = [] y_xd_seq = [] for i in range(len(xdIdxs)): if xdIdxs[i] is not None: fenType = xfenTypes[i] # dt = chanK['enddate'][biIdx[i]] # 缠论k线 dt = chanK.index[xdIdxs[i]] if chanK_flag else chanK['enddate'][ xdIdxs[i]] # print k_data['high'][dt], k_data['low'][dt] time_long = long( time.mktime((dt + datetime.timedelta(hours=8)).timetuple()) * 1000000000) # print x_date_list.index(time_long) if time_long in x_date_list else 0 if fenType == 1: x_xd_seq.append(x_date_list.index(time_long)) y_xd_seq.append(k_data['high'][dt]) if fenType == -1: x_xd_seq.append(x_date_list.index(time_long)) y_xd_seq.append(k_data['low'][dt]) # bottom_time = None # for k_line_dto in m_line_dto.member_list[::-1]: # if k_line_dto.low == m_line_dto.low: # # get_price返回的日期,默认时间是08:00:00 # bottom_time = k_line_dto.begin_time.strftime('%Y-%m-%d') +' 08:00:00' # break # x_fenbi_seq.append(x_date_list.index(long(time.mktime(datetime.strptime(bottom_time, "%Y-%m-%d %H:%M:%S").timetuple())*1000000000))) # y_fenbi_seq.append(m_line_dto.low) # 在原图基础上添加分笔蓝线 print("线段 :%s" % (x_xd_seq)) print("笔值 :%s" % ([str(x) for x in (y_xd_seq)])) # Y_hat = X * b + a if show_mpl: plt.plot(x_fenbi_seq, y_fenbi_seq) plt.legend([stock_code, cname], loc=0) plt.title(stock_code + " | " + cname + " | " + str(quotes.index[-1])[:10], fontsize=14) plt.plot(x_xd_seq, y_xd_seq) if len(quotes) > windows: roll_mean = pd.rolling_mean(quotes.close, window=windows) plt.plot(roll_mean, 'r') zp = zoompan.ZoomPan() figZoom = zp.zoom_factory(ax1, base_scale=1.1) figPan = zp.pan_factory(ax1) '''#subplot2 bar ax2 = plt.subplot2grid((10, 1), (8, 0), rowspan=2, colspan=1) # ax2.plot(quotes.vol) # ax2.set_xticks(np.linspace(-2, len(quotes) + 2, len(Ti))) ll = min(quotes.vol.values.tolist()) * 0.97 hh = max(quotes.vol.values.tolist()) * 1.03 ax2.set_ylim(ll, hh) # ax2.set_xticklabels(Ti) # plt.hist(quotes.vol, histtype='bar', rwidth=0.8) plt.bar(x_date_list,quotes.vol, label="Volume", color='b') ''' #画Volume no tight_layout() ''' pad = 0.25 yl = ax1.get_ylim() ax1.set_ylim(yl[0]-(yl[1]-yl[0])*pad,yl[1]) ax2 = ax1.twinx() ax2.set_position(mat.transforms.Bbox([[0.125,0.1],[0.9,0.32]])) volume = np.asarray(quotes.amount) pos = quotes['open']-quotes['close']<0 neg = quotes['open']-quotes['close']>=0 idx = quotes.reset_index().index ax2.bar(idx[pos],volume[pos],color='red',width=1,align='center') ax2.bar(idx[neg],volume[neg],color='green',width=1,align='center') yticks = ax2.get_yticks() ax2.set_yticks(yticks[::3]) ''' # same sharex plt.subplots_adjust(left=0.05, bottom=0.08, right=0.95, top=0.95, wspace=0.15, hspace=0.00) plt.setp(ax1.get_xticklabels(), visible=False) yl = ax1.get_ylim() # ax2 = plt.subplot(212, sharex=ax1) ax2 = plt.subplot2grid((10, 1), (8, 0), rowspan=2, colspan=1, sharex=ax1) # ax2.set_position(mat.transforms.Bbox([[0.125,0.1],[0.9,0.32]])) volume = np.asarray(quotes.amount) pos = quotes['open'] - quotes['close'] < 0 neg = quotes['open'] - quotes['close'] >= 0 idx = quotes.reset_index().index ax2.bar(idx[pos], volume[pos], color='red', width=1, align='center') ax2.bar(idx[neg], volume[neg], color='green', width=1, align='center') yticks = ax2.get_yticks() ax2.set_yticks(yticks[::3]) # plt.tight_layout() # plt.subplots_adjust(hspace=0.00, bottom=0.08) plt.xticks(rotation=15, horizontalalignment='center') # plt.bar(x_date_list,quotes.vol, label="Volume", color='b') # quotes['vol'].plot(kind='bar', ax=ax2, color='g', alpha=0.1) # ax2.set_ylim([0, ax2.get_ylim()[1] * 2]) # plt.gcf().subplots_adjust(bottom=0.15) # fig.subplots_adjust(left=0.05, bottom=0.08, right=0.95, top=0.95, wspace=0.15, hspace=0.25) #scale the x-axis tight # ax2.set_xlim(min(x_date_list),max(x_date_list)) # the y-ticks for the bar were too dense, keep only every third one # plt.grid(True) # plt.xticks(rotation=30, horizontalalignment='center') # plt.setp( axs[1].xaxis.get_majorticklabels(), rotation=70 ) # plt.legend() # plt.tight_layout() # plt.draw() # plt.show() plt.show(block=False)
def bars(scheme, verbose=None, norm='load'): """ Figure to compare link proportional and usage proportional for a single scheme and put them in ./sensitivity/figures/scheme/ """ # Load data and results F = abs(np.load('./results/' + scheme + '-flows.npy')) quantiles = np.load('./results/quantiles_' + scheme + '_' + str(lapse) + '.npy') nNodes = 30 names = node_namer(N) # array of node labels links = range(len(F)) nodes = np.linspace(0.5, 2 * nNodes - 1.5, nNodes) nodes_shift = nodes + .5 for direction in directions: N_usages = np.load('./results/Node_contrib_' + scheme + '_' + direction + '_' + str(lapse) + '.npy') # Compare node transmission to mean load if verbose: print('Plotting node comparison - ' + scheme + ' - ' + direction) # sort node names for x-axis Total_usage = np.sum(N_usages, 1) node_ids = np.array(range(len(N))).reshape((len(N), 1)) node_mean_load = [n.mean for n in N] # Vector for normalisation if norm == 'cap': normVec = np.ones(nNodes) * sum(quantiles) else: normVec = node_mean_load # Calculate node proportional EU_load = np.sum(node_mean_load) Total_caps = sum(quantiles) Node_proportional = node_mean_load / EU_load * Total_caps / normVec Node_proportional = np.reshape(Node_proportional, (len(Node_proportional), 1)) # Calculate link proportional link_proportional = linkProportional(N, link_dic, quantiles) link_proportional = [link_proportional[i] / normVec[i] for i in range(nNodes)] # Calculate old usage proportional if direction == 'combined': old_usages = np.load('./linkcolouring/old_' + scheme + '_copper_link_mix_import_all_alpha=same.npy') old_usages += np.load('./linkcolouring/old_' + scheme + '_copper_link_mix_export_all_alpha=same.npy') else: old_usages = np.load('./linkcolouring/old_' + scheme + '_copper_link_mix_' + direction + '_all_alpha=same.npy') avg_node_usage = np.sum(np.sum(old_usages, axis=2), axis=0) / 70128. avg_EU_usage = np.sum(np.sum(np.sum(old_usages, axis=2), axis=0)) / 70128. avg_node_usage /= avg_EU_usage avg_node_usage /= normVec avg_node_usage *= 500000 # Calculate usage and sort countries by mean load normed_usage = Total_usage / normVec normed_usage = np.reshape(normed_usage, (len(normed_usage), 1)) node_mean_load = np.reshape(node_mean_load, (len(node_mean_load), 1)) data = np.hstack([normed_usage, node_ids, node_mean_load, link_proportional, Node_proportional]) data_sort = data[data[:, 2].argsort()] names_sort = [names[int(i)] for i in data_sort[:, 1]] # flip order so largest is first names_sort = names_sort[::-1] link_proportional = data_sort[:, 3][::-1] Node_proportional = data_sort[:, 4][::-1] data_sort = data_sort[:, 0][::-1] plt.figure(figsize=(10, 4), facecolor='w', edgecolor='k') ax = plt.subplot(111) green = '#009900' blue = '#000099' # Plot node proportional plt.rc('lines', lw=2) plt.rc('lines', dash_capstyle='round') plt.plot(np.linspace(0, len(N) * 2 + 2, len(N)), Node_proportional, '--k') # Plot link proportional #plt.bar(nodes, link_proportional, width=1, color=green, edgecolor='none') # Plot old usage proportional plt.bar(nodes, avg_node_usage[loadOrder], width=1, color=green, edgecolor='none') # Plot usage proportional plt.bar(nodes_shift, data_sort, width=1, color=blue, edgecolor='none') # Magic with ticks and labels ax.set_xticks(np.linspace(2, len(N) * 2 + 2, len(N) + 1)) ax.set_xticklabels(names_sort, rotation=60, ha="right", va="top", fontsize=10.5) ax.xaxis.grid(False) ax.xaxis.set_tick_params(width=0) if norm == 'cap': ax.set_ylabel(r'$M_n/ \mathcal{K}^T$') else: # ax.set_ylabel(r'Network usage [MW$_T$/MW$_L$]') ax.set_ylabel(r'$M_n/\left\langle L_n \right\rangle$') maxes = [max(avg_node_usage), max(data_sort)] plt.axis([0, nNodes * 2 + .5, 0, 1.15 * max(maxes)]) # Legend artists = [plt.Line2D([0, 0], [0, 0], ls='dashed', lw=2.0, c='k'), plt.Rectangle((0, 0), 0, 0, ec=green, fc=green), plt.Rectangle((0, 0), 0, 0, ec=blue, fc=blue)] LABS = ['$M^1$', '$M^{3}_{old}$', '$M^{3}_{new}$'] leg = plt.legend(artists, LABS, loc='upper left', ncol=len(artists), columnspacing=0.6, borderpad=0.4, borderaxespad=0.0, handletextpad=0.2, handleheight=1.2) leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') ltext = leg.get_texts() plt.setp(ltext, fontsize=12) # the legend text fontsize plt.savefig(figPath + scheme + '/network-usage-' + direction + '-' + norm + '.png', bbox_inches='tight') if verbose: print('Saved figures to ./figures/compareUsage/' + scheme + '/network-usage-' + direction + '-' + norm + '.png')
yy = yy.astype(np.float) dimx = float(dimx) dimy=float(dimy) nTimesInX = np.floor(xx / M).max() + 1 seg_cpu = np.floor(yy / M) * nTimesInX + np.floor(xx / M) seg_cpu = seg_cpu.astype(np.int32) return seg_cpu def random_permute_seg(seg): p=np.random.permutation(seg.max()+1) seg2 = np.zeros_like(seg) for c in range(seg.max()+1): seg2[seg==c]=p[c] return seg2.astype(np.int32) if __name__ == "__main__": tic = time.clock() seg= get_init_seg(500, 500,17,True) # seg= get_init_seg(512, 512,50,False) toc = time.clock() print toc-tic print 'k = ', seg.max()+1 plt.figure(1) plt.clf() plt.imshow(seg,interpolation="Nearest") plt.axis('scaled')
def plot_variable(u, name, direc, cmap=cmaps.parula, scale='lin', numLvls=100, umin=None, umax=None, \ tp=False, \ tpAlpha=1.0, show=False, hide_ax_tick_labels=False, label_axes=True, title='', use_colorbar=True, hide_axis=False, colorbar_loc='right'): """ show -- whether to show the plot on the screen tp -- show triangle cmap -- colors: gist_yarg - grey gnuplot, hsv, gist_ncar jet - typical colors """ mesh = u.function_space().mesh() v = u.compute_vertex_values(mesh) x = mesh.coordinates()[:,0] y = mesh.coordinates()[:,1] t = mesh.cells() if not os.path.isdir( direc ): os.makedirs(direc) full_path = os.path.join(direc, name) if umin != None: vmin = umin else: vmin = v.min() if umax != None: vmax = umax else: vmax = v.max() # countour levels : if scale == 'log': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import LogFormatter levels = np.logspace(np.log10(vmin), np.log10(vmax), numLvls) tick_numLvls = min( numLvls, 8 ) tick_levels = np.logspace(np.log10(vmin), np.log10(vmax), tick_numLvls) formatter = LogFormatter(10, labelOnlyBase=False) norm = colors.LogNorm() elif scale == 'lin': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import ScalarFormatter levels = np.linspace(vmin, vmax, numLvls) tick_numLvls = min( numLvls, 8 ) tick_levels = np.linspace(vmin, vmax, tick_numLvls) formatter = ScalarFormatter() norm = None elif scale == 'bool': from matplotlib.ticker import ScalarFormatter levels = [0, 1, 2] formatter = ScalarFormatter() norm = None fig = plt.figure(figsize=(5,5)) ax = fig.add_subplot(111) c = ax.tricontourf(x, y, t, v, levels=levels, norm=norm, cmap=plt.get_cmap(cmap)) plt.axis('equal') if tp == True: p = ax.triplot(x, y, t, '-', lw=0.2, alpha=tpAlpha) ax.set_xlim([x.min(), x.max()]) ax.set_ylim([y.min(), y.max()]) if label_axes: ax.set_xlabel(r'$x$') ax.set_ylabel(r'$y$') if hide_ax_tick_labels: ax.set_xticklabels([]) ax.set_yticklabels([]) if hide_axis: plt.axis('off') # include colorbar : if scale != 'bool' and use_colorbar: divider = make_axes_locatable(plt.gca()) cax = divider.append_axes(colorbar_loc, "5%", pad="3%") cbar = plt.colorbar(c, cax=cax, format=formatter, ticks=tick_levels) tit = plt.title(title) if use_colorbar: plt.tight_layout(rect=[.03,.03,0.97,0.97]) else: plt.tight_layout() plt.savefig( full_path + '.eps', dpi=300) if show: plt.show() plt.close(fig)
def freqz(ofb, length_sec=6, ffilt=False, plot=True): """Computes the IR and FRF of a digital filter. Parameters ---------- ofb : FractionalOctaveFilterbank object length_sec : scalar Length of the impulse response test signal. ffilt : bool Backard forward filtering. Effectiv order is doubled then. plot : bool Create Plots or not. Returns ------- x : ndarray Impulse test signal. y : ndarray Impules responses signal of the filters. f : ndarray Frequency vector for the FRF. Y : Frequency response (FRF) of the summed filters. """ from pylab import np, plt, fft, fftfreq x = np.zeros(length_sec*ofb.sample_rate) x[length_sec*ofb.sample_rate/2] = 0.9999 if not ffilt: y, states = ofb.filter_mimo_c(x) y = y[:, :, 0] else: y, states = ofb.filter(x, ffilt=ffilt) s = np.zeros(len(x)) for i in range(y.shape[1]): s += y[:, i] X = fft(y[:, i]) # sampled frequency response f = fftfreq(len(x), 1.0/ofb.sample_rate) if plot: fig = plt.figure('freqz filter bank') plt.grid(True) plt.axis([0, ofb.sample_rate / 2, -100, 5]) L = 20*np.log10(np.abs(X[:len(x)/2]) + 1e-17) plt.semilogx(f[:len(x)/2], L, lw=0.5) plt.hold(True) Y = fft(s) if plot: plt.title('freqz() Filter Bank') plt.xlabel('Frequency / Hz') plt.ylabel('Damping /dB(FS)') plt.xlim((10, ofb.sample_rate/2)) plt.hold(False) plt.figure('sum') L = 20*np.log10(np.abs(Y[:len(x)/2]) + 1e-17) plt.semilogx(f[:len(x)/2], L, lw=0.5) level_input = 10*np.log10(np.sum(x**2)) level_output = 10*np.log10(np.sum(s**2)) plt.axis([5, ofb.sample_rate/1.8, -50, 5]) plt.grid(True) plt.title('Sum of filter bands') plt.xlabel('Frequency / Hz') plt.ylabel('Damping /dB(FS)') print('sum level', level_output, level_input) return x, y, f, Y
stds = np.std(error, axis=1) nodeMean = np.mean(means) weightedNodeMean = np.mean(weightedMeans) x = np.linspace(.5, 29.5, 30) if mode == 'linear': title = 'localised' if mode == 'square': title = 'synchronised' plt.figure() ax = plt.subplot() plt.errorbar(x, means[loadOrder], yerr=stds * 0, marker='s', lw=0, elinewidth=1) plt.plot([0, 30], [nodeMean, nodeMean], '--k', lw=2) plt.title(title + ' ' + direction + ', sum of colors vs. total network usage') plt.ylabel('Mean link deviation in %') ax.set_xticks(np.linspace(1, 30, 30)) ax.set_xticklabels(loadNames, rotation=60, ha="right", va="top", fontsize=9) plt.axis([0, 30, min(means) - (.1 * min(means)), max(means) + (.1 * max(means))]) plt.legend(('individual country', 'mean of countries'), loc=2, ncol=2) plt.savefig(figPath + 'error/' + title + '_' + direction + '.pdf', bbox_inches='tight') plt.figure() ax = plt.subplot() plt.errorbar(x, weightedMeans[loadOrder], yerr=stds * 0, marker='s', lw=0, elinewidth=1) plt.plot([0, 30], [weightedNodeMean, weightedNodeMean], '--k', lw=2) plt.title(title + ' ' + direction + ', sum of colors vs. total network usage') plt.ylabel(r'Weighed mean link deviation in % normalised to $\left\langle \mathcal{K}^T \right\rangle$') ax.set_xticks(np.linspace(1, 30, 30)) ax.set_xticklabels(loadNames, rotation=60, ha="right", va="top", fontsize=9) plt.axis([0, 30, min(weightedMeans) - (.1 * min(weightedMeans)), max(weightedMeans) + (.1 * max(weightedMeans))]) plt.legend(('individual country', 'mean of countries'), loc=2, ncol=2) plt.savefig(figPath + 'error/' + 'weighted_' + title + '_' + direction + '.pdf', bbox_inches='tight') plt.close()
def example(tess='I', base=[2, 2, 2], nLevels=1, zero_v_across_bdry=[True] * 3, vol_preserve=False, nRows=100, nCols=100, nSlices=100, use_mayavi=False, eval_v=False, eval_cell_idx=False): tw = TransformWrapper(nRows=nRows, nCols=nCols, nSlices=nSlices, nLevels=nLevels, base=base, zero_v_across_bdry=zero_v_across_bdry, tess=tess, valid_outside=False, only_local=False, vol_preserve=vol_preserve) print_iterable(tw.ms.L_cpa_space) print tw # create some fake 3D image. img = np.zeros((nCols, nRows, nSlices), dtype=np.float64) # img[:]=np.random.random_integers(0,255,img.shape) # Fill the image with the x coordinates as fake values img[:] = tw.pts_src_dense.cpu[:, 0].reshape(img.shape) img0 = CpuGpuArray(img.copy().astype(np.float64)) img_wrapped_fwd = CpuGpuArray.zeros_like(img0) img_wrapped_inv = CpuGpuArray.zeros_like(img0) seed = 0 np.random.seed(seed) ms_Avees = tw.get_zeros_PA_all_levels() ms_theta = tw.get_zeros_theta_all_levels() if tess == 'II': for level in range(tw.ms.nLevels): cpa_space = tw.ms.L_cpa_space[level] Avees = ms_Avees[level] # 1/0 if level == 0: tw.sample_gaussian(level, ms_Avees[level], ms_theta[level], mu=None) # zero mean # ms_theta[level].fill(0) # ms_theta[level][-4]=10 cpa_space.theta2Avees(theta=ms_theta[level], Avees=Avees) else: tw.sample_from_the_ms_prior_coarse2fine_one_level( ms_Avees, ms_theta, level_fine=level) else: # For tess='I' in 3D, I have yet to implement the coarse-to-fine sampling. for level in range(tw.ms.nLevels): cpa_space = tw.ms.L_cpa_space[level] velTess = cpa_space.zeros_velTess() ms_Avees[level].fill(0) Avees = ms_Avees[level] tw.sample_gaussian_velTess(level, Avees, velTess, mu=None) print 'img shape:', img0.shape # You don't have use these. You can use any 2d array # that has 3 columns (regardless of the number of rows). pts_src = tw.pts_src_dense pts_src = CpuGpuArray(pts_src.cpu[::1].copy()) # Create a buffer for the output pts_fwd = CpuGpuArray.zeros_like(pts_src) pts_inv = CpuGpuArray.zeros_like(pts_src) for level in range(tw.ms.nLevels): tw.update_pat_from_Avees(ms_Avees[level], level) if eval_v: # Evaluating the velocity field. # You don't have to do it in unless you want to visualize v. # (when evaluting the treansformation, v will be internally # evaluated anyway -- but its result won't be stored) tw.calc_v(level=level) print 'level', level print print 'number of points:', len(pts_src) print 'number of cells:', tw.ms.L_cpa_space[level].nC # optional, if you want to time it timer_gpu_T_fwd = GpuTimer() # Simply calling # tic = time.clock() # and then # tic = time.clock() # won't work. # In fact, most likely you will get that toc-tic is zero. # You need to use the GpuTimer object. When you do that, # one side effect is that suddenly the toc-tic from above will # give you a more realistic result. tic = time.clock() timer_gpu_T_fwd.tic() tw.calc_T_fwd(pts_src, pts_fwd, level=level) timer_gpu_T_fwd.toc() toc = time.clock() print 'Time, in sec, for computing T_fwd:' print timer_gpu_T_fwd.secs print toc - tic # likely to be 0, unless you also used the GpuTimer. # You can also time the inv of course. Results will be similar. tw.calc_T_inv(pts_src, pts_inv, level=level) if eval_cell_idx: # cell_idx is computed here just for display. cell_idx = CpuGpuArray.zeros(len(pts_src), dtype=np.int32) tw.calc_cell_idx(pts_src, cell_idx, level) tw.remap_fwd(pts_inv, img0, img_wrapped_fwd) tw.remap_inv(pts_fwd, img0, img_wrapped_inv) # For display purposes, do gpu2cpu transfer print "For display purposes, do gpu2cpu transfer" if eval_cell_idx: cell_idx.gpu2cpu() if eval_v: tw.v_dense.gpu2cpu() pts_fwd.gpu2cpu() pts_inv.gpu2cpu() img_wrapped_fwd.gpu2cpu() img_wrapped_inv.gpu2cpu() if use_mayavi: ds = 1 # downsampling factor i = 17 pts_src_grid = pts_src.cpu.reshape(tw.nRows, tw.nCols, -1, 3) pts_src_ds = pts_src_grid[::ds, ::ds, i].reshape(-1, 3) pts_fwd_grid = pts_fwd.cpu.reshape(tw.nRows, tw.nCols, -1, 3) pts_fwd_ds = pts_fwd_grid[::ds, ::ds, i].reshape(-1, 3) pts_inv_grid = pts_inv.cpu.reshape(tw.nRows, tw.nCols, -1, 3) pts_inv_ds = pts_inv_grid[::ds, ::ds, i].reshape(-1, 3) from of.my_mayavi import * mayavi_mlab_close_all() mayavi_mlab_figure_bgwhite('src') x, y, z = pts_src_ds.T mayavi_mlab_plot3d(x, y, z) mayavi_mlab_figure_bgwhite('fwd') x, y, z = pts_fwd_ds.T mayavi_mlab_plot3d(x, y, z) figsize = (12, 12) plt.figure(figsize=figsize) i = 17 # some slice plt.subplot(131) plt.imshow(img0.cpu[:, :, i].astype(np.uint8), interpolation="Nearest") plt.title('slice from img') plt.subplot(132) plt.imshow(img_wrapped_fwd.cpu[:, :, i].astype(np.uint8), interpolation="Nearest") plt.axis('off') plt.title('slice from fwd(img)') plt.subplot(133) plt.imshow(img_wrapped_inv.cpu[:, :, i].astype(np.uint8), interpolation="Nearest") plt.axis('off') plt.title('slice from inv(img)') if 0: # debug cpa_space = tw.ms.L_cpa_space[level] if eval_v: vx = tw.v_dense.cpu[:, 0].reshape( cpa_space.x_dense_grid_img.shape[1:]) vy = tw.v_dense.cpu[:, 1].reshape( cpa_space.x_dense_grid_img.shape[1:]) vz = tw.v_dense.cpu[:, 2].reshape( cpa_space.x_dense_grid_img.shape[1:]) plt.figure() plt.imshow(vz[:, :, 17], interpolation="Nearest") plt.colorbar() plt.title('vz in some slice') return tw
def freqz(ofb, length_sec=6, ffilt=False, plot=True): """Computes the IR and FRF of a digital filter. Parameters ---------- ofb : FractionalOctaveFilterbank object length_sec : scalar Length of the impulse response test signal. ffilt : bool Backard forward filtering. Effectiv order is doubled then. plot : bool Create Plots or not. Returns ------- x : ndarray Impulse test signal. y : ndarray Impules responses signal of the filters. f : ndarray Frequency vector for the FRF. Y : Frequency response (FRF) of the summed filters. """ from pylab import np, plt, fft, fftfreq x = np.zeros(length_sec * ofb.sample_rate) x[int(length_sec * ofb.sample_rate / 2)] = 0.9999 if not ffilt: y, states = ofb.filter_mimo_c(x) y = y[:, :, 0] else: y, states = ofb.filter(x, ffilt=ffilt) s = np.zeros(len(x)) len_x_2 = int(len(x) / 2) for i in range(y.shape[1]): s += y[:, i] X = fft(y[:, i]) # sampled frequency response f = fftfreq(len(x), 1.0 / ofb.sample_rate) if plot: fig = plt.figure('freqz filter bank') plt.grid(True) plt.axis([0, ofb.sample_rate / 2, -100, 5]) L = 20 * np.log10(np.abs(X[:len_x_2]) + 1e-17) plt.semilogx(f[:len_x_2], L, lw=0.5) Y = fft(s) if plot: plt.title(u'freqz() Filter Bank') plt.xlabel('Frequency / Hz') plt.ylabel(u'Damping /dB(FS)') plt.xlim((10, ofb.sample_rate / 2)) plt.figure('sum') L = 20 * np.log10(np.abs(Y[:len_x_2]) + 1e-17) plt.semilogx(f[:len_x_2], L, lw=0.5) level_input = 10 * np.log10(np.sum(x**2)) level_output = 10 * np.log10(np.sum(s**2)) plt.axis([5, ofb.sample_rate / 1.8, -50, 5]) plt.grid(True) plt.title('Sum of filter bands') plt.xlabel('Frequency / Hz') plt.ylabel(u'Damping /dB(FS)') print('sum level', level_output, level_input) return x, y, f, Y
def usagePlotter(direction): """ Scatter plots of nodes' import/export usages of links saved to ./figures/. """ legendNames = ['diagonal', r'$99\%$ quantile', 'avg. usage', 'usage'] modes = ['linear', 'square'] modeNames = ['localised', 'synchronised'] names = ['usageS', 'usageW'] colors = ['#ffa500', '#0000aa'] for mode in modes: N = EU_Nodes_usage(mode + '.npz') F = np.load('./results/' + mode + '-flows.npy') Fmax = np.max(np.abs(F), 1) nodes = len(N) links = F.shape[0] usageS = np.load(outPath + mode + '_' + direction + '_' + 'usageS.npy') usageW = np.load(outPath + mode + '_' + direction + '_' + 'usageW.npy') if mode == 'square': usageB = np.load(outPath + mode + '_' + direction + '_' + 'usageB.npy') names.append('usageB') colors.append('#874a2b') for node in xrange(nodes): nodeLabel = N[node].label nodePath = figPath + 'usage/' + nodeLabel.tostring() if not os.path.exists(nodePath): os.makedirs(nodePath) for link in xrange(links): linkLabel = link_label(link, N) linkflow = abs(F[link, :]) qq = get_q(abs(F[link]), .99) plt.figure() ax = plt.subplot() nBins = 90 totUsage = np.zeros((nBins)) for i, color in enumerate(names): usages = eval(color) usages = usages[link, node, :] / linkflow F_vert = np.reshape(linkflow, (len(linkflow), 1)) exp_vert = np.reshape(usages, (len(usages), 1)) F_matrix = np.hstack([F_vert, exp_vert]) F_matrix[F_matrix[:, 0].argsort()] H, bin_edges = binMaker(F_matrix, qq, lapse=70128, nbins=nBins) plt.plot(bin_edges / qq, H[:, 1], '-', c=colors[i], lw=2) totUsage += H[:, 1] plt.plot(bin_edges / qq, totUsage, '-', c="#aa0000", lw=2) plt.axis([0, 1, 0, 1]) ax.set_xticks(np.linspace(0, 1, 11)) plt.xlabel(r'$|F_l|/\mathcal{K}_l^T$') plt.ylabel(r'$\left\langle H_{ln} \right\rangle /|F_l|$') if mode == 'square': modeName = modeNames[1] plt.legend(('solar usage', 'wind usage', 'backup usage', 'total usage'), loc=1) else: modeName = modeNames[0] plt.legend(('solar usage', 'wind usage', 'total usage'), loc=1) plt.title(nodeLabel.tostring() + ' ' + modeName + ' ' + direction + ' flows on link ' + linkLabel) plt.savefig(nodePath + '/' + str(link) + '_' + modeName + '_' + direction + '.pdf', bbox_inches='tight') plt.close()
def link_level_hour(levels, usages, quantiles, scheme, direction, color, nnames, lnames, admat=None): """ Make a color mesh of a node's average hourly usage of links at different levels. """ if not admat: admat = np.genfromtxt('./settings/eadmat.txt') if color == 'solar': cmap = Oranges_cmap elif color == 'wind': cmap = Blues_cmap elif color == 'backup': cmap = 'Greys' links, nodes, lapse = usages.shape usages = np.reshape(usages, (links, nodes, lapse / 24, 24)) totalHour = np.zeros((levels, 24)) totalNormed = np.zeros((levels, 24)) for node in range(nodes): nl = neighbor_levels(node, levels, admat) hourSums = np.zeros((levels, 24)) for lvl in range(levels): ll = link_level(nl, lvl, nnames, lnames) ll = np.array(ll, dtype='int') meanSum = np.sum(np.mean(usages[ll, node], axis=1), axis=0) linkSum = sum(quantiles[ll]) hourSums[lvl] = meanSum / linkSum totalHour += hourSums plt.figure(figsize=(9, 3)) ax = plt.subplot() plt.pcolormesh(hourSums, cmap=cmap) plt.colorbar().set_label(label=r'$U_n^{(l)}$', size=11) ax.set_yticks(np.linspace(.5, levels - .5, levels)) ax.set_yticklabels(range(1, levels + 1)) ax.yaxis.set_tick_params(width=0) ax.xaxis.set_tick_params(width=0) ax.set_xticks(np.linspace(.5, 23.5, 24)) ax.set_xticklabels(np.array(np.linspace(1, 24, 24), dtype='int'), ha="center", va="top", fontsize=10) plt.ylabel('Link level') plt.axis([0, 24, 0, levels]) plt.title(nnames[node] + ' ' + direction + ' ' + color) plt.savefig(figPath + '/hourly/' + str(scheme) + '/' + str(node) + '_' + color + '_' + direction + '.pdf', bbox_inches='tight') plt.close() hourSums = hourSums / np.sum(hourSums, axis=1)[:, None] totalNormed += hourSums plt.figure(figsize=(9, 3)) ax = plt.subplot() plt.pcolormesh(hourSums, cmap=cmap) plt.colorbar().set_label(label=r'$U_n^{(l)}$', size=11) ax.set_yticks(np.linspace(.5, levels - .5, levels)) ax.set_yticklabels(range(1, levels + 1)) ax.yaxis.set_tick_params(width=0) ax.xaxis.set_tick_params(width=0) ax.set_xticks(np.linspace(.5, 23.5, 24)) ax.set_xticklabels(np.array(np.linspace(1, 24, 24), dtype='int'), ha="center", va="top", fontsize=10) plt.ylabel('Link level') plt.axis([0, 24, 0, levels]) plt.title(nnames[node] + ' ' + direction + ' ' + color) plt.savefig(figPath + '/hourly/' + str(scheme) + '/normed/' + str(node) + '_' + color + '_' + direction + '.pdf', bbox_inches='tight') plt.close() # Plot average hourly usage totalHour /= nodes plt.figure(figsize=(9, 3)) ax = plt.subplot() plt.pcolormesh(totalHour, cmap=cmap) plt.colorbar().set_label(label=r'$U_n^{(l)}$', size=11) ax.set_yticks(np.linspace(.5, levels - .5, levels)) ax.set_yticklabels(range(1, levels + 1)) ax.yaxis.set_tick_params(width=0) ax.xaxis.set_tick_params(width=0) ax.set_xticks(np.linspace(.5, 23.5, 24)) ax.set_xticklabels(np.array(np.linspace(1, 24, 24), dtype='int'), ha="center", va="top", fontsize=10) plt.ylabel('Link level') plt.axis([0, 24, 0, levels]) plt.savefig(figPath + '/hourly/' + str(scheme) + '/total_' + color + '_' + direction + '.pdf', bbox_inches='tight') plt.close() totalNormed /= nodes plt.figure(figsize=(9, 3)) ax = plt.subplot() plt.pcolormesh(totalNormed, cmap=cmap) plt.colorbar().set_label(label=r'$U_n^{(l)}$', size=11) ax.set_yticks(np.linspace(.5, levels - .5, levels)) ax.set_yticklabels(range(1, levels + 1)) ax.yaxis.set_tick_params(width=0) ax.xaxis.set_tick_params(width=0) ax.set_xticks(np.linspace(.5, 23.5, 24)) ax.set_xticklabels(np.array(np.linspace(1, 24, 24), dtype='int'), ha="center", va="top", fontsize=10) plt.ylabel('Link level') plt.axis([0, 24, 0, levels]) plt.savefig(figPath + '/hourly/' + str(scheme) + '/normed/total_' + color + '_' + direction + '.pdf', bbox_inches='tight') plt.close()
linestyle='-', color='navy', label='Exp. - $\\phi_0 = 0.592$') plt.plot(time_sim_dila_0, vel_sim_dila_0, marker='|', markersize=5, linestyle='-', linewidth=1.2, color='navy', label='SedFoam - $\\phi_0 = 0.592$') plt.ylabel('$\\frac{v^s}{\\sqrt{gd}}$ [$-$]', fontsize=18) plt.xlabel('$\\frac{t}{\\sqrt{d/g}}$ [$-$]', fontsize=18) plt.axis([-1000, 160000, -0.00005, 0.03001]) plt.grid() plt.tight_layout() plt.savefig('Figures/velocityPlot2D_phi0592' + '.png', dpi=200) #time - pressure plot including the experimental data (Pailha et al 2008) and the numerical results plt.figure() plt.plot(time_p_562, pressure_562, marker='o', markersize=0, linestyle='--', color='lightpink', label='Exp. - $\\phi_0 = 0.562$') plt.plot(time_p_568,
# Build the meta M = nx.Graph() pos = {} pos[1] = [0,0] pos[2] = [0,-dy] pos[3] = [-dx,-2*dy] pos[4] = [dx,-2*dy] pos[5] = [-dx,-3*dy] pos[6] = [dx,-3*dy] M.add_edges_from([ (1,2),(2,3),(2,4),(4,6),(3,6),(3,5) ]) nx.draw_networkx_edges(M,pos,color='r',style='--',width=3,zorder=-10,alpha=.5) nx.draw_networkx_nodes(M,pos,width=6,node_color='white', with_labels=False,node_size=4500,alpha=.3,zorder=10) ################### for g,pos in zip(G,POS): pos_map = dict(zip(range(1,5),pos)) nx.draw_networkx_nodes(g,pos_map,zorder=20,**dargs) nx.draw_networkx_edges(g,pos_map,zorder=20,**dargs) ######## plt.axis('off') plt.axis('equal') plt.savefig("figures/example_4.png",bbox_inches='tight',bbox_inches=0) plt.show() #print k4
# Defines generator model generator_model = tf.keras.Model([z_gen, label], valid) generator_model.compile(loss=wasserstein_loss, optimizer=optimizer) print('finished building networks') # plot some real samples # plot a couple of samples plt.figure(figsize=(25, 25)) n_plot = 30 [X_real, cond_real] = next(generate_real_samples(n_plot)) for i in range(n_plot): plt.subplot(n_plot, 25, i * 25 + 1) plt.imshow(cond_real[i, :, :].squeeze(), cmap=plt.cm.gist_earth_r, norm=LogNorm(vmin=0.01, vmax=1)) plt.axis('off') for j in range(1, 24): plt.subplot(n_plot, 25, i * 25 + j + 1) plt.imshow(X_real[i, j, :, :].squeeze(), vmin=0, vmax=1, cmap=plt.cm.hot_r) plt.axis('off') plt.colorbar() plt.savefig(f'{plotdir}/real_samples.{plot_format}') hist = {'d_loss': [], 'g_loss': []} print(f'start training on {n_samples} samples') def train(n_epochs, _batch_size, start_epoch=0):
def showImage(self): self.imgObj = imshow(self.img) plt.axis('off') show()
def plot_variable(u, name, direc, cmap='gist_yarg', scale='lin', numLvls=12, umin=None, umax=None, tp=False, tpAlpha=0.5, show=True, hide_ax_tick_labels=False, label_axes=True, title='', use_colorbar=True, hide_axis=False, colorbar_loc='right'): """ """ mesh = u.function_space().mesh() v = u.compute_vertex_values(mesh) x = mesh.coordinates()[:, 0] y = mesh.coordinates()[:, 1] t = mesh.cells() d = os.path.dirname(direc) if not os.path.exists(d): os.makedirs(d) if umin != None: vmin = umin else: vmin = v.min() if umax != None: vmax = umax else: vmax = v.max() # countour levels : if scale == 'log': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import LogFormatter levels = np.logspace(np.log10(vmin), np.log10(vmax), numLvls) formatter = LogFormatter(10, labelOnlyBase=False) norm = colors.LogNorm() elif scale == 'lin': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import ScalarFormatter levels = np.linspace(vmin, vmax, numLvls) formatter = ScalarFormatter() norm = None elif scale == 'bool': from matplotlib.ticker import ScalarFormatter levels = [0, 1, 2] formatter = ScalarFormatter() norm = None fig = plt.figure(figsize=(8, 7)) ax = fig.add_subplot(111) c = ax.tricontourf(x, y, t, v, levels=levels, norm=norm, cmap=pl.get_cmap(cmap)) plt.axis('equal') if tp == True: p = ax.triplot(x, y, t, 'k-', lw=0.25, alpha=tpAlpha) ax.set_xlim([x.min(), x.max()]) ax.set_ylim([y.min(), y.max()]) if label_axes: ax.set_xlabel(r'$x$') ax.set_ylabel(r'$y$') if hide_ax_tick_labels: ax.set_xticklabels([]) ax.set_yticklabels([]) if hide_axis: plt.axis('off') # include colorbar : if scale != 'bool' and use_colorbar: divider = make_axes_locatable(plt.gca()) cax = divider.append_axes(colorbar_loc, "5%", pad="3%") cbar = plt.colorbar(c, cax=cax, format=formatter, ticks=levels) pl.mpl.rcParams['axes.titlesize'] = 'small' tit = plt.title(title) plt.tight_layout() d = os.path.dirname(direc) if not os.path.exists(d): os.makedirs(d) plt.savefig(direc + name + '.pdf') if show: plt.show() plt.close(fig)
def train(n_epochs, _batch_size, start_epoch=0): """ train with fixed batch_size for given epochs make some example plots and save model after each epoch """ global batch_size batch_size = _batch_size # create a dataqueue with the keras facilities. this allows # to prepare the data in parallel to the training sample_dataqueue = GeneratorEnqueuer(generate_real_samples(batch_size), use_multiprocessing=True) sample_dataqueue.start(workers=2, max_queue_size=10) sample_gen = sample_dataqueue.get() # targets for loss function gan_sample_dataqueue = GeneratorEnqueuer( generate_latent_points_as_generator(batch_size), use_multiprocessing=True) gan_sample_dataqueue.start(workers=2, max_queue_size=10) gan_sample_gen = gan_sample_dataqueue.get() # targets for loss function valid = -np.ones((batch_size, 1)) fake = np.ones((batch_size, 1)) dummy = np.zeros((batch_size, 1)) # Dummy gt for gradient penalty bat_per_epo = int(n_samples / batch_size) # we need to call the discriminator once in order # to initialize the input shapes [X_real, cond_real] = next(sample_gen) latent = np.random.normal(size=(batch_size, latent_dim)) critic_model.predict([X_real, cond_real, latent]) for i in trange(n_epochs): epoch = 1 + i + start_epoch # enumerate batches over the training set for j in trange(bat_per_epo): for _ in range(n_disc): # fetch a batch from the queue [X_real, cond_real] = next(sample_gen) latent = np.random.normal(size=(batch_size, latent_dim)) d_loss = critic_model.train_on_batch( [X_real, cond_real, latent], [valid, fake, dummy]) # we get for losses back here. average, valid, fake, and gradient_penalty # we want the average of valid and fake d_loss = np.mean([d_loss[1], d_loss[2]]) # train generator # prepare points in latent space as input for the generator [latent, cond] = next(gan_sample_gen) # update the generator via the discriminator's error g_loss = generator_model.train_on_batch([latent, cond], valid) # summarize loss on this batch print(f'{epoch}, {j + 1}/{bat_per_epo}, d_loss {d_loss}' + \ f' g:{g_loss} ') # , d_fake:{d_loss_fake} d_real:{d_loss_real}') if np.isnan(g_loss) or np.isnan(d_loss): raise ValueError('encountered nan in g_loss and/or d_loss') hist['d_loss'].append(d_loss) hist['g_loss'].append(g_loss) # plot generated examples plt.figure(figsize=(25, 25)) n_plot = 30 X_fake, cond_fake = generate_fake_samples(n_plot) for iplot in range(n_plot): plt.subplot(n_plot, 25, iplot * 25 + 1) plt.imshow(cond_fake[iplot, :, :].squeeze(), cmap=plt.cm.gist_earth_r, norm=LogNorm(vmin=0.01, vmax=1)) plt.axis('off') for jplot in range(1, 24): plt.subplot(n_plot, 25, iplot * 25 + jplot + 1) plt.imshow(X_fake[iplot, jplot, :, :].squeeze(), vmin=0, vmax=1, cmap=plt.cm.hot_r) plt.axis('off') plt.colorbar() plt.suptitle(f'epoch {epoch:04d}') plt.savefig( f'{plotdir}/fake_samples_{params}_{epoch:04d}_{j:06d}.{plot_format}' ) # plot loss plt.figure() plt.plot(hist['d_loss'], label='d_loss') plt.plot(hist['g_loss'], label='g_loss') plt.ylabel('batch') plt.legend() plt.savefig(f'{plotdir}/training_loss_{params}.{plot_format}') pd.DataFrame(hist).to_csv('hist.csv') plt.close('all') generator.save(f'{outdir}/gen_{params}_{epoch:04d}.h5') critic.save(f'{outdir}/disc_{params}_{epoch:04d}.h5')
hx, hy = my_dict['history_x'], my_dict['history_y'] lines_shape = (18, 512) # The initial points lines_old_x = hx[0].reshape(lines_shape).copy() lines_old_y = hy[0].reshape(lines_shape).copy() # The final points lines_new_x = hx[-1].reshape(lines_shape).copy() lines_new_y = hy[-1].reshape(lines_shape).copy() c = 'r' fig = plt.figure() plt.subplot(121) for line_x, line_y in zip(lines_old_x, lines_old_y): plt.plot(line_x, line_y, c) plt.axis('scaled') q = 100 plt.xlim(0 - q, 512 + q) plt.ylim(0 - q, 512 + q) plt.gca().invert_yaxis() c = 'b' plt.subplot(122) for line_x, line_y in zip(lines_new_x, lines_new_y): plt.plot(line_x, line_y, c) plt.axis('scaled') q = 500 plt.xlim(0 - q, 512 + q) plt.ylim(0 - q, 512 + q) plt.gca().invert_yaxis() pylab.show()
def example(tess='I',base=[2,2,2],nLevels=1, zero_v_across_bdry=[True]*3, vol_preserve=False, nRows=100, nCols=100,nSlices=100, use_mayavi=False, eval_v=False, eval_cell_idx=False): tw = TransformWrapper(nRows=nRows, nCols=nCols, nSlices=nSlices, nLevels=nLevels, base=base, zero_v_across_bdry=zero_v_across_bdry, tess=tess, valid_outside=False, only_local=False, vol_preserve=vol_preserve) print_iterable(tw.ms.L_cpa_space) print tw # create some fake 3D image. img = np.zeros((nCols,nRows,nSlices),dtype=np.float64) # img[:]=np.random.random_integers(0,255,img.shape) # Fill the image with the x coordinates as fake values img[:]=tw.pts_src_dense.cpu[:,0].reshape(img.shape) img0 = CpuGpuArray(img.copy().astype(np.float64)) img_wrapped_fwd= CpuGpuArray.zeros_like(img0) img_wrapped_inv= CpuGpuArray.zeros_like(img0) seed=0 np.random.seed(seed) ms_Avees=tw.get_zeros_PA_all_levels() ms_theta=tw.get_zeros_theta_all_levels() if tess == 'II' : for level in range(tw.ms.nLevels): cpa_space = tw.ms.L_cpa_space[level] Avees = ms_Avees[level] # 1/0 if level==0: tw.sample_gaussian(level,ms_Avees[level],ms_theta[level],mu=None)# zero mean # ms_theta[level].fill(0) # ms_theta[level][-4]=10 cpa_space.theta2Avees(theta=ms_theta[level],Avees=Avees) else: tw.sample_from_the_ms_prior_coarse2fine_one_level(ms_Avees,ms_theta, level_fine=level) else: # For tess='I' in 3D, I have yet to implement the coarse-to-fine sampling. for level in range(tw.ms.nLevels): cpa_space = tw.ms.L_cpa_space[level] velTess = cpa_space.zeros_velTess() ms_Avees[level].fill(0) Avees = ms_Avees[level] tw.sample_gaussian_velTess(level,Avees,velTess,mu=None) print 'img shape:',img0.shape # You don't have use these. You can use any 2d array # that has 3 columns (regardless of the number of rows). pts_src = tw.pts_src_dense pts_src=CpuGpuArray(pts_src.cpu[::1].copy()) # Create a buffer for the output pts_fwd = CpuGpuArray.zeros_like(pts_src) pts_inv = CpuGpuArray.zeros_like(pts_src) for level in range(tw.ms.nLevels): tw.update_pat_from_Avees(ms_Avees[level],level) if eval_v: # Evaluating the velocity field. # You don't have to do it in unless you want to visualize v. # (when evaluting the treansformation, v will be internally # evaluated anyway -- but its result won't be stored) tw.calc_v(level=level) print 'level',level print print 'number of points:',len(pts_src) print 'number of cells:',tw.ms.L_cpa_space[level].nC # optional, if you want to time it timer_gpu_T_fwd = GpuTimer() # Simply calling # tic = time.clock() # and then # tic = time.clock() # won't work. # In fact, most likely you will get that toc-tic is zero. # You need to use the GpuTimer object. When you do that, # one side effect is that suddenly the toc-tic from above will # give you a more realistic result. tic = time.clock() timer_gpu_T_fwd.tic() tw.calc_T_fwd(pts_src,pts_fwd,level=level) timer_gpu_T_fwd.toc() toc = time.clock() print 'Time, in sec, for computing T_fwd:' print timer_gpu_T_fwd.secs print toc-tic # likely to be 0, unless you also used the GpuTimer. # You can also time the inv of course. Results will be similar. tw.calc_T_inv(pts_src,pts_inv,level=level) if eval_cell_idx: # cell_idx is computed here just for display. cell_idx = CpuGpuArray.zeros(len(pts_src),dtype=np.int32) tw.calc_cell_idx(pts_src,cell_idx,level) tw.remap_fwd(pts_inv,img0,img_wrapped_fwd) tw.remap_inv(pts_fwd,img0,img_wrapped_inv) # For display purposes, do gpu2cpu transfer print "For display purposes, do gpu2cpu transfer" if eval_cell_idx: cell_idx.gpu2cpu() if eval_v: tw.v_dense.gpu2cpu() pts_fwd.gpu2cpu() pts_inv.gpu2cpu() img_wrapped_fwd.gpu2cpu() img_wrapped_inv.gpu2cpu() if use_mayavi: ds=1 # downsampling factor i= 17 pts_src_grid = pts_src.cpu.reshape(tw.nRows,tw.nCols,-1,3) pts_src_ds=pts_src_grid[::ds,::ds,i].reshape(-1,3) pts_fwd_grid = pts_fwd.cpu.reshape(tw.nRows,tw.nCols,-1,3) pts_fwd_ds=pts_fwd_grid[::ds,::ds,i].reshape(-1,3) pts_inv_grid = pts_inv.cpu.reshape(tw.nRows,tw.nCols,-1,3) pts_inv_ds=pts_inv_grid[::ds,::ds,i].reshape(-1,3) from of.my_mayavi import * mayavi_mlab_close_all() mayavi_mlab_figure_bgwhite('src') x,y,z=pts_src_ds.T mayavi_mlab_plot3d(x,y,z) mayavi_mlab_figure_bgwhite('fwd') x,y,z=pts_fwd_ds.T mayavi_mlab_plot3d(x,y,z) figsize = (12,12) plt.figure(figsize=figsize) i= 17 # some slice plt.subplot(131) plt.imshow(img0.cpu[:,:,i].astype(np.uint8),interpolation="Nearest") plt.title('slice from img') plt.subplot(132) plt.imshow(img_wrapped_fwd.cpu[:,:,i].astype(np.uint8),interpolation="Nearest") plt.axis('off') plt.title('slice from fwd(img)') plt.subplot(133) plt.imshow(img_wrapped_inv.cpu[:,:,i].astype(np.uint8),interpolation="Nearest") plt.axis('off') plt.title('slice from inv(img)') if 0: # debug cpa_space=tw.ms.L_cpa_space[level] if eval_v: vx=tw.v_dense.cpu[:,0].reshape(cpa_space.x_dense_grid_img.shape[1:]) vy=tw.v_dense.cpu[:,1].reshape(cpa_space.x_dense_grid_img.shape[1:]) vz=tw.v_dense.cpu[:,2].reshape(cpa_space.x_dense_grid_img.shape[1:]) plt.figure() plt.imshow(vz[:,:,17],interpolation="Nearest");plt.colorbar() plt.title('vz in some slice') return tw
# In[16]: log_returns.std() * math.sqrt(M) # In[17]: plt.figure(figsize=(10, 6)) plt.hist(log_returns.flatten(), bins=70, normed=True, label='frequency', color='b') plt.xlabel('log_return') plt.ylabel('frequency') x = np.linspace(plt.axis()[0], plt.axis()[1]) plt.plot(x, scs.norm.pdf(x, loc=r / M, scale=sigma / np.sqrt(M)), 'r', lw=2.0, label='pdf') plt.legend() # In[18]: sm.qqplot(log_returns.flatten()[::500], line='s') plt.xlabel('Theoretical Quantiles') plt.ylabel('Sample Quantiles') # In[19]:
plt.title('Easy as 1,2,3') # 添加subplot 211 的标题 '===========================================' mu, sigma = 100, 15 x = mu + sigma * np.random.randn(10000) # 数据的直方图 n, bins, patches = plt.hist(x, 50, normed=1, facecolor='g', alpha=0.75) plt.xlabel('Smarts') plt.ylabel('Probability') # 添加标题 plt.title('Histogram of IQ') # 添加文字 plt.text(60, .025, r'$\mu=100,\ \sigma=15$') plt.axis([40, 160, 0, 0.03]) plt.grid(True) plt.show() '===========================================' ax = plt.subplot(111) t = np.arange(0.0, 5.0, 0.01) s = np.cos(2 * np.pi * t) line, = plt.plot(t, s, lw=2) plt.annotate( 'local max', xy=(2, 1), xytext=(3, 1.5),