def plot_data(self): plt.clf() # clear the figure plt.title("Parent incomes vs. student grade") plt.plot(self.incomes, self.grades, color='orange', marker='o', linestyle='')
def displayRetireWRate(month, rates, terms): plt.figure('retireRate') plt.clf() for rate in rates: xvals, yvals = retire(month, rate, terms) plt.plot(xvals, yvals, label='monthly: ' + str(month) + ' rate of: ' + str(int(rate * 100))) plt.legend(loc='upper left')
def displayRetireWMonthlies(monthlies, rate, terms): plt.figure('retireMonth') plt.clf() for monthly in monthlies: xvals, yvals = retire( monthly, rate, terms) # using base and savings list as x and y values plt.plot(xvals, yvals, label='retire with monthly inst of ' + str(monthly)) plt.legend()
def convert_all_to_png(vis_path, out_dir="maps_png", size=None): units = { 'gas_density': 'Gas Density [g/cm$^3$]', 'Tm': 'Temperature [K]', 'Tew': 'Temperature [K]', 'S': 'Entropy []', 'dm': 'DM Density [g/cm$^3$]', 'v': 'Velocity [km/s]' } log_list = ['gas_density'] for vis_file in os.listdir(vis_path): if ".dat" not in vis_file: continue print "converting %s" % vis_file map_type = re.search('sigma_(.*)_[xyz]', vis_file).group(1) (image, pixel_size, axis_values) = read_visualization_data(vis_path + "/" + vis_file, size) print "image width in Mpc/h: ", axis_values[-1] * 2.0 x, y = np.meshgrid(axis_values, axis_values) cmap_max = image.max() cmap_min = image.min() ''' plotting ''' plt.figure(figsize=(5, 4)) if map_type in log_list: plt.pcolor(x, y, image, norm=LogNorm(vmax=cmap_max, vmin=cmap_min)) else: plt.pcolor(x, y, image, vmax=cmap_max, vmin=cmap_min) cbar = plt.colorbar() if map_type in units.keys(): cbar.ax.set_ylabel(units[map_type]) plt.axis( [axis_values[0], axis_values[-1], axis_values[0], axis_values[-1]]) del image plt.xlabel(r"$Mpc/h$", fontsize=18) plt.ylabel(r"$Mpc/h$", fontsize=18) out_file = vis_file.replace("dat", "png") plt.savefig(out_dir + "/" + out_file, dpi=150) plt.close() plt.clf()
def displayRetireWMonthsandRates(monthlies, rates, terms): plt.figure('retire both') plt.clf() plt.xlim(30 * 12, 40 * 12) # focusing only on the last 10 years of investment for monthly in monthlies: for rate in rates: xvals, yvals = retire(monthly, rate, terms) plt.plot(xvals, yvals, label='retire with ' + str(monthly) + ":" + str(int(rate * 100))) plt.legend(loc='upper left')
def convert_all_to_png(vis_path, out_dir = "maps_png", size = None) : units = { 'gas_density' : 'Gas Density [g/cm$^3$]', 'Tm' : 'Temperature [K]', 'Tew' : 'Temperature [K]', 'S' : 'Entropy []', 'dm' : 'DM Density [g/cm$^3$]', 'v' : 'Velocity [km/s]' } log_list = ['gas_density'] for vis_file in os.listdir(vis_path) : if ".dat" not in vis_file : continue print "converting %s" % vis_file map_type = re.search('sigma_(.*)_[xyz]', vis_file).group(1) (image, pixel_size, axis_values) = read_visualization_data(vis_path+"/"+vis_file, size) print "image width in Mpc/h: ", axis_values[-1]*2.0 x, y = np.meshgrid( axis_values, axis_values ) cmap_max = image.max() cmap_min = image.min() ''' plotting ''' plt.figure(figsize=(5,4)) if map_type in log_list: plt.pcolor(x,y,image, norm=LogNorm(vmax=cmap_max, vmin=cmap_min)) else : plt.pcolor(x,y,image, vmax=cmap_max, vmin=cmap_min) cbar = plt.colorbar() if map_type in units.keys() : cbar.ax.set_ylabel(units[map_type]) plt.axis([axis_values[0], axis_values[-1],axis_values[0], axis_values[-1]]) del image plt.xlabel(r"$Mpc/h$", fontsize=18) plt.ylabel(r"$Mpc/h$", fontsize=18) out_file = vis_file.replace("dat", "png") plt.savefig(out_dir+"/"+out_file, dpi=150 ) plt.close() plt.clf()
def drawAdoptionNetworkMPL(G, fnum=1, show=False, writeFile=None): """Draws the network to matplotlib, coloring the nodes based on adoption. Looks for the node attribute 'adopted'. If the attribute is True, colors the node a different color, showing adoption visually. This function assumes that the node attributes have been pre-populated. :param networkx.Graph G: Any NetworkX Graph object. :param int fnum: The matplotlib figure number. Defaults to 1. :param bool show: :param str writeFile: A filename/path to save the figure image. If not specified, no output file is written. """ Gclean = G.subgraph([n for n in G.nodes() if n not in nx.isolates(G)]) plt.figure(num=fnum, figsize=(6,6)) # clear figure plt.clf() # Blue ('b') node color for adopters, red ('r') for non-adopters. nodecolors = ['b' if Gclean.node[n]['adopted'] else 'r' \ for n in Gclean.nodes()] layout = nx.spring_layout(Gclean) nx.draw_networkx_nodes(Gclean, layout, node_size=80, nodelist=Gclean.nodes(), node_color=nodecolors) nx.draw_networkx_edges(Gclean, layout, alpha=0.5) # width=4 # TODO: Draw labels of Ii values. Maybe vary size of node. # TODO: Color edges blue based on influences from neighbors influenceEdges = [] for a in Gclean.nodes(): for n in Gclean.node[a]['influence']: influenceEdges.append((a,n)) if len(influenceEdges)>0: nx.draw_networkx_edges(Gclean, layout, alpha=0.5, width=5, edgelist=influenceEdges, edge_color=['b']*len(influenceEdges)) #some extra space around figure plt.xlim(-0.05,1.05) plt.ylim(-0.05,1.05) plt.axis('off') if writeFile != None: plt.savefig(writeFile) if show: plt.show()
def dynamic_img_show(img,title_str='',fig_size=[14,8],hide_axes=True): '''Show image <img>. If called repeatedly within a cycle will dynamically redraw image. #DEMO import time for i in range(10): img = np.zeros([50,50]) img[:i*5]=1 dynamic_img_show(img,'iter=%s'%i) time.sleep(0.1) ''' plt.clf() plt.title(title_str) plt.imshow(img) plt.xticks([]); plt.yticks([]); plt.gcf().set_size_inches(fig_size) display.display(plt.gcf()) display.clear_output(wait=True)
def displayRetireWMonthsandRates2(monthlies, rates, terms): plt.figure('retire better') plt.clf() plt.xlim(30 * 12, 40 * 12) monthLabels = ['r', 'b', 'g', 'k'] rateLabels = ['-', 'o', '^'] for i in range(len(monthlies)): monthly = monthlies[i] monthLabel = monthLabels[i % len( monthLabels )] # using remainder to pick new label for each new month choice for j in range(len(rates)): rate = rates[j] rateLable = rateLabels[j % len( rateLabels)] # if more months, cycles back to the beginning xvals, yvals = retire(monthly, rate, terms) plt.plot(xvals, yvals, monthLabel + rateLable, label='retire: ' + str(monthly) + " : " + str(int(rate * 100))) plt.legend(loc="upper left")
xx = xx.astype(np.float) yy = yy.astype(np.float) dimx = float(dimx) dimy=float(dimy) nTimesInX = np.floor(xx / M).max() + 1 seg_cpu = np.floor(yy / M) * nTimesInX + np.floor(xx / M) seg_cpu = seg_cpu.astype(np.int32) return seg_cpu def random_permute_seg(seg): p=np.random.permutation(seg.max()+1) seg2 = np.zeros_like(seg) for c in range(seg.max()+1): seg2[seg==c]=p[c] return seg2.astype(np.int32) if __name__ == "__main__": tic = time.clock() seg= get_init_seg(500, 500,17,True) # seg= get_init_seg(512, 512,50,False) toc = time.clock() print toc-tic print 'k = ', seg.max()+1 plt.figure(1) plt.clf() plt.imshow(seg,interpolation="Nearest") plt.axis('scaled')
yy = yy.astype(np.float) dimx = float(dimx) dimy = float(dimy) nTimesInX = np.floor(xx / M).max() + 1 seg_cpu = np.floor(yy / M) * nTimesInX + np.floor(xx / M) seg_cpu = seg_cpu.astype(np.int32) return seg_cpu def random_permute_seg(seg): p = np.random.permutation(seg.max() + 1) seg2 = np.zeros_like(seg) for c in range(seg.max() + 1): seg2[seg == c] = p[c] return seg2.astype(np.int32) if __name__ == "__main__": tic = time.clock() seg = get_init_seg(500, 500, 17, True) # seg= get_init_seg(512, 512,50,False) toc = time.clock() print(toc - tic) print('k = ', seg.max() + 1) plt.figure(1) plt.clf() plt.imshow(seg, interpolation="Nearest") plt.axis('scaled')
def fit(self, use_prior=False, proposal_scale=0.001, use_local=False): nLevels = self.nLevels tw = self.tw sigma_lm = self.sigma_lm self.use_local = use_local inference_record = Bunch() inference_record.nLevels = nLevels inference_record.tw_args = tw.args inference_record.steps = [] inference_record.use_prior = use_prior inference_record.proposal_scale = proposal_scale inference_record.sigma_lm = sigma_lm try: run_lengths = self.run_lengths except AttributeError: self.set_run_lengths([500] * self.nLevels) run_lengths = self.run_lengths # raise Exception("self.set_run_lengths was not called yet") for i in range(nLevels): # for i in range(nLevels+5): if i < nLevels: level = i if level == 0: theta = tw.ms.L_cpa_space[level].get_zeros_theta() else: theta_fine = tw.ms.L_cpa_space[level].get_zeros_theta() tw.ms.propogate_theta_coarse2fine(theta_coarse=theta, theta_fine=theta_fine) theta = theta_fine _sigma_lm = sigma_lm else: _sigma_lm *= 0.9 print '-' * 10, 'level', level, '-' * 10 cpa_space = tw.ms.L_cpa_space[level] print cpa_space data = { 'src': self.src, 'dst': self.dst, 'transformed': self.transformed } if use_prior: lp_func = LP(ms=tw.ms, msp=tw.msp, level=level, SDLP=SDLP, required={}) else: lp_func = None sampler = Metropolis( ll_func=LL( ms=tw.ms, level=level, SDLL=SDLL, data=data, required={ 'sigma_lm': _sigma_lm, 'params_flow_int': tw.params_flow_int_coarse }, ), proposal=Proposal(ms=tw.ms, msp=tw.msp, level=level, scale=proposal_scale, use_local=use_local), # proposal=ProposalSphericalGaussian(ms=tw.ms, # level=level, # scale=0.1 / (1.2**level) # scale=0.01, # scale=0.1 / (1.1**level) # scale=0.1 # ) , lp_func=lp_func, wlp=self.wlp) sampler.set_theta(theta) run_length = run_lengths[level] sampler.run(run_length) theta = sampler.theta_current # prepare for next iteration inference_record.steps.append(sampler.get_record()) if i >= nLevels - 1: plt.figure(i + 1) plt.clf() self.disp(sampler=sampler) inference_record.theta = theta.copy() steps = inference_record.steps nAccepted = [step.nAccepted for step in steps] run_lengths = [step.N for step in steps] times = [step.runs[0].time for step in steps] total_time = sum(times) print "run_lengths:", run_lengths print "nAccepted:", nAccepted print 'times:' print_iterable(times) print 'time total:', total_time return theta.copy(), inference_record
def main(data, infernece_params, dispOn): img1 = data.img1 # src img2 = data.img2 # dst if img1.shape != img2.shape: raise ValueError(img1.shape, img2.shape) nRows, nCols = img1.shape[:2] nPts = nRows * nCols reg = Register(nRows=nRows, nCols=nCols, base=inference_params.base, nLevels=inference_params.nLevels, tess='I', zero_v_across_bdry=[False, False], sigma_signal=inference_params.sigma_signal, scale_spatial=inference_params.scale_spatial, scale_value=inference_params.scale_value, wlp=inference_params.wlp, ll_type=inference_params.ll_type, only_local=False, valid_outside=infernece_params.valid_outside) reg.set_dense(domain_start=0, domain_end=nPts) reg.set_data(x=reg.tw.pts_src_dense, signal_src=img1, signal_dst=img2, isbinary=inference_params.isbinary) print reg # reg.set_run_lengths([50000]*reg.nLevels) reg.set_run_lengths([infernece_params.MCMCniters_per_level] * reg.nLevels) # if inside_spyder(): # reg.set_run_lengths([100]*reg.nLevels) # # reg.set_run_lengths([50000]*reg.nLevels) # else: # reg.set_run_lengths(run_lengths) theta_est, inference_record = reg.fit( use_prior=infernece_params.use_prior, use_local=infernece_params.use_local, dispOn=dispOn, #interp_type_for_ll=cv2.INTER_LANCZOS4, interp_type_for_ll='gpu_linear', interp_type_during_visualization=cv2.INTER_LANCZOS4) if dispOn: plt.figure(1000) plt.clf() reg.plot_inference_summary(inference_record) reg.transformed.gpu2cpu() reg.signal.transformed.gpu2cpu() inference_record.transformed = reg.transformed.cpu inference_record.src = reg.src.cpu inference_record.signal = Bunch() inference_record.signal.src = reg.signal.src.cpu inference_record.signal.dst = reg.signal.dst.cpu inference_record.signal.transformed = reg.signal.transformed.cpu # if TF.dump_results: # Pkl.dump(results_filename,inference_record,create_dir_if_needed=1,override=1) return reg, inference_record, theta_est
def example(base=[5], scale_spatial=100, nLevels=2, zero_v_across_bdry=[1], use_local_basis=True): nPtsDense = 10000 tw = TransformWrapper(nCols=100, nLevels=nLevels, base=base, scale_spatial=scale_spatial, nPtsDense=nPtsDense, zero_v_across_bdry=zero_v_across_bdry) print_iterable(tw.ms.L_cpa_space) seed=0 np.random.seed(seed) for level in range(tw.ms.nLevels): cpa_space = tw.ms.L_cpa_space[level] Avees = cpa_space.Avees velTess = cpa_space.zeros_velTess() if use_local_basis: if 0: tw.sample_gaussian_velTess(level,Avees,velTess,mu=None) Avees*=0.001 velTess*=0.001 else: if not zero_v_across_bdry[0]: velTess[:]=10*np.random.standard_normal(velTess.shape) cpa_space.velTess2Avees(velTess=velTess,Avees=Avees) else: velTess[1:-1]=10*np.random.standard_normal(velTess[1:-1].shape) cpa_space.velTess2Avees(velTess=velTess,Avees=Avees) cpa_space.velTess2Avees(velTess=velTess,Avees=Avees) else: theta= cpa_space.get_zeros_theta() tw.sample_gaussian(level,Avees,theta,mu=None) # theta/=10 cpa_space.theta2Avees(theta=theta,Avees=Avees) # This step is important and must be done # before are trying to "use" the new values of # the (vectorized) A's. tw.update_pat_from_Avees(Avees,level) pts_src = tw.x_dense tw.calc_v(level=level,pts=pts_src,v=tw.v_dense) tw.v_dense.gpu2cpu() pts_fwd = CpuGpuArray.zeros_like(pts_src) # Create a buffer for the output tw.calc_T_fwd(pts_src,pts_fwd,level=level) pts_fwd.gpu2cpu() pts_inv = CpuGpuArray.zeros_like(pts_src) # Create a buffer for the output tw.calc_T_inv(pts_src,pts_inv,level=level) pts_inv.gpu2cpu() plt.figure(level) plt.clf() interval = pts_src.cpu # interval doesn't have to be pts_src.cpu Visualize.simple(tw.x_dense,tw.v_dense,interval,pts_src, transformed_fwd=pts_fwd,transformed_inv=pts_inv, cpa_space=cpa_space) return tw
def main(data,infernece_params,dispOn ): img1=data.img1 # src img2=data.img2 # dst if img1.shape != img2.shape: raise ValueError(img1.shape,img2.shape) nRows,nCols = img1.shape[:2] nPts = nRows * nCols reg = Register(nRows=nRows, nCols=nCols, base=inference_params.base, nLevels=inference_params.nLevels, tess='I', zero_v_across_bdry=[False,False], sigma_signal=inference_params.sigma_signal , scale_spatial=inference_params.scale_spatial, scale_value=inference_params.scale_value, wlp=inference_params.wlp, ll_type=inference_params.ll_type, only_local=False, valid_outside=infernece_params.valid_outside) reg.set_dense(domain_start=0,domain_end=nPts) reg.set_data(x=reg.tw.pts_src_dense, signal_src=img1,signal_dst=img2, isbinary=inference_params.isbinary) print reg # reg.set_run_lengths([50000]*reg.nLevels) reg.set_run_lengths([infernece_params.MCMCniters_per_level]*reg.nLevels) # if inside_spyder(): # reg.set_run_lengths([100]*reg.nLevels) # # reg.set_run_lengths([50000]*reg.nLevels) # else: # reg.set_run_lengths(run_lengths) theta_est,inference_record = reg.fit(use_prior=infernece_params.use_prior, use_local=infernece_params.use_local, dispOn=dispOn, #interp_type_for_ll=cv2.INTER_LANCZOS4, interp_type_for_ll= 'gpu_linear', interp_type_during_visualization=cv2.INTER_LANCZOS4) if dispOn: plt.figure(1000) plt.clf() reg.plot_inference_summary(inference_record) reg.transformed.gpu2cpu() reg.signal.transformed.gpu2cpu() inference_record.transformed = reg.transformed.cpu inference_record.src = reg.src.cpu inference_record.signal = Bunch() inference_record.signal.src = reg.signal.src.cpu inference_record.signal.dst = reg.signal.dst.cpu inference_record.signal.transformed = reg.signal.transformed.cpu # if TF.dump_results: # Pkl.dump(results_filename,inference_record,create_dir_if_needed=1,override=1) return reg,inference_record,theta_est
transformed.gpu2cpu() v_dense.gpu2cpu() if 0: plt.figure(17) for c,A in enumerate(As): _x = np.ones((2,100)) m=cpa_space.cells_verts[c,0,0] M=cpa_space.cells_verts[c,1,0] _x[0] = np.linspace(m,M,100) _v = A.dot(_x).flatten() plt.plot(_x[0],_v) if 1: # plt.figure() plt.figure(1);plt.clf() plt.subplot(231) plt.plot(interval,src.cpu) plt.title('src') plt.subplot(232) # plt.plot(interval[1:],np.diff(src)/(interval[1]-interval[0])) dx=interval[1]-interval[0] plt.plot(interval[1:],np.diff(src.cpu.ravel())/dx) plt.title(" d/dx src") plt.ylim(0,.5) plt.subplot(233) plt.plot(np.linspace(cpa_space.XMINS[0],cpa_space.XMAXS[0], interval.size),v_dense.cpu.ravel()) plt.ylim(-1,1) plt.title('velocity') plt.subplot(234)
def fit(self,use_prior=False,proposal_scale=0.001,use_local=True,dispOn=True, interp_type_for_ll=None, interp_type_during_visualization=None, scale_local_proposal=None): nLevels=self.nLevels tw = self.tw sigma_signal = self.sigma_signal self.use_local = use_local inference_record = Bunch() inference_record.tw_args = tw.args inference_record.steps = [] inference_record.use_prior = use_prior inference_record.proposal_scale= proposal_scale inference_record.sigma_signal = sigma_signal try: run_lengths = self.run_lengths except AttributeError: self.set_run_lengths([500]*self.nLevels) run_lengths = self.run_lengths # raise Exception("self.set_run_lengths was not called yet") wlp=self.wlp for i in range(nLevels): # for i in range(nLevels+5): if i<nLevels: level=i if level == 0: theta = tw.ms.L_cpa_space[level].get_zeros_theta() else: theta_fine = tw.ms.L_cpa_space[level].get_zeros_theta() tw.ms.propogate_theta_coarse2fine(theta_coarse=theta,theta_fine=theta_fine) theta = theta_fine _sigma_signal = sigma_signal else: _sigma_signal *= 0.9 print '-'*10 ,'level',level, '-'*10 cpa_space = tw.ms.L_cpa_space[level] print cpa_space data = {'src':self.src,'transformed':self.transformed, 'signal':self.signal} if use_prior: lp_func = LP(ms=tw.ms,msp=tw.msp,level=level,SDLP=SDLP, required={}) else: lp_func = None sampler = Metropolis(ll_func= LL( ms=tw.ms,level=level,SDLL=self.SDLL, data=data, required={'sigma_signal':_sigma_signal, 'params_flow_int':tw.params_flow_int_coarse, 'interp_type_for_ll':interp_type_for_ll} ), proposal=Proposal(ms=tw.ms,msp=tw.msp,level=level, scale=proposal_scale, use_local=use_local, scale_local_proposal=scale_local_proposal), # proposal=ProposalSphericalGaussian(ms=tw.ms, # level=level, # scale=0.1 / (1.2**level) # scale=0.01, # scale=0.1 / (1.1**level) # scale=0.1 # ) , lp_func=lp_func, wlp=wlp ) sampler.set_theta(theta) run_length = run_lengths[level] sampler.run(run_length) theta = sampler.theta_current # prepare for next iteration inference_record.steps.append(sampler.get_record()) if dispOn: if i >= nLevels-1 or 1: plt.figure(i+1) plt.clf() self.disp(sampler=sampler, interp_type_during_visualization=interp_type_during_visualization) inference_record.theta = theta.copy() steps = inference_record.steps nAccepted = [ step.nAccepted for step in steps] run_lengths = [ step.N for step in steps] times = [ step.runs[0].time for step in steps] total_time = sum(times) print "run_lengths:",run_lengths print "nAccepted:",nAccepted print 'times:' print_iterable(times) print 'time total:',total_time return theta.copy(),inference_record
def example(base=[5], scale_spatial=100, nLevels=2, zero_v_across_bdry=[1], use_local_basis=True): nPtsDense = 10000 tw = TransformWrapper(nCols=100, nLevels=nLevels, base=base, scale_spatial=scale_spatial, nPtsDense=nPtsDense, zero_v_across_bdry=zero_v_across_bdry) print_iterable(tw.ms.L_cpa_space) seed = 0 np.random.seed(seed) for level in range(tw.ms.nLevels): cpa_space = tw.ms.L_cpa_space[level] Avees = cpa_space.Avees velTess = cpa_space.zeros_velTess() if use_local_basis: if 0: tw.sample_gaussian_velTess(level, Avees, velTess, mu=None) Avees *= 0.001 velTess *= 0.001 else: if not zero_v_across_bdry[0]: velTess[:] = 10 * np.random.standard_normal(velTess.shape) cpa_space.velTess2Avees(velTess=velTess, Avees=Avees) else: velTess[1:-1] = 10 * np.random.standard_normal( velTess[1:-1].shape) cpa_space.velTess2Avees(velTess=velTess, Avees=Avees) cpa_space.velTess2Avees(velTess=velTess, Avees=Avees) else: theta = cpa_space.get_zeros_theta() tw.sample_gaussian(level, Avees, theta, mu=None) # theta/=10 cpa_space.theta2Avees(theta=theta, Avees=Avees) # This step is important and must be done # before are trying to "use" the new values of # the (vectorized) A's. tw.update_pat_from_Avees(Avees, level) pts_src = tw.x_dense tw.calc_v(level=level, pts=pts_src, v=tw.v_dense) tw.v_dense.gpu2cpu() pts_fwd = CpuGpuArray.zeros_like( pts_src) # Create a buffer for the output tw.calc_T_fwd(pts_src, pts_fwd, level=level) pts_fwd.gpu2cpu() pts_inv = CpuGpuArray.zeros_like( pts_src) # Create a buffer for the output tw.calc_T_inv(pts_src, pts_inv, level=level) pts_inv.gpu2cpu() plt.figure(level) plt.clf() interval = pts_src.cpu # interval doesn't have to be pts_src.cpu Visualize.simple(tw.x_dense, tw.v_dense, interval, pts_src, transformed_fwd=pts_fwd, transformed_inv=pts_inv, cpa_space=cpa_space) return tw