def plot_collinearity(motifs, best_Z): """Plot the cooccurrences of motifs. """ import scipy.cluster.hierarchy as hier # from scipy.stats import pearsonr M = len(motifs) cooccurrences = numpy.ones((M, M)) for m1 in xrange(M): for m2 in xrange(M): # both = sum(numpy.logical_and(m1seqs, m2seqs)) # cooccurrences[m1,m2] = both/float(sum(m2seqs)) cooccurrences[m1, m2] = \ numpy.sqrt(sum(best_Z[m1] * best_Z[m2])) \ / numpy.linalg.norm(best_Z[m2]) # rho, _ = pearsonr(best_Z[m1], best_Z[m2]) # cooccurrences[m1, m2] = rho Y = hier.centroid(cooccurrences) index = hier.fcluster(Y, -1) - 1 cooccurrences = cooccurrences[index, :] cooccurrences = cooccurrences[:, index] pylab.pcolor(cooccurrences) pylab.colorbar() ax = pylab.gca() ax.set_xticks([]) # ax.set_xticks(.5 + numpy.arange(M)) # ax.set_xticklabels(motifs) ax.set_yticks(.5 + numpy.arange(M)) ax.set_yticklabels(numpy.asarray(motifs)[index]) ax.set_xlim((0, M)) ax.set_ylim((0, M)) for line in ax.yaxis.get_ticklines(): line.set_markersize(0) pylab.gcf().subplots_adjust(left=.27, bottom=.02, top=.98, right=.99)
def Xtest3(self): """ Test from Kate Marvel As the following code snippet demonstrates, regridding a cdms2.tvariable.TransientVariable instance using regridTool='regrid2' results in a new array that is masked everywhere. regridTool='esmf' and regridTool='libcf' both work as expected. This is similar to the original test but we construct our own uniform grid. This should passes. """ import cdms2 as cdms import numpy as np filename = cdat_info.get_sampledata_path() + '/clt.nc' a=cdms.open(filename) data=a('clt')[0,...] print data.mask #verify this data is not masked GRID = cdms.grid.createUniformGrid(-90.0, 23, 8.0, -180.0, 36, 10.0, order="yx", mask=None) test_data=data.regrid(GRID,regridTool='regrid2') # check that the mask does not extend everywhere... self.assertNotEqual(test_data.mask.sum(), test_data.size) if PLOT: pylab.subplot(2, 1, 1) pylab.pcolor(data[...]) pylab.title('data') pylab.subplot(2, 1, 2) pylab.pcolor(test_data[...]) pylab.title('test_data (interpolated data)') pylab.show()
def plotAllWarmJumps(): jumpAddrs = np.array(getAllWarmJumpsAddr()).reshape((8, 18)) figure() pcolor(jumpAddrs) for (x, y), v in np.ndenumerate(jumpAddrs): text(y + 0.125, x + 0.5, "0x%03x" % v) show()
def heat_map(K, show, name): pl.pcolor(K) pl.colorbar() pl.title(name) pl.savefig(name) if show == True: pl.show()
def Xtest2(self): """ Test from Kate Marvel As the following code snippet demonstrates, regridding a cdms2.tvariable.TransientVariable instance using regridTool='regrid2' results in a new array that is masked everywhere. regridTool='esmf' and regridTool='libcf' both work as expected. This passes. """ import cdms2 as cdms import numpy as np filename = cdat_info.get_sampledata_path() + '/clt.nc' a=cdms.open(filename) data=a('clt')[0,...] print data.mask #verify this data is not masked GRID= data.getGrid() # input = output grid, passes test_data=data.regrid(GRID,regridTool='regrid2') # check that the mask does not extend everywhere... self.assertNotEqual(test_data.mask.sum(), test_data.size) if PLOT: pylab.subplot(2, 1, 1) pylab.pcolor(data[...]) pylab.title('data') pylab.subplot(2, 1, 2) pylab.pcolor(test_data[...]) pylab.title('test_data (interpolated data)') pylab.show()
def SOM(data,leninput,lentarget,alpha_som,omega_som): som = MiniSom(16,16,leninput,sigma=omega_som,learning_rate=alpha_som) som.random_weights_init(data) print("Training...") som.train_batch(data,20000) # training with 10000 iterations print("\n...ready!") numpy.save('weight_som',som.weights) bone() pcolor(som.distance_map().T) # distance map as background colorbar() t = zeros(lentarget,dtype=int) # use different colors and markers for each label markers = ['o','s','D'] colors = ['r','g','b'] outfile = open('cluster-result.csv','w') for cnt,xx in enumerate(data): w = som.winner(xx) # getting the winner for z in xx: outfile.write("%s " % str(z)) outfile.write("%s-%s \n" % (str(w[0]),str(w[1]))) outfile.close()
def correlation_matrix(data, size=8.0): """ Calculates and shows the correlation matrix of the pandas data frame 'data' as a heat map. Only the correlations between numerical variables are calculated! """ # calculate the correlation matrix corr = data.corr() #print corr lc = len(corr.columns) # set some settings for plottin' pl.pcolor(corr, vmin = -1, vmax = 1, edgecolor = "black") pl.colorbar() pl.xlim([-5,lc]) pl.ylim([0,lc+5]) pl.axis('off') # anotate the rows and columns with their corresponding variables ax = pl.gca() for i in range(0,lc): ax.annotate(corr.columns[i], (-0.5, i+0.5), \ size='large', horizontalalignment='right', verticalalignment='center') ax.annotate(corr.columns[i], (i+0.5, lc+0.5),\ size='large', rotation='vertical',\ horizontalalignment='center', verticalalignment='right') # change the size of the image fig = pl.figure(num=1) fig.set_size_inches(size+(size/4), size) pl.show()
def plot_pairwise_contours(theta,nuvec,Cinv,lvls=(2.291,6.158,11.618)): """ > theta is a (3,) vector that contains the model parameters > thetavecs is a (n,3) matrix that contains the values of the parameters that will be plotted over """ labels = ['A','nu0','sigma'] fisher = fisher_matrix(theta,nuvec,Cinv) Finv = n.linalg.inv(fisher) thetavecs = n.zeros((50,theta.shape[0])) for ii in range(theta.shape[0]): thetavecs[:,ii] = n.linspace(theta[ii]-5*n.sqrt(Finv[ii,ii]),theta[ii]+5*n.sqrt(Finv[ii,ii]),num=50) print thetavecs for ii,jj in ((0,1),(0,2),(1,2)): print ii,jj ts = thetavecs[:,[ii,jj]] print thetavecs.shape print ts.shape fs = fisher_select_pair(fisher,ii,jj) print fs.shape t0,t1 = n.meshgrid(ts[:,0],ts[:,1]) print t0.shape Z = fs[0,0]*(t0-theta[ii])*(t0-theta[ii]) + (fs[0,1]+fs[1,0])*(t0-theta[ii])*(t1-theta[jj]) + fs[1,1]*(t1-theta[jj])*(t1-theta[jj]) p.pcolor(t0,t1,Z) p.colorbar() CS = p.contour(t0,t1,Z,levels=lvls) #levels=lvls p.clabel(CS, inline=1, fontsize=10) #p.contour(t0,t1,Z,lvls) p.xlabel(labels[ii]) p.ylabel(labels[jj]) p.savefig('./figures/fisher/contours_{0}_{1}.pdf'.format(labels[ii],labels[jj])) p.clf()
def updateColorTable(self, cItem): print "now viz!"+str(cItem.row())+","+str(cItem.column()) row = cItem.row() col = cItem.column() pl.clf() #pl.ion() x = pl.arange(self.dataDimen+1) y = pl.arange(self.dataDimen+1) X, Y = pl.meshgrid(x, y) pl.subplot(1,2,1) pl.pcolor(X, Y, self.mWx[row*self.dataMaxRange+col]) pl.gca().set_aspect('equal') pl.colorbar() pl.gray() pl.title("user 1") pl.subplot(1,2,2) pl.pcolor(X, Y, self.mWy[row*self.dataMaxRange+col]) pl.gca().set_aspect('equal') pl.colorbar() pl.gray() pl.title("user 2") #pl.tight_layout() pl.draw() #pl.show() pl.show(block=False)
def pylab_pcolor(self, mapping_method='one_to_one_greedy_mapping', normalize='gold'): assert normalize in ('total', 'gold', 'test') import pylab, numpy rows, gold_labels, test_labels = \ self.as_confusion_matrix(mapping_method=mapping_method) def normalize_row_by_total(row): total = self.total_points return [1 - (cell / total) for cell in row] def normalize_row_by_row(row): total = sum(row) return [1 - (cell / total) for cell in row] if normalize == 'total': normalize = normalize_row_by_total elif normalize == 'gold': normalize = normalize_row_by_row else: # test from AIMA import vector_add column_totals = reduce(vector_add, rows) def normalize(row): return [1 - (cell / total) for cell, total in zip(row, column_totals)] rows = [normalize(row) for gold_label, row in zip(gold_labels, rows)] rows = numpy.array(rows) pylab.pcolor(rows)
def __call__(self, n): if len(self.f.shape) == 3: # f = f[x,v,t], 2 dim in phase space ft = self.f[n,:,:] pylab.pcolor(self.X, self.V, ft.T, cmap='jet') pylab.colorbar() pylab.clim(0,0.38) # for Landau test case pylab.grid() pylab.axis([self.xmin, self.xmax, self.ymin, self.ymax]) pylab.xlabel('$x$', fontsize = 18) pylab.ylabel('$v$', fontsize = 18) pylab.title('$N_x$ = %d, $N_v$ = %d, $t$ = %2.1f' % (self.x.N, self.v.N, self.it*self.t.width)) pylab.savefig(self.path + self.filename) pylab.clf() return None if len(self.f.shape) == 2: # f = f[x], 1 dim in phase space ft = self.f[n,:] pylab.plot(self.x.gridvalues,ft,'ob') pylab.grid() pylab.axis([self.xmin, self.xmax, self.ymin, self.ymax]) pylab.xlabel('$x$', fontsize = 18) pylab.ylabel('$f(x)$', fontsize = 18) pylab.savefig(self.path + self.filename) return None
def pcolor(self, mode="mean", vmin=None, vmax=None): """ If you loaded the pickle data sets with only mean and sigma, the D and pvalue mode cannot be used. """ from pylab import clf, xticks, yticks, pcolor, colorbar, flipud, log10 if mode == "mean": data = self.get_mean() elif mode == "sigma": data = self.get_sigma() elif mode == "D": data = self.get_ks()[0] elif mode == "pvalue": data = self.get_ks()[1] clf(); if mode == "pvalue": pcolor(log10(flipud(data)), vmin=vmin, vmax=vmax); else: pcolor(flipud(data), vmin=vmin,vmax=vmax); colorbar() xticks([x+0.5 for x in range(0,8)], self.ligands, rotation=90) cellLines = self.cellLines[:] cellLines.reverse() yticks([x+0.5 for x in range(0,4)], cellLines, rotation=0)
def main(argv=None): if argv is None: argv = sys.argv from pylab import pcolor, show pcolor(FlowNetwork().fs()[0]) show()
def plotAVTable(experiment): pylab.figure() pylab.gray() pylab.pcolor(experiment.agent.module.params.reshape(81,4).max(1).reshape(9,9), shading='faceted') pylab.title("Action-Value table, %s, Run %d" % (experiment.agent.learner.__class__.__name__, experiment.stepid))
def figurepoimsimple_small(poim, l, start, savefile, show): R = poim py.figure(figsize=(14, 12)) motivelen = int(np.log(len(poim)) / np.log(4)) ylabel = [] for i in range(int(math.pow(4, motivelen))): label = [] index = i for j in range(motivelen): label.append(index % 4) index = int(index / 4) label.reverse() ylabel.append(veclisttodna(label)) py.pcolor(R[:, start:start + l]) cb=py.colorbar() for t in cb.ax.get_yticklabels(): t.set_fontsize(40) diff = int((l / 5)) - 1 x_places = py.arange(0.5, l, diff) xa = np.arange(start, start + l, diff) diff = int((l / 4)) x_places = py.arange(0.5, l , diff) xa = np.arange(start + 1, start + 1 + l, diff) py.xlabel("Position", fontsize=46) py.ylabel("Motif", fontsize=46) py.yticks(np.arange(math.pow(4, motivelen)) + 0.5, (ylabel),fontsize=40) py.xticks(x_places, (xa.tolist()),fontsize=40) if savefile != "": py.savefig(savefile) print "the poim should show up here" if show: py.show()
def elo_corr_grid_search(data, run=False): prior_weights = np.arange(0.6, 1.2, 0.1) corr_place_weights = np.arange(0.8, 1.4, 0.1) results = pd.DataFrame(columns=prior_weights, index=corr_place_weights, dtype=float) for prior in prior_weights: for corr_place in corr_place_weights: model = EloCorrModel(prior_weight=prior, corr_place_weight=corr_place, min_corr=200) if run: Runner(data, model).run() report = Evaluator(data, model).evaluate() else: report = Evaluator(data, model).get_report() results[prior][corr_place] = report["rmse"] plt.figure() plt.title(data) cmap = plt.cm.get_cmap("gray") cmap.set_gamma(0.5) plt.pcolor(results, cmap=cmap) plt.yticks(np.arange(0.5, len(results.index), 1), results.index) plt.ylabel("corr_place_weights") plt.xticks(np.arange(0.5, len(results.columns), 1), results.columns) plt.xlabel("prior_weights") plt.colorbar()
def elo_grid_search(data, run=False): alphas = np.arange(0.4, 2, 0.2) betas = np.arange(0.02, 0.2, 0.02) results = pd.DataFrame(columns=alphas, index=betas, dtype=float) plt.figure() for alpha in alphas: for beta in betas: model = EloModel(alpha=alpha, beta=beta) # model = EloTreeModel(alpha=alpha, beta=beta, clusters=utils.get_maps("data/"), local_update_boost=0.5) if run: Runner(data, model).run() report = Evaluator(data, model).evaluate() else: report = Evaluator(data, model).get_report() # results[alpha][beta] = report["brier"]["reliability"] results[alpha][beta] = report["rmse"] plt.title(data) cmap = plt.cm.get_cmap("gray") cmap.set_gamma(0.5) plt.pcolor(results, cmap=cmap) plt.yticks(np.arange(0.5, len(results.index), 1), results.index) plt.ylabel("betas") plt.xticks(np.arange(0.5, len(results.columns), 1), results.columns) plt.xlabel("alphas") plt.colorbar()
def test_regression(): from numpy.random import rand x = rand(40,1) # explanatory variable y = x*x*x+rand(40,1)/5 # depentend variable from sklearn.linear_model import LinearRegression linreg = LinearRegression() linreg.fit(x,y) from numpy import linspace, matrix xx = linspace(0,1,40) plot(x,y,'o',xx,linreg.predict(matrix(xx).T),'--r') show() from sklearn.metrics import mean_squared_error print mean_squared_error(linreg.predict(x),y) from numpy import corrcoef corr = corrcoef(data.T) # .T gives the transpose print corr from pylab import pcolor, colorbar, xticks, yticks from numpy import arrange pcolor(corr) colorbar() # add # arranging the names of the variables on the axis xticks(arange(0.5,4.5),['sepal length', 'sepal width', 'petal length', 'petal width'],rotation=-20) yticks(arange(0.5,4.5),['sepal length', 'sepal width', 'petal length', 'petal width'],rotation=-20) show()
def plot_pairwise_corrcoef(data,ranklist=range(16,24),title="Correlation Coefficient"): array_byAttribRankRange=[] for attrib in attribList: array_byAttribRankRange.append(get_byAttribRankRange(data, attrib=attrib, ranklist=ranklist)) Narray = len(array_byAttribRankRange) array_corrcoef=np.zeros((Narray,Narray),dtype='float') for i,elemi in enumerate(array_byAttribRankRange[::-1]): for j,elemj in enumerate(array_byAttribRankRange[::-1]): if i>j: continue elif i==j: array_corrcoef[i,j]=1 else: array_corrcoef[i,j]=np.corrcoef(elemi,elemj)[0,1] P.pcolor(np.transpose(array_corrcoef), cmap=P.cm.RdBu, alpha=0.8) P.title(title) P.xlim([0,23]) P.ylim([0,23]) P.clim([-1,1]) P.xticks(range(len(attribList)), attribListAbrv[::-1],rotation='vertical') P.yticks(range(len(attribList)), attribListAbrv[::-1]) P.subplots_adjust(bottom=0.35) P.subplots_adjust(left=0.25) P.colorbar() return array_corrcoef
def plot2d_spectre(tags, freq, resistivity=False,added_resistivity=0,filter_id=False,id_start=0,id_stop=0,add_name=''): curves = models.CurveDB.objects.filter_tag(tags).filter_param("center_freq",value=freq) additional_name = '' if resistivity == True: curves = curves.filter_param('Added Resistivity ',value=added_resistivity) additional_name = '_R'+str(added_resistivity)+'Ohm' if filter_id == True : curves = curves.filter(id__gt=id_start-1).filter(id__lt=id_stop+1) a=[] X=[] Y=[] for curve in curves: a.append(np.power(10,curve.data.values/20)*float(curve.params["calibration"])) Y.append(float(curve.params["offset"])) X=array(curve.data.index, dtype=float) tuple = [(i,j) for i, j in zip(Y,a)] tuple.sort() Y = [val[0] for val in tuple] a = [val[1] for val in tuple] X = array(X) a=array(a) Y=array(Y) pylab.close() pylab.pcolor(X,Y,np.log(abs(a))) pylab.title("Mechancial_response_electrode"+tags.replace('/','_')) pylab.ylabel('Offset (K)') pylab.xlabel('Frequency (Hz)') pylab.legend() pylab.savefig('thermal_frequency_shift'+str(freq)+tags.replace('/','_')+ additional_name+add_name+'.png') b=np.asarray([a,X,Y]) np.savetxt('thermal_frequency_shift'+str(freq)+"Hz_amplitude"+tags.replace('/','_')+ additional_name+ add_name +".csv",a,delimiter=",") np.savetxt('thermal_frequency_shift'+str(freq)+"Hz_frequency"+tags.replace('/','_')+ additional_name+ add_name +".csv",X,delimiter=",")
def rhoPlot(self, ccaMat, filename, winsize, framesize,dataDimen): dlen = len(ccaMat) #x = pl.arange(dlen) #y = pl.arange(dlen) print "dlen:"+str(dlen) order = dataDimen for i in range(order): pl.clf() #pl.ion() #第一成分はとった cMat = ccaMat[:,:,i] Y,X = np.mgrid[slice(0, dlen, 1),slice(0, dlen, 1)] #print "len X:"+str(len(X))+", X:"+str(X) #X, Y = np.mgrid[0:dlen:complex(0, dlen), 0:dlen:complex(0, dlen)] pl.pcolor(X, Y, cMat) pl.xlim(0,dlen-1) pl.ylim(0,dlen-1) name = str(filename) + "_w-" + str(winsize) + "_f-" + str(framesize)+"_"+str(i) pl.title(name) pl.colorbar() pl.gray() pl.draw() outname = "cca-eig/"+name+".png" pl.savefig(outname) print "eig order:",i," save!"
def plot_covar(del_bl=8.,beam_sig=0.09,save_covar=True): covar,fqs = construct_covar(del_bl=del_bl,beam_sig=beam_sig) #print covar p.pcolor(fqs,fqs,covar) p.colorbar() p.savefig('{0}/mc_spec_figs/{1}_covar.pdf'.format(fig_loc,save_tag_base)) if save_covar: n.savez_compressed('{0}/monte_carlo/spectrum_data/{1}_covar'.format(data_loc,save_tag_base),covar=covar,fqs=fqs)
def VisualizeColorMaps(CombinedMat, NormalizedPrunedDepMat, PrunedSemanticSimMat, OntologySimilarityMat, PrunedLabels): ''' #plt.grid(True) #plt.subplots_adjust(bottom=0.50) plt.pcolor(NormalizedPrunedDepMat) plt.colorbar(use_gridspec=True) #to resize to the tight layout format plt.yticks(numpy.arange(0.5,len(CombinedMat) + 0.5),PrunedLabels) plt.xticks(numpy.arange(0.5,len(CombinedMat)+0.5),PrunedLabels, rotation=30,ha='right') #in prev line: ha = horizontal alignment - right is used to make label terminate the the center of the grid plt.title("NormalizedPrunedDepMat",fontsize=20,verticalalignment='bottom') plt.tight_layout() #to resize so that all labels are visible #plt.savefig('foo.pdf',figsize=(4,4),dpi=600) # to save image as pdf, fig size may or maynot be used plt.show() plt.pcolor(PrunedSemanticSimMat) plt.colorbar(use_gridspec=True) plt.yticks(numpy.arange(0.5,len(CombinedMat) + 0.5),PrunedLabels) plt.xticks(numpy.arange(0.5,len(CombinedMat) + 0.5),PrunedLabels, rotation=45,ha='right') plt.title("PrunedSemanticSimMat",fontsize=20,verticalalignment='bottom') plt.tight_layout() plt.show() ''' plt.pcolor(OntologySimilarityMat) plt.colorbar(use_gridspec=True) plt.yticks(numpy.arange(0.5,len(CombinedMat) + 0.5),PrunedLabels) plt.xticks(numpy.arange(0.5,len(CombinedMat) + 0.5),PrunedLabels, rotation=45,ha='right') plt.title("OntologySimilarityMat",fontsize=20,verticalalignment='bottom') plt.xlabel('Packages') plt.ylabel('Packages') plt.tight_layout() plt.show()
def som_plot_mapping(distance_map): bone() pcolor(distance_map.T) # plotting the distance map as background colorbar() #axis([0,som.weights.shape[0],0,som.weights.shape[1]]) ion() show() # show the figure
def testeps(d): M.clf() M.pcolor(d) M.axis('tight') M.colorbar() M.gcf().set_size_inches((7.5,6.)) M.savefig('test.png',dpi=240)
def plot_time_course(self, data, mode='boolean', fontsize=16): # TODO sort columnsi alphabetically # FIXME: twiny labels are slightly shifted # TODO flip if mode == 'boolean': cm = pylab.get_cmap('gray') pylab.clf() data = pd.DataFrame(data).fillna(0.5) pylab.pcolor(data, cmap=cm, vmin=0, vmax=1, shading='faceted') pylab.colorbar() ax1 = pylab.gca() ax1.set_xticks([]) Ndata = len(data.columns) ax1.set_xlim(0, Ndata) ax = pylab.twiny() ax.set_xticks(pylab.linspace(0.5, Ndata+0.5, Ndata )) ax.set_xticklabels(data.columns, fontsize=fontsize, rotation=90) times = list(data.index) Ntimes = len(times) ax1.set_yticks([x+0.5 for x in times]) ax1.set_yticklabels(times[::-1], fontsize=fontsize) pylab.sca(ax1) else: print('not implemented')
def show( self, maxIdx=None, indices=None): print( 'Exemplar projection') som = self.som if maxIdx == None: maxIdx = len(self.data) if indices ==None: data= self.data[0:maxIdx] target = self.target else: data= self.data[indices] target= self.target[indices] bone() pcolor(som.distance_map().T) # plotting the distance map as background colorbar() t = zeros(len(target),dtype=int) t[target == 'A'] = 0 t[target == 'B'] = 1 # use different colors and markers for each label markers = ['o','s','D'] colors = ['r','g','b'] for cnt,xx in enumerate(data): w = som.winner(xx) # getting the winner # palce a marker on the winning position for the sample xx plot(w[0]+.5,w[1]+.5,markers[t[cnt]],markerfacecolor='None', markeredgecolor=colors[t[cnt]],markersize=12,markeredgewidth=2) axis([0,som.weights.shape[0],0,som.weights.shape[1]]) show() # show the figure
def pcAddition(MOVIE=True): #BP S1 out=[] for vp in [1,0]: path=inpath+'vp%03d/E%d/'%(vp+1,97) coeff=np.load(path+'X/coeff.npy') pc1=_getPC(coeff,0) if pc1.mean()>=0.4: pc1=1-pc1 pc2=_getPC(coeff,1) if pc2.mean()>=0.4: pc2=1-pc2 out.append([]) out[-1].append(pc1) out[-1].append(pc2) out[-1].append((pc1-pc2+1)/2.) #out[-1].append((pc1+pc2)/2.) if False: out.append([]) out[-1].append(pc1) out[-1].append(1-pc2) out[-1].append((pc1+pc2)/2.) if MOVIE: plotGifGrid(out,fn=figpath+'Pixel/pcAddition'+FMT,bcgclr=1,snapshot=2, plottime=True,text=[['A',20,12,-10],['B',20,84,-10]]) bla print out[0][0].shape cols=5;fs=np.linspace(0,out[0][0].shape[0]-1,cols) ps=np.arange(out[0][0].shape[1]) for i in range(len(out)): for j in range(len(out[0])): for fi in range(cols): plt.subplot(3,cols,j*cols+fi+1) plt.pcolor(ps,ps,out[i][j][fs[fi],:,:],cmap='gray') #plt.grid() plt.savefig(figpath+'Pixel'+os.path.sep+'pcAddition', dpi=DPI,bbox_inches='tight')
def plot(self): """ .. plot:: :include-source: :width: 80% from cellnopt.simulate import * from cellnopt.core import * pkn = cnodata("PKN-ToyPB.sif") midas = cnodata("MD-ToyPB.csv") s = boolean.BooleanSimulator(CNOGraph(pkn, midas)) s.simulate(30) s.plot() """ pylab.clf() data = numpy.array([self.data[x] for x in self.species if x in self.species]) data = data.transpose() data = 1 - pylab.flipud(data) pylab.pcolor(data, vmin=0, vmax=1, edgecolors="k") pylab.xlabel("species"); pylab.ylabel("Time (tick)"); pylab.gray() pylab.xticks([0.5+x for x in range(0,30)], self.species, rotation=90) pylab.ylim([0, self.tick]) pylab.xlim([0, len(self.species)])
def plotContour(X,Y,u,nx,ny,impsi,L,H): pl.pcolor(X, Y, np.transpose(u), cmap='RdBu') pl.colorbar() pl.title("u-velocity contour") pl.axis([X.min(), X.max(), Y.min(), Y.max()]) pl.show()
# run again with numpy vectorized inner-implementation t0 = time.time() ar = view.apply_async(_solve, tstop, dt=0, verbose=True, final_test=final_test) #, user_action=wave_saver) if final_test: # this sum is performed element-wise as results finish s = sum(ar) # the L2 norm (RMS) of the result: norm = sqrt(s / num_cells) else: norm = -1 t1 = time.time() print 'vector inner-version, Wtime=%g, norm=%g' % (t1 - t0, norm) # if ns.save is True, then u_hist stores the history of u as a list # If the partion scheme is Nx1, then u can be reconstructed via 'gather': if ns.save and partition[-1] == 1: import pylab view.execute('u_last=u_hist[-1]') # map mpi IDs to IPython IDs, which may not match ranks = view['my_id'] targets = range(len(ranks)) for idx in range(len(ranks)): targets[idx] = ranks.index(idx) u_last = rc[targets].gather('u_last', block=True) pylab.pcolor(u_last) pylab.show()
sc = MinMaxScaler(feature_range=(0, 1)) x = sc.fit_transform(x) # Training the SOM from minisom import MiniSom # cloned from https://github.com/JustGlowing/minisom/ som = MiniSom(x=10, y=10, input_len=15, sigma=1.0, learning_rate=0.5) # Sigma= radius of the neighbourhood som.random_weights_init(x) som.train_random(data=x, num_iteration=100) # Visualizing the results # MID - Mean Interneuron Distance # We have to find the winning node with the highest MID from pylab import bone, pcolor, colorbar, plot, show bone() pcolor(som.distance_map().T) colorbar() markers = [ 'o', 's' ] # Two markers red circle = didn't get approval and green square = got approval colors = ['r', 'g'] for i, X in enumerate( x): # i = indexes of customers and X = vectors of customers w = som.winner(X) plot( w[0] + 0.5, # x coordinate of the winning node 0.5 at the centre of the square w[1] + 0.5, # y coordinate markers[y[ i]], # y[i] = value of the dependent variable of that customer; red or green if application is accepted markeredgecolor=colors[y[i]],
""" Plot the columns of the output files """ import sys import pylab data = pylab.loadtxt(sys.argv[1], unpack=True) shape = (int(sys.argv[2]), int(sys.argv[3])) lon = pylab.reshape(data[0], shape) lat = pylab.reshape(data[1], shape) for i, value in enumerate(data[3:]): value = pylab.reshape(value, shape) pylab.figure(figsize=(4, 3)) pylab.axis('scaled') pylab.title("Column %d" % (i + 4)) pylab.pcolor(lon, lat, value) pylab.colorbar() pylab.xlim(lon.min(), lon.max()) pylab.ylim(lat.min(), lat.max()) pylab.savefig('column%d.png' % (i + 4))
def allele_plot(filename, normalize=False, alleles=None, generations=None): """Plot the alleles from each generation from the individuals file. This function creates a plot of the individual allele values as they change through the generations. It creates three subplots, one for each of the best, median, and average individual. The best and median individuals are chosen using the fitness data for each generation. The average individual, on the other hand, is actually an individual created by averaging the alleles within a generation. This function requires the pylab library. .. note:: This function only works for single-objective problems. .. figure:: _static/allele_plot.png :alt: Example allele plot :align: center An example image saved from the ``allele_plot`` function. Arguments: - *filename* -- the name of the individuals file produced by the file_observer - *normalize* -- Boolean value stating whether allele values should be normalized before plotting (default False) - *alleles* -- a list of allele index values that should be plotted (default None) - *generations* -- a list of generation numbers that should be plotted (default None) If *alleles* is ``None``, then all alleles are plotted. Similarly, if *generations* is ``None``, then all generations are plotted. """ import pylab generation_data = [] reader = csv.reader(open(filename)) for row in reader: g = int(row[0]) row[3] = row[3].replace('[', '') row[-1] = row[-1].replace(']', '') individual = [float(r) for r in row[3:]] individual.append(float(row[2])) try: generation_data[g] except IndexError: generation_data.append([]) generation_data[g].append(individual) for gen in generation_data: gen.sort(key=lambda x: x[-1]) for j, g in enumerate(gen): gen[j] = g[:-1] best = [] median = [] average = [] for gen in generation_data: best.append(gen[0]) plen = len(gen) if plen % 2 == 1: med = gen[(plen - 1) // 2] else: med = [] for a, b in zip(gen[plen // 2 - 1], gen[plen // 2]): med.append(float(a + b) / 2) median.append(med) avg = [0] * len(gen[0]) for individual in gen: for i, allele in enumerate(individual): avg[i] += allele for i, a in enumerate(avg): avg[i] /= float(len(gen)) average.append(avg) for plot_num, (data, title) in enumerate( zip([best, median, average], ["Best", "Median", "Average"])): if alleles is None: alleles = list(range(len(data[0]))) if generations is None: generations = list(range(len(data))) if normalize: columns = list(zip(*data)) max_col = [max(c) for c in columns] min_col = [min(c) for c in columns] for dat in data: for i, d in enumerate(dat): dat[i] = (d - min_col[i]) / float(max_col[i] - min_col[i]) plot_data = [] for g in generations: plot_data.append([data[g][a] for a in alleles]) sub = pylab.subplot(3, 1, plot_num + 1) pylab.pcolor(pylab.array(plot_data)) pylab.colorbar() step_size = max(len(generations) // 7, 1) ytick_locs = list(range(step_size, len(generations), step_size)) ytick_labs = generations[step_size::step_size] pylab.yticks(ytick_locs, ytick_labs) pylab.ylabel('Generation') if plot_num == 2: xtick_locs = list(range(len(alleles))) xtick_labs = alleles pylab.xticks(xtick_locs, xtick_labs) pylab.xlabel('Allele') else: pylab.setp(sub.get_xticklabels(), visible=False) pylab.title(title) pylab.show()
def run(cluster): ratio = [] import pyfits import os import os, sys, bashreader, commands from config_bonn import appendix, tag, arc, filters, filter_root, appendix_root type = 'all' AP_TYPE = '' magtype = 'APER1' DETECT_FILTER = '' SPECTRA = 'CWWSB_capak.list' LIST = None if len(sys.argv) > 2: for s in sys.argv: if s == 'spec': type = 'spec' elif s == 'rand': type = 'rand' elif s == 'all': type = 'all' elif s == 'picks': type = 'picks' elif s == 'ISO': magtype = 'ISO' elif s == 'APER1': magtype = 'APER1' import string if string.find(s, 'detect') != -1: import re rs = re.split('=', s) DETECT_FILTER = '_' + rs[1] if string.find(s, 'aptype') != -1: import re rs = re.split('=', s) AP_TYPE = '_' + rs[1] if string.find(s, 'spectra') != -1: import re rs = re.split('=', s) SPECTRA = rs[1] if string.find(s, 'spectra') != -1: import re rs = re.split('=', s) SPECTRA = rs[1] if string.find(s, 'list') != -1: import re rs = re.split('=', s) LIST = rs[1] print DETECT_FILTER print cluster path = '/nfs/slac/g/ki/ki05/anja/SUBARU/%s/' % cluster filecommand = open('record.analysis', 'w') BASE = "coadd" image = BASE + '.fits' from glob import glob images = [] filters.reverse() print filters ims = {} ims_seg = {} params = { 'path': path, 'filter_root': filter_root, 'cluster': cluster, 'appendix': appendix, 'DETECT_FILTER': DETECT_FILTER, 'AP_TYPE': AP_TYPE } params['SPECTRA'] = SPECTRA params['type'] = type params['magtype'] = magtype outputcat = '%(path)s/PHOTOMETRY%(DETECT_FILTER)s%(AP_TYPE)s/%(cluster)s.%(magtype)s.1.%(SPECTRA)s.%(type)s.bpz.tab' % params catalog = '%(path)s/PHOTOMETRY%(DETECT_FILTER)s%(AP_TYPE)s/%(cluster)s.slr.cat' % params starcatalog = '%(path)s/PHOTOMETRY%(DETECT_FILTER)s%(AP_TYPE)s/%(cluster)s.stars.calibrated.cat' % params import do_multiple_photoz, os filterlist = do_multiple_photoz.get_filters(catalog, 'OBJECTS') print filterlist filters = conv_filt(filterlist) y = {} for f in filters: y[f] = 'yes' filters = y.keys() filters.sort(sort_filters) print filters stars_dict = star_num(filterlist, catalog, starcatalog, cluster, 'APER1', name_suffix='') os.system('mkdir -p ' + os.environ['sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/') if False: pagemain = open( os.environ['sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/index.html', 'w') pagemain.write( '<table align=left><tr><td colspan=5 class="dark"><h1>' + cluster + '</h1></td></tr><tr><td colspan=5><a href=http://www.slac.stanford.edu/~pkelly/photoz/' + cluster + '/stars.html>Stellar Color-Color Plots</a><td></tr><tr><td colspan=5><a href=redsequence.html>Red Sequence Redshifts</a><td></tr><tr><td><a href=objects.html>Photoz Plots</a><td></tr><tr><td><a href=thecorrections.html>Correction Plots</a><td></tr><tr><td><a href=zdistribution.html>Z Distribution</a><td></tr></table>\n' ) pagemain.close() import pyfits, pylab, scipy p = pyfits.open(outputcat)['STDTAB'].data pylab.clf() pylab.hist(p.field('BPZ_ODDS'), bins=scipy.arange(0, 1, 0.02)) pylab.xlabel('ODDS') pylab.ylabel('Number of Galaxies') pylab.savefig(os.environ['sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/odds.png') cut = 0.5 p = p[p.field('BPZ_ODDS') > cut] pylab.clf() pylab.hist(p.field('BPZ_Z_B'), bins=scipy.arange(0, 4, 0.02)) pylab.xlabel('Photometric Redshift') pylab.ylabel('Number of Galaxies (ODDS > ' + str(cut) + ')') pylab.savefig(os.environ['sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/zdist.png') pagemain = open( os.environ['sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/zdistribution.html', 'w') pagemain.write('<img src=zdist.png></a>') pagemain.write('<img src=odds.png></a>') pagemain.close() pylab.clf() import flux_comp corrections, plot_dict, title_dict = reload(flux_comp).calc_comp( cluster, DETECT_FILTER.replace('_', ''), AP_TYPE, SPECTRA, type='all', plot=True) print corrections pagemain = open( os.environ['sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/thecorrections.html', 'w') def s(a, b): if corrections[a] < corrections[b]: return 1 else: return -1 keys2 = corrections.keys() keys2.sort(s) import scipy from scipy import stats kernel = scipy.array( [scipy.stats.norm.pdf(i, 1.) for i in [4, 3, 2, 1, 0, 1, 2, 3, 4]]) kernel = kernel / kernel.sum() pagemain.write('<br>CORRECTIONS<br><ul>') ims = '' nums = '<br><br>NUMBER OF GOOD STARS<br><ul>\n' for key in keys2: if key != 'ID' and key != 'Z_S': file = os.environ[ 'sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/' + key + '.png' pylab.clf() o = pylab.hist(plot_dict[key], bins=100, range=[0.5, 1.5]) y_smooth = scipy.convolve(o[0], kernel, 'same') #[5:-5] print o[1], y_smooth, len(o[1]), len(y_smooth) #pylab.linewidth = 4 xs = o[1][:-1] + scipy.ones(len( o[1][:-1])) * (o[1][1] - o[1][0]) * 0.5 ys = y_smooth pylab.plot(xs, ys, 'r', linewidth=2) pylab.suptitle(title_dict[key]) print o pylab.savefig(file) ims += '<img src=' + key + '.png></a>\n' a = zip(y_smooth, xs) a.sort() max = a[-1][1] pagemain.write(key + ' median=' + str(corrections[key]) + ' smoothed peak=' + str(max) + '<br>') nums += key + ' ' + str(stars_dict[key]) + '<br>\n' pagemain.write('</ul>' + nums + '</ul>' + ims) pagemain.close() os.system('mkdir -p ' + os.environ['sne'] + '/photoz/' + cluster) #mkcolorcolor(filterlist,catalog,starcatalog,cluster,'APER1',name_suffix='aperapercalib') #mkcolorcolor(filterlist,catalog,starcatalog,cluster,',name_suffix='') if False: mkcolorcolor(filterlist, catalog, starcatalog, cluster, 'APER1', name_suffix='') #import sys #sys.exit(0) print catalog #catalog = '/u/ki/dapple/subaru/MACS2243-09/PHOTOMETRY_W-J-V_iso/MACS2243-09.aper.slr.cat' #file = mkcolorcolor(filterlist,catalog,starcatalog,cluster,'ISO',name_suffix='isomags_apercalib') #catalog = '/u/ki/dapple/subaru/MACS2243-09/PHOTOMETRY_W-J-V_iso/MACS2243-09.slr.cat' #file = mkcolorcolor(filterlist,catalog,starcatalog,cluster,'ISO',name_suffix='isomags_isocalib') #catalog = '/u/ki/dapple/subaru/MACS2243-09/PHOTOMETRY_W-J-V_aper/MACS2243-09.slr.cat' #file = mkcolorcolor(filterlist,catalog,starcatalog,cluster,'APER1',name_suffix='apermags_apercalib') #print file ffile = os.environ['sne'] + '/photoz/' + cluster + '/all.tif' print filters if not glob(ffile): print filters[1:4] for filt in filters[1:4]: #filter = filter.replace('MEGAPRIME-0-1-','').replace('SUBARU-10_2-1-','').replace('SUBARU-10_2-2-','').replace('SUBARU-10_1-1-','').replace('SUBARU-10_1-2-','')#.replace('SUBARU-8-1-','') params = { 'path': path, 'filter_root': filter_root, 'cluster': cluster, 'filter': filt, 'DETECT_FILTER': DETECT_FILTER, 'AP_TYPE': AP_TYPE, 'appendix': appendix, } print params # now run sextractor to determine the seeing: image = '%(path)s/%(filter)s/SCIENCE/coadd_%(cluster)s%(appendix)s/coadd.fits' % params images.append(image) print 'reading in ' + image seg_image = '/%(path)s/%(filter)s/PHOTOMETRY/coadd.apertures.fits' % params ims[filt] = pyfits.open(image)[0].data print 'read in ' + image #ims_seg[filter] = pyfits.open(seg_image)[0].data print images os.system('mkdir ' + os.environ['sne'] + '/photoz/' + cluster + '/') os.system('chmod o+rx ' + os.environ['sne'] + '/photoz/' + cluster + '/') from glob import glob os.system('stiff ' + reduce(lambda x, y: x + ' ' + y, images) + ' -OUTFILE_NAME ' + ffile + ' -BINNING 1 -GAMMA_FAC 1.6') os.system('convert ' + os.environ['sne'] + '/photoz/' + cluster + '/all.tif ' + os.environ['sne'] + '/photoz/' + cluster + '/fix.tif') print ffile import Image im = Image.open(os.environ['sne'] + '/photoz/' + cluster + '/fix.tif') print catalog p_cat = pyfits.open(catalog)[1].data #outputcat = '%(path)s/PHOTOMETRY/all_bpz1_' % params #spec = True picks = False #SPECTRA = 'CWWSB4_txitxo.list' #SPECTRA = 'CWWSB_capak.list' # use Peter Capak's SEDs print SPECTRA outbase = os.environ['sne'] + '/photoz/' + cluster + '/' os.system('mkdir -p ' + outbase + '/' + SPECTRA + '/') SeqNr_file = open('SeqNr_file', 'w') if type == 'spec': probsout = '%(path)s/PHOTOMETRY%(DETECT_FILTER)s%(AP_TYPE)s/%(cluster)s.%(magtype)s.1.%(SPECTRA)s.%(type)s.probs' % params #outputcat = '%(path)s/PHOTOMETRY/%(cluster)s.1.all.bpz.tab' % params print probsout, outputcat.replace('.tab', '') f = open(probsout, 'r').readlines() fz_temp = open(outputcat.replace('.tab', ''), 'r').readlines() fz = [] for l in fz_temp: if l[0] != '#': fz.append(l) import re res = re.split('\s+', f[0]) print res[4] res2 = re.split('\,', res[4].replace('z=arange(', '').replace(')', '')) print res2 import scipy increment = float(res2[2]) #matrix = scipy.array(zs* print zip(f[1:], fz[:])[1] #zs end = int(1. / increment) zs = (scipy.arange(float(res2[0]), 1. + increment, increment)) prob_matrix = scipy.zeros((end, end)) for l, zf in zip(f[1:], fz[:]): #print l, zf import re resz = re.split('\s+', zf[:-1]) if resz[0] == '': resz = resz[1:] #print resz z_spec = float(resz[9]) odds = float(resz[5]) res = re.split('\s+', l[:-1]) if res[-1] == '': res = res[:-1] #print res pdz = scipy.array([float(x) for x in res[1:]]) from copy import copy zs_copy = copy(zs) #zs_copy = zs_copy[pdz != 0] #pdz = pdz[pdz != 0] #print zs_copy, pdz, resz[9] for i in range(len(zs_copy))[:end]: #z,p in zip(zs_copy, pdz): if odds > 0.95 and z_spec < 1.: prob_matrix[i, int(z_spec / increment)] += pdz[i] #print prob_matrix #pdz.append( print 'done calculating' X, Y = pylab.meshgrid(zs_copy, zs_copy) print prob_matrix.shape, X.shape, Y.shape pylab.clf() print prob_matrix.max() prob_matrix[prob_matrix > 1] = 1. pylab.pcolor(X, Y, -1. * prob_matrix, cmap='gray', alpha=0.9, shading='flat', edgecolors='None') pylab.plot([0, 1], [0, 1], color='red') pylab.xlabel('SpecZ') pylab.ylabel('PhotZ') pylab.savefig(outbase + '/' + SPECTRA + '/RedshiftPDZ01.png') #pylab.pcolor(X, Y,prob_matrix,cmap='gray',alpha=0.9,shading='flat',edgecolors='None') #pylab.colorbar() #doFormating(**formating) #histogram2d(zs_copy,zs_copy,prob_matrix) #for l in f[1:]: # res = re.split('\s+',l) # print res if True: #glob(outputcat.replace('.tab','')): plot_res(outputcat.replace('.tab', ''), outbase, SPECTRA) print 'plot_res' print outputcat bpz_cat = pyfits.open(outputcat)[1].data print outputcat gals = [] set = range(len(bpz_cat)) if type == 'rand': set = range(100) f_nums = [e[:-1] for e in open('SeqNr_save', 'r').readlines()] for i in set: #[0:25]: #[75227,45311,53658, 52685, 64726]: print i line = bpz_cat[i] print line, outputcat text = '' #for x,y,side,index in [[4800,4900,200,1],[5500,5500,100,2],[4500,5500,300,2]]: text += '<tr>\n' SeqNr = int(line['SeqNr']) fileNumber = str(int(line['BPZ_NUMBER'])) params['fileNumber'] = str(fileNumber) base = '%(path)s/PHOTOMETRY%(DETECT_FILTER)s%(AP_TYPE)s/%(cluster)s.%(magtype)s.1.%(SPECTRA)s.%(type)s' % params probs = '%(path)s/PHOTOMETRY%(DETECT_FILTER)s%(AP_TYPE)s/%(cluster)s.%(magtype)s.1.%(SPECTRA)s.%(type)s.probs' % params print probs print base, probs resid = line['BPZ_Z_B'] - line['BPZ_Z_S'] z = line['BPZ_Z_B'] ODDS = line['BPZ_ODDS'] if True: #line['BPZ_ODDS'] > 0.5 and 0.6 < line['BPZ_Z_B'] < 0.8 and 0.2 < line['BPZ_Z_S'] < 0.4: #abs(resid) > 0.2: #abs(resid) > 0.04 and 0.4 < line['BPZ_Z_S'] < 0.6: #True: #SeqNr==24778: # SeqNr == 4441 or SeqNr==1285: #abs(resid) > 0.1: # print f_nums # if True: #len(filter(lambda x: int(line['SeqNr']) == int(x),f_nums)): probs_f = open(probs, 'r').readlines() print line['SeqNr'] file = '/nfs/slac/g/ki/ki04/pkelly/photoz/' + cluster + '/' + SPECTRA + '/' + str( SeqNr) + 'spec.png' #print 'SAVING', outdir + outimg sys.argv = ['', base, str(SeqNr), file] import sedplot print base, 'base' filt_info = sedplot.run() print filt_info import math if 1: #filt_info[0]['flux'] > 0: ratio.append(filt_info[0]['expectedflux'] / filt_info[0]['flux']) #import pylab #from scipy import arange #print ratio #import scipy #print 'ratio', scipy.median(scipy.array(ratio)) #a, b, varp = pylab.hist(ratio,bins=arange(0,5,0.1)) #pylab.show() #if line['BPZ_ODDS'] > 0.95 and abs(resid) > 0.2: #abs(resid) > 0.04 and 0.4 < line['BPZ_Z_S'] < 0.6: #True: #SeqNr==24778: # SeqNr == 4441 or SeqNr==1285: #abs(resid) > 0.1: #if 1: print line['BPZ_Z_B'] SeqNr_file.write(str(SeqNr) + '\n') mask = p_cat.field('SeqNr') == SeqNr temp = p_cat[mask] x = int(temp.field('Xpos')[0]) y = 10000 - int(temp.field('Ypos')[0]) x_fits = int(temp.field('Xpos')[0]) y_fits = int(temp.field('Ypos')[0]) import pyraf from pyraf import iraf if line['BPZ_Z_S'] != 0: resid = line['BPZ_Z_B'] - line['BPZ_Z_S'] if resid > 0: color = 'green' resid_str = ' <font color=' + color + '>+' + str( resid) + '</font> ' else: color = 'red' resid_str = ' <font color=' + color + '>' + str( resid) + '</font> ' else: resid = 'no spec z' color = 'black' resid_str = ' <font color=' + color + '>' + str( resid) + '</font> ' t = [ 'BPZ_Z_B', 'BPZ_ODDS', 'BPZ_CHI-SQUARED', 'BPZ_Z_B_MIN', 'BPZ_Z_B_MAX' ] text += '<td colspan=10>' + str(SeqNr) + ' z=' + str( line['BPZ_Z_B'] ) + ' MIN=' + str(line['BPZ_Z_B_MIN']) + ' MAX=' + str( line['BPZ_Z_B_MAX']) + resid_str + ' ODDS=' + str( line['BPZ_ODDS']) + ' TYPE=' + str( line['BPZ_T_B']) + ' CHISQ=' + str( line['BPZ_CHI-SQUARED']) + ' x=' + str( x) + ' y=' + str(y) + '</td></tr><tr>\n' print x, y index = SeqNr outfile = os.environ[ 'sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/' + str( index) + '.png' plot_bpz_probs(index, probs_f, outfile) #file = 'Id%09.f.spec' % index #outfile = os.environ['sne'] + '/photoz/' + str(index) + '.png' #print outfile #plot(file,outfile) text += '<td align=left><img height=400px src=' + str( index) + '.png></img>\n' text += '<img height=400px src=' + str(index) + 'spec.png></img>\n' images = [] file = '/nfs/slac/g/ki/ki04/pkelly/photoz/' + cluster + '/' + str( SeqNr) + 'spec.png' #print 'SAVING', outdir + outimg command = 'python ' + os.environ[ 'BPZPATH'] + '/plots/sedplot.py ' + base + ' ' + str( SeqNr) + ' ' + file print command import sedplot #sys.argv = ['',base,str(SeqNr),file] #filt_info = sedplot.run() #print filt_info #print command #os.system(command) side = 100 #x = temp.field('Xpos')[0] if 1: #try: p = im.crop([x - side, y - side, x + side, y + side]) image = os.environ[ 'sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/' + str( index) + '.jpg' os.system('rm ' + image) p.save(image, 'JPEG') #except: print 'fail' text += '<img src=' + str( index) + '.jpg></img></td><td colspan=20></td>\n' text += '</tr>\n' keys = [ 'name', 'wavelength', 'observed', 'flux', 'fluxerror', 'expectedflux', 'chioffset' ] text += '<tr><td colspan=10><table><tr>' + reduce( lambda x, y: x + y, ['<td>' + n + '</td>' for n in keys]) + "</tr>" for f in filt_info: text += '<tr>' + reduce( lambda x, y: x + y, ['<td>' + str(f[n]) + '</td>' for n in keys]) + "</tr>" text += "</table></td></tr>" side = 100 #x = temp.field('Xpos')[0] #if x_fits - 100 < 0: x_fits = 101 #if y_fits - 100 < 0: y_fits = 101 #if x_fits + 100 > 9999: x_fits = 9899 #if y_fits + 100 > 9999: y_fits = 9899 xlow = int(x_fits - side) xhigh = int(x_fits + side) ylow = int(y_fits - side) yhigh = int(y_fits + side) if xlow < 0: xlow = '' if ylow < 0: ylow = '' if xhigh > 9999: xhigh = '' if yhigh > 9999: yhigh = '' bounds = '[' + str(xlow) + ':' + str(xhigh) + ',' + str( ylow) + ':' + str(yhigh) + ']' text += '<td colspan=20><table>\n' textim = '' textlabel = '' import string filters_b = conv_filt(filt_info) for filt in []: #filters_b: #[1:]: fitsfile = '/nfs/slac/g/ki/ki04/pkelly/photoz/' + cluster + '/' + SPECTRA + '/' + str( SeqNr ) + 'cutout' + filt + '.fits' #print 'SAVING', outdir + outimg jpg = '/nfs/slac/g/ki/ki04/pkelly/photoz/' + cluster + '/' + SPECTRA + '/' + str( SeqNr ) + 'cutout' + filt + '.jpg' #print 'SAVING', outdir + outimg os.system('rm ' + fitsfile) os.system('rm ' + jpg) bigfile = '/a/wain023/g.ki.ki05/anja/SUBARU/' + cluster + '/' + filt + '/SCIENCE/coadd_' + cluster + '_all/coadd.fits' iraf.imcopy(bigfile + bounds, fitsfile) import commands seeing = commands.getoutput('gethead ' + bigfile + ' SEEING') print 'seeing', seeing import os #os.system("ds9 " + fitsfile + " -view info no -view panner no -view magnifier no -view buttons no -view colorbar yes -view horzgraph no -view wcs no -view detector no -view amplifier no -view physical no -zoom to fit -minmax -histequ -invert -zoom to fit -saveas jpeg " + jpg ) # -quit >& /dev/null &") #os.system("xpaset -p ds9 " + fitsfile + " -zoom to fit -view colorbar no -minmax -histequ -invert -zoom to fit -saveas jpeg " + jpg + " ") # -quit >& /dev/null &") print 'bounds', bigfile + bounds com = [ 'file ' + fitsfile, 'zoom to fit', 'view colorbar no', 'minmax', 'scale histequ', 'saveimage jpeg ' + jpg ] # -quit >& /dev/null &") for c in com: z = 'xpaset -p ds9 ' + c print z os.system(z) print jpg text += '<td><img height=200px src=' + str( SeqNr) + 'cutout' + filt + '.jpg></img></td>\n' textlabel += '<td>' + filt + ' seeing ' + seeing + '</td>\n' text += '<tr>' + textim + '</tr><tr>' + textlabel + '</tr></table></td></tr><tr>' #os.system('stiff ' + image_seg + ' -OUTFILE_NAME ' + image_seg.replace('fits','tif')) gals.append([line['BPZ_T_B'], text]) from datetime import datetime t2 = datetime.now() file = os.environ[ 'sne'] + '/photoz/' + cluster + '/' + SPECTRA + '/objects.html' print file page = open(file, 'w') t = '<head><link href="http://www.slac.stanford.edu/~pkelly/photoz/table.css" rel="stylesheet" type="text/css"></head>' t += '<table align=left><tr><td colspan=5 class="dark"><h1>' + cluster + '</h1></td></tr><tr><td colspan=5><h4>created ' + t2.strftime( "%Y-%m-%d %H:%M:%S" ) + '</h4></td></tr><tr><td colspan=5><a href=stars.html>Stellar Color-Color Plots</a><td></tr>\n' if type == 'spec': t += '<tr><td colspan=5 class="dark"><h2>Spectroscopic Sample</h2></td></tr>\n' t += '<tr><td colspan=10 align=left><img height=400px src=RedshiftErrors.png></img>\n' t += '<img height=400px src=RedshiftScatter04.png></img>\n' t += '<img height=400px src=RedshiftScatter01.png></img>\n' t += '<img height=400px src=RedshiftPDZ01.png></img></td></tr>\n' if type == 'rand': t += '<tr><td colspan=5 class="dark"><h2>Random Sample of 100 Galaxies</h2></td></tr>\n' if type == 'picks': t += '<tr><td colspan=5 class="dark"><h2>Pick</h2></td></tr>\n' page.write(t) gals.sort() for gal in gals: page.write(gal[1]) page.write('<table>\n') page.close() print 'CLOSED!!' if len(gals) > 100: break ''' make SED plots '''
M9 = n.fromfile('1024_1200Mpc_lc_z7.1.dens', dtype='<f8', count=N**2) I9.shape = M9.shape = (N, N) C = lambda z: 26 * n.sqrt((1 + z) / 10) X, R = n.mgrid[:N, :N] R = DM(7.1) + R * Dx / N Z = z(R) T = (1 + M9) * (1 - I9) * C(Z) X *= Dx / N p.figure(facecolor='k', edgecolor='k') Theta = n.zeros((N, N)) for i in range(N): for j in range(N): Theta[i, j] = r2theta(X[i, j], Z[i, j]) * 180 / n.pi #compute theta in degrees ax = p.axes(axisbg='k') for line in ax.yaxis.get_ticklines(): # line is a matplotlib.lines.Line2D instance line.set_color('w') # line.set_markersize(25) # line.set_markeredgewidth(3) for line in ax.xaxis.get_ticklines(): line.set_color('w') p.pcolor(Theta, Z, T, cmap='gist_heat', vmin=0) p.colorbar(orientation='horizontal') p.xlabel('degrees') p.ylabel('z') p.show()
P.rc('font', size=9) P.figure(figsize=(6,8)) P.subplots_adjust(hspace=.5, bottom=.03, top=.96) for im in xrange(nmode): P.subplot(nmode,2,im*2+1) P.plot(eof[im].filled()) P.title('EOF %i'%(im+1)) P.subplot(nmode,2,im*2+2) P.plot(pc[im].filled()) P.title('PC %i'%(im+1)) P.savefig(__file__[:-2]+'out.png') P.figure(figsize=(6,8)) P.subplots_adjust() P.subplot(211) P.pcolor(var.filled()-var.filled().mean(0)) P.colorbar() P.title('Original') P.subplot(212) P.pcolor(rec.filled()) P.colorbar() P.title('Rec%i'%nmode) P.savefig(__file__[:-2]+'outrec.png') #P.show()
# We will be suing minisom library here from minisom import MiniSom # Out dataset is small # So we will just create a 10 by 10 matrix, X = 10, y = 10 # Input_len = number of features in our dataset 14+1=15 # Sigma: Radious in the different neighborhoods in the grid minisom = MiniSom(x=10, y=10, input_len=15, sigma=1.0, learning_rate=0.5) minisom.random_weights_init(X) minisom.train_random(data=X, num_iteration=200) # Visualize the SOM from pylab import bone, pcolor, colorbar, plot, show bone() pcolor(minisom.distance_map().T) colorbar() # Color close to white are frauds markers = ["o", "s"] # s=square colors = ["r", "g"] # red = didn't get approval, green=got approval for i, x in enumerate(X): winning_node = minisom.winner(x) plot( winning_node[0] + 0.5, # placing to center using .5 winning_node[1] + 0.5, markers[y[i]], markeredgecolor=colors[y[i]], markerfacecolor="None", markersize=10, markeredgewidth=2) show()
# - populate relevant fields with actual data from the dictionaries max_size = max([max(d.keys()) for d in count_dicts]) count_arrays = [[0] * (max_size + 1) for i in range(num_files)] for i in range(0, num_files): for key in count_dicts[i]: count_arrays[i][key] = count_dicts[i][key] # And now print it all out print num_files, "rows,", max_size, "columns." # for i in range(0, num_files): # for j in range(0, max_size): # print str(count_arrays[i][j]), # print # Now we graph it! # We actually graph the transpose of the log of the matrix + 1 flame_graphable = (np.log(np.add(count_arrays, 1))).conj().transpose() pl.hot() flame_graph = pl.pcolor(flame_graphable) pl.ylim((0, max_size)) pl.xlim((0, num_files)) pl.savefig(outfilename) if options.showgraph: pl.show()
pos = util.get_realdata(True) neg = util.get_realdata(False) traindatList[i] = concatenate((pos, neg), axis=1) trainfeatList[i] = util.get_realfeatures(pos, neg) trainlabsList[i] = util.get_labels(True) trainlabList[i] = util.get_labels() kernelList[i] = GaussianKernel(trainfeatList[i], trainfeatList[i], width) svmList[i] = LibSVM(10, kernelList[i], trainlabList[i]) for i in range(num_svms): print "Training svm nr. %d" % (i) currentSVM = svmList[i] currentSVM.train() print currentSVM.get_num_support_vectors() print "Done." x, y, z = util.compute_output_plot_isolines(currentSVM, kernelList[i], trainfeatList[i]) subplot(num_svms / 2, 2, i + 1) pcolor(x, y, z, shading='interp') contour(x, y, z, linewidths=1, colors='black', hold=True) scatter(traindatList[i][0, :], traindatList[i][1, :], s=20, marker='o', c=trainlabsList[i], hold=True) axis('tight') connect('key_press_event', util.quit) show()
sc = MinMaxScaler(feature_range=(0, 1)) X = sc.fit_transform(X) # Training the SOM from minisom import MiniSom som = MiniSom(x=10, y=10, input_len=15, sigma=1.0, learning_rate=0.5) #x,y is dimension of som #input len is feature here 15 because to catch faruad so use ID also,sigma is radius ,l_r is how much weight iterate for each iteration #higher l_r fastely get O/P som.random_weights_init(X) #ransom weights som.train_random(data=X, num_iteration=100) # Visualizing the results from pylab import bone, pcolor, colorbar, plot, show bone() #get blank window pcolor(som.distance_map().T) #transpose of dis vector colorbar() #legend highest distance then 1 markers = ['o', 's'] colors = ['r', 'g'] #red circle not approved for i, x in enumerate(X): #i is 1,2,3...,x is vector of values w = som.winner(x) plot( w[0] + 0.5, w[1] + 0.5, #centre markers[y[i]], #gives approved or not markeredgecolor=colors[y[i]], markerfacecolor='None', #inside color markersize=10, markeredgewidth=2) show()
def prettyplot(f): xx, yy = pylab.meshgrid(pylab.linspace(-range_,range_, 100), pylab.linspace(-range_,range_, 100)) zz = pylab.zeros(xx.shape) for i in range(xx.shape[0]): for j in range(xx.shape[1]): zz[i,j] = f(np.array([xx[i,j], yy[i,j]])) pylab.pcolor(xx,yy,zz, cmap='RdBu', vmin=-range_, vmax=range_) pylab.colorbar() plt.xlim(-1, 1) plt.ylim(-1, 1) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.title('Training data') plt.savefig('figs/orig_data.eps') plt.savefig('figs/orig_data.pdf') plt.savefig('figs/orig_data.png') trainx, trainy = pylab.meshgrid(pylab.linspace(-1,1, 25), pylab.linspace(-1,1, 25)) modelParams = {'model':'dgp', 'maxiter': 300, 'minibatch_size': 300, 'layer_types': ['gp', 'noisy', 'gp', 'noisy'], 'layer_nodes': [2, 1, 2, 1], 'early_stopping': False} training_data = [] training_targets = [] for i in range(trainx.shape[0]): for j in range(trainx.shape[1]): training_data.append([trainx[i, j], trainy[i, j]]) training_targets.append(f(np.array([trainx[i,j], trainy[i,j]])).flatten()) model = GPNetwork(np.array(training_data), np.array(training_targets), modelParams) zz = pylab.zeros(xx.shape) data = [] for i in range(xx.shape[0]): for j in range(xx.shape[1]): data.append([xx[i,j], yy[i,j]]) data = np.array(data) model.session.run(model.set_for_training.assign(0.0)) fd = {model.data_placeholder: data} pred = model.session.run((model.output_mean), feed_dict=fd) k = 0 for i in range(xx.shape[0]): for j in range(xx.shape[1]): zz[i, j] = pred[k, 0] k += 1 pylab.figure() pylab.pcolor(xx,yy,zz, cmap='RdBu', vmin=-range_, vmax=range_) pylab.colorbar() plt.xlim(-1, 1) plt.ylim(-1, 1) plt.title('DGP model') plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.savefig(r'figs/dgp_model.eps') plt.savefig(r'figs/dgp_model.pdf') plt.savefig(r'figs/dgp_model.png') for i, k in [(1, 0), (1, 1), (3, 0)]: modelplot(model, model.layers[i].nodes[k], xx, yy, 'layer{0}node{1}'.format(i, k))
os.getcwd() os.chdir('/disk/plasma2/jaa/CB8WAVES/CB8waves_04/Current_along_box') #Plots of the curls ################################################################################################ for i in range(10): nn= int(i) fig, ax0 = plt.subplots() im = plt.pcolor(J_sheet_2_Uri[:,:,nn]) #plot in the 65 (middle) fig.colorbar(im, ax=ax0) ax0.set_title('Current_sheet') fig.savefig("current"+ str(nn)+".png", bbox_inches='tight') ############################################################################### pylab.pcolor(divB[:,:,15], cmap='bwr') pylab.colorbar() pylab.title('Div B') #pylab.savefig('120_curlBzVi.png') pylab.show() pylab.pcolor(divB2[:,:,15], cmap='bwr') pylab.colorbar() pylab.title('Div B') #pylab.savefig('120_curlBzVi.png') pylab.show() pylab.pcolor(divBx[:,:,100], cmap='bwr') pylab.colorbar() pylab.title('Div B') #pylab.savefig('120_curlBzVi.png')
a.append(-1.0) metrics.append(a) fig1 = pylab.figure(1) ax = fig1.add_subplot(1, 1, 1) rect = ax.patch rect.set_facecolor('lightgray') colors = [('black')] + [(pylab.cm.jet(i)) for i in xrange(1, 255)] + [('white')] new_map = matplotlib.colors.LinearSegmentedColormap.from_list('new_map', colors, N=256) mdata = pylab.array(metrics) pylab.pcolor(mdata, cmap=new_map) cbar = pylab.colorbar() #cbar.set_label(cm) pylab.xlabel('P/A') labels = get_labels(xs) pylab.xticks(labels[0], labels[1]) pylab.ylabel('Variance') labels = get_labels(ys) pylab.yticks(labels[0], labels[1]) #pylab.title('%s: %s\n%s'%(cm, cn, desc)) fig = pylab.gcf() #fig.canvas.set_window_title('%s - %s (%s)'%(cn, desc, cm))
def r(d1, d2, boxsize=500., bin2fact=1. / 16., filename='', getnuminbin=False, overwrite=False, getnumbin=False, special='', plotty=False): ngrid = len(d1[:, 0, 0]) if (os.path.isfile(filename)) * (overwrite == False): p = N.loadtxt(filename) kmean = p[:, 0] pk = p[:, 1] numinbin = p[:, 2] if (ngrid != len(d2[:, 0, 0])): print 'arrays not the same size; quit' return d1k = N.fft.rfftn(d1) d2k = N.fft.rfftn(d2) s = d1.shape sk = d1k.shape #d1k = d1k.flatten() #d2k = d2k.flatten() if special == '': dk2 = 0.5*(d1k*N.conjugate(d2k) + N.conjugate(d1k)*d2k)/\ N.sqrt(d1k*N.conjugate(d1k)*d2k*N.conjugate(d2k)) elif special == 'propagator': #d1 must be initial conditions! dk2 = 0.5*N.real((d1k*N.conjugate(d2k) + N.conjugate(d1k)*d2k)/\ (d1k*N.conjugate(d1k))) elif special == 'amplitude': dk2 = 0.5*(d1k*N.conjugate(d2k) + N.conjugate(d1k)*d2k)/\ (d1k*N.conjugate(d1k)) if (plotty): M.clf() M.pcolor(N.real(dk2[:, 0, :])) M.colorbar() dk2 = dk2.flatten() a = N.fromfunction(lambda x, y, z: x, sk) a[N.where(a > s[0] / 2)] -= s[0] b = N.fromfunction(lambda x, y, z: y, sk) b[N.where(b > s[1] / 2)] -= s[1] c = N.fromfunction(lambda x, y, z: z, sk) c[N.where(c > s[2] / 2)] -= s[2] kmin = 2. * N.pi / boxsize k = kmin * N.sqrt((a**2 + b**2 + c**2).flatten()) index = N.argsort(k) k = k[index] dk2 = dk2[index] c0 = 0. * c.flatten() + 1. c0[N.where(c.flatten() == 0.)] -= 0.5 c0 = c0[index] # half-count cells on the z-axis log2 = N.log(2.) binedges = kmin * 2.**N.arange(-bin2fact / 2., N.log(k[-1] / kmin) / log2, bin2fact) cuts = N.searchsorted(k, binedges) numinbin = 0. * binedges pk = 0. * binedges kmean = 0. * binedges nbins = len(binedges) for i in N.arange(0, nbins - 1): if (cuts[i + 1] > cuts[i]): numinbin[i] = N.sum(c0[cuts[i]:cuts[i + 1]]) pk[i] = N.sum(c0[cuts[i]:cuts[i + 1]] * dk2[cuts[i]:cuts[i + 1]]) kmean[i] = N.sum(c0[cuts[i]:cuts[i + 1]] * k[cuts[i]:cuts[i + 1]]) wn0 = N.where(numinbin > 0.)[0] pk = pk[wn0] kmean = kmean[wn0] numinbin = numinbin[wn0] pk /= numinbin kmean /= numinbin #pk *= boxsize**3/N.prod(N.array(s).astype(float))**2 if filename != '': N.savetxt(filename, N.transpose([kmean, pk, numinbin])) if (getnumbin): return kmean, pk, getnumbin else: return kmean, pk
def MirrorPressurecontour_N_Pressureslice(current_data): from pylab import linspace, plot, subplot, pcolor, contour, contourf, annotate, text, cm, colorbar, show, xlabel, ylabel, xticks, yticks from pylab import subplots, ylim, xlim, setp, annotate, text, get, subplot2grid, axes, gca, title import matplotlib.ticker as ticker from clawpack.visclaw import colormaps as cmaps xx = current_data.x yy = current_data.y dy = abs(yy[1, 1] - yy[1, 2]) q = current_data.q # solution when this function called aux = current_data.aux gamma = aux[0, :, :] gamma1 = aux[0, :, :] - 1.0 pinf = aux[1, :, :] omega = aux[2, :, :] rho = q[0, :, :] # density momx = q[1, :, :] # momentum momy = q[2, :, :] ene = q[3, :, :] # energy P = gamma1 * (ene - 0.5 * (momx * momx + momy * momy) / rho) #/(1.0 - omega*rho) P = P - gamma * pinf # Convert to PSI P = P * 0.000145038 - 14.6959488 x = [-0.0085, -0.0085, 0.0085, 0.0085] x = [zz - 0.0 for zz in x] y = [0.0, 0.0085, 0.0085, 0.0] y2 = [-zz for zz in y] s1 = subplot2grid((5, 16), (0, 0), colspan=14, rowspan=3) # subplot(211) plot(x, y, 'k', linewidth=2.0) plot(x, y2, 'k', linewidth=2.0) locator = ticker.MaxNLocator( 20) # if you want no more than 10 contours locator.create_dummy_axis() #For Pa #locator.set_bounds(90000, 300000) # For PSI locator.set_bounds(-20, 30) levs = locator() #f, (ax1, ax2) = subplots(2, sharex=True,sharey=True,subplot_kw=dict(adjustable='datalim', aspect='equal')) #OTHER colormap: cmap = camps.schlieren_grays cont1 = contourf(xx, yy - 0.5 * dy, P, levs, alpha=.75, cmap=cm.Blues) cont2 = contourf(xx, -(yy - 0.5 * dy), P, levs, alpha=.75, cmap=cm.Blues) # for PSI or Pascals pcolor(xx, yy - 0.5 * dy, P, cmap=cm.Blues, vmin=-20, vmax=30) pcolor(xx, -(yy - 0.5 * dy), P, cmap=cm.Blues, vmin=-20, vmax=30) s1.set_xlim([-0.03, 0.03]) s1.set_ylim([-0.02, 0.02]) # Convert axis to cm xxticks = np.arange(xx.min(), xx.max(), 0.01) labelsx = range(0) #range(xxticks.size) labelsx[:] = [x - 3 for x in labelsx] xticks(xxticks, labelsx) yyticks = np.arange(-yy.max(), yy.max(), 0.01) labelsy = range(yyticks.size) labelsy[:] = [x - 2 for x in labelsy] yticks(yyticks, labelsy) #xlabel('cm',fontsize='13',fontweight='bold') ylabel('cm', fontsize='13', fontweight='bold') title('Pressure contours (2D) & pressure slice (1D)', fontweight='bold') #pcolor(xx,yy-0.5*dy,P,cmap=cm.Blues, vmin=90000, vmax=300000) #pcolor(xx,-(yy-0.5*dy),P,cmap=cm.Blues, vmin=90000, vmax=300000) #colorbar() contour(xx, yy - 0.5 * dy, P, levs, colors='black', linewidth=0.5) contour(xx, -(yy - 0.5 * dy), P, levs, colors='black', linewidth=0.5) s2 = subplot2grid((5, 16), (3, 0), colspan=14, rowspan=2) #subplot(212) x_slice, P_slice = xsec(current_data) plot(x_slice, P_slice, 'k-') s2.set_xlim([-3, 3]) s2.set_ylim([-20, 30]) #ax1.ylim(-20,30) #s2.set_xlim([-3.0,3.0]) #s2.set_ylim([-20,30]) #gcs = 2.0/200.0 x = [-0.85, -0.85, 0.85, 0.85] y = [-100, 100, 100, -100] #y[:] = [xx - gcs for xx in y] plot(x, y, 'k', linewidth=2.0) xlabel('cm', fontsize='13', fontweight='bold') ylabel('psi', fontsize='13', fontweight='bold') #title('Pressure slice') xcav = [-3.0, 3.0] ycav = [ -14.334351113, -14.334351113 ] #Water vapour pressure for cavitation at room temp in 1atm=0 ref system plot(xcav, ycav, 'b--') #plot(-8.0, 180000, 'vk', markersize=10) #plot(-2.0, 180000, 'vk', markersize=10) #plot(0.0, 180000, 'vk', markersize=10) #plot(2.0, 180000, 'vk', markersize=10) text(-0.75, 25, 'Water', fontweight='bold', fontsize=13) #text(-0.8,285000,'PS',fontweight='bold',fontsize=20) text(-2.9, 25, 'Air', fontweight='bold', fontsize=13) text(0.95, 25, 'Air', fontweight='bold', fontsize=13) text(1.2, -13, 'Vapor pressure', fontweight='bold', fontsize=13, color='blue') s3 = subplot2grid((5, 16), (0, 15), rowspan=5) from mpl_toolkits.axes_grid1 import make_axes_locatable divider = make_axes_locatable(gca()) cax = divider.append_axes("right", "5%", pad="3%") cax.axis('off') cbar = colorbar( cont1, cax=s3 ) #,orientation='horizontal') #, shrink=0.99) #,location='eastoutside') #orientation='horizontal') cbar.ax.set_xlabel('psi', fontsize='13', fontweight='bold', rotation='horizontal')
def draw(): PL.cla() PL.pcolor(grid, vmin=0, vmax=1, cmap=PL.cm.binary) PL.axis('image') PL.title('t = ' + str(time))
reglon = reshape(glon, (Mx, My)) # Plot areas where thickness and PISM mask are appropriate, # and filter out RIGGS points that are outside the model domain, # and use location of calving front from pismaccur flag, which is # interpolated by PISM from original ross.nc file "accur" flag. cbar_masked = ma.array(cbar, mask=(mask != 3) + (H < 20.0) + ((pismaccur < 0.01) & (reglat < -11.0) & (reglon < 1.0))) # show computed speed as color figure(1, figsize=(9, 8)) clf() hold(True) pcolor(gridlon, gridlatext, cbar_masked) colorbar() # compute grid lat and lon of RIGGS points (in deg,min,sec in .dat file); RIGGSlat = -(RIGGS[:, 3] + RIGGS[:, 4] / 60 + RIGGS[:, 5] / (60 * 60)) RIGGSlon = RIGGS[:, 6] + RIGGS[:, 7] / 60 + RIGGS[:, 8] / (60 * 60) RIGGSlon = -RIGGSlon * RIGGS[:, 9] # RIGGS[:,9] is +1 if W, -1 if E # throw out the ones which are not in model domain; 132 (131?) remain cbar_masked = cbar_masked.filled(-20) # triangulate data tri = Triangulation(glon, glat) cRIGGS = tri.nn_interpolator(cbar_masked.flat)(RIGGSlon, RIGGSlat) rig = RIGGS[cRIGGS > 0]
data = np.zeros((width, height)) # torch coordinates XX, YY = np.meshgrid(np.arange(width) + 1.5, np.arange(height) + 1.5) # note that the torch indices are one based # so we subtract one here baseIndex = firstIndices[photonIndex] - 1 for i in range(numRecHits[photonIndex]): xc = xcoords[baseIndex + i] - 1 yc = ycoords[baseIndex + i] - 1 assert xc >= 0 assert xc < width assert yc >= 0 assert yc < height data[yc, xc] = energies[baseIndex + i] # plot the matrix pylab.figure() pylab.pcolor(XX, YY, data, cmap=pylab.cm.Blues) pylab.title("index " + str(photonIndex) + " (label=" + str(labels[photonIndex]) + ")") pylab.grid() pylab.show()
# standard exploration is e-greedy, but a different type can be chosen as well # learner.explorer = BoltzmannExplorer() # create agent agent = LearningAgent(table, learner) # create experiment experiment = Experiment(task, agent) # prepare plotting pylab.gray() pylab.ion() #for i in range(100): while True: # interact with the environment (here in batch mode) experiment.doInteractions(matrix_size) agent.learn() agent.reset() # and draw the table print table.params.reshape(matrix_size,2) #print table.params.reshape(matrix_size,matrix_size) pylab.pcolor(table.params.reshape(matrix_size,2).max(1).reshape(matrix_size,1)) #pylab.pcolor(table.params.reshape(matrix_size,matrix_size).max(1).reshape(matrix_size,1)) pylab.draw() pylab.ion() pylab.show() print "training complete"
def plot_flc_haplotypes(snps, accessions=None, positions=None, dist_measure="identity", correctScale=False, haplotypeFile=None, treeFile=None, acc_250k=None, perlegen_acc=None, flc_250K_positions=None): """ 080609 created. dist_measure: type of distance metric used for clustering. """ import numpy as np import scipy as sp import scipy.cluster.hierarchy as hc if dist_measure == "identity": a = identity_metric(snps) elif dist_measure == "": pass Z = hc.average(a) #Performing clustering using the dist. matr. import pylab #hc.leaders(Z) dend_dict = hc.dendrogram(Z, labels=accessions) new_acc_order = dend_dict['ivl'] #print new_acc_order #print accessions if treeFile: pylab.savefig(treeFile, format='pdf') acc_mapping = [] for acc in accessions: i = new_acc_order.index(acc) acc_mapping.append(i) new_snps = [] for snp in snps: new_snp = [0] * len(snp) for (nt, i) in zip(snp, acc_mapping): new_snp[i] = nt new_snps.append(new_snp) snps = sp.array(snps) class _ntDict_(dict): def __missing__(self, key): return 0.0 nt_map = _ntDict_() d = {"N": 0.0, "NA": 0.0, "-": 5.0, "A": 1.0, "C": 2.0, "G": 3.0, "T": 4.0} for key in d: #print key,d[key] nt_map[key] = d[key] for i, nt in enumerate(snps.flat): #print nt,nt_map[nt] snps.flat[i] = nt_map[nt] # for i,nt in enumerate(snps.flat): # if nt=='-': # snps.flat[i]=5.0 snps = snps.astype(float) #print snps for i in range(0, len(snps), 500): pylab.clf() fig = pylab.figure(figsize=(16, 5)) ax1 = pylab.axes([0.06, 0.05, 1.0, 0.9]) new_snps = snps[i:i + 500] if positions: #Prepare SNPs x_s = np.zeros((len(new_snps) + 1, len(new_snps[0, ]) + 1)) start_pos = positions[0] - (0.5 * (positions[1] - positions[0])) print len(x_s), len(x_s[0, ]) for j in range(0, len(x_s[0, ])): x_s[0, j] = start_pos for j in range(1, len(x_s) - 1): # number of SNPs x = positions[j - 1] + 0.5 * (positions[j] - positions[j - 1]) for k in range(0, len(x_s[j, ])): # number of NTs x_s[j, k] = x for j in range(0, len(x_s[0, ])): x_s[-1, j] = positions[-1] + (0.5 * (positions[-1] - positions[-2])) y_s = np.zeros((len(new_snps) + 1, len(new_snps[0, ]) + 1)) for j in range(0, len(y_s)): # number of SNPs for k in range(0, len(y_s[j, ])): # number of NTs y_s[j, k] = k - 0.5 #import pdb #pdb.set_trace() # z_s = [] # print len(new_snps) # for j in range(0,len(new_snps)): # number of SNPs # pos = positions[j] # for k in range(0,len(new_snps[j,])): # number of NTs # z_s.append(new_snps[j,k]) pylab.pcolor(x_s, y_s, new_snps) else: pylab.pcolor(new_snps.transpose()) yticks, labels = pylab.yticks(range(0, len(accessions)), accessions, size="medium") for acc_i in range(0, len(accessions)): if acc_i in acc_250k: yticks[acc_i].label1.set_color("blue") for pos in flc_250K_positions: pylab.plot([pos, pos], [-2, -1], color="black") x_range = (x_s[-1, 0] - x_s[0, 0]) y_range = len(accessions) pylab.axis((x_s[0, 0] - 0.05 * x_range, x_s[-1, 0] + 0.05 * x_range, -2 - 0.05 * y_range, len(accessions) + 0.05 * y_range)) cbar = pylab.colorbar(ticks=[0, 1, 2, 3, 4, 5]) cbar.ax.set_yticklabels(['NA', 'A', 'C', 'G', 'T', '-']) # horizontal colorbar #pylab.subplot(2,1,2) #ax2 = pylab.axes([0.06,0.04,1.0,0.05],frameon=False) # ax2.spines['left'].set_color('none') # ax2.spines['right'].set_color('none') # ax2.spines['bottom'].set_color('none') # ax2.spines['top'].set_color('none') #for pos in flc_250K_positions: # ax2.plot([pos,pos],[0,1]) #ax2.axis((x_s[0,0]-0.05*x_range,x_s[-1,0]+0.05*x_range,-0.05,1.05)) #Add some more features to the figure... if haplotypeFile: pylab.savefig(haplotypeFile + "_" + str(i) + ".pdf", format='pdf') else: pylab.show() pylab.clf()
# Training the SOM # we will use another developer impelmentation for the moment (minisom.py file) from minisom import MiniSom # the dimntions of the map (10*10) # the input length (the number of features) som = MiniSom(x=10, y=10, input_len=15, sigma=1.0, learning_rate=0.5) som.random_weights_init(X) som.train_random(data=X, num_iteration=100) # visualizaing the results from pylab import bone, pcolor, colorbar, plot, show bone() pcolor(som.distance_map().T) # for the pcolor take the transpose colorbar() markers = ['o', 's'] # o means circle and S means square colors = ['r', 'g'] # green and red colors for i, x in enumerate( X): # i is the idx for customer and x is the vector of [i] (the row) w = som.winner(x) # get the winning node of the customer with vector x plot(w[0] + 0.5, w[1] + 0.5, markers[y[i]], markeredgecolor=colors[y[i]], markerfacecolor='None', markersize=10, markeredgewidth=2) show()
def plot_local_haplotypes(filename, marker_data, focal_start, focal_end, error_tolerance=0, phenotypeData=None, phen_id=None): """ Plots certain types of haplotype plots... """ haplotype_ids = range(1000, 0, -1) #Fill color matrix up.. start_i = 0 cur_pos = 0 while start_i < len(marker_data.positions) and cur_pos < focal_start: cur_pos = marker_data.positions[start_i] start_i += 1 if start_i == len(marker_data.positions): raise Exception("Region is not covered by markers.") end_i = start_i while end_i < len(marker_data.positions) and cur_pos < focal_end: cur_pos = marker_data.positions[end_i] end_i += 1 center_haplotypes = [] for a_i in range(len(marker_data.accessions)): haplotype = [] for snp in marker_data.snps[start_i:end_i]: haplotype.append(snp[a_i]) center_haplotypes.append(haplotype) haplotype_dict = {} hap_pos = (marker_data.positions[end_i - 1] + marker_data.positions[start_i]) / 2 for a_i, c_h in enumerate(center_haplotypes): ch = tuple(c_h) if not ch in haplotype_dict: haplotype_dict[ch] = [ 0, 0, [a_i], hap_pos ] #haplotype id number, haplotype frequency count, list of accessions indices, and position. else: haplotype_dict[ch][2].append(a_i) freq_hapl_list = [] for ch in haplotype_dict: hi = haplotype_dict[ch] haplotype_dict[ch][1] = len(hi[2]) freq_hapl_list.append((len(hi[2]), ch)) freq_hapl_list.sort(reverse=True) for (hc, haplotype) in freq_hapl_list: if hc == 1: haplotype_dict[haplotype][0] = 0 else: haplotype_dict[haplotype][0] = haplotype_ids.pop() center_haplotype_dict = haplotype_dict left_haplotypes = [] right_haplotypes = [] left_haplotypes.append(center_haplotype_dict) right_haplotypes = [] left_positions = [hap_pos] right_positions = [] #Starting with the haplotype structure to the left! some_haplotype = True i = start_i - 1 old_hap_dict = center_haplotype_dict while old_hap_dict and i >= 0: #print i #l1 = [len(old_hap_dict[h][2]) for h in old_hap_dict] #l2 = [old_hap_dict[h][0] for h in old_hap_dict] #print l1,l2, sum(l1) haplotype_dict = {} hap_pos = marker_data.positions[i] left_positions.append(hap_pos) for hap in old_hap_dict: (h_id, h_count, acc_indices, pos) = old_hap_dict[hap] #info on the old haplotype #print h_id temp_hap_dict = {} for a_i in acc_indices: new_hap = tuple([marker_data.snps[i][a_i]] + list(hap)) if not new_hap in temp_hap_dict: temp_hap_dict[new_hap] = [ 0, 0, [a_i], hap_pos ] #haplotype id number, haplotype frequency count, list of accessions indices, and position. else: temp_hap_dict[new_hap][2].append(a_i) freq_hapl_list = [] for h in temp_hap_dict: hi = temp_hap_dict[h] temp_hap_dict[h][1] = len(hi[2]) freq_hapl_list.append((len(hi[2]), h)) freq_hapl_list.sort() #print freq_hapl_list (hc, h) = freq_hapl_list.pop( ) #the most frequent haplotype gets colored like the last one. if hc == 1: del temp_hap_dict[h] else: temp_hap_dict[h][0] = h_id freq_hapl_list.reverse() for (hc, h) in freq_hapl_list: if hc == 1: del temp_hap_dict[h] else: temp_hap_dict[h][0] = haplotype_ids.pop() for h in temp_hap_dict: haplotype_dict[h] = temp_hap_dict[h] if haplotype_dict: left_haplotypes.append(haplotype_dict) old_hap_dict = haplotype_dict i -= 1 #Now the same with the haplotype structure to the right! i = end_i old_hap_dict = center_haplotype_dict while old_hap_dict and i < len(marker_data.snps): #print i #l1 = [len(old_hap_dict[h][2]) for h in old_hap_dict] #l2 = [old_hap_dict[h][0] for h in old_hap_dict] #print l1,l2, sum(l1) haplotype_dict = {} hap_pos = marker_data.positions[i] right_positions.append(hap_pos) for hap in old_hap_dict: (h_id, h_count, acc_indices, pos) = old_hap_dict[hap] #info on the old haplotype temp_hap_dict = {} for a_i in acc_indices: nt = marker_data.snps[i][a_i] new_hap = list(hap) new_hap.append(nt) new_hap = tuple(new_hap) #print new_hap if not new_hap in temp_hap_dict: temp_hap_dict[new_hap] = [ 0, 0, [a_i], hap_pos ] #haplotype id number, haplotype frequency count, list of accessions indices, and position. else: temp_hap_dict[new_hap][2].append(a_i) freq_hapl_list = [] for h in temp_hap_dict: hi = temp_hap_dict[h] temp_hap_dict[h][1] = len(hi[2]) freq_hapl_list.append((len(hi[2]), h)) freq_hapl_list.sort() (hc, h) = freq_hapl_list.pop( ) #the most frequent haplotype gets colored like the last one. if hc == 1: del temp_hap_dict[h] else: temp_hap_dict[h][0] = h_id freq_hapl_list.reverse() for (hc, h) in freq_hapl_list: if hc == 1: del temp_hap_dict[h] else: temp_hap_dict[h][0] = haplotype_ids.pop() for h in temp_hap_dict: haplotype_dict[h] = temp_hap_dict[h] if haplotype_dict: right_haplotypes.append(haplotype_dict) old_hap_dict = haplotype_dict i += 1 #Clustering... dm = calc_local_dist(marker_data, focal_start, focal_end, error_tolerance=error_tolerance) print dm import scipy as sp import scipy.cluster.hierarchy as hc Z = hc.average(dm) #Performing clustering using the dist. matr. print Z import pylab dend_dict = hc.dendrogram(Z, labels=marker_data.accessions) new_acc_order = dend_dict['ivl'] print new_acc_order ai_map = [new_acc_order.index(acc) for acc in marker_data.accessions] import numpy as np #Updating the positions in the figure. left_positions.reverse() positions = left_positions + right_positions x_s = np.zeros((len(positions) + 1, len(marker_data.accessions) + 1)) start_pos = positions[0] - (0.5 * (positions[1] - positions[0])) print len(x_s), len(x_s[0, ]) for j in range(0, len(x_s[0, ])): x_s[0, j] = start_pos for j in range(1, len(x_s) - 1): # number of SNPs x = positions[j - 1] + 0.5 * (positions[j] - positions[j - 1]) for k in range(0, len(x_s[j, ])): # number of NTs x_s[j, k] = x for j in range(0, len(x_s[0, ])): x_s[-1, j] = positions[-1] + (0.5 * (positions[-1] - positions[-2])) y_s = np.zeros((len(positions) + 1, len(marker_data.accessions) + 1)) for j in range(0, len(y_s)): # number of SNPs for k in range(0, len(y_s[j, ])): # number of NTs y_s[j, k] = k - 0.5 #Updating the colors in the figure. color_matrix = np.ones((len(positions), len(marker_data.accessions))) left_haplotypes.reverse() haplotypes = left_haplotypes + right_haplotypes max_color = float(haplotype_ids.pop()) for i, hap_dict in enumerate(haplotypes): for h in hap_dict: (h_id, h_count, acc_indices, pos) = hap_dict[h] for a_i in acc_indices: m_ai = ai_map[a_i] if h_id == 0: color_matrix[i, m_ai] = 1.0 else: color_matrix[i, m_ai] = h_id / max_color import phenotypeData as pd e_dict = pd._getEcotypeIdInfoDict_() accessions = [ unicode(e_dict[int(e)][0], 'iso-8859-1') for e in new_acc_order ] #Plot figure.. import pylab pylab.figure(figsize=(18, 8)) pylab.axes([0.08, 0.06, 0.9, 0.88]) pylab.pcolor(x_s, y_s, color_matrix, cmap=pylab.cm.hot) #Dealing with the phenotype data phenotypeData.removeAccessionsNotInSNPsData(marker_data) et_mapping = [] for i, et in enumerate(new_acc_order): et_mapping.append((marker_data.accessions.index(et), i)) phenotypeData.orderAccessions(et_mapping) phen_vals = phenotypeData.getPhenVals(phen_id, noNAs=False) acc_strings1 = [ accessions[i] + ", " + str(phen_vals[i]) for i in range(len(accessions)) ] acc_strings = [ accessions[i] + ", " + str(phen_vals[i]) for i in range(len(accessions)) ] pylab.yticks(range(0, len(marker_data.accessions)), acc_strings, size="small") x_range = (x_s[-1, 0] - x_s[0, 0]) #Retreiving and drawing the genes import regionPlotter as rp import gwaResults as gr genes = gr.get_gene_list(start_pos=x_s[0, 0], end_pos=x_s[-1, 0], chr=5) rp.drawGenes(genes, y_shift=-3, rangeVal=40) pylab.axis((x_s[0, 0] - 0.05 * x_range, x_s[-1, 0] + 0.05 * x_range, -0.1 * len(marker_data.accessions) - 1, 1.02 * len(marker_data.accessions))) pylab.savefig(filename, format='pdf')
winning notes will be colored by different colors in such a way that the larger is the MID, the closer to white the color will be. """ #------------------------------------------------------------------------------------ """ we actually identified the outlier(fraud) in below. to get the explicit list of the customers, we just need to the inverse mapping of these winning nodes to see which customers are associated to this winning node. also, we can add some markers to detect them in the map easily! """ # Visualizing the results from pylab import bone, pcolor, colorbar, plot, show bone() # this is the window that will contain the map. pcolor(som.distance_map().T ) # all the different colors corresponding to the MID's. colorbar() # white colors are the outliers (frauds). markers = ['o', 's'] # red circles(r, o) : the customers who didn't get approval. colors = ['r', 'g'] # green squares(g, s) : the customers who got approval. for i, j in enumerate( X): # i : indexes, j : all the vectors of customers at i. w = som.winner(j) # winning node. plot( w[0] + 0.5, # we want to put the marker at the center of the square. w[1] + 0.5, # we want to put the marker at the center of the square. markers[y[i]], markeredgecolor=colors[y[i]], markerfacecolor='None', markersize=10, markeredgewidth=2)
if video == 1: if os.path.exists('movie') == True: pass else: os.mkdir('movie') k = 0 while t <= ND: [Grid, X, Y, timestep] = diff_eqs(Grid, timestep) t += timestep T.append(t) k += 1 pl.clf() pl.subplot(211) pl.pcolor(Grid, cmap=pl.cm.jet) pl.title('Forest-Fire Model') pl.subplot(413) pl.plot(T, X, color='g') pl.ylabel('Susceptible') pl.subplot(414) pl.plot(T, Y, color='r') pl.ylabel('Infected') pl.xlabel('Time (days)') pl.savefig("movie/frame_%04d.png" % k) print(k) ## You will mencoder from mplayer for this to work ## With windows you have to modify the path ## With linux if you have mencoder istall usually it should work
def plot_haplotypes(snps, accessions=None, positions=None, dist_measure="identity", correctScale=False, haplotypeFile=None, treeFile=None, with_genes=False, chr=None): """ 080109 created. dist_measure: type of distance metric used for clustering. """ import numpy as np import scipy as sp import scipy.cluster.hierarchy as hc if dist_measure == "identity": a = identity_metric(snps) Z = hc.average(a) #Performing clustering using the dist. matr. print Z import pylab #hc.leaders(Z) dend_dict = hc.dendrogram(Z, labels=accessions) new_acc_order = dend_dict['ivl'] print new_acc_order #print accessions if treeFile: pylab.savefig(treeFile, format='pdf') acc_mapping = [] for acc in accessions: i = new_acc_order.index(acc) acc_mapping.append(i) new_snps = [] for snp in snps: new_snp = [0] * len(snp) for (nt, i) in zip(snp, acc_mapping): new_snp[i] = nt new_snps.append(new_snp) snps = new_snps accessions = new_acc_order snps = sp.array(snps) class _ntDict_(dict): def __missing__(self, key): return 0.0 nt_map = _ntDict_() d = {"N": 0.0, "NA": 0.0, "-": 5.0, "A": 1.0, "C": 2.0, "G": 3.0, "T": 4.0} for key in d: #print key,d[key] nt_map[key] = d[key] for i, nt in enumerate(snps.flat): #print nt,nt_map[nt] snps.flat[i] = nt_map[nt] # for i,nt in enumerate(snps.flat): # if nt=='-': # snps.flat[i]=5.0 snps = snps.astype(float) #Load genes.. if with_genes: import gwaResults as gr genes = gr.get_gene_list(chr=chr, start_pos=min(positions), end_pos=max(positions)) #print snps for i in range(0, len(snps), 500): pylab.clf() if with_genes: pylab.figure(figsize=(20, 7)) pylab.axes([0.04, 0.09, 1.0, 0.87]) else: pylab.figure(figsize=(16, 5)) pylab.axes([0.06, 0.06, 1.0, 0.9]) new_snps = snps[i:i + 500] if positions: #Prepare SNPs x_s = np.zeros((len(new_snps) + 1, len(new_snps[0, ]) + 1)) start_pos = positions[0] - (0.5 * (positions[1] - positions[0])) print len(x_s), len(x_s[0, ]) for j in range(0, len(x_s[0, ])): x_s[0, j] = start_pos for j in range(1, len(x_s) - 1): # number of SNPs x = positions[j - 1] + 0.5 * (positions[j] - positions[j - 1]) for k in range(0, len(x_s[j, ])): # number of NTs x_s[j, k] = x for j in range(0, len(x_s[0, ])): x_s[-1, j] = positions[-1] + (0.5 * (positions[-1] - positions[-2])) y_s = np.zeros((len(new_snps) + 1, len(new_snps[0, ]) + 1)) for j in range(0, len(y_s)): # number of SNPs for k in range(0, len(y_s[j, ])): # number of NTs y_s[j, k] = k - 0.5 #import pdb #pdb.set_trace() # z_s = [] # print len(new_snps) # for j in range(0,len(new_snps)): # number of SNPs # pos = positions[j] # for k in range(0,len(new_snps[j,])): # number of NTs # z_s.append(new_snps[j,k]) pylab.pcolor(x_s, y_s, new_snps) x_range = (x_s[-1, 0] - x_s[0, 0]) pylab.axis( (x_s[0, 0] - 0.05 * x_range, x_s[-1, 0] + 0.05 * x_range, -0.1 * len(accessions) - 1, 1.1 * len(accessions))) else: pylab.pcolor(new_snps.transpose()) if with_genes: import regionPlotter as rp rp.drawGenes(genes, y_shift=-1.5) pylab.yticks(range(0, len(accessions)), accessions, size="medium") cbar = pylab.colorbar(ticks=[0, 1, 2, 3, 4, 5]) cbar.ax.set_yticklabels(['NA', 'A', 'C', 'G', 'T', '-']) # horizontal colorbar #pylab.subplot(2,1,2) #Add some more features to the figure... if haplotypeFile: pylab.savefig(haplotypeFile + "_" + str(i) + ".pdf", format='pdf') else: pylab.show() pylab.clf()
'/global/cscratch1/sd/petercal/ACME_simulations/edison.A_WCYCL1950.1950_aero4.ne30_oECv3_ICG/run/edison.A_WCYCL1950.1950_aero4.ne30_oECv3_ICG.cam.h0.0001-01-01-00000-rgr.nc' ) x_old = f_orig.variables['SOLIN'] x_new = f_new['SOLIN'] lat = f_orig.variables['lat'][:] lon = f_orig.variables['lon'][:] LON, LAT = pl.meshgrid(lon, lat) if x_old.shape[0] != x_new.shape[0]: raise Exception('time dim of variables differs between runs') pl.figure(1) pl.subplot(2, 1, 1) pl.pcolor(LON, LAT, x_old[-1].squeeze()) pl.colorbar() pl.title('old') pl.subplot(2, 1, 2) pl.pcolor(LON, LAT, x_new[-1].squeeze()) pl.colorbar() pl.title('new') pl.savefig('SOLIN-1950-CMIP6.png') #COMPARE MODEL INPUT: #==================== fo = Dataset( '/project/projectdirs/acme/inputdata/atm/cam/solar/Solar_1850control_input4MIPS_c20171101.nc' )
lda.set_labels(labels) lda.train(features) # compute output plot iso-lines xs = np.array(np.concatenate([x_pos, x_neg])) ys = np.array(np.concatenate([y_pos, y_neg])) x1_max = max(1.2 * xs) x1_min = min(1.2 * xs) x2_max = max(1.2 * ys) x2_min = min(1.2 * ys) x1 = np.linspace(x1_min, x1_max, size) x2 = np.linspace(x2_min, x2_max, size) x, y = np.meshgrid(x1, x2) dense = RealFeatures(np.array((np.ravel(x), np.ravel(y)))) dense_labels = lda.apply(dense).get_labels() z = dense_labels.reshape((size, size)) pcolor(x, y, z) contour(x, y, z, linewidths=1, colors='black', hold=True) axis([x1_min, x1_max, x2_min, x2_max]) connect('key_press_event', util.quit) show()
# prepare plotting # pylab.gray() pylab.ion() imax = 1000 for i in range(1, imax+1): ## interact with the environment (here in batch mode) # experiment.doInteractions(100) for j in range(100): experiment.doInteractions(1) # print( env ) agent.learn() agent.reset() # and draw the table if i % 10 == 0: field = table.params.reshape(81,4).max(1).reshape(9,9) pylab.pcolor(field) pylab.draw() pylab.pause(0.000001) if i % 100 == 0: print(i, "/", imax) print("Done") pylab.ioff() pylab.show()
# got features 6 and 13 - seem to be linearally correlated # one more time yind1 = random.randint(1,21) yind2 = random.randint(1,21) print "feature1= f" + str(yind1) print "feature2= f" + str(yind2) plt.figure() plt.scatter(df.loc[:, "f"+str(yind1)],df.loc[:, "f"+str(yind2)], c='rg') plt.xlabel('f' + str(yind1)) plt.ylabel('f' + str(yind2)) plt.title('f' + str(yind1) + ' vs f' + str(yind2)) plt.savefig('features_4.png') plt.clf() # got features 20 and 21 - tightly balled # OK, so we see that some features are correlated (strongly, perhaps?) and might be # redundant, while others aren't at all correlated. Also interesting... # print out the correlation matrix print "Correlation matrix: " print df.corr() # features 4, 5, 6 7, 11, 12, 13 are correlated (> 0.4) with the class data... # It might be wise to use the least correlated features when classifying. # Let's look at this in a heat map: R = df.corr() from pylab import pcolor, show, colorbar, xticks, yticks from numpy import arange pcolor(R) colorbar() yticks(arange(0.5,22.5),range(0,22)) xticks(arange(0.5,22.5),range(0,22)) show()