コード例 #1
0
def plot_missing_data():
    d = open("attr_counter")
    profile_counter = pickle.load(d)
    attr_counter = pickle.load(d)
    missing = {}
    for key in attr_counter:
        missing[key] = float(2985414 - attr_counter[key]) / 2985414
    from collections import OrderedDict
    from pylab import arange

    ordered_missing = OrderedDict(sorted(missing.items(), key=lambda k: k[1]))
    plt.bar(range(len(ordered_missing.values())), sorted(missing.values()))
    plt.xticks(arange(len(missing)) + 0.4, ordered_missing.keys(), rotation="vertical")
    plt.plot(missing)
    plt.savefig("missing_data.png")
    profile_missing = {}
    for i in profile_counter.keys():
        profile_missing[16 - i] = profile_counter[i]
    plt.bar(profile_missing.keys(), profile_missing.values())
    plt.xticks(arange(len(profile_missing)) + 0.4, profile_missing.keys())

    m_dump = open("missing_data1")
    p_dump = open("person_missing_data1")
    missing_data = pickle.load(m_dump)
    person_missing_data = pickle.load(p_dump)
コード例 #2
0
def DirectSearch(Range1, Range2, f, N=10.0, tol=.00001, output=False):
    PrevBest = None
    for i in range(100):
        if output: print Range1, Range2
        Width1 = Range1[1] - Range1[0]
        xs = m.arange(Range1[0], Range1[1] + Width1/N, Width1 / N)
        Width2 = Range2[1] - Range2[0]
        ys = m.arange(Range2[0], Range2[1] + Width2/N, Width2 / N)
        
        if abs(Width1) < tol or abs(Width2) < tol: return PrevBest
        
        Best, BestVal = (xs[0], ys[0]), f(xs[0], ys[0])
        for x in xs:
            for y in ys:
                val = f(x,y)
                if val < BestVal:
                    BestVal = val
                    Best = (x, y)
                    
        if Best == PrevBest or PrevBest!=None and abs(Best[0]-PrevBest[0])+abs(Best[1]-PrevBest[1])<.000001:
            Range1 = (Best[0] - Width1 / 4, Best[0] + Width1 / 4)
            Range2 = (Best[1] - Width2 / 4, Best[1] + Width2 / 4)
        else:
            Range1 = (Best[0] - Width1 / 2, Best[0] + Width1 / 2)
            Range2 = (Best[1] - Width2 / 2, Best[1] + Width2 / 2)
        PrevBest = Best
        
        if output: print BestVal, Best
    
    return Best
コード例 #3
0
def plot_multiple_roc(rocList,title='',labels=None, include_baseline=False, equal_aspect=True):
	""" Plots multiple ROC curves on the same chart. 
		Parameters:
			rocList: the list of ROCData objects
			title: The tile of the chart
			labels: The labels of each ROC curve
			include_baseline: if it's  True include the random baseline
			equal_aspect: keep equal aspect for all roc curves
	"""
	pylab.clf()
	pylab.ylim((0,1))
	pylab.xlim((0,1))
	pylab.xticks(pylab.arange(0,1.1,.1))
	pylab.yticks(pylab.arange(0,1.1,.1))
	pylab.grid(True)
	if equal_aspect:
		cax = pylab.gca()
		cax.set_aspect('equal')
	pylab.xlabel("1 - Specificity")
	pylab.ylabel("Sensitivity")
	pylab.title(title)
	if not labels:
		labels = [ '' for x in rocList]
	_remove_duplicate_styles(rocList)
	for ix, r in enumerate(rocList):
		pylab.plot([x[0] for x in r.derived_points], [y[1] for y in r.derived_points], r.linestyle, linewidth=1, label=labels[ix])
	if include_baseline:
		pylab.plot([0.0,1.0], [0.0, 1.0], 'k-', label= 'random')
	if labels:
		pylab.legend(loc='lower right')
		
	pylab.show()
コード例 #4
0
ファイル: hplot.py プロジェクト: citterio/physplit
def InvokeMap(coastfile='/media/sda4/map-data/aust-coast-noaa-2000000-1.dat',
		    lllon=80,
		    urlon=166,
		    lllat=-47,
		    urlat=-9,
		    draw_map=True):
    global PYLIB_PATH

    map = Basemap(projection='cyl',
			llcrnrlon=lllon,
			urcrnrlon=urlon,
			llcrnrlat=lllat,
			urcrnrlat=urlat,
			#lat_ts=-35,
			lat_0=-35,
			lon_0=120,
			resolution='l',
			area_thresh=1000.)


    try: 
	coast = p.load(coastfile)
	coast = p.load(coastfile)
	coast_x,coast_y = map(coast[:,0],coast[:,1])
	p.plot(coast_x,coast_y,color='black')    
    except IOError:
	map.drawcoastlines()

    map.drawmapboundary()
    map.drawmeridians(p.arange(0,360,10),labels=[0,0,1,0])
    map.drawparallels(p.arange(-90,0,10),labels=[1,0,0,0])

    return map
コード例 #5
0
	def plot(self,title='',include_baseline=False,equal_aspect=True):
		""" Method that generates a plot of the ROC curve 
			Parameters:
				title: Title of the chart
				include_baseline: Add the baseline plot line if it's True
				equal_aspect: Aspects to be equal for all plot
		"""
		
		pylab.clf()
		pylab.plot([x[0] for x in self.derived_points], [y[1] for y in self.derived_points], self.linestyle)
		if include_baseline:
			pylab.plot([0.0,1.0], [0.0,1.0],'k-.')
		pylab.ylim((0,1))
		pylab.xlim((0,1))
		pylab.xticks(pylab.arange(0,1.1,.1))
		pylab.yticks(pylab.arange(0,1.1,.1))
		pylab.grid(True)
		if equal_aspect:
			cax = pylab.gca()
			cax.set_aspect('equal')
		pylab.xlabel('1 - Specificity')
		pylab.ylabel('Sensitivity')
		pylab.title(title)
		
		pylab.show()
コード例 #6
0
ファイル: misc.py プロジェクト: MMaus/mutils
def get_minmax(data, deg=2):
    """
    returns the interpolated extremum and position (in fractions of frames)

    :args:
        data (iterable): data to be fitted with a <deg> degree polynomial
        deg (int): degree of polynomial to fit

    :returns:
        val, pos: a tuple of floats indicating the position
    """

    x = arange(len(data))
    p = polyfit(x, data, deg)
    d = (arange(len(p))[::-1] * p)[:-1]
    r = roots(d)
    cnt = 0
    idx = None
    for nr, rx in enumerate(r):
        if isreal(r):
            idx = nr
            cnt +=1
            if cnt > 1:
                raise ValueError("Too many real roots found." + 
                        "Reduce order of polynomial!")
    x0 = r[nr].real
    return x0, polyval(p, x0)
コード例 #7
0
ファイル: misc.py プロジェクト: MMaus/mutils
def fBM_nd(dims, H, return_mat = False, use_eig_ev = True):
    """
    creates fractional Brownian motion
    parameters: dims is a tuple of the shape of the sample path (nxd); 
                H: Hurst exponent
    this is the slow version of fBM. It might, however, be more precise than
    fBM, however - sometimes, the matrix square root has a problem, which might
    induce inaccuracy    
    use_eig_ev: use eigenvalue decomposition for matrix square root computation
    (faster)
    """
    n = dims[0]
    d = dims[1]
    Gamma = zeros((n,n))
    print ('building ...\n')
    for t in arange(n):
        for s in arange(n):
            Gamma[t,s] = .5*((s+1)**(2.*H) + (t+1)**(2.*H) - abs(t-s)**(2.*H))
    print('rooting ...\n')    
    if use_eig_ev:
        ev,ew = eig(Gamma.real)
        Sigma = dot(ew, dot(diag(sqrt(ev)),ew.T) )
    else:
        Sigma = sqrtm(Gamma)
    if return_mat:
        return Sigma
    v = randn(n,d)
    return dot(Sigma,v)
コード例 #8
0
ファイル: cca_interface2.py プロジェクト: cvpapero/rqt_cca
    def updateColorTable(self, cItem):
        print "now viz!"+str(cItem.row())+","+str(cItem.column())

        row = cItem.row()
        col = cItem.column()

        pl.clf()
        #pl.ion()
        x = pl.arange(self.dataDimen+1)
        y = pl.arange(self.dataDimen+1)
        X, Y = pl.meshgrid(x, y)
        pl.subplot(1,2,1)
        pl.pcolor(X, Y, self.mWx[row*self.dataMaxRange+col])
        pl.gca().set_aspect('equal')
        pl.colorbar()
        pl.gray()
        pl.title("user 1")

        pl.subplot(1,2,2)
        pl.pcolor(X, Y, self.mWy[row*self.dataMaxRange+col])
        pl.gca().set_aspect('equal')
        pl.colorbar()
        pl.gray()
        pl.title("user 2")
        #pl.tight_layout()

        pl.draw()
        #pl.show()
        pl.show(block=False) 
コード例 #9
0
def makecoords(expr, BL,UR,gridspacing=.1):
    from pylab import arange, array
    X=arange(BL[0],UR[0],gridspacing)
    Y=arange(BL[1],UR[1],gridspacing)
    fn=deriv(expr)
    Z=array([[fn(x=x,y=y) for x in X] for y in Y])
    return X,Y,Z
コード例 #10
0
ファイル: Elements.py プロジェクト: clemrom/pyoptic
 def surface(self) :
     x = pl.arange(-self.dimension[0],self.dimension[0]+1e-8,self.dimension[0]/5)
     y = pl.arange(-self.dimension[1],self.dimension[1]+1e-8,self.dimension[1]/5)
     xx,yy = pl.meshgrid(x,y)
     zz = 1/self.placement.orientation[2]*(pl.linalg.dot(self.placement.orientation,self.placement.location)-self.placement.orientation[0]*xx-self.placement.orientation[1]*yy)
     
     return [xx,yy,zz]        
コード例 #11
0
def visualize_labeled_z():
	x_batch, label_batch = sample_x_and_label_from_data_distribution(len(dataset), sequential=True)
	z_batch = gen(x_batch, test=True)
	z_batch = z_batch.data
	# if z_batch[0].shape[0] != 2:
	# 	raise Exception("Latent code vector dimension must be 2.")

	fig = pylab.gcf()
	fig.set_size_inches(20.0, 16.0)
	pylab.clf()
	colors = ["#2103c8", "#0e960e", "#e40402","#05aaa8","#ac02ab","#aba808","#151515","#94a169", "#bec9cd", "#6a6551"]
	for n in xrange(z_batch.shape[0]):
		result = pylab.scatter(z_batch[n, 0], z_batch[n, 1], c=colors[label_batch[n]], s=40, marker="o", edgecolors='none')

	classes = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
	recs = []
	for i in range(0, len(colors)):
	    recs.append(mpatches.Rectangle((0, 0), 1, 1, fc=colors[i]))

	ax = pylab.subplot(111)
	box = ax.get_position()
	ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
	ax.legend(recs, classes, loc="center left", bbox_to_anchor=(1.1, 0.5))
	pylab.xticks(pylab.arange(-4, 5))
	pylab.yticks(pylab.arange(-4, 5))
	pylab.xlabel("z1")
	pylab.ylabel("z2")
	pylab.savefig("%s/labeled_z.png" % args.visualization_dir)
コード例 #12
0
ファイル: analysis.py プロジェクト: adamar/wikidump
def length_stats_chart(path, prefixes, sortby=1):
  stats = []
  for prefix in prefixes:
    med, m,s = length_stats(prefix)
    stats.append((prefix,med,m,s))

  stats.sort(key=operator.itemgetter(sortby))
  prefixes, med_list, mean_list, std_list = zip(*stats)

  blockSize = 8 
  ind = p.arange(0, blockSize*len(prefixes), blockSize) # y location for groups
  height = 3 # bar height 

  p3 = p.barh(ind, std_list, 2   * height, color = 'b', linewidth = 0)
  p2 = p.barh(ind, med_list, height, color = 'g', linewidth = 0)
  p1 = p.barh(ind+height, mean_list, height, color = 'r', linewidth = 0)
  
  p.ylim(-height, len(prefixes) * blockSize)
  yfontprop = FontProperties(size=4)
  xfontprop = FontProperties(size='smaller')
  p.xlabel('Unicode Codepoints')
  p.ylabel('Language Code')
  p.title('Descriptive Statistics for Document Lengths')
  p.gca().yaxis.tick_left()
  p.yticks(ind+height, prefixes, fontproperties = yfontprop)
  xmin, xmax = p.xlim()
  p.xticks( p.arange(xmin,xmax,1000),fontproperties = xfontprop)
  p.gca().xaxis.grid(linestyle = '-', linewidth=0.15)
  p.legend((p1[0], p2[0], p3[0]), ('Mean','Median','Standard Deviation'), prop = xfontprop, loc = 'lower right' )

  p.savefig(path, dpi=300)
  p.close()
  p.clf()
コード例 #13
0
ファイル: example.py プロジェクト: astrofanlee/project_TL
def redshift():
    """
    Evolution with redshift of matter power spectrum
    """
    zs = M.arange(0.,5.,2.)

    for z in zs:
        print z
        c = pt.Camb(hubble = 70., ombh2 = 0.05*(0.7)**2, omch2 = 0.25*(0.7)**2,transfer_redshift = [z])
        c.run()
        ps = pt.PowerSpectrum(c.cp)
        c.kextend(-10,60) #To ensure accurate sigma(r) -- if it doesn't, a warning will ensue
        pt.normalizePk(c,0.8*ps.d1(z)/ps.d1(0.)) #sigma_8 at redshift z
        
        #Sheth-Tormen
        h = halo.HaloModel(c,st_big_a = 0., st_little_a=0.707, stq = 0.3, k = 10**M.arange(-2,2.01,0.2),massdivsperdex=5)

        h.pmm = halo.getHaloPknl(c,h)
        M.loglog(h.k, h.pmm, label='z='+str(z))
        M.loglog(h.k, h.pk,'k:',label='linear')

        cp_halofit = c.cp
        cp_halofit['do_nonlinear'] = 1 # Halofit (Smith et al) fit
        chalofit = pt.Camb(cambParam=cp_halofit)
        chalofit.run()
        wheretoplot = N.where(chalofit.k > 1e-2)[0]
        M.loglog(chalofit.k[wheretoplot[::10]],chalofit.pk[wheretoplot[::10]],'--',label='halofit')

    M.legend()
    M.show()
コード例 #14
0
 def _pvoc(self, X_hat, Phi_hat=None, R=None):
     """
     ::
       a phase vocoder - time-stretch
       inputs:
         X_hat - estimate of signal magnitude
         [Phi_hat] - estimate of signal phase
         [R] - resynthesis hop ratio
       output:
         updates self.X_hat with modified complex spectrum
     """
     N = self.nfft
     W = self.wfft
     H = self.nhop
     R = 1.0 if R is None else R
     dphi = (2*P.pi * H * P.arange(N/2+1)) / N
     print "Phase Vocoder Resynthesis...", N, W, H, R
     A = P.angle(self.STFT) if Phi_hat is None else Phi_hat
     phs = A[:,0]
     self.X_hat = []
     n_cols = X_hat.shape[1]
     t = 0
     while P.floor(t) < n_cols:
         tf = t - P.floor(t)            
         idx = P.arange(2)+int(P.floor(t))
         idx[1] = n_cols-1 if t >= n_cols-1 else idx[1]
         Xh = X_hat[:,idx]
         Xh = (1-tf)*Xh[:,0] + tf*Xh[:,1]
         self.X_hat.append(Xh*P.exp( 1j * phs))
         U = A[:,idx[1]] - A[:,idx[0]] - dphi
         U = U - P.np.round(U/(2*P.pi))*2*P.pi
         phs += (U + dphi)
         t += P.randn()*P.sqrt(PVOC_VAR*R) + R # 10% variance
     self.X_hat = P.np.array(self.X_hat).T
コード例 #15
0
 def _pvoc2(self, X_hat, Phi_hat=None, R=None):
     """
     ::
       alternate (batch) implementation of phase vocoder - time-stretch
       inputs:
         X_hat - estimate of signal magnitude
         [Phi_hat] - estimate of signal phase
         [R] - resynthesis hop ratio
       output:
         updates self.X_hat with modified complex spectrum
     """
     N, W, H = self.nfft, self.wfft, self.nhop
     R = 1.0 if R is None else R
     dphi = P.atleast_2d((2*P.pi * H * P.arange(N/2+1)) / N).T
     print "Phase Vocoder Resynthesis...", N, W, H, R
     A = P.angle(self.STFT) if Phi_hat is None else Phi_hat
     U = P.diff(A,1) - dphi
     U = U - P.np.round(U/(2*P.pi))*2*P.pi
     t = P.arange(0,n_cols,R)
     tf = t - P.floor(t)
     phs = P.c_[A[:,0], U] 
     phs += U[:,idx[1]] + dphi # Problem, what is idx ?
     Xh = (1-tf)*Xh[:-1] + tf*Xh[1:]
     Xh *= P.exp( 1j * phs)
     self.X_hat = Xh
コード例 #16
0
def makeGridPlots( histos, filename, ext="png" ) :
    # old code : doesnt owrk
    nplot = len( histos.keys() )
    sh = sqrt( nplot )
    fl = floor( sh )
    ce = ceil( sh )
    if sh - fl > 0.5 :
        fl = ce

    f = r2m.RootFile(filename)
    hists = [ f.get(hist) for hist in sorted(histos.keys()) ]
    opts  = [ histos[key] for key  in sorted(histos.keys()) ]

    fig = plt.figure(figsize=[ce*(sml_plot_size[0]+fl),fl*(sml_plot_size[1]+fl)])
    #fig.subplots_adjust(left=1, right=2, top=2, bottom=1)


    ax_list = []
    for h, (hist,opt) in enumerate(zip(hists,opts)) :
        ax_list.append( fig.add_subplot(ce, fl, h  ))
        ax_list[-1].set_xlabel( hist.xlabel )
        ax_list[-1].set_ylabel( hist.ylabel )
        xmin, xmax = hist.xedges[0], hist.xedges[-1]
        ymin, ymax = hist.yedges[0], hist.yedges[-1]
        plt.axis([xmin, xmax, ymin, ymax])
        hist.contour( levels=opt["contours"], colors = opt["colors"], linewidths=2 )
        hist.colz()
        plt.axis([xmin, xmax, ymin, ymax])
        plt.clim(opt["zrange"][0],opt["zrange"][1])
        pylab.yticks(pylab.arange(ymin, ymax, opt["yticks"]))
        pylab.xticks(pylab.arange(xmin, xmax, opt["xticks"]))
        ax_list[-1].set_title( opt["title"] )

    #plt.show()
    plt.savefig( grid_name( filename ) + ".%s" % ext )
コード例 #17
0
    def _make_log_freq_map(self):
        """
        ::

            For the given ncoef (bands-per-octave) and nfft, calculate the center frequencies
            and bandwidths of linear and log-scaled frequency axes for a constant-Q transform.
        """
        fp = self.feature_params
        bpo = float(self.nbpo) # Bands per octave
        self._fftN = float(self.nfft)
        hi_edge = float( self.hi )
        lo_edge = float( self.lo )
        f_ratio = 2.0**( 1.0 / bpo ) # Constant-Q bandwidth
        self._cqtN = float( P.floor(P.log(hi_edge/lo_edge)/P.log(f_ratio)) )
        self._dctN = self._cqtN
        self._outN = float(self.nfft/2+1)
        if self._cqtN<1: print "warning: cqtN not positive definite"
        mxnorm = P.empty(self._cqtN) # Normalization coefficients        
        fftfrqs = self._fftfrqs #P.array([i * self.sample_rate / float(self._fftN) for i in P.arange(self._outN)])
        logfrqs=P.array([lo_edge * P.exp(P.log(2.0)*i/bpo) for i in P.arange(self._cqtN)])
        logfbws=P.array([max(logfrqs[i] * (f_ratio - 1.0), self.sample_rate / float(self._fftN)) 
                         for i in P.arange(self._cqtN)])
        #self._fftfrqs = fftfrqs
        self._logfrqs = logfrqs
        self._logfbws = logfbws
        self._make_cqt()
コード例 #18
0
def initialise_axes(axes,hists,options,filename=None):
    # hists is a list with potentially only one element
#    try: hists[0]
#    except:TypeError , hists=[hists]
    xmins  =    [ hist.xedges[0]  for hist in hists ] 
    ymins  =    [ hist.yedges[0]  for hist in hists ]
    xmaxs  =    [ hist.xedges[-1] for hist in hists ]
    ymaxs  =    [ hist.yedges[-1] for hist in hists ]
    xmin, xmax, ymin, ymax = min(xmins), min(xmaxs), min(ymins), max(ymaxs)
    axes.set_xlabel( hists[0].xlabel )
    axes.set_ylabel( hists[0].ylabel )
    plt.axis( [xmin, xmax, ymin, ymax] )
    if options.get("xlog") :
        axes.set_xscale('log')
    if options.get("ylog") :
        axes.set_yscale('log')
    if options.get('yticks') : 
        pylab.yticks(pylab.arange( ymin, ymax*1.001, options["yticks"] ) )
    if options.get('xticks') : 
        pylab.xticks(pylab.arange( xmin, xmax*1.001, options["xticks"] ) )
    if options.get('title') :    
        if filename:
           # axes.set_title( file_dict.get(filename,{}).get('title'))
            title=file_dict.get(filename,{}).get('title')
            plt.text(0.5, 1.05, title, ha='center', fontsize=30,transform=axes.transAxes)
コード例 #19
0
def observation_locns(spacestep,estimation_field_width,Delta_s):
	'''Define the center of sensors along x and y
		
		Atguments:
		----------
			spacestep: the spatial step in the simulated field
			observedwidthfield: the width of the observed field
			Delta_s: distance between sensors in mm

		Returns
		-------
			observation_locns_mm:
				the observation location along x or y directions in mm'''
		

	steps_in_field = (2*estimation_field_width)/spacestep + 1;
	inv_spacestep = 1./spacestep;						
	Nspacestep_in_observed_field = inv_spacestep*estimation_field_width+1	

	observation_offest = estimation_field_width/2;     # mm
	observation_offset_units = observation_offest / spacestep -1;
	field_space=pb.arange(-estimation_field_width,estimation_field_width+spacestep,spacestep)
	spatial_location_num=(len(field_space))**2
	Delta_s_units = Delta_s/spacestep	
	nonsymmetric_obs_location_units = pb.arange(1,Nspacestep_in_observed_field,Delta_s_units)
	offset = ((Nspacestep_in_observed_field - nonsymmetric_obs_location_units[-1])/2.)
	symmetricobslocation_units = nonsymmetric_obs_location_units + offset + observation_offset_units

	observation_locs_mm = symmetricobslocation_units*spacestep - estimation_field_width
	return observation_locs_mm
コード例 #20
0
ファイル: ifit.py プロジェクト: duuucccan23/fermiqcd
    def plot2d(self,key1='a',key2='b',scale1=0.1,scale2=0.1):
        """
	under development
	plot the least_squared around the minimum
	assuming a quadratic approximation given by the hessian
	"""
	import pylab
	key1,key2=key2,key1
	keys=self.last_variables.keys()
	keys.sort()
        nv=len(self.last_variables)
	for k in range(len(keys)):
	    if keys[k]==key1: i,vi=k, self.last_variables[key1]
	    if keys[k]==key2: j,vj=k, self.last_variables[key2]
	ax=pylab.arange(vi*(1.0-10*scale1),vi*(1.0+12*scale1),vi*scale1)[:21]
	ay=pylab.arange(vj*(1.0-10*scale2),vj*(1.0+12*scale2),vj*scale2)[:21]

        dv=[0.0]*nv
        z=[[0 for ik in ax] for jk in ay]
        i0=0
        for x in ax:
            j0=0
            for y in ay:
               dv[i],dv[j]=x-vi,y-vj
               least_squares=0.0
               for ik in range(nv):
                   for jk in range(nv): 
                       least_squares+=0.5*dv[ik]*dv[jk]*self.last_hessian[ik][jk]
               z[i0][j0]=least_squares-self.last_least_squares
               j0+=1
            i0+=1
        pylab.contour(z,extent=(min(ay),max(ay),min(ax),max(ax)))
        pylab.show()        
コード例 #21
0
ファイル: view.py プロジェクト: mcvidomi/poim2motif
def figurepoimsimple_small(poim, l, start, savefile, show):
    R = poim

    py.figure(figsize=(14, 12))
    motivelen = int(np.log(len(poim)) / np.log(4))
    ylabel = []
    for i in range(int(math.pow(4, motivelen))):
        label = []
        index = i
        for j in range(motivelen):
            label.append(index % 4)
            index = int(index / 4)
        label.reverse()
        ylabel.append(veclisttodna(label))
    py.pcolor(R[:, start:start + l])
    cb=py.colorbar()
    for t in cb.ax.get_yticklabels():
     t.set_fontsize(40)   
    diff = int((l / 5)) - 1
    x_places = py.arange(0.5, l, diff)
    xa = np.arange(start, start + l, diff)
    diff = int((l / 4)) 
    x_places = py.arange(0.5, l , diff)
    xa = np.arange(start + 1, start + 1 + l, diff)
    py.xlabel("Position", fontsize=46)
    py.ylabel("Motif", fontsize=46)
    py.yticks(np.arange(math.pow(4, motivelen)) + 0.5, (ylabel),fontsize=40)   
    py.xticks(x_places, (xa.tolist()),fontsize=40)
    if savefile != "":
        py.savefig(savefile)  
    print "the poim should show up here"
    if show:
        py.show()
コード例 #22
0
ファイル: PyRoc.py プロジェクト: alex-attinger/fc_attributes
def plot_multiple_roc(rocList,title='',labels=None, include_baseline=False, equal_aspect=True,plot_average =True):
    pylab.figure()
    pylab.clf()
    pylab.ylim((0,1))
    pylab.xlim((0,1))
    pylab.xticks(pylab.arange(0,1.1,.1))
    pylab.yticks(pylab.arange(0,1.1,.1))
    pylab.grid(True)
    if equal_aspect:
        cax = pylab.gca()
        cax.set_aspect('equal')
    pylab.xlabel("1 - Specificity")
    pylab.ylabel("Sensitivity")
    pylab.title(title)
    if not labels:
        labels = [ '' for x in rocList]
    _remove_duplicate_styles(rocList)
    for ix, r in enumerate(rocList):
        pylab.plot([x[0] for x in r.derived_points], [y[1] for y in r.derived_points], r.linestyle, linewidth=1, label=labels[ix])
    if include_baseline:
        pylab.plot([0.0,1.0], [0.0, 1.0], 'k-', label= 'random')
    if labels:
        pylab.legend(loc='lower right')
    if plot_average:
        stepsize=.1
        av = multiple_roc_average(rocList,binstep=stepsize)
        x=np.arange(stepsize/2,1,stepsize)
        pylab.plot(x,[np.average(a) for a in av])
コード例 #23
0
def adjuster(file_name, threshold=0.9):
    record = collections.OrderedDict()
    total = 0
    for data in open(file_name).readlines():
        curr = data.strip().split('\t')
        record[curr[0]] = int(curr[1])
        total += int(curr[1])

    curr_count = 0
    curr_list = list()
    ratio_list = list()
    for item in record.items():
        curr_count += item[1]
        curr_list.append(item[0])
        ratio_list.append(curr_count / float(total))
        if curr_count / float(total) >= threshold:
            break

    x = pylab.arange(1, len(ratio_list) + 1, 1)
    plt.plot(x, ratio_list)
    plt.show()

    dx = 1
    dy = diff(ratio_list) / dx
    print len(dy)
    print len(x)
    x = pylab.arange(1, len(ratio_list), 1)
    plt.plot(x, dy)
    plt.show()

    print dy


    return curr_list
コード例 #24
0
def test_expert_model_level_value():
    d = data.ModelData()
    ages=pl.arange(101)

    # create model with no priors
    vars = {}
    vars.update(age_pattern.age_pattern('test', ages, knots=pl.arange(0,101,5), smoothing=.01))
    vars.update(expert_prior_model.level_constraints('test', {}, vars['mu_age'], ages))

    # fit model
    m = mc.MCMC(vars)
    m.sample(3)


    # create model with expert priors
    parameters = {}
    parameters['level_value'] = dict(value=.1, age_below=15, age_above=95)
    parameters['level_bound'] = dict(upper=.01, lower=.001)
    vars = {}
    vars.update(age_pattern.age_pattern('test', ages, knots=pl.arange(0,101,5), smoothing=.01))
    vars.update(expert_prior_model.level_constraints('test', parameters, vars['mu_age'], ages))

    # fit model
    m = mc.MCMC(vars)
    m.sample(3)
コード例 #25
0
ファイル: bn2roc.py プロジェクト: jhonatanoliveira/aGrUM_iSep
def drawROC(points,zeTitle,zeFilename,visible,show_fig,save_fig=True,
            special_point=None,special_value=None,special_label=None):
    AUC=computeAUC(points)
    import pylab

    pylab.clf()
    pylab.grid(color='#aaaaaa', linestyle='-', linewidth=1,alpha=0.5)

    pylab.plot([x[0] for x in points], [y[1] for y in points], '-', linewidth=3,color="#000088",zorder=3)
    pylab.fill_between([x[0] for x in points], [y[1] for y in points],0,color='0.9')
    pylab.plot([0.0,1.0], [0.0, 1.0], '-',color="#AAAAAA")

    pylab.ylim((-0.01,1.01))
    pylab.xlim((-0.01,1.01))
    pylab.xticks(pylab.arange(0,1.1,.1))
    pylab.yticks(pylab.arange(0,1.1,.1))
    pylab.grid(True)

    ax=pylab.gca()
    r = pylab.Rectangle((0,0), 1, 1, edgecolor='#444444', facecolor='none',zorder=1)
    ax.add_patch(r)
    [spine.set_visible(False) for spine in ax.spines.values()]

    if len(points)<10:
      for i in range(1,len(points)-1):
        pylab.plot(points[i][0],points[i][1],'o',color="#000066",zorder=6)

    pylab.xlabel('False positive rate')
    pylab.ylabel('True positive rate')

    if special_point is not None:
        pylab.plot(special_point[0],special_point[1],'o',color="#DD9999",zorder=6)
        if special_value is not None:
            pylab.text(special_point[0]+0.01,special_point[1]-0.01, special_value,
                       {'color' : '#DD5555', 'fontsize' : 10},
                       horizontalalignment = 'left',
                       verticalalignment = 'top',
                       rotation = 0,
                       clip_on = False)
    if special_label is not None:
        if special_label!="":
            labels=[special_label]
            colors=['#DD9999']
            circles=[pylab.Circle((0, 0), 1, fc=colors[0])]
            legend_location = 'lower right'
            pylab.legend(circles, labels, loc=legend_location)

    pylab.text(0.5, 0.3,'AUC=%f'%AUC,
     horizontalalignment='center',
     verticalalignment='center',
     fontsize=18)

    pylab.title(zeTitle)

    if save_fig:
        pylab.savefig(zeFilename,dpi=300)
        print("\n result in "+zeFilename)

    if show_fig:
        pylab.show()
コード例 #26
0
def multi_plot_search_on_eval_metrics(roc_search_em, score_thresholds, labels, metrics, line_styles, query_id='Query'):
    import pylab
    
    pylab.clf()
    pylab.xlim((0, 1))
    pylab.xticks(pylab.arange(0,1.1,.1))
    pylab.ylim((-0.5, 1))
    pylab.yticks(pylab.arange(0,1.1,.1))
    pylab.grid(True)
    pylab.xlabel('Score Thresholds')
    
    for iix, score_key in enumerate(metrics): 
        for ix, eval_dict in enumerate(roc_search_em):
            pylab.plot(score_thresholds, eval_dict[score_key], 
                       linewidth=2, label=labels[ix] + '(%s)' % score_key, 
                       color=METRIC_COLORS[ix], linestyle=line_styles[iix])
        
    pylab.ylabel(' '.join(METRICS_DICT[score_key] for score_key in metrics))
    pylab.title(' '.join(METRICS_DICT[score_key] for score_key in metrics))
    
    if labels: pylab.legend(loc='lower left', prop={'size':9})

    eval_file_name = '%s_%s.png' % (query_id, '_'.join(METRICS_DICT[score_key] for score_key in metrics))
    pylab.savefig(eval_file_name, dpi=300, bbox_inches='tight', pad_inches=0.1)

    print 'Saved figure: ', eval_file_name
        
    print '----------------------------------------------------------------------------------'
コード例 #27
0
ファイル: cpwing.py プロジェクト: goriccardo/bem2dlib
def plotcp(nelem, chord, thick, u):
    # Geometry
    xnode = geomwing(nelem, chord, thick)
    TEat1 = 1
    dt = 1.0
    # Boundary conditions
    chisrf = bcondvel(xnode, u)
    # Integral equation solution
    B, C = srfmatbc(xnode)
    phisrf = solvephi(B, C, chisrf)
    cpoint = collocation(xnode)
    # Pressure figure
    spl = subplot(111)
    spl.set_aspect("equal", "box")
    plotgeom(xnode)
    # Pressure calculation and plotting
    U = u.reshape((1, 2))
    cp = calccp(xnode, TEat1, dt, U, phisrf, chisrf)
    cl = calchalfcl(xnode, TEat1, dt, U, phisrf, chisrf)
    print cl
    plot(cpoint[: nelem / 2 + 1, 0], cp[: nelem / 2 + 1, 0])
    title(r"Stationary pressure coefficient, $c_L = %9.3f$" % cl[0])
    ylabel(r"$c_p$", size=18)
    xlabel(r"$x/c$", size=18)
    xticks(arange(-0.2, 1.3, 0.2))
    yticks(arange(-0.8, 1.3, 0.2))
コード例 #28
0
ファイル: plot.py プロジェクト: jorjuato/IORstats
def bars(data, figname, fig = None):
    if fig == None:
        P.ioff()
        fig = P.figure()
        ax  = fig.add_subplot(111)
        save = True
    else:
        ax = fig
        fig = ax.get_figure()
        save = False

    ind = P.arange(len(data[0][0]))  # the x locations for the groups
    width = 0.20       # the width of the bars
    #colors = ['r','b','g','y']

    bar_groups = [  ax.bar( ind+width*i, grp[0], width,color=colors[i],\
                            yerr=grp[1], ecolor='k')\
                    for i,grp in enumerate(data)]
    barsLegend = tuple([grp[0] for grp in bar_groups])
    etiquetas = [str(i) for i in ctoa_list]
    P.legend( barsLegend, ctoa_list, shadow=True)
    #ax.set_title('Incremento de RT frente a CTD en distintos CTOAS', font, fontsize=12)
    #ax.set_xlabel('CTD distance (cm)', font)
    #ax.set_ylabel('RT increment (ms)', font)
    ax.set_xticks(ind+width)
    ax.set_xticklabels( ctd_names[:-1] )
    ax.set_yticks(P.arange(-50,50,5))
    ax.xaxis.set_ticks_position("bottom")
    ax.yaxis.set_ticks_position("left")
    ax.set_xlim(-width,len(ind))
    ax.set_ylim(-60,60)
    if save == True:
        fig.savefig(figname+'bar'+graphext,dpi=dpi)
        P.close(fig)
コード例 #29
0
ファイル: Intensity.py プロジェクト: clemrom/pyoptic
    def fresnelConvolutionTransform(self,d) :
        # make intensity distribution
        i2 = Intensity2D(self.nx,self.startx,self.endx,
                         self.ny,self.starty,self.endy,
                         self.wl)       

        # FT on inital distribution 
        u1ft = pl.fft2(self.i)

        # 2d convolution kernel
        k = 2*pl.pi/i2.wl
        
        # make spatial frequency matrix
        maxsfx = 2*pl.pi/self.dx
        maxsfy = 2*pl.pi/self.dy
        
        dsfx = 2*maxsfx/(self.nx)
        dsfy = 2*maxsfy/(self.ny)
        
        self.sfx = pl.arange(-maxsfx/2,maxsfx/2+1e-15,dsfx/2)
        self.sfy = pl.arange(-maxsfy/2,maxsfy/2+1e-15,dsfy/2)

        [self.sfxgrid, self.sfygrid] = pl.fftshift(pl.meshgrid(self.sfx,self.sfy))
                
        # make convolution kernel 
        kern = pl.exp(1j*d*(self.sfxgrid**2+self.sfygrid**2)/(2*k))
        
        # apply convolution kernel and invert
        i2.i = pl.ifft2(kern*u1ft) 

        return i2
コード例 #30
0
ファイル: exercise_1.py プロジェクト: latencie/Beeldbewerken
def main():
    # Create the grid
    x = arange(-100, 101)
    y = arange(-100, 101)

    # Create the meshgrid
    Y, X = meshgrid(x, y)
    A = 1
    B = 2
    V = 6*pi / 201
    W = 4*pi / 201
    F = A*sin(V*X) + B*cos(W*Y)
    Fx = V*A*cos(V*X)
    Fy = W*B*-sin(W*Y)

    # Show the images
    show_image(F)
    show_image(Fx)
    show_image(Fy)

    # Create the grid for the quivers
    xs = arange(-100, 101, 10)
    ys = arange(-100, 101, 10)

    # Here we determine the direction of the quivers
    Ys, Xs = meshgrid(ys, xs)
    FFx = V*A*cos(V*Xs)
    FFy = W*B*-sin(W*Ys)

    # Draw the quivers and the image
    clf()
    imshow(F, cmap=cm.gray, extent=(-100, 100, -100, 100))
    quiver(ys, xs, -FFy, FFx, color='red')
    show()
コード例 #31
0
    def show_iq(self, out, pr, q=None):
        """
            Display computed I(q)
        """
        qtemp = pr.x
        if q is not None:
            qtemp = q

        # Make a plot
        maxq = -1
        for q_i in qtemp:
            if q_i > maxq:
                maxq = q_i

        minq = 0.001

        # Check for user min/max
        if pr.q_min is not None:
            minq = pr.q_min
        if pr.q_max is not None:
            maxq = pr.q_max

        x = pylab.arange(minq, maxq, maxq / 301.0)
        y = np.zeros(len(x))
        err = np.zeros(len(x))
        for i in range(len(x)):
            value = pr.iq(out, x[i])
            y[i] = value
            try:
                err[i] = math.sqrt(math.fabs(value))
            except:
                err[i] = 1.0
                print("Error getting error", value, x[i])

        new_plot = Data1D(x, y)
        new_plot.symbol = GUIFRAME_ID.CURVE_SYMBOL_NUM
        new_plot.name = IQ_FIT_LABEL
        new_plot.xaxis("\\rm{Q}", 'A^{-1}')
        new_plot.yaxis("\\rm{Intensity} ", "cm^{-1}")
        title = "I(q)"
        new_plot.title = title

        # If we have a group ID, use it
        if 'plot_group_id' in pr.info:
            new_plot.group_id = pr.info["plot_group_id"]
        new_plot.id = IQ_FIT_LABEL
        self.parent.update_theory(data_id=self.data_id, theory=new_plot)

        wx.PostEvent(self.parent, NewPlotEvent(plot=new_plot, title=title))

        # If we have used slit smearing, plot the smeared I(q) too
        if pr.slit_width > 0 or pr.slit_height > 0:
            x = pylab.arange(minq, maxq, maxq / 301.0)
            y = np.zeros(len(x))
            err = np.zeros(len(x))
            for i in range(len(x)):
                value = pr.iq_smeared(out, x[i])
                y[i] = value
                try:
                    err[i] = math.sqrt(math.fabs(value))
                except:
                    err[i] = 1.0
                    print("Error getting error", value, x[i])

            new_plot = Data1D(x, y)
            new_plot.symbol = GUIFRAME_ID.CURVE_SYMBOL_NUM
            new_plot.name = IQ_SMEARED_LABEL
            new_plot.xaxis("\\rm{Q}", 'A^{-1}')
            new_plot.yaxis("\\rm{Intensity} ", "cm^{-1}")
            # If we have a group ID, use it
            if 'plot_group_id' in pr.info:
                new_plot.group_id = pr.info["plot_group_id"]
            new_plot.id = IQ_SMEARED_LABEL
            new_plot.title = title
            self.parent.update_theory(data_id=self.data_id, theory=new_plot)
            wx.PostEvent(self.parent, NewPlotEvent(plot=new_plot, title=title))
コード例 #32
0
"""
Demonstrates the"reshape with non-integer intevals" and the optimization of
the reshape interval by minimizing the standard deviation of the overlap.
"""

import pylab as p
from segment_psp_trace import segment, optimize_segment

if __name__ == '__main__':
    data = p.np.load("traces/spiketrace_1.npz")["arr_0"][:2400000]

    dt = 1.
    p.figure()
    m = lambda interval: p.sum(p.var(segment(data, dt, interval), axis=0))

    shift_values = p.arange(3841, 3842, .01)
    p.plot(shift_values, map(m, shift_values), 'x')
    p.show()

    r = optimize_segment(data, 1., 3840)
    print r

    p.figure()
    seg = segment(data, 1., r)
    print len(seg)
    mean = p.mean(seg, axis=0)
    std = p.std(seg, axis=0)
    dt = 1.
    time = p.arange(len(std)) * dt
    p.plot(time, mean, "r-")
    p.fill_between(time, mean - std, mean + std, alpha=.2)
コード例 #33
0
m = Basemap(resolution='c', projection='sinu', lon_0=0)
ax = fig.add_axes([0.1, 0.1, 0.7, 0.7])
# make a filled contour plot.
x, y = m(lons, lats)
CS = m.contour(x, y, hgt, 15, linewidths=0.5, colors='k')
CS = m.contourf(x, y, hgt, 15, cmap=cm.jet)
l, b, w, h = ax.get_position()
cax = axes([l + w + 0.075, b, 0.05, h])  # setup colorbar axes
colorbar(drawedges=True, cax=cax)  # draw colorbar
axes(ax)  # make the original axes current again
# draw coastlines and political boundaries.
m.drawcoastlines()
m.drawmapboundary()
m.fillcontinents()
# draw parallels and meridians.
parallels = arange(-60., 90, 30.)
m.drawparallels(parallels, labels=[1, 0, 0, 0])
meridians = arange(-360., 360., 30.)
m.drawmeridians(meridians)
title('Sinusoidal Filled Contour Demo')
print 'plotting with sinusoidal basemap ...'

# create new figure
fig = figure()
# setup of mollweide basemap
m = Basemap(resolution='c', projection='moll', lon_0=0)
ax = fig.add_axes([0.1, 0.1, 0.7, 0.7])
# make a filled contour plot.
x, y = m(lons, lats)
CS = m.contour(x, y, hgt, 15, linewidths=0.5, colors='k')
CS = m.contourf(x, y, hgt, 15, cmap=cm.jet)
コード例 #34
0
 out(a1,a2)
endin
schedule(1,0,1,0dbfs*.9,60)
'''

cs = csound.Csound()
cs.setOption('-n')
cs.compileOrc(code)
cs.start()
spout = cs.spout()
sr = cs.sr()
N = int(sr) // 10
sig = pl.zeros(N)
n = 0
for i in range(0, int(len(sig) / (2 * cs.ksmps()))):
    cs.performKsmps()
    for i in spout:
        sig[n] = i / cs.get0dBFS()
        n += 1

pl.figure(figsize=(8, 3))
pl.title('PWM')
t = pl.arange(0, N // 2) / float(sr)
pl.xlim(0, max(t))
pl.ylim(-1.1, 1.1)
pl.xlabel("time (s)")
pl.plot(t, sig[1::2], 'k--', linewidth=2)
pl.plot(t, sig[0::2])

pl.show()
コード例 #35
0
from scipy.stats import binom
import pylab as plt
n, p = 5, 0.4
x = plt.arange(6)
y = binom.pmf(x, n, p)
plt.subplot(121)
plt.plot(x, y, 'ro')
plt.vlines(x, 0, y, 'k', lw=3, alpha=0.5)
plt.subplot(122)
plt.stem(x, y, use_line_collection=True)
plt.show()
コード例 #36
0
#Py Matplotlib Test 3

from pylab import show,arange,sin, plot, pi
t = arange(0,2,0.01)
s = sin(2*pi*t)
plot(t,s)

show()
コード例 #37
0
    def plot(self, to_plot=None, do_save=None, fig_path=None, fig_args=None, plot_args=None,
             scatter_args=None, axis_args=None, legend_args=None, as_dates=True, dateformat=None,
             interval=None, n_cols=1, font_size=18, font_family=None, use_grid=True, use_commaticks=True,
             log_scale=False, do_show=True, verbose=None):
        '''
        Plot the results -- can supply arguments for both the figure and the plots.

        Args:
            to_plot (dict): Nested dict of results to plot; see default_sim_plots for structure
            do_save (bool or str): Whether or not to save the figure. If a string, save to that filename.
            fig_path (str): Path to save the figure
            fig_args (dict): Dictionary of kwargs to be passed to pl.figure()
            plot_args (dict): Dictionary of kwargs to be passed to pl.plot()
            scatter_args (dict): Dictionary of kwargs to be passed to pl.scatter()
            axis_args (dict): Dictionary of kwargs to be passed to pl.subplots_adjust()
            legend_args (dict): Dictionary of kwargs to be passed to pl.legend()
            as_dates (bool): Whether to plot the x-axis as dates or time points
            dateformat (str): Date string format, e.g. '%B %d'
            interval (int): Interval between tick marks
            n_cols (int): Number of columns of subpanels to use for subplot
            font_size (int): Size of the font
            font_family (str): Font face
            use_grid (bool): Whether or not to plot gridlines
            use_commaticks (bool): Plot y-axis with commas rather than scientific notation
            log_scale (bool or list): Whether or not to plot the y-axis with a log scale; if a list, panels to show as log
            do_show (bool): Whether or not to show the figure
            verbose (bool): Display a bit of extra information

        Returns:
            fig: Figure handle
        '''

        if verbose is None:
            verbose = self['verbose']
        sc.printv('Plotting...', 1, verbose)

        if to_plot is None:
            to_plot = cvd.default_sim_plots
        to_plot = sc.odict(to_plot) # In case it's supplied as a dict

        # Handle input arguments -- merge user input with defaults
        fig_args     = sc.mergedicts({'figsize':(16,14)}, fig_args)
        plot_args    = sc.mergedicts({'lw':3, 'alpha':0.7}, plot_args)
        scatter_args = sc.mergedicts({'s':70, 'marker':'s'}, scatter_args)
        axis_args    = sc.mergedicts({'left':0.1, 'bottom':0.05, 'right':0.9, 'top':0.97, 'wspace':0.2, 'hspace':0.25}, axis_args)
        legend_args  = sc.mergedicts({'loc': 'best'}, legend_args)

        fig = pl.figure(**fig_args)
        pl.subplots_adjust(**axis_args)
        pl.rcParams['font.size'] = font_size
        if font_family:
            pl.rcParams['font.family'] = font_family

        res = self.results # Shorten since heavily used

        # Plot everything
        n_rows = np.ceil(len(to_plot)/n_cols) # Number of subplot rows to have
        for p,title,keylabels in to_plot.enumitems():
            if p == 0:
                ax = pl.subplot(n_rows, n_cols, p+1)
            else:
                ax = pl.subplot(n_rows, n_cols, p + 1, sharex=ax)
            if log_scale:
                if isinstance(log_scale, list):
                    if title in log_scale:
                        ax.set_yscale('log')
                else:
                    ax.set_yscale('log')
            for key in keylabels:
                label = res[key].name
                this_color = res[key].color
                y = res[key].values
                pl.plot(res['t'], y, label=label, **plot_args, c=this_color)
                if self.data is not None and key in self.data:
                    data_t = (self.data.index-self['start_day'])/np.timedelta64(1,'D') # Convert from data date to model output index based on model start date
                    pl.scatter(data_t, self.data[key], c=[this_color], **scatter_args)
            if self.data is not None and len(self.data):
                pl.scatter(pl.nan, pl.nan, c=[(0,0,0)], label='Data', **scatter_args)

            pl.legend(**legend_args)
            pl.grid(use_grid)
            sc.setylim()
            if use_commaticks:
                sc.commaticks()
            pl.title(title)

            # Optionally reset tick marks (useful for e.g. plotting weeks/months)
            if interval:
                xmin,xmax = ax.get_xlim()
                ax.set_xticks(pl.arange(xmin, xmax+1, interval))

            # Set xticks as dates
            if as_dates:
                @ticker.FuncFormatter
                def date_formatter(x, pos):
                    return (self['start_day'] + dt.timedelta(days=x)).strftime('%b-%d')
                ax.xaxis.set_major_formatter(date_formatter)
                if not interval:
                    ax.xaxis.set_major_locator(ticker.MaxNLocator(integer=True))

            # Plot interventions
            for intervention in self['interventions']:
                intervention.plot(self, ax)

        # Ensure the figure actually renders or saves
        if do_save:
            if fig_path is None: # No figpath provided - see whether do_save is a figpath
                if isinstance(do_save, str) :
                    fig_path = do_save # It's a string, assume it's a filename
                else:
                    fig_path = 'covasim.png' # Just give it a default name
            fig_path = sc.makefilepath(fig_path) # Ensure it's valid, including creating the folder
            pl.savefig(fig_path)

        if do_show:
            pl.show()
        else:
            pl.close(fig)

        return fig
コード例 #38
0
def fitnoise(pool):
    allIntensities = 10 - pool['allIntensities']
    allResponses = pool['allResponses']  # reverse the response
    ntrial = pool['ntrial']

    # curve fitting and plotting for each condition
    res = {}
    # print('label: ' + 'centre, ' + 'std, ' + 'ssq')
    fig, axes = plt.subplots(1, len(pool['allIntensities']), figsize=(16, 6))
    fig.suptitle(str(ntrial) + 'trials')
    for idx, label in enumerate(allResponses.index):
        res[label] = {}
        res[label]['intensities'] = allIntensities[label]
        res[label]['responses'] = allResponses[label]

        # plt.scatter(res[label]['intensities'], res[label]['responses'])

        res[label]['combinedInten'], res[label]['combinedResp'], res[label]['combinedN'] = \
            data.functionFromStaircase(res[label]['intensities'],
                                       res[label]['responses'],
                                       bins=10)  # bin data and fit to PF

        # plt.scatter(res[label]['combinedInten'], res[label]['combinedResp'])

        res[label]['sems'] = [
            1.0 / (n / sum(res[label]['combinedN']))
            for n in res[label]['combinedN']
        ]  # sems is defined as 1/weight in Psychopy

        guess = [5, 0.5]
        # if label == 'hue_1p' or label == 'hue_2p':
        #     print('detected')
        #     guess = [8, 0.5]
        res[label]['fit'] = FitCumNormal(
            res[label]['combinedInten'],
            res[label]['combinedResp'],
            sems=res[label]['sems'],
            guess=None,
            expectedMin=0.5,
            lapse=0.01)  # customized cumulative Gaussian

        # print(label + ':' + str(res[label]['fit'].params) + ', ' + str(
        #     res[label]['fit'].ssq))  # a list with [centre, sd] for the Gaussian distribution forming the cumulative

        res[label]['thresh'] = res[label]['fit'].inverse(0.75)  # threshold

        this_res = res[label]

        # print(label + ':' + str(this_res['fit'].params) + ', ' + str(
        #         this_res['fit'].ssq))  # a list with [centre, sd] for the Gaussian distribution forming the cumulative

        ax = axes.flatten()[idx]
        fontsize = 8
        for inten, resp, se in zip(
                this_res['combinedInten'], this_res['combinedResp'],
                this_res['sems']):  # plot combined data points
            ax.plot(inten, resp, '.', alpha=0.5, markersize=100 / se)

        smoothResp = pylab.arange(0.01, 0.96, .02)
        smoothInt = this_res['fit'].inverse(smoothResp)
        # smoothInt = pylab.arange(0, 6.0, 0.05)
        # smoothResp = this_res['fit'].eval(smoothInt)

        ax.plot(smoothInt, smoothResp, '-')  # plot fitted curve
        ax.plot([this_res['thresh'], this_res['thresh']], [0, 0.75],
                '--',
                color='grey')
        ax.plot([0, this_res['thresh']], [0.75, 0.75], '--', color='grey')

        ssq = np.round(this_res['fit'].ssq, decimals=3)  # sum-squared error
        ax.text(3.5, 0.55, 'ssq = ' + str(ssq), fontsize=fontsize)
        ax.set_title(label + ' ' + 'threshold = %0.3f' % this_res['thresh'],
                     fontsize=fontsize)
        ax.set_ylim([0.5, 1])
        # ax.set_xlim([0, 6])
        ax.tick_params(axis='both', which='major', labelsize=fontsize - 2)
        ax.set_xlabel('level of consistence (10 - std)')
        ax.set_ylabel('correctness')

        # plt.setp(axes[:], xlabel='hue angle')
        # plt.setp(axes[:, 0], ylabel='correctness')
    plt.show()
コード例 #39
0
def pyprops(datafile,
            fluxfile,
            assignfile,
            root,
            assignfile2=None,
            montecarlo=0,
            doplots=False):

    import sys
    gitpaths = ['/Users/remy/local/github/pyprops/']
    for gitpath in gitpaths:
        if not gitpath in sys.path:
            sys.path.insert(0, gitpath)

    print("importing modules and data")
    import astropy.io.fits as fits
    from astropy.table import Table

    from measure_moments import measure_moments
    from extrap import extrap
    from deconvolve_gauss import deconvolve_gauss
    from ellfit import ellfit
    from add_noise_to_cube import add_noise_to_cube

    import pylab as pl
    import numpy as np
    pl.ion()

    datacube = fits.getdata(datafile)
    s = datacube.shape
    if len(s) == 4:
        if s[0] == 1:
            #  cube has the trailing stokes axis which turns into a leading degenerate axis in python
            datacube = datacube[0]

    fluxmap = fits.getdata(fluxfile)

    hdr = fits.getheader(datafile)
    bmaj = hdr['bmaj']  # degrees
    bmin = hdr['bmin']  # degrees
    bpa = hdr['bpa'] * np.pi / 180  # ->rad

    from astropy import wcs
    w = wcs.WCS(hdr)
    #    pix=pl.sqrt(-pl.det(w.celestial.pixel_scale_matrix)) # degrees
    pix = np.absolute(w.wcs.get_cdelt()[0])  # degrees

    bmaj_pix = bmaj / pix
    bmin_pix = bmin / pix
    bm_pix = [bmaj_pix, bmin_pix, bpa]

    import time
    x = time.localtime()
    datestr = str(x.tm_year) + ("%02i" % x.tm_mon) + ("%02i" % x.tm_mday)
    import pickle

    from cube_to_moments import cube_to_moments

    assigncube = fits.getdata(assignfile)
    moments, mcmoments = cube_to_moments(datacube,
                                         assigncube,
                                         montecarlo=montecarlo,
                                         bm_pix=bm_pix,
                                         fluxmap=fluxmap)

    moments['posang'] = moments['posang'] % np.pi
    moments['de_posang'] = moments['de_posang'] % np.pi

    pickle.dump([moments, bm_pix, mcmoments], open(root + ".pyprops.pkl",
                                                   "wb"))

    if assignfile2:
        assigncube2 = fits.getdata(assignfile2)
        moments2, mcmoments2 = cube_to_moments(datacube,
                                               assigncube2,
                                               montecarlo=montecarlo,
                                               bm_pix=bm_pix,
                                               fluxmap=fluxmap)

        moments2['posang'] = moments2['posang'] % np.pi
        moments2['de_posang'] = moments2['de_posang'] % np.pi

        pickle.dump([moments2, bm_pix, mcmoments2],
                    open(root2 + "pyprops.pkl", "wb"))

    #######################################################################
    # diagnostic plots

    if doplots:
        pl.clf()
        ellrad = np.sqrt(moments['halfmax_ell_maj'] *
                         moments['halfmax_ell_min'])
        pl.plot(ellrad, moments['mom2v'], '.')

        if assignfile2:
            ellrad2 = np.sqrt(moments2['halfmax_ell_maj'] *
                              moments2['halfmax_ell_min'])
            pl.plot(ellrad2, moments2['mom2v'], '.')

        pl.xlabel("size")
        pl.ylabel("linewidth")

        pl.xscale("log")
        pl.yscale("log")

        pl.savefig(root + ".pyprops.sizeline.png")

    if doplots:
        pl.clf()
        u = np.argsort(moments['flux'])[::-1]
        pl.plot((moments['flux'][u]), pl.arange(len(u)))
        if assignfile2:
            u2 = np.argsort(moments2['flux'])[::-1]
            pl.plot((moments2['flux'][u2]), np.arange(len(u2)))
        pl.yscale("log")
        pl.xscale("log")
        pl.xlabel("flux")
        pl.xlim(pl.xlim()[::-1])
        pl.ylabel("cumulative number")

        pl.savefig(root + ".pyprops.cumfluxdist.png")

    if doplots and montecarlo > 0:
        pl.clf()
        fnu = 0.5 * (moments['fnu_maxintchan'] + moments['fnu_maxchan'])
        delfnu = np.absolute(moments['fnu_maxintchan'] -
                             moments['fnu_maxchan'])
        dfnu = 0.5 * np.sqrt(moments['dfnu_maxchan']**2 +
                             moments['dfnu_maxintchan']**2)
        z = np.where(delfnu > dfnu)[0]
        if len(z) > 0:
            dfnu[z] = delfnu[z]

        pl.errorbar(fnu,
                    moments['flux'],
                    xerr=dfnu,
                    yerr=moments['dflux'],
                    fmt='.')
        pl.xlabel("I [Bunit * vpix]")
        pl.ylabel("F [Bunit * pix^2 * vpix")
        pl.xscale("log")
        pl.yscale("log")
        pl.xlim(.1, 100)
        pl.ylim(.5, 500)
        pl.plot(pl.xlim(), pl.array(pl.xlim()) * 3)
        pl.savefig(root + ".pyprops.I_F.png")

    # size comparisons
    if doplots and montecarlo > 0:
        pl.clf()
        bmarea = bmaj_pix * bmin_pix * np.pi / 4  # not a beam "volume"
        area1 = moments['de_mom2maj'] * moments[
            'de_mom2min'] * 2.354**2 * np.pi / 4 / bmarea
        darea1 = area1 * np.sqrt(
            (moments['dde_mom2maj'] / moments['de_mom2maj'])**2 +
            (moments['dde_mom2min'] / moments['de_mom2min'])**2)
        area2 = moments['de_halfmax_ell_maj'] * moments[
            'de_halfmax_ell_min'] * np.pi / 4 / bmarea
        darea2 = area2 * np.sqrt((moments['dde_halfmax_ell_maj'] /
                                  moments['de_halfmax_ell_maj'])**2 +
                                 (moments['dde_halfmax_ell_min'] /
                                  moments['de_halfmax_ell_min'])**2)

        pl.errorbar(moments['flux'],
                    area1,
                    xerr=moments['dflux'],
                    yerr=darea1,
                    fmt='.',
                    label="mom2")
        pl.errorbar(moments['flux'],
                    area2,
                    xerr=moments['dflux'],
                    yerr=darea2,
                    fmt='.',
                    label="ellfit")
        pl.ylabel("area [beams]")
        pl.xlabel("F [Bunit * pix^2 * vpix]")
        pl.legend(loc="best", prop={"size": 10}, numpoints=1)
        pl.xscale("log")
        pl.savefig(root + ".pyprops.F_area.png")

        bmarea = bmaj_pix * bmin_pix * np.pi / 4  # not a beam "volume"
        area1 = moments['mom2maj'] * moments[
            'mom2min'] * 2.354**2 * np.pi / 4 / bmarea
        darea1 = area1 * np.sqrt(
            (moments['dmom2maj'] / moments['mom2maj'])**2 +
            (moments['dmom2min'] / moments['mom2min'])**2)
        de_area1 = moments['de_mom2maj'] * moments[
            'de_mom2min'] * 2.354**2 * np.pi / 4 / bmarea
        dde_area1 = de_area1 * np.sqrt(
            (moments['dde_mom2maj'] / moments['de_mom2maj'])**2 +
            (moments['dde_mom2min'] / moments['de_mom2min'])**2)
        area2 = moments['halfmax_ell_maj'] * moments[
            'halfmax_ell_min'] * np.pi / 4 / bmarea
        darea2 = area2 * np.sqrt(
            (moments['dhalfmax_ell_maj'] / moments['halfmax_ell_maj'])**2 +
            (moments['dhalfmax_ell_min'] / moments['halfmax_ell_min'])**2)
        de_area2 = moments['de_halfmax_ell_maj'] * moments[
            'de_halfmax_ell_min'] * np.pi / 4 / bmarea
        dde_area2 = de_area2 * np.sqrt((moments['dde_halfmax_ell_maj'] /
                                        moments['de_halfmax_ell_maj'])**2 +
                                       (moments['dde_halfmax_ell_min'] /
                                        moments['de_halfmax_ell_min'])**2)

        pl.clf()
        pl.subplot(211)
        pl.errorbar(area1,
                    de_area1,
                    xerr=darea1,
                    yerr=dde_area1,
                    fmt='.',
                    label="success")
        z = np.where(np.isnan(de_area1))[0]
        pl.errorbar(area1[z],
                    area1[z],
                    xerr=darea1[z],
                    fmt='.',
                    label="failed")
        z = np.where(de_area1 == 0)[0]
        pl.errorbar(area1[z], area1[z], xerr=darea1[z], fmt='.', label="ptsrc")
        pl.ylabel("area [beams, deconv]")
        pl.xlabel("area [beams, meas]")
        pl.legend(loc="best", prop={"size": 10}, numpoints=1)

        pl.subplot(212)
        pl.errorbar(area2,
                    de_area2,
                    xerr=darea2,
                    yerr=dde_area2,
                    fmt='.',
                    label="success")
        z = np.where(np.isnan(de_area2))[0]
        pl.errorbar(area2[z],
                    area2[z],
                    xerr=darea2[z],
                    fmt='.',
                    label="failed")
        z = np.where(de_area2 == 0)[0]
        pl.errorbar(area2[z], area2[z], xerr=darea2[z], fmt='.', label="ptsrc")
        pl.ylabel("area [beams, deconv]")
        pl.xlabel("area [beams, meas]")
        pl.legend(loc="best", prop={"size": 10}, numpoints=1)

        #pl.xscale("log")
        pl.savefig(root + ".pyprops.area_dearea.png")

        raterr = area2 / area1
        draterr = raterr * np.sqrt((darea2 / area2)**2 + (darea1 / area1)**2)
        pl.clf()
        pl.errorbar(area2, raterr, xerr=darea2, yerr=draterr, fmt='.')
        pl.xlabel("ell area")
        pl.ylabel("ell area / mom2 area")

        pl.savefig(root + ".pyprops.ellarea_momarea.png")

    # fluxes and integrated spectra:
    if doplots and montecarlo > 0:
        ratiofnu = moments['fnu_maxintchan'] / moments['fnu_maxchan']
        dratio = ratiofnu * np.sqrt(
            (moments['dfnu_maxchan'] / moments['fnu_maxchan'])**2 +
            (moments['dfnu_maxintchan'] / moments['fnu_maxintchan'])**2)
        difffnu = moments['fnu_maxintchan'] - moments['fnu_maxchan']
        ddiff = np.sqrt(moments['dfnu_maxchan']**2 +
                        moments['dfnu_maxintchan']**2)

        fnu = 0.5 * (moments['fnu_maxintchan'] + moments['fnu_maxchan'])

        pl.clf()
        z = np.where(difffnu > 0)[0]
        pl.errorbar(moments['fnu_maxchan'][z], (difffnu / fnu)[z],
                    xerr=moments['dfnu_maxchan'][z],
                    yerr=(ddiff / fnu)[z],
                    fmt='.')
        pl.xlabel("Fnu @max    [Bunit * pix^2]")
        pl.ylabel("(Fnu @maxint - Fnu @max)/Fnu")
        pl.xscale("log")
        pl.xlim(.3, 40)
        pl.ylim(-.3, 1.5)
        pl.savefig("pyprops." + datestr + ".F_maxint_v_max.png")

    # convergence of errors with MC iterations:
    if doplots and montecarlo > 100:
        pl.clf()
        k = 'max'
        k = 'de_mom2min'
        if montecarlo > 100:
            pl.plot(moments[k], moments['d10' + k], '.', label='10')
            pl.plot(moments[k], moments['d30' + k], '.', label='30')
            pl.plot(moments[k], moments['d100' + k], '.', label='100')
        pl.plot(moments[k], moments['d' + k], '.', label=str(montecarlo))
        pl.legend(loc="best", prop={"size": 10}, numpoints=1)
        pl.xlabel(k)
        pl.ylabel("d" + k)

        if k == 'max':
            pl.plot(pl.xlim(), [0.003, 0.003], 'k')
            pl.plot(pl.xlim(), [0.005, 0.005], 'k', linestyle="dashed")
            pl.xlim(0, 0.35)

        if montecarlo > 100:
            pl.savefig(root + ".pyprops." + k + ".mciters.png")
        else:
            pl.savefig(root + ".pyprops." + k + ".png")

    if doplots:
        # previously, we did
        # if maj<bmaj*1.1: set de_maj,de_min to bmaj/2, bmin/2
        # elif min<bmin*1.1: de_maj=pl.sqrt(maj**2-bmaj**2), de_min=bmin/2

        pl.clf()
        pl.plot(moments['halfmax_ell_maj'], moments['halfmax_ell_min'], '.')
        pl.xlabel("measured major ell @halfmax")
        pl.ylabel("measured minor ell @halfmax")
        pl.plot([bm_pix[0], pl.xlim()[1]], [bm_pix[1], bm_pix[1]],
                'k',
                linestyle='dotted')
        pl.plot([bm_pix[0], bm_pix[0]], [bm_pix[1], pl.ylim()[1]],
                'k',
                linestyle='dotted')
        # if its something half the beamsize, convolved with the beam,
        # it'll end up with size=pl.sqrt(1+.5**2)=1.12*bm
        pl.plot([bm_pix[0] * 1.12, pl.xlim()[1]],
                [bm_pix[1] * 1.12, bm_pix[1] * 1.12],
                'k',
                linestyle='dotted')
        pl.plot([bm_pix[0] * 1.12, bm_pix[0] * 1.12],
                [bm_pix[1] * 1.12, pl.ylim()[1]],
                'k',
                linestyle='dotted')

        z = np.where(np.isnan(moments['de_halfmax_ell_maj']))[0]
        pl.plot(moments['halfmax_ell_maj'][z],
                moments['halfmax_ell_min'][z],
                's',
                label='dec=nan')
        z = np.where((moments['de_halfmax_ell_min'] == 0) *
                     (moments['de_halfmax_ell_maj'] > 0))[0]
        pl.plot(moments['halfmax_ell_maj'][z],
                moments['halfmax_ell_min'][z],
                'cd',
                label='dec min=0')
        z = np.where((moments['de_halfmax_ell_min'] == 0) *
                     (moments['de_halfmax_ell_maj'] == 0))[0]
        pl.plot(moments['halfmax_ell_maj'][z],
                moments['halfmax_ell_min'][z],
                'r*',
                label='dec both=0')
        pl.legend(loc="best", prop={"size": 10}, numpoints=1)

        pl.savefig(root + ".pyprops.measured_ellipses.png")

        pl.clf()
        pl.plot(moments['posang'], moments['halfmax_ell_min'], '.')
        pl.xlabel("posang")
        pl.ylabel("measured minor ell @halfmax")
        pl.plot([bm_pix[2], bm_pix[2]], pl.ylim(), 'k', linestyle='dashed')
        pl.plot(pl.xlim(), [bm_pix[1], bm_pix[1]], 'k', linestyle='dotted')
        pl.plot(pl.xlim(), [bm_pix[1] * 1.12, bm_pix[1] * 1.12],
                'k',
                linestyle='dotted')

        z = np.where(np.isnan(moments['de_halfmax_ell_maj']))[0]
        pl.plot(moments['posang'][z],
                moments['halfmax_ell_min'][z],
                's',
                label='dec=nan')
        z = np.where((moments['de_halfmax_ell_min'] == 0) *
                     (moments['de_halfmax_ell_maj'] > 0))[0]
        pl.plot(moments['posang'][z],
                moments['halfmax_ell_min'][z],
                'cd',
                label='dec min=0')
        z = np.where((moments['de_halfmax_ell_min'] == 0) *
                     (moments['de_halfmax_ell_maj'] == 0))[0]
        pl.plot(moments['posang'][z],
                moments['halfmax_ell_min'][z],
                'r*',
                label='dec both=0')
        pl.legend(loc="best", prop={"size": 10}, numpoints=1)

        pl.savefig(root + ".pyprops.measured_ellipses_angles.png")

        p = moments['posang'].copy()
        dp = moments['de_posang'].copy()
        p = (p - bm_pix[2] + np.pi / 2) % np.pi + bm_pix[2] - np.pi / 2
        dp = (dp - bm_pix[2] + np.pi / 2) % np.pi + bm_pix[2] - np.pi / 2

        pl.clf()
        pl.plot(p, dp, '.')
        pl.plot([bm_pix[2], bm_pix[2]], pl.ylim(), 'k')
        pl.plot(pl.xlim(), [bm_pix[2], bm_pix[2]], 'k')
        pl.xlabel("meas posang")
        pl.ylabel("deconv posang")

        pl.plot(pl.xlim(), [bm_pix[2] + np.pi / 2, bm_pix[2] + np.pi / 2],
                'k',
                linestyle='dotted')

        pl.savefig(root + ".pyprops.dec_ellipses_angles.png")
コード例 #40
0
p.rc('ytick', labelsize=TickSize)

genMeans = p.genfromtxt("GenotypeEnvelMean.dat")
genSTD = p.genfromtxt("GenotypeEnvelSTD.dat")
GenerData = p.genfromtxt("GeneralData.dat")
l = re.split(" ", ln.getline("ModelParams.dat", 7))
xResolution = float(l[6])
l = re.split(" ", ln.getline("ModelParams.dat", 6))
T = float(l[6])
print "Turbulence level = ", T
xSize = 2.0 / xResolution
if (genMeans.shape[1] != xSize):
    print "ERROR: Env space size does not match genMeans space size!",
    print "Check them."
    exit()
x = p.arange(-1.0, 1.0, xResolution)

# -- trimmig rows
fullPlot = GenerData[:, 0:2]
lastRow = genMeans[-1, :]
genMeans = genMeans[::everyOtherRow]
genMeans = p.vstack([genMeans, lastRow])
lastRow = genSTD[-1, :]
genSTD = genSTD[::everyOtherRow]
genSTD = p.vstack([genSTD, lastRow])
lastRow = GenerData[-1, :]
GenerData = GenerData[::everyOtherRow]
GenerData = p.vstack([GenerData, lastRow])
# -- trimmed

fig = p.figure(1, figsize=(15, 9))
コード例 #41
0
ファイル: plot_results.py プロジェクト: agravier/pynn
sizes = pylab.array([vm.size for vm in vm_data.values()])
if not all(sizes == sizes[0]):
    #if not pcsim_data.shape == nest_data.shape == neuron_data.shape:
    errmsg = "Data has different lengths. " + ", ".join([
        "%s: %s" % (simulator, vm.shape) for simulator, vm in vm_data.items()
    ])
    errmsg += "Trimming to the length of the shortest."
    warnings.warn(errmsg)
    new_length = min([vm.shape[0] for vm in vm_data.values()])
    for simulator in vm_data:
        vm_data[simulator] = vm_data[simulator][:new_length, :]
    for simulator in gsyn_data:
        gsyn_data[simulator] = gsyn_data[simulator][:new_length, :]

t = dt * pylab.arange(vm_data[vm_data.keys()[0]].shape[0])

for label, vm in vm_data.items():
    pylab.plot(t, vm[:, 0], label=label)
pylab.legend()
pylab.title(example)
pylab.xlabel("Time (ms)")
pylab.ylabel("Vm (mV)")

pylab.savefig("Results/%s.png" % example)

if gsyn_data:
    pylab.figure(2)
    for label, gsyn in gsyn_data.items():
        pylab.plot(t, gsyn[:, 0], label="%s (exc)" % label)
        pylab.plot(t, gsyn[:, 1], label="%s (inh)" % label)
コード例 #42
0
ファイル: VAbenchmark_graphs.py プロジェクト: claint76/83319
        # Get info about dataset from header of .v file
        exec(
            get_header("Results/VAbenchmark_%s_exc_%s_np%d.v" %
                       (benchmark, simulator, num_nodes)))

        # Plot membrane potential trace
        allvdata = pylab.load("Results/VAbenchmark_%s_exc_%s_np%d.v" %
                              (benchmark, simulator, num_nodes),
                              comments='#')
        cell_ids = allvdata[:, 1].astype(int)
        allvdata = allvdata[:, 0]
        sortmap = pylab.argsort(cell_ids, kind='mergesort')
        cell_ids = pylab.take(cell_ids, sortmap)
        allvdata = pylab.take(allvdata, sortmap)
        for i in 0, 1:
            tdata = pylab.arange(0, (n + 1) * dt, dt)
            vdata = allvdata.compress(cell_ids == i)
            vdata = pylab.where(vdata >= v_thresh - 0.05, 0.0,
                                vdata)  # add fake APs for plotting
            if len(tdata) > len(vdata):
                print "Warning. Shortening tdata from %d to %d elements (%s)" % (
                    len(tdata), len(vdata), simulator)
                tdata = tdata[0:len(vdata)]
            assert len(tdata) == len(
                vdata), "%d != %d (%s)" % (len(tdata), len(vdata), simulator)
            subplot.plot(tdata, vdata)

        # Plot spike rasters
        subplot = figure.add_axes([x, y0 + 2 * dy, w, h])
        exc_spikedata = signals.loadSpikeList(
            "Results/VAbenchmark_%s_exc_%s_np%d.ras" %
コード例 #43
0
def solve(param):
    jsp = JSP(param['data'])
    lib = param['solver']
    verb = param['verbose']

    model = JSP_Model(jsp)

    if lib == 'Mistral':
        solver = model.load(lib, model.sequence)
    else:
        solver = model.load(lib)

    solver.setHeuristic('Scheduling', 'Promise')
    solver.setVerbosity(param['verbose'] - 1)

    (lb, ub) = (jsp.lower_bound() - 1, jsp.upper_bound())

    (lb, ub, best) = dichotomic_search(model, solver, lb, ub, verb,
                                       param['tcutoff'])
    if verb > 0:
        print('start branch & bound in [' + str(lb) + '..' + str(ub) + ']')
    if lb + 1 < ub:
        (lb, ub, best) = branch_and_bound(model, lib, lb, ub, verb, best)

    ## finalize the solution (tasks)
    solver.reset()
    if lib == 'Mistral':
        for disjunct in model.sequence:
            solver.post(disjunct == best[disjunct])
        solver.propagate()
        for task in model.tasks:
            solver.post(task == task.get_min())
            solver.propagate()
    best = solver.get_solution()

    schedule = [[-1] * ub for job in jsp.job]
    index = 0
    for machine in jsp.machine:
        index += 1
        for m in machine:
            start = model.Jobs[m].get_value()
            for i in range(model.Jobs[m].duration):
                schedule[m[0]][start + i] = index

    out = ''
    if solver.is_sat():
        out = str(schedule)
    out += ('\nNodes: ' + str(solver.getNodes()))
    return out

    if param['print'] == 'yes':
        ###############################################
        ############# Output (Matplotlib) #############
        ###############################################
        print('\n display schedule')

        width = 60
        print_schedule = []
        for row in schedule:
            print_schedule.extend([row] * width)

        import pylab
        pylab.yticks(
            pylab.arange(int(width / 2), width * (len(jsp.job) + 1), width),
            ['job' + str(i + 1) for i in range(len(jsp.job))])
        cmap = pylab.cm.get_cmap('jet', len(jsp.machine) + 1)
        cmap.set_under(color='w')
        pylab.imshow(print_schedule,
                     cmap=cmap,
                     interpolation='nearest',
                     vmin=0)
        #pylab.colorbar()
        pylab.show()
コード例 #44
0
import pylab as pl

sr = 44100.
f1 = 3000
f2 = 200.
N = 10

t = pl.arange(0, sr)
w = 2 * pl.pi * f1 * t / sr
o = 2 * pl.pi * f2 * t / sr
a = 0.5

sinw = pl.sin(w)
cosmo = pl.cos((N + 1) * o)
cosno = pl.cos(N * o)
den = 1. - 2 * a * pl.cos(o) + a * a
scal = pl.sqrt(1. - a * a / (1 + a * a * -2 * a**(2 * N + 2)))
s = sinw * (1 - a * a - (2 * a**(N + 1)) * (cosmo - a * cosno)) / den
s *= scal

pl.figure(figsize=(8, 5))

pl.subplot(211)
pl.plot(t[0:440] / sr, s[0:440] / max(abs(s)), 'k-')
pl.xlabel("time (s)")

sig = s
N = 32768
start = 0
x = pl.arange(0, N / 2)
bins = x * sr / N
コード例 #45
0
def cf(d):
    return pylab.arange(1.0, float(len(d)) + 1.0) / float(len(d))
コード例 #46
0
GRC_norm_model = pandas.read_csv('/home/j/Project/dismod/gbd/data/applications-fruit_GRC_norm_model.csv')

ISL_model = pandas.read_csv('/home/j/Project/dismod/gbd/data/applications-fruit_ISL_model.csv')
ISL_log_model = pandas.read_csv('/home/j/Project/dismod/gbd/data/applications-fruit_ISL_log_model.csv')
ISL_norm_model = pandas.read_csv('/home/j/Project/dismod/gbd/data/applications-fruit_ISL_norm_model.csv')

GRC_data = pandas.DataFrame(pl.zeros((len(GRC_model.columns),3)), columns=['0','1','2'])
ISL_data = pandas.DataFrame(pl.zeros((len(ISL_model.columns),3)), columns=['0','1','2'])

# age standardizing
import scikits.statsmodels.lib.io as pd
aw_file = pandas.DataFrame(pd.genfromdta('/home/j/Project/COD/envelope/data/age_weights_final.dta'))
for a in range(len(aw_file['age'])): aw_file['age'][a] = round(aw_file['age'][a],3)
aw_file = pandas.DataFrame(aw_file['weight'], index=list(aw_file['age']))

age_weights = pl.vstack((pl.arange(101), pl.zeros(101))).transpose()
for a in range(101):
    if a == 0: age_weights[a,1] = aw_file.ix[0.0] + aw_file.ix[0.01] + aw_file.ix[0.1]
    elif (a>=1) & (a<5): age_weights[a,1] = aw_file.ix[1.0]/4
    elif (a>=5) & (a<10): age_weights[a,1] = aw_file.ix[5.0]/5
    elif (a>=10) & (a<15): age_weights[a,1] = aw_file.ix[10.0]/5
    elif (a>=15) & (a<20): age_weights[a,1] = aw_file.ix[15.0]/5
    elif (a>=20) & (a<25): age_weights[a,1] = aw_file.ix[20.0]/5
    elif (a>=25) & (a<30): age_weights[a,1] = aw_file.ix[25.0]/5
    elif (a>=30) & (a<35): age_weights[a,1] = aw_file.ix[30.0]/5
    elif (a>=35) & (a<40): age_weights[a,1] = aw_file.ix[35.0]/5
    elif (a>=40) & (a<45): age_weights[a,1] = aw_file.ix[40.0]/5
    elif (a>=45) & (a<50): age_weights[a,1] = aw_file.ix[45.0]/5
    elif (a>=50) & (a<55): age_weights[a,1] = aw_file.ix[50.0]/5
    elif (a>=55) & (a<60): age_weights[a,1] = aw_file.ix[55.0]/5
    elif (a>=60) & (a<65): age_weights[a,1] = aw_file.ix[60.0]/5
コード例 #47
0
ファイル: win.py プロジェクト: vlazzarini/instruments
import pylab as pl

pi = pl.pi
T = 1000
t = pl.arange(0, T)
x = 0.51 - 0.49 * pl.cos(2 * pi * t / T)
y = pl.sin(2 * pi * 10 * t / T)
X = pl.rfft(y * x)
pl.plot(pl.irfft(X))
pl.show()
コード例 #48
0
def main():

    args = parseCMD()
    fileNames = args.fileName
    Order = args.order
    Smoothness = args.smoothness

    matplotlib.rcParams['text.usetex'] = True
    linestyles = ['-', '--']
    colors = [(1, 0, 0), (0, 1, 0), (1, 1, 0), (1, 0, 1), (0, 0, 0), (0, 0, 1),
              (1, 0.5, 0.5), (0.5, 0.5, 0.5), (0.5, 0, 0), (1, 0.5, 0)]

    # Set up plots with shared axes
    fig1 = pl.figure(1, figsize=(12, 12))
    pl.connect('key_press_event', kevent.press)

    ax1 = pl.subplot(311)
    pl.setp(ax1.get_xticklabels(), visible=False)
    pl.ylabel(r'$\mathrm{E\ [K]}$', fontsize=20)
    ax1.tick_params(axis='both', which='major', labelsize=20)
    ax1.tick_params(axis='both', which='minor', labelsize=20)
    pl.grid()

    ax2 = pl.subplot(312, sharex=ax1)
    pl.setp(ax2.get_xticklabels(), visible=False)
    pl.ylabel(r'$\mathrm{C_V}$', fontsize=20)
    ax2.tick_params(axis='both', which='major', labelsize=20)
    ax2.tick_params(axis='both', which='minor', labelsize=20)
    pl.grid()

    ax3 = pl.subplot(313, sharex=ax1)
    pl.xlabel(r'$\mathrm{T\ [K]}$', fontsize=20)
    pl.ylabel(r'${\mathrm{Entropy}}$', fontsize=20)
    ax3.tick_params(axis='both', which='major', labelsize=20)
    ax3.tick_params(axis='both', which='minor', labelsize=20)
    pl.grid()

    # do spline fitting for each data file
    for k, fileName in enumerate(fileNames):
        data = np.loadtxt(fileName)
        ATemps = data[:, 0]
        #ATemps = data[:,0] # Bohdan's test data
        AEners = data[:, 1]
        #AEners = data[:,5]
        AEnerE = data[:, 2]
        #AEnerE = data[:,6]
        weights = 1 / AEnerE

        ax1.errorbar(ATemps,
                     AEners,
                     AEnerE,
                     linestyle='None',
                     linewidth=2,
                     marker='D',
                     markeredgewidth=0.8,
                     color=colors[k],
                     markeredgecolor=colors[k],
                     markerfacecolor='white',
                     markersize=6,
                     capsize=3)

        lowTRegL = 0
        lowTRegH = len(ATemps[ATemps < args.LT * 1.0001]) - 1
        highTRegL = lowTRegH + 1
        highTRegH = len(ATemps) - 1

        # Spline fit to T>Tc
        SEners, SdEners, HSpline = SplineFitToData(ATemps[highTRegL:highTRegH],
                                                   AEners[highTRegL:highTRegH],
                                                   AEnerE[highTRegL:highTRegH],
                                                   Smoothness)

        # Spline fit to T<Tc
        #SLE,SLcv,LSpline = SplineFitToData(ATemps[lowTRegL:lowTRegH],
        #        AEners[lowTRegL:lowTRegH],AEnerE[lowTRegL:lowTRegH],Smoothness)

        # Polynomial fit to T<Tc
        SegRange = range(lowTRegL, lowTRegH)
        FitCoefs, DerCoefs = PolyFitToData(ATemps[SegRange], AEners[SegRange],
                                           AEnerE[SegRange], [Order])

        polyf = np.poly1d(FitCoefs[0])
        Dpolyf = np.poly1d(DerCoefs[0])

        interT = IntersectionPoint(HSpline, Dpolyf, ATemps[lowTRegH],
                                   ATemps[highTRegL])
        #interT2S = IntersectionPoint2S(LSpline,HSpline,ATemps[lowTRegH],
        #        ATemps[highTRegL])

        stepT = 0.01
        newLTs = pl.arange(ATemps[0], interT, stepT)
        highLTs = pl.arange(interT, ATemps[highTRegH], stepT)
        lowLTs = pl.arange(ATemps[0], interT, stepT)

        ax1.plot(newLTs,
                 polyf(newLTs),
                 linestyle=linestyles[0],
                 linewidth=0.5,
                 marker='None',
                 color=colors[k],
                 markerfacecolor=colors[k],
                 label='Pol. %s order fit for T: %s - %s K' %
                 (Order, ATemps[0], ATemps[lowTRegH]))
        ax1.plot(highLTs,
                 interpolate.splev(highLTs, HSpline, der=0),
                 linestyle=':',
                 linewidth=1.5,
                 color=colors[k],
                 markerfacecolor='white',
                 label='Spline fit for T: %s - %s K; smoothness: %s' %
                 (ATemps[highTRegL], ATemps[highTRegH], Smoothness))
        #ax1.plot(lowLTs, interpolate.splev(lowLTs,LSpline,der=0),
        #        linestyle=linestyles[0],linewidth=0.5,
        #        marker='None',color=colors[k],markerfacecolor=colors[k],
        #                label='Spline fit' )

        ax2.plot(newLTs,
                 Dpolyf(newLTs),
                 linestyle=linestyles[0],
                 linewidth=2,
                 marker='None',
                 color=colors[k],
                 markerfacecolor=colors[k])
        ax2.plot(highLTs,
                 interpolate.splev(highLTs, HSpline, der=1),
                 linestyle='-',
                 linewidth=2,
                 color=colors[k],
                 markerfacecolor='white')
        #ax2.plot(lowLTs, interpolate.splev(lowLTs,LSpline,der=1),
        #        linestyle=':', linewidth=1.5,
        #        color=colors[k],markerfacecolor='white')
        ticks = ax2.xaxis.get_major_ticks()

        np.insert(ticks, 3, 2)

        zeroT = ZeroPoint(Dpolyf, interT)
        ax2.xaxis.set_ticks(
            np.insert(ax2.xaxis.get_majorticklocs(), 0, round(zeroT, 2)))

        ax2.text(interT,
                 interpolate.splev(interT, HSpline, der=1),
                 r'${\mathrm{T_c = %s}}  $' % (round(interT, 3)),
                 fontsize=18)
        ax2.plot([interT, interT],
                 [0, interpolate.splev(interT, HSpline, der=1)],
                 color='black',
                 linestyle='--',
                 linewidth=0.3)

        ax2.plot([zeroT, zeroT], [
            ax2.yaxis.get_majorticklocs()[0],
            ax2.yaxis.get_majorticklocs()[-1]
        ],
                 color='black',
                 linewidth=0.3,
                 linestyle='--')
        ax2.plot([
            ax2.xaxis.get_majorticklocs()[0],
            ax2.xaxis.get_majorticklocs()[-1]
        ], [0, 0],
                 color='black',
                 linewidth=0.3,
                 linestyle='--')
        #if zeroT < 1:
        #    newETs = pl.arange(1,ATemps[-1], stepT)
        #    Entrops= [Entropy(T,interT,1,Dpolyf,HSpline) for T in newETs]
        #else:
        newETs = pl.arange(zeroT, ATemps[highTRegH], stepT)
        print 'Zero     T:', zeroT
        print 'Critical T', interT
        Entrops = [Entropy(T, interT, zeroT, Dpolyf, HSpline) for T in newETs]

        ax3.plot(newETs, Entrops, linestyle='-', linewidth=2, color=colors[k])

    lgd1 = ax1.legend(loc='best')
    lgd3 = ax3.legend(loc='best')

    lgd1.draggable(state=True)
    pl.tight_layout()
    pl.savefig('Helium_Critical_E_Cv_S.pdf', format='pdf', bbox_inches='tight')
    pl.show()
コード例 #49
0
dt2 = 0.0005  # time step for the synthetics
dt3 = 0.0005  # time step for the synthetics
dt_ref = 0.0002  # time step for the DWT
x0 = 11000.0  # position of the first receiver
dx = 25.0  # spacing between two receivers
NL = 12  # number of levels for DWT

fv0 = numpy.fromfile(dirname0 + '/' + filename0, 'f4')
fv1 = numpy.fromfile(dirname1 + '/OUTPUT_FILES/' + filename1, 'f4')
fv2 = numpy.fromfile(dirname2 + '/OUTPUT_FILES/' + filename2, 'f4')
fv3 = numpy.fromfile(dirname3 + '/OUTPUT_FILES/' + filename3, 'f4')
v0 = numpy.reshape(fv0, (nr, nt0))
v1 = numpy.reshape(fv1, (nr, nt1))
v2 = numpy.reshape(fv2, (nr, nt2))
v3 = numpy.reshape(fv3, (nr, nt3))
t0 = pylab.arange(0.0, nt0 * dt0, dt0)
t1 = pylab.arange(0.0, nt1 * dt1, dt1)
t2 = pylab.arange(0.0, nt2 * dt2, dt2)
t3 = pylab.arange(0.0, nt3 * dt3, dt3)
t_ref = pylab.arange(0.0, nt_ref * dt_ref, dt_ref)
f_ref = pylab.arange(0.0, (0.5 * (nt_ref - 1) + 1) / ((nt_ref - 1) * dt_ref),
                     1.0 / ((nt_ref - 1) * dt_ref))

# Loop on receivers
for i in range(0, nr):
    seism0 = v0[i, :]
    seism1 = -50.0 * v1[i, :]
    seism2 = -50.0 * v2[i, :]
    seism3 = -50.0 * v3[i, :]
    f0 = interpolate.interp1d(t0, seism0)
    f1 = interpolate.interp1d(t1, seism1)
コード例 #50
0
    def feature_plot(self,
                     feature=None,
                     normalize=False,
                     dbscale=False,
                     norm=False,
                     interp='nearest',
                     labels=True,
                     nofig=False,
                     **kwargs):
        """
        ::

          Plot the given feature, default is self.feature, 
           returns an error if feature not extracted

          Inputs:
           feature   - the feature to plot self.feature
                        features are extracted in the following hierarchy:
                           stft->cqft->mfcc->[lcqft,hcqft]->chroma,
                        if a later feature was extracted, then an earlier feature can be plotted
           normalize - column-wise normalization ['alse]
           dbscale   - transform linear power to decibels: 20*log10(X) [False]
           norm      - make columns unit norm [False]
           interp    - how to interpolate values in the plot ['nearest']
           labels    - whether to plot labels
           nofig     - whether to make new figure
           **kwargs  - keyword arguments to imshow or plot
        """
        feature = self._check_feature_params(
        )['feature'] if feature is None else feature
        # check plots
        if feature == 'stft':
            if not self._have_stft:
                print("Error: must extract STFT first")
            else:
                feature_plot(P.absolute(self.STFT),
                             normalize,
                             dbscale,
                             norm,
                             title_string="STFT",
                             interp=interp,
                             nofig=nofig,
                             **kwargs)
                if labels:
                    self._feature_plot_xticks(
                        float(self.nhop) / float(self.sample_rate))
                    self._feature_plot_yticks(
                        float(self.sample_rate) / (self.nfft))
                    P.xlabel('Time (secs)')
                    P.ylabel('Frequency (Hz)')
        elif feature == 'power':
            if not self._have_power:
                print("Error: must extract POWER first")
            else:
                if not nofig:
                    P.figure()
                P.plot(feature_scale(self.POWER, normalize, dbscale) / 20.0)
                if labels:
                    self._feature_plot_xticks(
                        float(self.nhop) / float(self.sample_rate))
                    P.title("Power")
                    P.xlabel("Time (s)")
                    P.ylabel("Power (dB)")
        elif feature == 'cqft':
            if not self._have_cqft:
                print("Error: must extract CQFT first")
            else:
                feature_plot(self.CQFT,
                             normalize,
                             dbscale,
                             norm,
                             title_string="CQFT",
                             interp=interp,
                             nofig=nofig,
                             **kwargs)
                if labels:
                    self._feature_plot_xticks(
                        float(self.nhop) / float(self.sample_rate))
                    # self._feature_plot_yticks(1.)
                    P.yticks(P.arange(0, self._cqtN, self.nbpo),
                             (self.lo *
                              2**(P.arange(0, self._cqtN, self.nbpo) /
                                  self.nbpo)).round(1))
                    P.xlabel('Time (secs)')
                    P.ylabel('Frequency (Hz)')
        elif feature == 'mfcc':
            if not self._have_mfcc:
                print("Error: must extract MFCC first")
            else:
                fp = self._check_feature_params()
                X = self.MFCC[self.lcoef:self.lcoef + self.ncoef, :]
                feature_plot(X,
                             normalize,
                             dbscale,
                             norm,
                             title_string="MFCC",
                             interp=interp,
                             nofig=nofig,
                             **kwargs)
                if labels:
                    self._feature_plot_xticks(
                        float(self.nhop) / float(self.sample_rate))
                    P.xlabel('Time (secs)')
                    P.ylabel('Cepstral coeffient')
        elif feature == 'lcqft':
            if not self._have_lcqft:
                print("Error: must extract LCQFT first")
            else:
                feature_plot(self.LCQFT,
                             normalize,
                             dbscale,
                             norm,
                             title_string="LCQFT",
                             interp=interp,
                             nofig=nofig,
                             **kwargs)
                if labels:
                    self._feature_plot_xticks(
                        float(self.nhop) / float(self.sample_rate))
                    P.yticks(P.arange(0, self._cqtN, self.nbpo),
                             (self.lo *
                              2**(P.arange(0, self._cqtN, self.nbpo) /
                                  self.nbpo)).round(1))
        elif feature == 'hcqft':
            if not self._have_hcqft:
                print("Error: must extract HCQFT first")
            else:
                feature_plot(self.HCQFT,
                             normalize,
                             dbscale,
                             norm,
                             title_string="HCQFT",
                             interp=interp,
                             nofig=nofig,
                             **kwargs)
                if labels:
                    self._feature_plot_xticks(
                        float(self.nhop) / float(self.sample_rate))
                    P.yticks(P.arange(0, self._cqtN, self.nbpo),
                             (self.lo *
                              2**(P.arange(0, self._cqtN, self.nbpo) /
                                  self.nbpo)).round(1))
                    P.xlabel('Time (secs)')
                    P.ylabel('Frequency (Hz)')
        elif feature == 'chroma' or feature == 'hchroma':
            if not self._have_chroma:
                print("Error: must extract CHROMA first")
            else:
                feature_plot(self.CHROMA,
                             normalize,
                             dbscale,
                             norm,
                             title_string="CHROMA",
                             interp=interp,
                             nofig=nofig,
                             **kwargs)
                if labels:
                    self._feature_plot_xticks(
                        float(self.nhop) / float(self.sample_rate))
                    P.yticks(P.arange(0, self.nbpo, self.nbpo / 12.), [
                        'C', 'C#', 'D', 'Eb', 'E', 'F', 'F#', 'G', 'G#', 'A',
                        'Bb', 'B'
                    ])
                    P.xlabel('Time (secs)')
                    P.ylabel('Pitch Class')
        else:
            print("Unrecognized feature, skipping plot: ", feature)
コード例 #51
0
ファイル: train.py プロジェクト: gombru/instaEmotions
def do_solve(maxIter, solver, display, test_interval, test_iters):

    # SET PLOTS DATA
    train_loss_C = zeros(maxIter / display)
    train_top1 = zeros(maxIter / display)
    # train_top5 = zeros(maxIter/display)

    val_loss_C = zeros(maxIter / test_interval)
    val_top1 = zeros(maxIter / test_interval)
    # val_top5 = zeros(maxIter/test_interval)

    it_axes = (arange(maxIter) * display) + display
    it_val_axes = (arange(maxIter) * test_interval) + test_interval

    _, ax1 = subplots()
    ax2 = ax1.twinx()
    ax1.set_xlabel('iteration')
    ax1.set_ylabel('train loss C (r), val loss C (y)')
    ax2.set_ylabel(
        'train TOP1 (b), val TOP1 (g)')  # train TOP-5 (c), val TOP-5 (k)')
    ax2.set_autoscaley_on(False)
    ax2.set_ylim([0, 1])

    lossC = np.zeros(maxIter)
    acc1 = np.zeros(maxIter)
    acc5 = np.zeros(maxIter)

    #RUN TRAINING
    for it in range(niter):
        #st = time.time()
        solver.step(1)  # run a single SGD step in Caffepy()
        #en = time.time()
        #print "Time step: " + str((en-st))

        #PLOT
        if it % display == 0 or it + 1 == niter:
            lossC[it] = solver.net.blobs['loss3/loss3'].data.copy()
            acc1[it] = solver.net.blobs['loss3/top-1'].data.copy()
            # acc5[it] = solver.net.blobs['loss3/top-5'].data.copy()

            loss_disp = 'loss3C= ' + str(lossC[it]) + '  top-1= ' + str(
                acc1[it])

            print '%3d) %s' % (it, loss_disp)

            train_loss_C[it / display] = lossC[it]
            train_top1[it / display] = acc1[it]
            # train_top5[it / display] = acc5[it]

            ax1.plot(it_axes[0:it / display], train_loss_C[0:it / display],
                     'r')
            ax2.plot(it_axes[0:it / display], train_top1[0:it / display], 'b')
            # ax2.plot(it_axes[0:it / display], train_top5[0:it / display], 'c')

            #ax1.set_ylim([0, 10])
            plt.title(training_id)
            plt.ion()
            plt.grid(True)
            plt.show()
            plt.pause(0.001)

        #VALIDATE
        if it % test_interval == 0 and it > 0:
            loss_val_C = 0
            top1_val = 0
            # top5_val = 0
            for i in range(test_iters):
                solver.test_nets[0].forward()
                loss_val_C += solver.test_nets[0].blobs['loss3/loss3'].data
                top1_val += solver.test_nets[0].blobs['loss3/top-1'].data
                # top5_val += solver.test_nets[0].blobs['loss3/top-5'].data

            loss_val_C /= test_iters
            top1_val /= test_iters
            # top5_val /= test_iters

            print("Val loss C: {:.3f}".format(loss_val_C))

            val_loss_C[it / test_interval - 1] = loss_val_C
            val_top1[it / test_interval - 1] = top1_val
            # val_top5[it / test_interval - 1] = top5_val

            ax1.plot(it_val_axes[0:it / test_interval],
                     val_loss_C[0:it / test_interval], 'y')
            ax2.plot(it_val_axes[0:it / test_interval],
                     val_top1[0:it / test_interval], 'g')
            # ax2.plot(it_val_axes[0:it / test_interval], val_top5[0:it / test_interval], 'k')

            #ax1.set_ylim([0, 10])
            plt.title(training_id)
            plt.ion()
            plt.grid(True)
            plt.show()
            plt.pause(0.001)
            title = '../../../datasets/EmotionDataset/models/training/' + training_id + str(
                it) + '.png'  # Save graph to disk
            savefig(title, bbox_inches='tight')

    return
コード例 #52
0
             3,
             4,
         ], [1, 2, 3, 4]),
         axis=[60, 104, .3, 4.5]),
]

for i, params in enumerate(param_list):
    ax = pl.subplot(2, 2, i + 1)
    if params['type'] == 'pf':
        dismod3.graphics.plot_data_bars(best_model.get_data('csmr'),
                                        color='grey')
    else:
        dismod3.graphics.plot_data_bars(best_model.get_data(params['type']),
                                        color='grey')

    pl.plot(pl.arange(101),
            pl.array(output['model_' + params['type']]),
            'k-',
            linewidth=2,
            label='Posterior Mean')
    pl.plot(pl.arange(101),
            pl.array(output['model_' + params['type'] + 'l']),
            'k-',
            linewidth=1,
            label='95% HPD interval')
    pl.plot(pl.arange(101),
            pl.array(output['model_' + params['type'] + 'u']),
            'k-',
            linewidth=1)

    pl.xlabel('Age (years)')
コード例 #53
0
    exit    

if __name__=="__main__":
    """
    var and es granularity adjustment as a function of the correlation 
    """
    #percentile = 0.99 
    #num_of_credits = 40
    #default_probability = 0.01
    #loss_given_default = 1.0
    if len(sys.argv) != 5 and len(sys.argv) != 8:
        usage() 
    else:
        if len(sys.argv) == 5:
             percentile, num_of_credits, pd, lgd  = sys.argv[1:5]
             x_min, x_max, x_step = (0.01, 1.00, 0.01)
        else:
            percentile, num_of_credits, pd, lgd, x_min, x_max, x_step = sys.argv[1:8]
        import pylab
        x_list = pylab.arange(float(x_min), float(x_max), float(x_step))
        var_values, es_values = main(x_list, float(percentile), int(num_of_credits), 
                                     float(pd), float(lgd))
        pylab.suptitle('First Order Granularity Adjustment', fontsize=12)
        pylab.xlabel("correlation")
        pylab.ylabel("function values")
        pylab.plot(x_list, var_values, 'b--', label='var adjustment')
        pylab.plot(x_list, es_values, 'g-' , label='es adjustment')
        pylab.legend()
        pylab.show()
    
コード例 #54
0
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 14 18:16:40 2020

@author: Marie
"""

from pylab import arange, sqrt

x = arange(1, 11)
y = sqrt(x)
print(x)
print(y)


def kvadrer(x):
    return x**2


z = kvadrer(x)
print(z)

for y_verdi in y:
    print(f"{y_verdi:.2f}")

print(y[0])
コード例 #55
0
def plotContig(contig, tracks, options, plot_legend=False,
               extra_features=None):
    """plot data for contig."""

    if extra_features and "figure" in extra_features:
        figure = pylab.figure(**enterParams(extra_features['figure'],
                                            (("figsize", lambda x: list(map(int, x.split(",")))),
                                             ("dpi", int),
                                             "facecolor",
                                             "edgecolor")))
    else:
        figure = pylab.figure()

    if plot_legend:
        if extra_features and "legend" in extra_features:
            legend = pylab.figure(**enterParams(extra_features['legend'],
                                                (("figsize", lambda x: list(map(int, x.split(",")))),
                                                 ("dpi", int),
                                                 "facecolor",
                                                 "edgecolor")))
        else:
            legend = pylab.figure()
        lx = legend.add_axes((0.1, 0.1, 0.9, 0.9))
        lx.set_title("Legend")
        lx.set_axis_off()
    else:
        legend = None

    axes = []

    ywidth = 0.8 / float(len(tracks))
    yoffset = 0.1

    axprops = {}
    ayprops = {}

    min_x, max_x = 1000000000, 0

    for track in tracks:
        if track.mData:
            min_x = min(min_x, min([(x[0]) for x in track.mData[contig]]))
            max_x = max(max_x, max([(x[1]) for x in track.mData[contig]]))

    # make sure that we use the same view for all axes
    axprops['xlim'] = (min_x, max_x)

    nplotted = 0
    for track in tracks:

        labels, plots = [], []

        ax = figure.add_axes(
            (0.1, track.mYOffset, 0.8, track.mYWidth), **axprops)

        if 'sharex' not in axprops:
            ax.xaxis.set_major_formatter(
                matplotlib.ticker.FuncFormatter(formatGenomicCoordinate))
            ax.set_xlabel("Genomic position / Mb")
            axprops['sharex'] = ax
        else:
            pylab.setp(ax.get_xticklabels(), visible=False)

        ax.set_ylabel(track.mTitle, **ayprops)

        if track.mSubTracks:

            # compute maximum extent of y-axis in all of subtracks
            first = True
            for tt in track.mSubTracks:
                if first:
                    min_y = min([x[2] for x in tt.mData[contig]])
                    max_y = max([x[2] for x in tt.mData[contig]])
                    first = False
                else:
                    min_y = min(
                        min_y, min([x[2] for x in tt.mData[contig]]))
                    max_y = max(
                        max_y, max([x[2] for x in tt.mData[contig]]))

            nsubplotted = 0
            for tt in track.mSubTracks:
                plot = addPlot(ax, tt, contig, nplotted,
                               nsubplotted, len(track.mSubTracks),
                               min_y, max_y)
                nsubplotted += 1
                plots.append(plot)
                if hasattr(tt, "legend"):
                    labels.append(tt.legend)
                else:
                    labels.append(tt.mTitle)

        else:
            min_y = min([x[2] for x in track.mData[contig]])
            max_y = max([x[2] for x in track.mData[contig]])

            if options.global_colours:
                n_for_colour = nplotted
            else:
                n_for_colour = 0

            plot = addPlot(ax, track, contig, n_for_colour)
            plots.append(plot)
            if hasattr(track, "legend"):
                lables.append(track.legend)
            else:
                labels.append(track.mTitle)

        # reduce number of ticks by 2
        old_ticks = ax.get_yticks()
        step_size = (old_ticks[1] - old_ticks[0]) * 2
        new_ticks = list(pylab.arange(old_ticks[0], old_ticks[-1], step_size))
        ax.set_yticks(new_ticks)

        if nplotted % 2 == 0:
            ax.yaxis.set_ticks_position("right")
            ax.yaxis.set_label_position("right")
        else:
            ax.yaxis.set_ticks_position("left")
            ax.yaxis.set_label_position("left")

        # deal with extra_features
        if extra_features:
            for key, config in list(extra_features.items()):
                if key == "vlines":
                    if contig not in config.mData:
                        continue
                    lines = []
                    for start, end, value in config.mData[contig]:
                        lines.append(start)
                        lines.append(end)
                    ax.vlines(
                        lines, min_y, max_y, **enterParams(config, ("colour", "linewidth")))

        nplotted += 1

        if legend:
            lx = legend.add_axes((0.1, track.mYOffset, 0.8, track.mYWidth))
            pylab.setp(lx.get_xticklabels(), visible=False)
            lx.set_xticks([])
            lx.set_yticks([])
            lx.text(0.4, 0.5, track.mTitle)
            lx.legend(plots, labels, 'center left')
            if hasattr(track, "text"):
                lx.text(0.6, 0.2, track.text, size="smaller",
                        clip_on=True)

    ax.set_title(contig)
    # has to be set at the end, otherwise re-scaled?
    ax.set_xlim(min_x, max_x)

    return figure, legend
コード例 #56
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from supreme.lib import pywt
import time
import pylab

#r = pywfdb.Record('d:/mitdb/101')
#data = r.read(0, 5050, 1024)

data1 = pylab.array(range(1, 400) + range(398, 600) + range(601, 1024)) / 1024.
data2 = pylab.arange(612 - 80, 20, -0.5) / 250.
data2 = pylab.sin(40 * pylab.log(data2)) * pylab.sign((pylab.log(data2)))
from sample_data import ecg as data3

mode = pywt.MODES.sp1
DWT = 1


def plot(data, w, title):
    w = pywt.Wavelet(w)
    a = data
    ca = []
    cd = []
    if DWT:
        for i in xrange(5):
            (a, d) = pywt.dwt(a, w, mode)
            ca.append(a)
            cd.append(d)
    else:
        for a, d in pywt.swt(data, w, 5):
コード例 #57
0
ファイル: conncon_targets.py プロジェクト: bitSlayer29090/CSL
    'kernel': 0.5,
    'weights': {
        'uniform': {
            'min': 0.5,
            'max': 2.0
        }
    },
    'delays': 1.0
}
topo.ConnectLayers(a, b, conndict)

# first, clear existing figure, get current figure
pylab.clf()
fig = pylab.gcf()

# plot targets of two source neurons into same figure, with mask
for src_pos in [[15, 15], [0, 0]]:
    # obtain node id for center
    src = topo.GetElement(a, src_pos)
    topo.PlotTargets(src, b, mask=conndict['mask'], fig=fig)

# beautify
pylab.axes().set_xticks(pylab.arange(-1.5, 1.55, 0.5))
pylab.axes().set_yticks(pylab.arange(-1.5, 1.55, 0.5))
pylab.grid(True)
pylab.axis([-2.0, 2.0, -2.0, 2.0])
pylab.axes().set_aspect('equal', 'box')
pylab.title('Connection targets')

# pylab.savefig('conncon_targets.pdf')
コード例 #58
0
center_plot = 0, 0

for i in range(0, 100):
    points_plot.append([0, 0])

# Get default camera window size
ret, frame = cap.read()
Height, Width = frame.shape[:2]
frame_count = 0

# Initialize time varaible
then = time.time()

#####################################Plot Start#######################################
# Initialize plot parameters
xAchse = pylab.arange(0, 100, 1)
yAchse = pylab.array([0] * 100)

fig = pylab.figure(1)
ax = fig.add_subplot(121)
ay = fig.add_subplot(122)

ax.grid(True)
ay.grid(True)
ax.set_title("X vs Time")
ay.set_title("Y vs Time")
ax.set_xlabel("Time")
ax.set_ylabel("X Value")
ay.set_xlabel("Time")
ay.set_ylabel("Y Value")
ax.axis([0, 100, -1000, 1000])
コード例 #59
0
#Cs = m.arange(18.5, 19.15, .005)
#C2s = m.arange(-100.0, -103, -.2)
#def Fit(C, beta, C2):
#return C * m.exp(beta * t) + C2
#BEST 1430.84646506 (18.644999999999971, 6.8319999999999856, -102.0)

# Blowup model
#Cs = m.arange(193, 199, .125)
#betas = m.arange(5.5, 5.95, .0125)
#C2s = m.arange(1.32, 1.38, .00125)
#def Fit(C, beta, C2):
#return C / (C2 - t)**beta
#BEST 78624.1696635 (198.625, 5.8875000000000055, 1.3637499999999991)

# Exponential model
betas = m.arange(7.2, 8, .001)
Cs = m.arange(9, 11, .0025)
C2s = [0]


def Fit(C, beta, C2):
    return C * m.exp(beta * t)


#BEST 16703.0662184 (10.000000000000313, 7.6779999999999031, 0)


def Error(C, beta, C2):
    fit = Fit(C, beta, C2)  #beta / (C - t)
    error = (fit - sup)
    return m.dot(error, error)
コード例 #60
0
    def createKerdenSOMPlots(self):
        apFile.removeFilePattern(
            os.path.join(self.params['rundir'],
                         self.spectraTemporalFilesMask + ".png"))

        #logging.debug('Inside createKerdenSOMPlots')
        apDisplay.printMsg("Create Plots")
        codeVectorFileName = os.path.join(self.params['rundir'],
                                          self.timestamp + '.cod')
        f1 = open(codeVectorFileName, 'r')
        #Read first line, I need number of harmonic plus size
        line = f1.readline()
        splitline = line.split()
        numberHarmonic = int(splitline[0])
        xx = int(splitline[2])
        yy = int(splitline[3])
        numberCodevectors = xx * yy

        xmin = int(self.params['spectralowharmonic'])
        xmax = int(self.params['spectrahighharmonic'])
        #array with x and y values
        xvalues = []
        #fill x array with harmonic number
        for colNo in pylab.arange(xmin, xmax + 1):
            xvalues.append(colNo)

        #figure size in inches
        pylab.rcParams['figure.figsize'] = 1, 1
        pylab.rc("lines", linewidth=1.5)
        pylab.rc(('xtick', 'ytick', 'axes'), labelsize=4.0)  #fontsize

        #read code vector
        #compute y maximum
        ymax = 0.
        ymin = 150.
        for rowNo in range(numberCodevectors):
            line = f1.readline()
            splitLine = line.split()
            for colNo in pylab.arange(numberHarmonic):
                yval = float(splitLine[colNo])
                if ymax < yval:
                    ymax = yval
                if ymin > yval:
                    ymin = yval
        f1.close()
        ymax = math.ceil(ymax) + 1
        print "ymax ", ymax
        ymin = max(math.floor(ymin) - 1, 0)
        print "ymin ", ymin

        f1 = open(codeVectorFileName, 'r')
        #skip first line
        line = f1.readline()
        for rowNo in range(numberCodevectors):
            line = f1.readline()
            splitLine = line.split()
            #print line
            data = []
            for colNo in pylab.arange(numberHarmonic):
                data.append(float(splitLine[colNo]))
            print xvalues
            print data
            #clear previous plot
            pylab.clf()
            lines = pylab.plot(xvalues, data)
            pylab.ylim(ymin, ymax)
            pylab.xlim(xmin, xmax)
            pylab.xlabel('fold symmetry')
            pylab.ylabel('likelihood')
            pylab.xticks(xvalues)
            basefilename = os.path.join(
                self.params['rundir'],
                self.spectraTemporalFiles % (int(rowNo / yy), rowNo % xx))
            pylab.savefig(basefilename + ".png", dpi=256, format='png')