Exemplo n.º 1
0
def plot(data):
    for line in data:
        P.semilogy(line[4], line[5], label = line[0] + ' %i' % line[2], lw = 2)
    P.xlabel('Wavelength [AA]')
    P.ylabel('Sensitivity [(count/s/pixel) / (erg/s/cm**2/angstrom)]')
    P.legend()
    P.savefig('sensitivity.pdf')
Exemplo n.º 2
0
def generateKineticsModel(reaction, tunneling='', plot=False):
    
    logging.info('Calculating rate coefficient for {0}...'.format(reaction))
    
    if len(reaction.reactants) == 1:
        kunits = 's^-1'
    elif len(reaction.reactants) == 2:
        kunits = 'm^3/(mol*s)'
    elif len(reaction.reactants) == 3:
        kunits = 'm^6/(mol^2*s)'
    else:
        kunits = ''
    
    Tlist = 1000.0/numpy.arange(0.4, 3.35, 0.05)
    klist = reaction.calculateTSTRateCoefficients(Tlist, tunneling)
    arrhenius = Arrhenius().fitToData(Tlist, klist, kunits)
    klist2 = arrhenius.getRateCoefficients(Tlist)
    
    reaction.kinetics = arrhenius
    
    if plot:
        logging.info('Plotting kinetics model for {0}...'.format(reaction))
        import pylab
        pylab.semilogy(1000.0 / Tlist, klist  * reaction.degeneracy, 'ok')
        pylab.semilogy(1000.0 / Tlist, klist2 * reaction.degeneracy, '-k')
        pylab.xlabel('1000 / Temperature (1000/K)')
        pylab.ylabel('Rate coefficient (SI units)')
        pylab.show()
Exemplo n.º 3
0
def psfplots():
	tpsf = wise.get_psf_model(1, pixpsf=True)
	
	psfp = tpsf.getPointSourcePatch(0, 0)
	psf = psfp.patch
	
	psf /= psf.sum()
	
	plt.clf()
	plt.imshow(np.log10(np.maximum(1e-5, psf)), interpolation='nearest', origin='lower')
	plt.colorbar()
	ps.savefig()
	
	h,w = psf.shape
	cx,cy = w/2, h/2
	
	X,Y = np.meshgrid(np.arange(w), np.arange(h))
	R = np.sqrt((X - cx)**2 + (Y - cy)**2)
	plt.clf()
	plt.semilogy(R.ravel(), psf.ravel(), 'b.')
	plt.xlabel('Radius (pixels)')
	plt.ylabel('PSF value')
	plt.ylim(1e-8, 1.)
	ps.savefig()
	
	plt.clf()
	plt.loglog(R.ravel(), psf.ravel(), 'b.')
	plt.xlabel('Radius (pixels)')
	plt.ylabel('PSF value')
	plt.ylim(1e-8, 1.)
	ps.savefig()
	
	print('PSF norm:', np.sqrt(np.sum(np.maximum(0, psf)**2)))
	print('PSF max:', psf.max())
Exemplo n.º 4
0
def plotHousing(impression):
    f=open('midWestHousingPrices.txt','r')
    labels,prices=[],[]
    for line in f:
        year,quarter,price=line.split(' ')
        label=year[2:4]+'\n'+quarter[:]
        labels.append(label)
        prices.append(float(price)/1000)
    quarters=pylab.arange(len(labels))
    width=0.8
    if impression=='flat':
        pylab.semilogy()
    pylab.bar(quarters,prices,width)
    pylab.xticks(quarters+width/2.0,labels)
    pylab.title('Housing Prices in U.S. Midwest')
    pylab.xlabel('Quarter')
    pylab.ylabel('Average Price ($1000\'s)')
    if impression=='flat':
        pylab.ylim(10,10**3)
    elif impression=='volatile':
        pylab.ylim(180,220)
    elif impression=='fair':
        pylab.ylim(150,250)
    else:
        raise ValueError
Exemplo n.º 5
0
def plotEventFlop(library, num, eventNames, sizes, times, events, filename = None):
  from pylab import legend, plot, savefig, semilogy, show, title, xlabel, ylabel
  import numpy as np

  arches = sizes.keys()
  bs     = events[arches[0]].keys()[0]
  data   = []
  names  = []
  for event, color in zip(eventNames, ['b', 'g', 'r', 'y']):
    for arch, style in zip(arches, ['-', ':']):
      if event in events[arch][bs]:
        names.append(arch+'-'+str(bs)+' '+event)
        data.append(sizes[arch][bs])
        data.append(1e-3*np.array(events[arch][bs][event])[:,1])
        data.append(color+style)
      else:
        print 'Could not find %s in %s-%d events' % (event, arch, bs)
  semilogy(*data)
  title('Performance on '+library+' Example '+str(num))
  xlabel('Number of Dof')
  ylabel('Computation Rate (GF/s)')
  legend(names, 'upper left', shadow = True)
  if filename is None:
    show()
  else:
    savefig(filename)
  return
Exemplo n.º 6
0
def flipPlot(minExp, maxExp, numTrials):
    meanRatios = []
    meanDiffs = []
    ratiosSDs =  []
    diffsSDs =  []
    xAxis = []
    for exp in range(minExp, maxExp + 1):
        xAxis.append(2**exp)
    for numFlips in xAxis:
        ratios = []
        diffs = []
        for t in range(numTrials):
            numHeads, numTails = runTrial(numFlips)
            ratios.append(numHeads/float(numTails))
            diffs.append(abs(numHeads - numTails))
        meanRatios.append(sum(ratios)/numTrials)
        meanDiffs.append(sum(diffs)/numTrials)
        ratiosSDs.append(stdDev(ratios))
        diffsSDs.append(stdDev(diffs))
    pylab.plot(xAxis, meanRatios, 'bo')
    pylab.title('Mean Heads/Tails Ratios ('
                + str(numTrials) + ' Trials)')
    pylab.xlabel('Number of Flips')
    pylab.ylabel('Mean Heads/Tails')
    pylab.semilogx()
    pylab.figure()
    pylab.plot(xAxis, ratiosSDs, 'bo')
    pylab.title('SD Heads/Tails Ratios ('
                + str(numTrials) + ' Trials)')
    pylab.xlabel('Number of Flips')
    pylab.ylabel('Standard Deviation')
    pylab.semilogx()
    pylab.semilogy()
Exemplo n.º 7
0
    def plot(self, outputDirectory):
        """
        Plot both the raw kinetics data and the Arrhenius fit versus 
        temperature. The plot is saved to the file ``kinetics.pdf`` in the
        output directory. The plot is not generated if ``matplotlib`` is not
        installed.
        """
        # Skip this step if matplotlib is not installed
        try:
            import pylab
        except ImportError:
            return

        Tlist = 1000.0/numpy.arange(0.4, 3.35, 0.05)
        klist = numpy.zeros_like(Tlist)
        klist2 = numpy.zeros_like(Tlist)
        for i in range(Tlist.shape[0]):
            klist[i] = self.reaction.calculateTSTRateCoefficient(Tlist[i])
            klist2[i] = self.reaction.kinetics.getRateCoefficient(Tlist[i])

        order = len(self.reaction.reactants)
        klist *= 1e6 ** (order-1)
        klist2 *= 1e6 ** (order-1)

        pylab.semilogy(1000.0 / Tlist, klist, 'ok')
        pylab.semilogy(1000.0 / Tlist, klist2, '-k')
        pylab.xlabel('1000 / Temperature (1000/K)')
        pylab.ylabel('Rate coefficient ({0})'.format(self.kunits))
        pylab.savefig(os.path.join(outputDirectory, 'kinetics.pdf'))
        pylab.close()
Exemplo n.º 8
0
def plot_ups(col):
    ''' Plot the hydrogen collision strength as a function of temperature
    '''
    global iplot
    t = col.t_list
    ups = col.ups_list
    ref = col.ref
    #Name of the output plot file
    ofname = format(iplot, '0>4')+'_ups_h.pdf'    
    #Figure
    pl.figure(dpi=100, facecolor='w', edgecolor='k')
    #Title of the figure
    ll = col.lower
    ul = col.upper
    title = SPE+' '+DEG+': '+str(ll.cfg)+' '+str(ll.term)+' [g='+str(ll.g)+'] '+str(ul.cfg)+' '+str(ul.term)+' [g='+str(ul.g)+'], $\lambda =$'+format(col.lbd, '8.1f')+' $\AA$'
    #Axes
    pl.xlabel('T [K]')
    pl.ylabel('$\Upsilon_H$')
    pl.semilogy()
    pl.ylim([1e-6, 1e5])
    pl.title(title)
    #Plot
    pl.plot(t, ups, 'k', label=col.ref+' ('+col.type.lower()+')')
    pl.legend()
    pl.savefig(ofname, dpi=100, format='pdf', orientation='landscape', papertype='a4')
    pl.close()
def flipPlot(minExp,maxExp):
    '''假定minExp和maxExp是正整数,并且minExp<maxExp,
       绘制出2**minExp到2**maxExp次抛硬币的结果'''
    ratios=[]
    diffs=[]
    xAxis=[]
    for exp in range(minExp,maxExp+1):
        xAxis.append(2**exp)
    for numFlips in xAxis:
        numHeads=0
        for n in range(numFlips):
            if random.random()<0.5:
                numHeads+=1
        numTails=numFlips-numHeads
        ratios.append(numHeads/float(numTails))
        diffs.append(abs(numHeads-numTails))
    pylab.title('Difference Between Heads and Tails')
    pylab.xlabel('Number of Flips')
    pylab.semilogx()
    pylab.semilogy()
    pylab.ylabel('Abs(#Heads-#Tails)')
    pylab.plot(xAxis,diffs,'bo')
    pylab.figure()
    pylab.title('Heads/Tails Ratios')
    pylab.xlabel('Number of Flips')
    pylab.semilogx()
    pylab.ylabel('#Heads/#Tails')
    pylab.plot(xAxis,ratios,'bo')
Exemplo n.º 10
0
 def plot_step_size(self):
     P.semilogy(self.simulated_time,N.diff([0.0]+self.simulated_time),drawstyle="steps-pre")
     P.ylabel("Step-size")
     P.xlabel("Time [s]")
     P.title("Step-size history")
     P.grid()
     P.show()
Exemplo n.º 11
0
def flipPlot(minExp, maxExp):
    """Assumes minExp and maxExp positive integers; minExp < maxExp
    Plots results of 2**minExp to 2**maxExp coin flips"""
    ratios = []
    diffs = []
    xAxis = []
    for exp in range(minExp, maxExp + 1):
        xAxis.append(2 ** exp)
    for numFlips in xAxis:
        numHeads = 0
        for n in range(numFlips):
            if random.random() < 0.5:
                numHeads += 1
        numTails = numFlips - numHeads
        ratios.append(numHeads / float(numTails))
        diffs.append(abs(numHeads - numTails))
            
    pylab.title('Difference Between Heads and Tails')
    pylab.xlabel('Number of Flips')
    pylab.ylabel('Abs(#Heads - #Tails)')
    pylab.rcParams['lines.markersize'] = 10
    pylab.semilogx()
    pylab.semilogy()
    pylab.plot(xAxis, diffs, 'bo')
    pylab.figure()
    pylab.title('Heads/Tails Ratios')
    pylab.xlabel('Number of Flips')
    pylab.ylabel('Heads/Tails')
    pylab.plot(xAxis, ratios)
Exemplo n.º 12
0
	def plotHist(self):
		p.figure()
		p.semilogy(self.counts.keys(), self.counts.values(), '.')
		p.xlabel('Log-return')
		p.ylabel('Count')
		p.title(self.symbol)
		p.show()
Exemplo n.º 13
0
def qdisk_plot(root):

	# some labels
	ylabels = ["Heating", r"$N_{\mathrm{hit}}$", r"$N_{\mathrm{hit}}/N_{\mathrm{tot}}$", 
	r"$T_{\mathrm{heat}}$", r"$T_{\mathrm{irrad}}$", r"$W_{\mathrm{irrad}}$"]

	log_lin = [1,0,0,1,1,1]


	p.figure(figsize=(9,10))

	disk_diag = "diag_%s/%s.disk.diag" % (root, root)

	# read the disk_diag file
	a = ascii.read(disk_diag)

	# cyce through the physical quantities and plot for each annulus
	for j, name in enumerate(a.colnames[3:]):
		
		p.subplot(3,2,j+1)
		p.plot(a[name], ls="steps", c="k", linewidth=2)
		p.ylabel(ylabels[j])
		p.xlabel("Annulus")

		if log_lin[j]:
			p.semilogy()

	p.savefig("qdisk_%s.png" % root, dpi=300)
Exemplo n.º 14
0
def standard_plot(data, rate):
    sample_length = len(data)
    k = arange(sample_length)
    period = sample_length / rate
    freqs = (k / period)[range(sample_length / 2)] #right-side frequency range
    Y = (fft(data) / sample_length)[range(sample_length / 2)]
    semilogy(freqs, abs(Y)) # plotting the spectrum
Exemplo n.º 15
0
def flipPlot(minExp,maxExp):
	ratios = []
	diffs = []
	xAxis = []

	for exp in range(minExp,maxExp+1):
		xAxis.append(2 ** exp)
	print "xAxis: ", xAxis
			
	for numFlips in xAxis:
		numHeads = 0
		for n in range(numFlips):
			if random.random() < 0.5:
				numHeads += 1
		numTails = numFlips - numHeads
		ratios.append(numHeads/float(numTails))
		diffs.append(abs(numHeads - numTails))

	pylab.figure()
	pylab.title('Difference Between Heads and Tails')
	pylab.xlabel('Number of Flips')
	pylab.ylabel('Abs(#Heads - #Tails')
	pylab.plot(xAxis, diffs, 'bo') #do not connect, show dot
	pylab.semilogx()
	pylab.semilogy()
	pylab.figure()
	pylab.plot(xAxis, ratios, 'bo') #do not connect, show dot
	pylab.title('Heads/Tails Ratios')
	pylab.xlabel('Number of Flips')
	pylab.ylabel('Heads/Tails')
	pylab.semilogx()
def plot():
    plots = []
    xs = []
    for filename, label, style in lines_to_plot:
        x = []
        y = []
        f = open('parser_results/' + filename)
        for line in f:
            data = line.split()
            time = data[0]
            num_gp = data[1]
            #x.append(time_min)
            #y.append(num_gp)
            x.append(int(time))
            y.append(int(num_gp))
        pylab.semilogy(x, y, style, label=label)
        
        #pylab.axis([0, 1500, 0, 10])


    pylab.legend(loc='upper left')
    pylab.savefig(output_filename)
#    pylab.close()
    

    print 'Output saved to:', output_filename
Exemplo n.º 17
0
def hanning_standard_plot(data, rate):
    sample_length = len(data)
    k = arange(sample_length)
    period = sample_length / rate
    freqs = (k / period)[range(sample_length / 2)] #right-side frequency range
    Y = (fft(data * np.hanning(sample_length)) / sample_length)[range(sample_length / 2)]
    semilogy(freqs, abs(Y))
Exemplo n.º 18
0
def lp_sensitivity():
    """
    Read in a number of GC L' data sets and plot the 
    SNR vs. mag with number of frames plotted.
    """
    rootDir = "/u/jlu/doc/proposals/keck/uc/10B/orion/"
    files = [rootDir + "mag04jul_lp_rms.lis", rootDir + "mag05jullgs_lp_rms.lis", rootDir + "mag06jullgs_lp_rms.lis"]
    legends = ["04jul", "05jullgs", "06jullgs"]

    py.clf()

    magStep = 1.0
    magBins = np.arange(6, 18, magStep)
    snrAvg = np.zeros(len(magBins))
    for ff in range(len(files)):
        tab = asciidata.open(files[ff])

        mag = tab[1].tonumpy()
        snr = tab[7].tonumpy()
        cnt = tab[9].tonumpy()

        for mm in range(len(magBins) - 1):
            magLo = magBins[mm] - magStep / 2.0
            magHi = magBins[mm] + magStep / 2.0
            idx = np.where((mag > magLo) & (mag <= magHi))[0]

            snrAvg[mm] = snr[idx].mean()

        py.semilogy(magBins, snrAvg)

        legends[ff] += ": N = %d" % cnt[0]
    py.legend(legends)
    py.show()
Exemplo n.º 19
0
def plotHousing(impression):
    """假设impression是一个字符串,必须是‘flat’, ‘volatile’或者是‘fair’
       生成房价随时间变化的图表"""
    f = open("midWestHousingPrices.txt", "r")
    # 文件的每一行是年季度价格
    # 数据来自美国中部区域
    labels, prices = ([], [])
    for line in f:
        year, quarter, price = line.split(" ")
        label = year[2:4] + "\n Q" + quarter[1]
        labels.append(label)
        prices.append(float(price) / 1000)
    quarters = pylab.arange(len(labels))
    width = 0.8
    if impression == "flat":
        pylab.semilogy()
    pylab.bar(quarters, prices, width)
    pylab.xticks(quarters + width / 2.0, labels)
    pylab.title("Housing Prices in U.S. Midwest")
    pylab.xlabel("Quarter")
    pylab.ylabel("Average Price($1,000's)")
    if impression == "flat":
        pylab.ylim(10, 10 ** 3)
    elif impression == "volatile":
        pylab.ylim(180, 220)
    elif impression == "fair":
        pylab.ylim(150, 250)
    else:
        raise ValueError
Exemplo n.º 20
0
def testPlot2(trials=51, maxsteps=5000):
    f = FunctionWrapper(trials, OptimumJumper(StochQuad(noiseLevel=10, curvature=1), jumptime=1000, jumpdist_std=1))
    for aclass, aparams in [#(SGD, {'learning_rate':0.1}),
                            #(SGD, {'learning_rate':0.01}),
                            #(AveragingSGD, {'learning_rate':0.01}),
                            #(AveragingSGD, {'learning_rate':0.01, 'fixedDecay':0.1}),
                            #(AveragingSGD, {'learning_rate':0.01, 'fixedDecay':0.1}),
                            #(AveragingSGD, {'learning_rate':0.1}),
                            #(AveragingSGD, {'learning_rate':1.0}),
                            (AveragingOracle, {}),
                            (AveragingOracle, {"fixedDecay":0.1}),
                            #(AveragingOracle, {"fixedDecay":0.01}),
                            (AdaptivelyAveragingOracle, {}),
                            #(AdaGrad, {'init_lr':0.3}),
                            #(Amari, {'init_lr':0.1, 'time_const':100}),
                            #(RMSProp, {'init_lr':0.1}),
                            (OracleSGD, {}),
                            #(vSGD, {'verbose':False}),
                            #(vSGDfd, {}),
                            ]:
        ls = lossTraces(fwrap=f, aclass=aclass, dim=trials,
                        maxsteps=maxsteps, algoparams=aparams)
        plotWithPercentiles(ls, algo_colors[aclass], aclass.__name__)
    pylab.semilogy()
    pylab.xlim(0, maxsteps)
    pylab.legend()
    pylab.show()
Exemplo n.º 21
0
def testPlot1(trials=20):
    f = FunctionWrapper(trials, StochQuad(noiseLevel=0.2))
    ls = lossTraces(fwrap=f, aclass=SGD, dim=trials, maxsteps=100, algoparams={'learning_rate':0.2})
    pylab.plot(ls, 'b:')
    pylab.plot(pylab.mean(ls, axis=1), 'r-')
    pylab.semilogy()
    pylab.show()
Exemplo n.º 22
0
def flipPlot(minExp, maxExp):
    """minExpとmaxExpは minExp < maxExp を満たす正の整数とする
    2**minExp から 2**maxExp 回のコイン投げの結果をプロットする"""
    ratios = []
    diffs = []
    xAxis = []
    for exp in range(minExp, maxExp + 1):
        xAxis.append(2**exp)
    for numFlips in xAxis:
        numHeads = 0
        for n in range(numFlips):
            if random.random() < 0.5:
                numHeads += 1
        numTails = numFlips - numHeads
        ratios.append(numHeads / float(numTails))
        diffs.append(abs(numHeads - numTails))
    pylab.title('Difference Between Heads and Tails ')
    pylab.xlabel('Number of Flips')
    pylab.ylabel('Abs(#Heads - #Tails)')
    pylab.semilogx()
    pylab.semilogy()
    pylab.plot(xAxis, diffs, 'bo')
    pylab.figure()
    pylab.title('Heads/Tails Ratios')
    pylab.xlabel('Number of Flips')
    pylab.ylabel('#Heads/#Tails')
    pylab.semilogx()
    pylab.semilogy()
    pylab.plot(xAxis, ratios, 'bo')
Exemplo n.º 23
0
def make_plots():
    work_dir = "/u/cmutnik/work/upperSco_copy/finished/"

    # Read in data
    files = glob.glob(work_dir + "*.fits")

    specs = []

    for ff in range(len(files)):
        spec = fits.getdata(files[ff])

        if ff == 0:
            tot0 = spec[1].sum()

        spec[1] *= tot0 / spec[1].sum()

        specs.append(spec)

    # Plot
    plt.clf()
    for ff in range(len(files)):
        legend = files[ff].split("/")[-1]
        plt.semilogy(specs[ff][0], specs[ff][1], label=legend)

    plt.legend(loc="lower left")
    plt.xlim(0.7, 2.55)

    return
Exemplo n.º 24
0
def plot_track_props(tracks, nx, ny, len_cutoff=20):
    pl.ioff()
    wdist = wraparound_dist(nx, ny)
    val_fig = pl.figure()
    area_fig = pl.figure()
    psn_fig = pl.figure()
    delta_vals = []
    delta_dists = []
    for tr in tracks:
        if len(tr) < len_cutoff:
            continue
        idxs, regs = zip(*tr)
        delta_vals.extend([abs(regs[idx].val - regs[idx + 1].val) for idx in range(len(regs) - 1)])
        dists = [wdist(regs[i].loc, regs[i + 1].loc) for i in range(len(regs) - 1)]
        delta_dists.extend([abs(dists[idx] - dists[idx + 1]) for idx in range(len(dists) - 1)])
        pl.figure(val_fig.number)
        pl.plot(idxs, [reg.val for reg in regs], "s-", hold=True)
        pl.figure(area_fig.number)
        pl.semilogy(idxs, [reg.area for reg in regs], "s-", hold=True)
        pl.figure(psn_fig.number)
        pl.plot(idxs[:-1], dists, "s-", hold=True)
    pl.figure(val_fig.number)
    pl.savefig("val_v_time.pdf")
    pl.figure(area_fig.number)
    pl.savefig("area_v_time.pdf")
    pl.figure(psn_fig.number)
    pl.savefig("psn_v_time.pdf")
    pl.figure()
    pl.hist(delta_vals, bins=pl.sqrt(len(delta_vals)))
    pl.savefig("delta_vals.pdf")
    pl.figure()
    pl.hist(delta_dists, bins=pl.sqrt(len(delta_dists)))
    pl.savefig("delta_dists.pdf")
    pl.close("all")
Exemplo n.º 25
0
def demo():
    import pylab

    # The module normalize is not part of the osrefl code base.
    from reflectometry.reduction import normalize

    from .examples import ng7 as dataset
    spec = dataset.spec()[0]
    water = WaterIntensity(D2O=20,probe=spec.probe)
    spec.apply(normalize())
    theory = water.model(spec.Qz,spec.detector.wavelength)

    pylab.subplot(211)
    pylab.title('Data normalized to water scattering (%g%% D2O)'%water.D2O)
    pylab.xlabel('Qz (inv Ang)')
    pylab.ylabel('Reflectivity')
    pylab.semilogy(spec.Qz,theory,'-',label='expected')
    scale = theory[0]/spec.R[0]
    pylab.errorbar(spec.Qz,scale*spec.R,scale*spec.dR,fmt='.',label='measured')

    spec.apply(water)
    pylab.subplot(212)
    #pylab.title('Intensity correction factor')
    pylab.xlabel('Slit 1 opening (mm)')
    pylab.ylabel('Incident intensity')
    pylab.yscale('log')
    pylab.errorbar(spec.slit1.x,spec.R,spec.dR,fmt='.',label='correction')

    pylab.show()
Exemplo n.º 26
0
def showGrowth(lower, upper):
    log = []
    linear = []
    quadratic = []
    logLinear = []
    exponential = []
    for n in range(lower, upper+1):
        log.append(math.log(n, 2))
        linear.append(n)
        logLinear.append(n*math.log(n, 2))
        quadratic.append(n**2)
        exponential.append(2**n)
    pylab.plot(log, label = 'log')
    pylab.plot(linear, label = 'linear')
    pylab.legend(loc = 'upper left')
    pylab.figure()
    pylab.plot(linear, label = 'linear')
    pylab.plot(logLinear, label = 'log linear')
    pylab.legend(loc = 'upper left')
    pylab.figure()
    pylab.plot(logLinear, label = 'log linear')
    pylab.plot(quadratic, label = 'quadratic')
    pylab.legend(loc = 'upper left')
    pylab.figure()
    pylab.plot(quadratic, label = 'quadratic')
    pylab.plot(exponential, label = 'exponential')
    pylab.legend(loc = 'upper left')
    pylab.figure()
    pylab.plot(quadratic, label = 'quadratic')
    pylab.plot(exponential, label = 'exponential')
    pylab.semilogy()
    pylab.legend(loc = 'upper left')
    return
def new_draw_parcel_trace(Tb, PLCL, Press):

	# Convert Pressures to log scale
	Pfact = np.multiply(skewness,np.log10(np.divide(1000., Press)))

	parcelT = []
	flag = 1

	for p in range(len(Press)):
		if Press[p] >= PLCL:
			newTB = ((Tb + 273.) * (Press[p]/Press[0]) ** (287.04/1004.)) - 273.
			parcelT.append(newTB)
		else:
			if flag:
				if p == 0:
					moists = draw_moist_adiabats(0, 1, Tb, 0)
				else:
					moists = draw_moist_adiabats(0,1,parcelT[p-1], (p - 1 + len(press_levels) - len(Press)))
				for m in moists:
					parcelT.append(m)
				flag = 0


	minlen = min(len(parcelT), len(Pfact))
	
	dry_parcel_trace = np.add(parcelT[:minlen], Pfact[:minlen])



	pylab.semilogy(dry_parcel_trace,Press[:minlen],\
		basey=10, color = 'brown', linestyle = 'dotted',\
		linewidth = 1.5)
Exemplo n.º 28
0
def test_calcpow():
    N1 = 128
    N2 = 128
    t1 = numpy.arange(N1)
    t2 = numpy.arange(N2)
    y1 = numpy.sin(t1*16.*numpy.pi/N1) + numpy.cos(t1*64.*numpy.pi/N1)
    y2 = numpy.sin(t2*16.*numpy.pi/N2) + numpy.sin(t2*32.*numpy.pi/N1)
    
    x = y1[:,None]*y2[None,:]
    x += 0.1*numpy.random.normal(size=(N1,N2))
    
    dt = 2.0

    ell,Pl = calcpow(x,dt,Nl=100)

    pylab.figure()
    pylab.imshow(x)
    pylab.colorbar()

    pylab.figure()
    pylab.semilogy(ell,Pl)
    
    i = numpy.argmax(Pl)
    print "scale of Pmax: %.3g arcmin" % (180.*60./ell[i])
    
    pylab.show()
def demo_perfidious(n):
    plt.figure()

    r = (np.arange(n)+1)/float(n+1)

    bases = [(PowerBasis(), "Power"),
             (ChebyshevBasis(interval=(1./(n+1),n/float(n+1))),"Chebyshev"), 
             (LagrangeBasis(interval=(1./(n+1),n/float(n+1))),"Lagrange"), 
             (LagrangeBasis(r),"Specialized Lagrange")]

    xs = np.linspace(0,1,50*n)
    
    for (i,(b,l)) in enumerate(bases):
        p = b.from_roots(r)
        plt.subplot(len(bases),1,i+1)
        plt.semilogy(xs,np.abs(p(xs)),label=l)
        plt.xlim(0,1)
        plt.ylim(min=1)
        
        for j in range(n):
            plt.axvline((j+1)/float(n+1),linestyle=":",color="black")
        plt.legend(loc="best")
    print b.points
    print p.coefficients
    plt.subplot(len(bases),1,1)
    plt.title('The "perfidious polynomial" for n=%d' % n)
def plotDependencyEPS():

    """Plot thoretical dependency between n_components and eps"""
    
    # range of admissible distortions
    eps_range = np.linspace(0.01, 0.99, 100)

    # range of number of samples to embed
    n_samples_range = np.logspace(2, 6, 5)
    colors = pl.cm.Blues(np.linspace(0.3, 1.0, len(n_samples_range)))

    pl.figure()

    for n_samples, color in zip(n_samples_range, colors):
        min_n_components = johnson_lindenstrauss_min_dim(n_samples, \
                                                         eps=eps_range)
        pl.semilogy(eps_range, min_n_components, color=color)

    pl.legend(["n_samples = %d" % n for n in n_samples_range], \
              loc="upper right")

    pl.xlabel("Distortion eps")
    pl.ylabel("Minimum number of dimensions")
    pl.title("Johnson-Lindenstrauss bounds:\nn_components vs eps")
    pl.show()
Exemplo n.º 31
0
print("Calculating CONVERGENCE\n\n")
cvg = []
ncvg = []

for i in range(max(idx)):
    if i != max(idx):
        cvg.append((n[i + 1] - n[i]) / np.float64(NUMNEWPOINTS))
        ncvg.append(i + 1)
        print(('Acceptance while making bank %d is %f' %
               (ncvg[i], 100. * cvg[i])) + ' %')

print("\nPlotting the ACCEPTANCE RATIO vs THE BANK BEING MADE\n\n")

pb.figure(pi)
pi += 1
pb.semilogy(ncvg, cvg, 'o-')
pb.xlabel('Index of the bank being made')
pb.ylabel('Acceptance Ratio (per $%d$ steps)' % NUMNEWPOINTS)
pb.savefig('plots/ConVsNumBank.png', dpi=300, format='png')

print("\nPlotting the banks themselves\n\n")

for idx in range(NUMBANKSMADE):
    fname = 'banks/bank_%d.xml' % idx
    print("Reading bank %s" % fname)

    bdoc = ligolw_utils.load_filename(fname)
    btab = table.get_table(bdoc, lsctables.SimInspiralTable.tableName)

    m1 = []
    m2 = []
Exemplo n.º 32
0
    def plot_quantity(self,
                      field_label,
                      y_field_index,
                      y_field_axis_label="",
                      x_field_index='Cycle',
                      x_field_axis_label="Cycle Number",
                      filename="performance.png",
                      repeated_field="",
                      log_y_axis="Auto",
                      smooth_len=0,
                      bounds="Off",
                      fractional=False,
                      xlim=[],
                      ylim=[]):
        """
        Produce a plot for the given quantity(s) from the performance data.
    
        Parameters
        ----------
        field_label : string or array_like of strings
            The label of the field you wish to plot.  If you wish to plot
            multiple fields, enumerate them in an array or tuple. Ex: "Level 0"
        y_field_index : string
            The index of the field you wish to plot on the y axis. 
            Ex: "Cycle", "Mean Time", "Stddev Time", "Min Time", "Max Time",
            "Cell Updates", "Num Grids", "Updates/processor/sec".
            If you have a single value for many field_labels, it is assumed
            that such a value will index all of them.  If you have an array_like
            structure of strings, each one will index the corresponding key in 
            field_lable.  
        y_field_axis_label : string, optional
            The y axis label on the resulting plot. Default = the y_field_index
            of the recarray.
        x_field_index : string, optional
            The index of the field you wish to plot on the x axis.
            Default = "Cycle"
        x_field_axis_label : string, optional
            The x axis label on the resulting plot. Default = "Cycle Number"
        filename : string, optional
            The filename where I will store your plotted data.
        repeated_field : string, optional
            If you have a regularly named set of fields you wish to plot 
            against each other (e.g. "Level 0", "Level 1", "Level 2"), then
            include the string here and they will all be included automatically
            and in order (e.g. "Level").  There are two special cases to this
            parameter.  "Non-Level" includes all fields without "Level" in the 
            name (or "Total"), and "All" includes all fields.
        log_y_axis : string, optional
            This controls whether the plot will use logarithmic units for the
            y axis.  Valid settings are "Auto", "On", and "Off".  When "Auto" is
            used, the code automatically recognizes when you have a maximum 
            y value more than 3 orders of magnitude greater than your minimum y
            value (for non-zero values) at which point it plots the y axis in 
            log units.
        smooth_len : int, optional
            This value controls the amount by which smoothing occurs over
            N consecutive cycles of data.  Default = 0 (i.e. None). 
            Must be an odd number (recommended 5-11)
        bounds : string, optional
            This controls whether to overplot additional bounding data over
            the existing plotted quantities.  Valid values of this variable
            are "minmax", "sigma" and "Off".  "minmax" overplots the minima and
            maxima bounds, whereas "sigma" plots the mean +/- 1 sigma bounds.
        fractional : bool, optional
            When set to true, the plotted values shown in fractions of the 
            equivalent field in "Total".
        xlim, ylim : array_like, optional
            Set these variables two 2-element lists/arrays in order to
            explicitly set your plot limits
    
        See Also
        --------
        plot_stack, plot_maxmin
    
        Examples
        --------
        To produce a simple plot of the mean time taken over the course of 
        the simulation to run the RebuildHierarchy section of code.
        Save this plot to performance.png:
    
        >>> plot_quantity("RebuildHierarchy", "Mean Time")
    
        To produce a plot comparing the RebuildHiearchy and SolveHydroEquations
        maximum time taken over the course of the simulation and save it 
        to file "test.png":
    
        >>> plot_quantity(["RebuildHierarchy", "SolveHydroEquations"],
        "Max Time", "Maximum Time (sec)", filename="test.png")
    
        To produce a plot comparing the maximum time from RebuildHiearchy and 
        the minimum time from SolveHydroEquations taken over the course of the 
        simulation and save it to file "test.png":
    
        >>> plot_quantity(["RebuildHierarchy", "SolveHydroEquations"],
        ["Max Time", "Min Time"], "Time (sec)", filename="test.png")
    
        To produce a plot comparing the mean time taken by all of the different
        levels over the course of the simulation and save it to file "test.png": 
        >>> plot_quantity([], "Mean Time", "Mean Time (sec)", 
        filename="test.png", repeated_field="Level")
        """
        ax = pl.subplot(111)
        data = self.data
        extrema = np.zeros(5)
        min_bound_extrema = np.zeros(5)
        max_bound_extrema = np.zeros(5)

        ### Convert plots of single quantities to list format for homogenous
        ### processing.
        if not is_listlike(field_label):
            field_label = [field_label]

        ### If there is a repeated_field, figure out how many fields
        ### there are including any that were defined in the original
        ### field_label argument.
        if repeated_field:
            key_list = data.keys()
            if repeated_field == "All":
                field_label = key_list

            elif repeated_field == "Non-Level":
                for key in key_list:
                    if not key.startswith("Level") and \
                       not key.startswith("Total"):
                        field_label.append(key)
            else:
                for key in key_list:
                    if key.startswith(repeated_field):
                        field_label.append(key)
        num_fields = len(field_label)
        field_label.sort()

        ### If y_field_index is a single index, then replace it with a list of
        ### identical indices
        if not is_listlike(y_field_index):
            y_field_index = num_fields * [y_field_index]

        ### Total number of cycles in data.
        num_cycles = len(data[field_label[0]][x_field_index])

        ### Create a normalization vector to use on each vector
        ### before plotting.  In non-fractional case, this vector is 1.
        if fractional:
            norm = data['Total']
        else:
            records = [('Cycle', 'float'), ('Mean Time', 'float'),
                       ('Stddev Time', 'float'), ('Min Time', 'float'),
                       ('Max Time', 'float'), ('Cell Updates', 'float'),
                       ('Num Grids', 'float'),
                       ('Updates/processor/sec', 'float')]
            norm = np.ones(data['Total'].shape, dtype=records)

        ### Loop through the y datasets to figure out the extrema
        for i in range(len(field_label)):
            xdata = data[field_label[i]][x_field_index]
            ydata = data[field_label[i]][y_field_index[i]] / \
                    norm[y_field_index[i]]
            if smooth_len:
                ydata = smooth(ydata, smooth_len)
            extrema = preserve_extrema(extrema, xdata, ydata)

        ### If there's only one cycle, create an artificial xdata
        if num_cycles == 1:
            xdata = np.tile(xdata, 3) + [-0.1, 0.0, 0.1]

        if log_y_axis == "Auto":
            if extrema[3] / extrema[2] > 1e3:
                log_y_axis = "On"
            else:
                log_y_axis = "Off"

        ### Now for the actual plotting
        for i in range(len(field_label)):
            color = cm.jet(1. * i / num_fields)
            ydata = data[field_label[i]][y_field_index[i]] / \
                    norm[y_field_index[i]]

            ### If there's only one cycle, tile ydata to match
            ### artificial xdata
            if num_cycles == 1:
                ydata = np.tile(ydata, 3)

            if smooth_len:
                ydata = smooth(ydata, smooth_len)
            if log_y_axis == "On":
                pl.semilogy(
                    xdata, ydata, color=color,
                    label=field_label[i])  #, marker='s', ms=5+i, alpha=0.7)
            else:
                pl.plot(
                    xdata, ydata, color=color,
                    label=field_label[i])  #, marker='s', ms=5+i, alpha=0.7)
            if not bounds == "Off":
                zerodata = np.zeros(len(ydata))
                if bounds == "minmax":
                    min_bound = data[field_label[i]]["Min Time"] / \
                    norm["Min Time"]
                    max_bound = data[field_label[i]]["Max Time"] / \
                    norm["Max Time"]
                else:
                    min_bound = ydata - data[field_label[i]]["Stddev Time"] / \
                    norm["Stddev Time"]
                    max_bound = ydata + data[field_label[i]]["Stddev Time"] / \
                    norm["Stddev Time"]
                if smooth_len:
                    min_bound = smooth(min_bound, smooth_len)
                    max_bound = smooth(max_bound, smooth_len)

                ### also preserve min/max_bound extrema for proper
                ### yaxis plot range
                min_bound_extrema = preserve_extrema(min_bound_extrema, xdata,
                                                     min_bound)
                max_bound_extrema = preserve_extrema(max_bound_extrema, xdata,
                                                     max_bound)

                ### If there's only one cycle, tile min/max_bound to
                ### match artificial xdata
                if num_cycles == 1:
                    min_bound = np.tile(min_bound, 3)
                    max_bound = np.tile(max_bound, 3)

                fillin = pl.fill_between(xdata,
                                         min_bound,
                                         max_bound,
                                         facecolor=color)
                fillin.set_alpha(0.5)

        ### Correct y-extrema to reflect extrema of min/max_bound
        if not bounds == "Off":
            extrema[2] = min_bound_extrema[2]
            extrema[3] = max_bound_extrema[3]

        ### If xlim and ylim are set explicitly.  If not, use smart defaults
        ### using extrema
        if xlim:
            pl.xlim(xlim)
        else:
            ### If there's only one cycle, force the xlim to go from
            ### cycle-1 to cycle+1, and fix number of xticks to 3.
            if num_cycles == 1:
                pl.xlim(extrema[0] - 1.0, extrema[0] + 1.0)
                pl.xticks((extrema[0] - 1.0, extrema[0], extrema[0] + 1.0))
            else:
                pl.xlim(extrema[0:2])
        if ylim:
            pl.ylim(ylim)
        else:
            if log_y_axis == "On":
                y_log_range = 1.2 * np.log10(extrema[3] / extrema[2])
                ### To assure there is a labeled tick mark on the y-axis
                if y_log_range < 1.:
                    y_log_range = 1.
                pl.ylim([extrema[2] * 0.9, extrema[2] * 10**y_log_range])
            else:
                pl.ylim([0., 1.2 * extrema[3]])

        ### Make a legend
        ### Shink current plot by 20% to make room for external legend
        box = ax.get_position()
        ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])

        ### Put a legend to the right of the current axis
        ### Reverse the order of the entries, so colors match order plotted
        handles, labels = ax.get_legend_handles_labels()
        legend = ax.legend(handles[::-1],
                           labels[::-1],
                           loc='center left',
                           bbox_to_anchor=(1, 0.5),
                           numpoints=1)

        ### Make the legend small
        ltext = legend.get_texts()
        pl.setp(ltext, fontsize='xx-small')

        ### Set the axis labels and save
        pl.xlabel(x_field_axis_label)
        if not y_field_axis_label:
            y_field_axis_label = y_field_index[0]
        if fractional:
            pl.ylabel(y_field_axis_label + ", as Fraction of Total")
        else:
            pl.ylabel(y_field_axis_label)
        pl.suptitle("Non-Stacked Quantities for " + self.filename)
        pl.savefig(filename)
        pl.clf()
Exemplo n.º 33
0
    if (time_index == 0):
        print(n.shape)
        mass_initial = np.mean(n)
        momentum_x_initial = np.mean(px)
        momentum_y_initial = np.mean(py)
        kinetic_energy_initial = np.mean(E)

    mass = np.max(n)
    mass2 = np.max(n2)
    momentum_x = np.mean(px)
    momentum_y = np.mean(py)
    kinetic_energy = np.mean(E)

    mass_error[time_index] = (mass - 0 * mass_initial)
    momentum_x_error[time_index] = (mass2 - 0 * momentum_x -
                                    0 * momentum_x_initial)
    momentum_y_error[time_index] = (momentum_y - momentum_y_initial)
    kinetic_energy_error[time_index] = (kinetic_energy -
                                        kinetic_energy_initial)

pl.semilogy(time_array, abs(mass_error), label=r'$N=32$', alpha=1)
pl.semilogy(time_array, abs(momentum_x_error), label=r'$N=64$', alpha=1)
# pl.semilogy(time_array, abs(momentum_y_error), label = 'y-Momentum', alpha = 0.5)
# pl.semilogy(time_array, abs(kinetic_energy_error), label = 'Kinetic Energy', alpha = 0.5)
# pl.ylim([1e-17, 1e-11])
pl.ylabel(r'MAX($n$)')
pl.legend(framealpha=0, fontsize=25)
pl.xlabel(r'Time')
pl.savefig('plot.png', bbox_inches='tight')
Exemplo n.º 34
0
def plot_1d_residual(lm, oqb, nrms, rqh, rq, m1, m2, **extra):
    import pylab as plt
    plt.figure()
    plt.semilogy(lm,
                 rqh[:, m1, m2] * lm * (lm + 1.) / 2. / nm.pi,
                 label="data")
    plt.semilogy(lm, rq[:, m1, m2] * lm * (lm + 1.) / 2. / nm.pi, label="cmb")
    if extra.get("abs", False):
        plt.semilogy(lm,
                     -rqh[:, m1, m2] * lm * (lm + 1.) / 2. / nm.pi,
                     label="-data")
        plt.semilogy(lm,
                     -rq[:, m1, m2] * lm * (lm + 1.) / 2. / nm.pi,
                     label="-cmb")
    for oq, nn in zip(oqb, nrms):
        plt.semilogy(lm, oq[:, m1, m2] * lm * (lm + 1.) / 2. / nm.pi, label=nn)
    plt.semilogy(lm,
                 nm.abs(rqh[:, m1, m2] -
                        (rq[:, m1, m2] + nm.sum(oqb[:, :, m1, m2], 0))) * lm *
                 (lm + 1.) / 2. / nm.pi,
                 label="|data-model|")
    plt.legend(loc="upper right",
               frameon=False,
               ncol=3,
               prop=plt.matplotlib.font_manager.FontProperties(size="x-small"))
    if extra.get("title", ""):
        plt.suptitle(extra["title"])
    plt.ylabel(extra.get("ylabel", "$D_{\\ell}\\ \\mu K^2$"))
    plt.xlabel(extra.get("xlabel", "$\\ell$"))
def plot(*curves):
    '''
    Plots Curve objects. plot(...) takes any number of Curve objects
    as arguments, and plots all of them on a single graph.  If multiple
    Curve objects are passed as arguments, the plot appearance data
    (axis options, title, axis labels, etc) are taken from the first
    Curve object. The Curve objects need not all have the same data length.
    This is convenient for plotting data from multiple sources, with varying
    resolution. 
    '''
    cMaster = curves[0]  #Appearance options are taken from this Curve

    fig = pl.figure()  #Always start a new figure
    #**ToDo: Make sure log-axis options so they work consistently
    #        with Ngl implementation when x/y axes are switched.
    #        (Looks OK, but is that implementation the logical way?)
    #r.trXLog = c.XlogAxis
    #r.trYLog = c.YlogAxis
    if cMaster.XlogAxis:
        pl.semilogx()
    if cMaster.YlogAxis:
        pl.semilogy()
    if cMaster.XlogAxis & cMaster.YlogAxis:
        pl.loglog()
    #
    #Line styles (Not needed in MPL, handled automatically)
    #r.xyLineColors = lineColors
    #r.xyLineThicknesses = lineThickness
    #Plot title
    #r.tiMainString = c.PlotTitle
    pl.title(cMaster.PlotTitle)
    #Axis labels (ToDo: add defaults)
    #X and Y axis labels
    #r.tiXAxisString = c.Xlabel
    #r.tiYAxisString = c.Ylabel
    if cMaster.switchXY:
        pl.ylabel(cMaster.Xlabel)
        pl.xlabel(cMaster.Ylabel)
    else:
        pl.xlabel(cMaster.Xlabel)
        pl.ylabel(cMaster.Ylabel)

    #  Legends, for multicurve plot
    legends = []
    for c in curves:
        for id in c.listVariables():
            if not id == c.Xid:
                if len(c.label[id]) > 0:
                    legends.append(c.label[id])
                else:
                    legends.append(id)
    #ToDo: Add option to skip legends
    #
    #Suppress line drawing and just plot symbol for scatter plot curves
    #**ToDo: Implement for MatPlotLib
    #r.xyMarkers = plotSymbols
    #r.xyMarkerColors = lineColors
    #r.xyMarkerSizeF = .01
    #r.xyMarkLineModes = []
    formatList = []
    count = 0
    for c in curves:
        for id in c.listVariables():
            if not id == c.Xid:
                if c.scatter[id]:
                    #r.xyMarkLineModes.append('Markers')
                    color = lineColors[count % len(lineColors)]
                    symbol = plotSymbols[count % len(plotSymbols)]
                    formatList.append(color + symbol)
                else:
                    color = lineColors[count % len(lineColors)]
                    style = lineStyles[count % len(lineStyles)]
                    formatList.append(color + style)
                count += 1
    #
    plotList = [
    ]  #Mainly so we can add legends. Could be done with label = ...
    #Note that pl.plot returns a list of lines, even if there is only
    #     one line in the plot, so for legends to work correctly,
    #     we need to extract just the first element.
    countAll = 0
    for c in curves:
        if cMaster.switchXY:
            count = 0
            for data in c.Y():
                plotList.append(pl.plot(data, c.X(), formatList[countAll])[0])
                count += 1
                countAll += 1
        else:
            count = 0
            for data in c.Y():
                plotList.append(pl.plot(c.X(), data, formatList[countAll])[0])
                count += 1
                countAll += 1
    #Do the legends
    #11/12/2013: Added Jonah's trick to put legends on the side
    ax = pl.subplot(111)
    box = ax.get_position()
    ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
    pl.legend(plotList, legends, loc='center left', bbox_to_anchor=(1, 0.5))
    #
    #Do the axis reversal
    #We do it here, since we don't know the axis limits until
    #plotting is done
    #r.trYReverse = c.reverseY
    #r.trXReverse = c.reverseX
    axes = pl.gca()  #Gets current axis
    if cMaster.reverseX:
        axes.set_xlim(axes.get_xlim()[::-1])  #::-1 reverses the array
    if cMaster.reverseY:
        axes.set_ylim(axes.get_ylim()[::-1])
    #Now re-draw the plot
    pl.draw()
    #(Insert commands needed to show plot, if necessary)
    return plotObj(
        None,
        fig)  #Eventually we will use this to make subplots and do save option
Exemplo n.º 36
0
    mn, mx = [np.percentile(X, p) for p in [1, 99]]
    print('min,max SNR', mn, mx)
    mx = 50.
    plt.hist(X, 50, range=(mn, mx))
    plt.xlabel('flux_auto SNR')
    plt.title('SE2')
    ps.savefig()

    plt.clf()
    plt.loglog(M.chi2_psf, M.chi2_model, 'b.')
    plt.xlabel('chi2_psf')
    plt.ylabel('chi2_model')
    ps.savefig()

    plt.clf()
    plt.semilogy(M.class_star, M.chi2_psf / M.chi2_model, 'b.')
    plt.xlabel('class_star')
    plt.ylabel('chi2_psf / chi2_model')
    ps.savefig()

    for col in [
            'x2_world', 'y2_world', 'xy_world', 'a_world', 'b_world',
            'theta_world', 'class_star'
    ]:
        plt.clf()
        X = S.get(col)
        mn, mx = [np.percentile(X, p) for p in [1, 99]]
        plt.hist(X, 100, range=(mn, mx))
        plt.xlabel(col)
        ps.savefig()
Exemplo n.º 37
0
# background spectrum
background_spectrum = np.median(background_spectra, 0)
background_spectrum[0] = 0

# alpha spectrum
alpha_spectrum = np.zeros_like(background_spectrum)
alpha_mask = interval_mask(freq, ALPHA_BAND)
alpha_flankers_mask = interval_flankers_mask(freq, ALPHA_BAND, FLANKER_WIDTH)
alpha_spectrum[alpha_mask] = np.median(alpha_peaks, 0)

# viz background spectrum
plt.plot(freq, background_spectrum, 'r')
plt.plot(freq[alpha_mask], (background_spectrum+alpha_spectrum)[alpha_mask])
plt.xlabel('Freq, Hz')
plt.semilogy()
plt.ylim(1e-8, 1e-5)
plt.xlim(0, FS/2)
plt.show()


# simulate background eeg
n_seconds_to_sim = (N_SAMPLES_TRAIN + N_SAMPLES_TEST)//FS + 20
def sim_from_spec(n_seconds, freq, spectrum):
    n_samples = FS * n_seconds + 1

    # frequencies
    freq_full = np.fft.fftfreq(n_samples, 1 / FS)

    # dft coefficients amplitudes
    amplitudes = np.interp(np.abs(freq_full), freq, spectrum)
    #py.plot(np.array(Bk_S_kr), np.exp(-np.array(Bk_S_kr)**2*sigma_tmp**2))
    #py.semilogx()
    #py.errorbar(Bk_N_kr, Bk_N_mean/max(Bk_N_mean), Bk_N_std/max(Bk_N_mean),fmt='o')
    #py.semilogx()
    py.errorbar(np.array(Bk_SN_kr),
                Bk_SN_mean / max(Bk_SN_mean),
                Bk_SN_std / max(Bk_SN_mean),
                fmt='.')
    py.errorbar(np.array(Bk_N_kr),
                Bk_N_mean / max(Bk_SN_mean),
                Bk_N_std / max(Bk_SN_mean),
                fmt='.')
    py.text(200, 0.1, 'noise floor= %02.2e' % (noise_floor))
    py.text(200, 0.03, '$D$ = %02.2d mm' % (Dapt_mm))
    py.text(200, 0.01, '$\\nu$ = %02.2d GHz' % (nu_obs * 1e-9))
    py.semilogy()
    py.ylim([1e-6, 1.1])
    py.xlim([0, 1000])
    py.xlabel('$|k|$')
    py.ylabel('$B_k$')
    py.title('Beam in Fourier space')

    #++++++++
    py.subplot(236)
    py.errorbar(np.array(Bk_S_kr),
                Bk_S_mean / max(Bk_S_mean),
                Bk_S_std / max(Bk_S_mean),
                fmt='o')
    py.errorbar(np.array(Bk_SN_kr),
                Bk_SN_mean / max(Bk_SN_mean),
                Bk_SN_std / max(Bk_SN_mean),
Exemplo n.º 39
0
    t = time.time()
    prox_list, global_indices = form_prox_list(socp_vars, partition_list)
    split_time = time.time() - t

    result = solve(prox_list,
                   global_indices,
                   parallel=True,
                   max_iters=1000,
                   rho=2)

    pri = result['res_pri']
    dual = result['res_dual']

    if args.show_plot:
        pylab.semilogy(range(len(pri)), pri, range(len(dual)), dual)
        pylab.legend(['primal', 'dual'])
        pylab.show()

    print 'split time: ', split_time
    print 'solve time: ', result['solve_time']

    print 'subsystem times'
    for x in result['subsystem_stats']:
        print 'subsystem: ', x

    print
    admm_objval = objective(result['sol'])
    print 'admm objective   ', admm_objval
    print 'optimal objective', objval
Exemplo n.º 40
0
    fname = "/data/id11/nanoscope/Commissioning/2017Feb/gold/Au6_s0_048_b/Au6_s0_048_b0432.edf.gz"
    bname = "/data/id11/nanoscope/Commissioning/2017Feb/gold/pks/bkg.edf"
    im = fabio.open(fname).data.astype(np.float32)
    im = im - fabio.open(bname).data.astype(np.float32)
    h, b = np.histogram(im, bins=np.arange(-128, 128))
    bc = (b[1:] + b[:-1]) / 2
    pl.plot(bc, h, "-")
    hh = np.zeros(256, np.int32)
    low = -50
    step = 1

sc.histogram_image(im.ravel(), len(im.ravel()), low, step, hh, len(hh))
# Notes:
#
#  The cutoff in the iterative approach depends if we come from below or above
#  ... the process was not converging properly for the testoverlaps image
#
#  Alternative idea:
#    Compute the histogram of the image
#    Use the histogram to determine the cutoff (e.g. fit low side of "peak")
#  TODO : use the histogram...

pl.plot(np.arange(low, low + len(hh) * step, step), hh, "-")
pl.semilogy()
pl.show()
mask = np.zeros(im.shape, np.int8)
start = time.time()
sc.threshold_image(im.ravel(), len(im.ravel()), 4., mask.ravel())
end = time.time()
print(end - start)
    pylab.colorbar(im)
    plot_no += 1

    # plot the 1D SFS
    pylab.subplot(len(time_pair_idxs[0]), 2, plot_no)
    pylab.xlabel('Minor allele frequency')
    pylab.ylabel('SFS')
    pylab.xlim([0, 1])
    pylab.ylim([3e-06, 3e-02])
    colNo = 0
    for idx in [idx1, idx2]:
        if sfss[idx].sum() != 0:
            normalized_sfs = sfss[idx]
            pylab.semilogy(bin_locations + normal(0, 1) *
                           (bin_locations[1] - bin_locations[0]) * 0.1,
                           normalized_sfs,
                           '.-',
                           alpha=0.5,
                           color=color[colNo])
        colNo += 1
    pylab.legend(['first time pt', 'second time pt'],
                 'upper right',
                 prop={'size': 6})
    plot_no += 1

pylab.savefig(
    '%s/%s_within_person_2D_sfs_time_pair_polarized_%s.png' %
    (parse_midas_data.analysis_directory, species_name, fig_annotation),
    bbox_inches='tight')

#######################
# repeat for high pis #
Exemplo n.º 42
0
def plot_vs_choi(l, cl, error, mc_std, Db, std, plot_dir, combin, spec):

    str = "%s_%s_cross.png" % (spec, combin)

    plt.figure(figsize=(12, 12))
    if spec == "TT":
        plt.semilogy()
    plt.errorbar(l, cl, error, fmt=".", label="steve %s" % combin)
    plt.errorbar(l, Db[spec], fmt=".", label="thibaut")
    plt.legend()
    plt.title(r"$D^{%s}_{\ell}$" % (spec), fontsize=20)
    plt.xlabel(r"$\ell$", fontsize=20)
    plt.savefig("%s/%s" % (plot_dir, str), bbox_inches="tight")
    plt.clf()
    plt.close()

    plt.figure(figsize=(12, 12))
    plt.semilogy()
    plt.errorbar(l, std, label="master %s" % combin, color="blue")
    plt.errorbar(l, mc_std, fmt=".", label="montecarlo", color="red")
    plt.errorbar(l, error, label="Knox", color="lightblue")
    plt.legend()
    plt.title(r"$\sigma^{%s}_{\ell}$" % (spec), fontsize=20)
    plt.xlabel(r"$\ell$", fontsize=20)
    plt.savefig("%s/error_%s" % (plot_dir, str), bbox_inches="tight")
    plt.clf()
    plt.close()

    plt.figure(figsize=(12, 12))
    plt.plot(l, l * 0 + 1, color="grey")
    if std is not None:
        plt.errorbar(l[2:],
                     std[2:] / mc_std[2:],
                     label="master %s" % combin,
                     color="blue")
    plt.errorbar(l[2:],
                 error[2:] / mc_std[2:],
                 label="Knox",
                 color="lightblue")
    plt.legend()
    plt.title(r"$\sigma^{ %s}_{\ell}/\sigma^{MC, %s}_{\ell} $" % (spec, spec),
              fontsize=20)
    plt.xlabel(r"$\ell$", fontsize=20)
    plt.savefig("%s/error_divided_%s" % (plot_dir, str), bbox_inches="tight")
    plt.clf()
    plt.close()

    plt.figure(figsize=(12, 12))
    plt.plot(l, (cl - Db[spec]) / mc_std, ".")
    plt.title(r"$\Delta D^{%s}_{\ell}/\sigma^{MC}_{\ell}$" % (spec),
              fontsize=20)
    plt.xlabel(r"$\ell$", fontsize=20)
    plt.savefig("%s/frac_error_%s" % (plot_dir, str), bbox_inches="tight")
    plt.clf()
    plt.close()

    plt.figure(figsize=(12, 12))
    plt.plot(l, (cl - Db[spec]) / cl)
    plt.title(r"$\Delta D^{%s}_{\ell}/ D^{%s}_{\ell}$" % (spec, spec),
              fontsize=20)
    plt.xlabel(r"$\ell$", fontsize=20)
    plt.savefig("%s/frac_%s" % (plot_dir, str), bbox_inches="tight")
    plt.clf()
    plt.close()
Exemplo n.º 43
0
            covariance = m.covar

        if 1:
            area = max(N.abs(p1[-3::]))
            sigma = p1[-6:-3] / 2.354
            amax = max(area / N.sqrt(2 * pi * sigma**2))

            chimin = (cost_func(p1, x, y, yerr)**2).sum()
            rangex = max(x) - min(x)
            prob = calc_prob(npeaks, amax, covariance, chimin, rangex)
            print 'prob', prob
            nlist.append(npeaks)
            plist.append(prob)
            #sys.exit()

    pylab.semilogy(nlist, plist, 's')
    pylab.show()
    if 0:
        pylab.plot(x, y, 's')
        pylab.axis([0, 2, 0, 1.4e4])

        for i in range(npeaks):
            pylab.axvline(x=results['xpeaks'][i])
            xcen = results['xpeaks'][i]
            half_height = y[results['indices'][i]] / 2
            pylab.plot([(xcen - fwhm[i] / 2), (xcen + fwhm[i] / 2)],
                       [half_height, half_height])
            ycalc = gen_function(p1, x)
            pylab.plot(x, ycalc)

        pylab.axis([0, 2, 0, 1.4e4])
Exemplo n.º 44
0
Arquivo: test.py Projeto: nkern/capo
 for cnt1,k1 in enumerate(days):
     for k2 in days[cnt1:]:
         if not Q_Iz.has_key(k2): Q_Iz[k2] = {}
         if not Q_Cz.has_key(k2): Q_Cz[k2] = {}
         for bl1 in _Cz[k1]:
             for bl2 in _Cz[k2]:
                 #if k1 == k2 and bl1 == bl2: continue # this results in a significant bias
                 if k1 == k2 or bl1 == bl2: continue
                 #if k1 == k2: continue
                 #if bl1 == bl2: continue # also a significant noise bias
                 print k1, k2, bl1, bl2
                 if PLOT and False:
                     p.subplot(231); capo.arp.waterfall(C[m], drng=3)
                     p.subplot(232); capo.arp.waterfall(_C[m], drng=3)
                     p.subplot(233); capo.arp.waterfall(n.dot(C[m],_C[m]), drng=3)
                     p.subplot(234); p.semilogy(S)
                     p.subplot(236); capo.arp.waterfall(V, drng=3)
                     p.show()
                     p.subplot(311); capo.arp.waterfall(x[m], mode='real', mx=5, drng=10); p.colorbar(shrink=.5)
                     p.subplot(312); capo.arp.waterfall(_Cx, mode='real'); p.colorbar(shrink=.5)
                     p.subplot(313); capo.arp.waterfall(_Ix, mode='real'); p.colorbar(shrink=.5)
                     p.show()
                 if False: # use ffts to do q estimation fast
                     qI += n.conj(_Iz[k1][bl1]) * _Iz[k2][bl2]
                     qC += n.conj(_Cz[k1][bl1]) * _Cz[k2][bl2]
                 else: # brute force with Q to ensure normalization
                     #_qI = n.array([_Iz[k1][bl1].conj() * n.dot(Q[i], _Iz[k2][bl2]) for i in xrange(nchan)])
                     #_qC = n.array([_Cz[k1][bl1].conj() * n.dot(Q[i], _Cz[k2][bl2]) for i in xrange(nchan)])
                     if not Q_Iz[k2].has_key(bl2): Q_Iz[k2][bl2] = [n.dot(Q[i], _Iz[k2][bl2]) for i in xrange(nchan)]
                     if not Q_Cz[k2].has_key(bl2): Q_Cz[k2][bl2] = [n.dot(Q[i], _Cz[k2][bl2]) for i in xrange(nchan)]
                     _qI = n.array([_Iz[k1][bl1].conj() * Q_Iz[k2][bl2][i] for i in xrange(nchan)])
Exemplo n.º 45
0
def plotKinetics(kineticsList, Tlist, filename=None, bestKinetics=None):
    """
    Plot the set of loaded kinetics `kineticsList` at the array of temperatures
    `Tlist` in K. Different symbols denote the various reference types, while
    different linespecs denote individual kinetics within each reference type. 
    If given, the `bestKinetics` will also be plotted, using a thicker line to
    make it stand out.
    """

    sm = pylab.cm.ScalarMappable(
        norm=matplotlib.colors.Normalize(vmin=0, vmax=len(kineticsList) - 1),
        cmap=pylab.get_cmap('jet'),
    )

    if len(forwardReaction.reactants) == 2:
        kfactor = 1e6
        kunits = '$cm^3/mol*s$'
    else:
        kfactor = 1
        kunits = '$s^-1$'

    fig = pylab.figure(figsize=(8, 6))
    legend = []
    lines = []
    for index, kinetics in enumerate(kineticsList):
        klist = numpy.zeros_like(Tlist)
        for n in range(len(Tlist)):
            if kinetics.isTemperatureValid(Tlist[n]):
                klist[n] = kinetics.getRateCoefficient(Tlist[n])
        try:
            if kinetics.comment[0] == 'R':
                linespec = '-'
            elif kinetics.comment[0] == 'E':
                linespec = ':'
            elif kinetics.comment[0] == 'T':
                linespec = '--'
            else:
                linespec = '-.'
        except IndexError:
            continue
        color = sm.to_rgba(index)
        if numpy.any(klist):
            lines.append(
                pylab.semilogy(1000. / Tlist,
                               klist * kfactor,
                               linespec,
                               color=color,
                               picker=5)[0])
            legend.append(kinetics.comment)

    if bestKinetics:
        klist = numpy.zeros_like(Tlist)
        for n in range(len(Tlist)):
            if bestKinetics.isTemperatureValid(Tlist[n]):
                klist[n] = bestKinetics.getRateCoefficient(Tlist[n])
        if numpy.any(klist):
            pylab.semilogy(1000. / Tlist, klist * kfactor, '-k', linewidth=3)
            legend.append(bestKinetics.comment)

    pylab.xlabel('1000 / (Temperature (K))')
    pylab.ylabel('Rate coefficient ({0})'.format(kunits))
    pylab.xlim(0.0, 4.0)
    pylab.legend(legend, loc=1)

    pylab.title('{0} $\\rightarrow$ {1}'.format(
        ' + '.join([spec.label for spec in forwardReaction.reactants]),
        ' + '.join([spec.label for spec in forwardReaction.products]),
    ))

    if filename:
        pylab.savefig(filename)

    def onpick(event):
        index = lines.index(event.artist)
        kinetics = kineticsList[index]
        print kinetics.comment
        print kinetics

    connection_id = fig.canvas.mpl_connect('pick_event', onpick)

    pylab.show()
Exemplo n.º 46
0
def driver(x, y):
    kernel = 31
    if 1:
        kern, DW = optimize_DW(y)  #choose the right window size
        #pylab.plot(kern,DW,'s')
        #pylab.show()
        kernel = min(N.abs(kern - 2))

    npeaks = 2
    nlist = []
    plist = []
    for npeaks in range(1, 10):
        print 'npeaks', npeaks
        results = findpeak(x, y, npeaks, kernel=31)
        fwhm = findwidths(x, y, npeaks, results['xpeaks'], results['indices'])
        print 'res', results['xpeaks']
        print 'fwhm', fwhm
        print 'heights', results['heights']
        p0 = [0, 0]

        #results['heights']=[1000,500,1000]
        #fwhm=[.1,.2,.4]
        sigma = fwhm / 2.354
        pb = N.concatenate((results['xpeaks'], fwhm,
                            results['heights'] * N.sqrt(2 * pi * sigma**2)))
        pb = N.array(pb).flatten()
        p0 = N.concatenate((p0, pb)).flatten()
        print 'p0', p0
        ycalc = gen_function(p0, x)
        if 1:
            results = scipy.optimize.leastsq(cost_func,
                                             p0,
                                             args=(x, y, yerr),
                                             full_output=1)
            p1 = results[0]
            covariance = results[1]
            print 'p1', p1

        if 0:
            parbase = {
                'value': 0.,
                'fixed': 0,
                'limited': [0, 0],
                'limits': [0., 0.]
            }
            parinfo = []
            for i in range(len(p0)):
                parinfo.append(copy.deepcopy(parbase))
            for i in range(len(p0)):
                parinfo[i]['value'] = p0[i]
            fa = {'x': x, 'y': y, 'err': yerr}
            m = mpfit(myfunctlin, p0, parinfo=parinfo, functkw=fa)
            print 'status = ', m.status
            print 'params = ', m.params
            p1 = m.params
            covariance = m.covar

        if 1:
            area = max(N.abs(p1[-3::]))
            sigma = p1[-6:-3] / 2.354
            amax = max(area / N.sqrt(2 * pi * sigma**2))

            chimin = (cost_func(p1, x, y, yerr)**2).sum()
            rangex = max(x) - min(x)
            prob = calc_prob(npeaks, amax, covariance, chimin, rangex)
            print 'prob', prob
            nlist.append(npeaks)
            plist.append(prob)
            #sys.exit()

    pylab.semilogy(nlist, plist, 's')
    pylab.show()
    if 0:
        pylab.plot(x, y, 's')
        pylab.axis([0, 2, 0, 1.4e4])

        for i in range(npeaks):
            pylab.axvline(x=results['xpeaks'][i])
            xcen = results['xpeaks'][i]
            half_height = y[results['indices'][i]] / 2
            pylab.plot([(xcen - fwhm[i] / 2), (xcen + fwhm[i] / 2)],
                       [half_height, half_height])
            ycalc = gen_function(p1, x)
            pylab.plot(x, ycalc)

        pylab.axis([0, 2, 0, 1.4e4])
        pylab.show()
Exemplo n.º 47
0
 ok = w > thresh
 g0[ant], w0[ant] = n.where(ok, d / w, 0), n.where(ok, 1, 0)
 p.subplot(121)
 p.plot(fqs, g0[ant], colors[i], label=str(ant), alpha=.5)
 window = a.dsp.gen_window(d.size, 'blackman-harris')
 dw, ww = d * window, w * window
 #dw,ww = g0[ant] * window, w0[ant] * window
 _dw, _ww = n.fft.ifft(dw), n.fft.ifft(ww)
 gain = a.img.beam_gain(_ww)
 mdl, info = a.deconv.clean(_dw, _ww, tol=tol, stop_if_div=div)
 mdl += info['res'] / gain
 mdl /= mdl[0]
 p.subplot(122)
 p.semilogy(tau[:tau.size / 2],
            n.abs(mdl[:tau.size / 2]),
            colors[i],
            label=str(ant),
            alpha=.5)
 sync = (fqs / .150)**-4.5
 w2[ant] = w0[ant]
 g2[ant] = n.where(w2[ant] > 0, g0[ant] / sync, 0)
 p.subplot(121)
 p.plot(fqs, g2[ant], colors[i], label='%d/S' % (ant))
 dw, ww = g2[ant] * window, w2[ant] * window
 _dw, _ww = n.fft.ifft(dw), n.fft.ifft(ww)
 gain = a.img.beam_gain(_ww)
 mdl, info = a.deconv.clean(_dw, _ww, tol=tol, stop_if_div=div)
 mdl += info['res'] / gain
 mdl /= mdl[0]
 p.subplot(122)
 p.semilogy(tau[:tau.size / 2],
Exemplo n.º 48
0
lmax = 3 * nside - 1

(Cls, alms) = hp.sphtfunc.anafast(imap,
                                  lmax=lmax,
                                  iter=3,
                                  alm=True,
                                  pol=False,
                                  use_weights=False,
                                  datapath=None,
                                  gal_cut=0)

ells = np.arange(len(Cls))

print("l_max:  %d" % lmax)

pl.semilogy(ells, Cls)

pl.xlim(0, 40)
pl.ylim(10**-5., 5.)

pl.ylabel(
    r"$C(\ell) \ = \ \sum_{m=-\ell}^{\ell} \ |a_{lm}|^2 \ / \ (2 \ell + 1)$")
pl.xlabel(r"$\ell$")

## pl.title(r"Spherical transform of DESI footprint")

pl.savefig("desi_Cls.pdf", bbox_inches="tight")

## a_lm ' s plot
pl.clf()
Exemplo n.º 49
0
                outfile.write(' '.join([str(d) for d in data])+'\n')
elif iUp > 0:
        for data in zip(tUp,Ux,Uy,Uz,p):
                outfile.write(' '.join([str(d) for d in data])+'\n')

outfile.close()

# prepare plot
import pylab
pylab.xlabel('iteration')
pylab.ylabel('residual')
pylab.grid(True)

# plot in log scale
if iUp > 0:
        pylab.semilogy(tUp,Ux,'-',label="Ux")
        pylab.semilogy(tUp,Uy,'-',label="Uy")
        pylab.semilogy(tUp,Uz,'-',label="Uz")
        pylab.semilogy(tUp,p,'-',label="p")

if ik > 0:
        pylab.semilogy(tk,k,'-',label="k")

if iomega > 0:
        pylab.semilogy(tomega,omega,'-',label="omega")

if iepsilon > 0:
        pylab.semilogy(tepsilon,epsilon,'-',label="epsilon")

pylab.legend()
pylab.show()
Exemplo n.º 50
0
print(l.learn(), 'in', l.numEvaluations, 'evaluations.')

# b) desiredValue
l = algo(f, desiredEvaluation=10)
print(l.learn(), ': fitness below 10 (we minimize the function).')

# c) maximal number of learning steps
l = algo(f, maxLearningSteps=25)
l.learn()
print(l.learn(), 'in', l.numLearningSteps, 'learning steps.')

# it is possible to continue learning from where we left off, for a
# specific number of additional learning steps:
print(l.learn(75), 'in', l.numLearningSteps, 'total learning steps.')

# Finally you can set storage settings and then access all evaluations made
# during learning, e.g. for plotting:
l = algo(f,
         x0,
         storeAllEvaluations=True,
         storeAllEvaluated=True,
         maxEvaluations=150)
l.learn()
try:
    import pylab
    pylab.plot(list(map(abs, l._allEvaluations)))
    pylab.semilogy()
    pylab.show()
except ImportError as e:
    print('No plotting:', e)
Exemplo n.º 51
0
#####################################
# Example of analysis of data
#####################################

import numpy as np
import pylab as plt
from boutdata.collect import collect

path = './data/'
var = collect('P', path=path)
#print var

dcvar = np.mean(var, axis=3)
rmsvar = np.sqrt(np.mean(var**2, axis=3) - dcvar**2)

plt.figure()
plt.plot(rmsvar[:, 34, 32])
plt.show(block=False)
fvar = np.fft.rfft(var, axis=3)

plt.figure()
plt.plot(abs(fvar[:, 34, 32, 1:10]))
plt.show(block=False)

plt.figure()
plt.semilogy(abs(fvar[:, 34, 32, 1:7]))
plt.show(block=False)

plt.show()
Exemplo n.º 52
0
def plot_power_law(X):
    xabs = np.abs(X).flat[:]
    xabs.sort()
    P.semilogy(xabs[::-1])
    P.show()
Exemplo n.º 53
0
reference_xs, reference_survivals = stats_utils.calculate_unnormalized_survival_from_vector(reference_prevalences, min_x=0, max_x=1)

metaphlan2_xs, metaphlan2_survivals = stats_utils.calculate_unnormalized_survival_from_vector(metaphlan2_prevalences, min_x=0, max_x=1)

marker_xs, marker_survivals = stats_utils.calculate_unnormalized_survival_from_vector(marker_prevalences, min_x=0, max_x=1)

pylab.figure(1,figsize=(3.42,4))
pylab.title(species_name)

#pylab.step(pangenome_xs, pangenome_survivals/pangenome_survivals[0],label='Pan-genome')
#pylab.step(reference_xs, reference_survivals/reference_survivals[0],label='Reference')
#pylab.step(metaphlan2_xs, metaphlan2_survivals/metaphlan2_survivals[0],label='Metaphlan2')
#pylab.step(marker_xs, marker_survivals/marker_survivals[0],label='MIDAS Marker')
#pylab.ylim([1e-02,1])

pylab.step(pangenome_xs, pangenome_survivals, label='Pangenome')
pylab.step(reference_xs, reference_survivals,label='Reference')
pylab.step(metaphlan2_xs, metaphlan2_survivals,label='Metaphlan2')
pylab.step(marker_xs, marker_survivals,label='MIDAS Marker')
pylab.ylim([10,1e04])

pylab.semilogy([1],[1])

pylab.legend(frameon=False,loc='lower left', fontsize=7)
pylab.xlabel('Prevalence fraction, $p$ (from genes/)')
pylab.ylabel('Fraction of genes >= $p$')

pylab.savefig('%s/%s_prevalence_distribution.pdf' % (parse_midas_data.analysis_directory, species_name), bbox_inches='tight', transparent=True)
pylab.savefig('%s/%s_prevalence_distribution.png' % (parse_midas_data.analysis_directory, species_name), bbox_inches='tight', dpi=300)
Exemplo n.º 54
0
    def plot(self,
             y_list=None,
             x_list=None,
             labels=None,
             ax=None,
             ylabel="",
             xlabel="",
             yscale=False):
        fig = self.fig

        if y_list == None and x_list == None:
            y_list = self.y_list
            x_list = self.x_list

        if yscale == "log":
            # Makse sure everything is non-negative
            # for yi in y_list:
            #     assert np.all(yi >= 0)

            # Set zeros to eps
            for i in range(len(y_list)):
                y_list[i] = np.maximum(y_list[i], np.finfo(float).eps)

            # Set zeros to eps
            for i in range(len(y_list)):

                opt_ind = np.where(y_list[i] == np.finfo(float).eps)[0]
                if opt_ind.size > 0:
                    opt_ind = opt_ind[0]

                    y_list[i] = y_list[i][:opt_ind + 1]
                    x_list[i] = x_list[i][:opt_ind + 1]

        n_labels = len(y_list)

        if ax is None:
            ax = self.fig.add_subplot(self.nrows, self.ncols, self.pIndex)

        ax.set_facecolor('white')
        ax.set_yscale("log", nonposy='clip')
        if labels is None and self.labels is None:
            labels = list(map(str, np.arange(n_labels)))
        elif labels is None:
            labels = self.labels

        ref_points = []
        for i in range(len(self.converged)):
            if self.converged[i] is not None:

                ref_points += [[
                    self.converged[i]["X"], self.converged[i]["Y"]
                ]]

        label_positions, label_indices = get_labelPositions(
            y_list,
            x_list,
            self.ylim,
            labels=labels,
            ref_points=np.array(ref_points))

        ls_markers = markers

        if not self.lim_set:
            y_min, y_max = get_min_max(y_list)
            x_min, x_max = get_min_max(x_list)
            #y_min = max(y_min, 1e-8)
            #ax.set_ylim([y_min, y_max])

            prev = 10**int(np.log10(y_min) - 1)
            latter = 10**(int(np.log10(y_max)) + 1)
            #latter=10**(int(np.log10(y_max)))

            ax.set_ylim(prev, latter)
            ax.set_xlim([x_min, x_max])

        for i in range(n_labels):
            color = colors[i]
            ls, marker = ls_markers[i]

            y_vals = y_list[i]
            x_vals = x_list[i]

            n_points = len(y_vals)

            label = labels[i]

            markerFreq = n_points / (int(np.log(n_points)) + 1)

            ## SCATTER PLOT OPTIMAL
            # ind_opt = np.where(y_vals == np.finfo(float).eps)[0]

            # if ind_opt.size > 0:
            #     x_opt = x_vals[np.where(y_vals == np.finfo(float).eps)[0][0]]
            #     y_opt = np.finfo(float).eps

            if self.converged[i] is not None:
                ax.scatter(self.converged[i]["X"],
                           self.converged[i]["Y"],
                           s=300,
                           marker="*",
                           color=color,
                           clip_on=False,
                           zorder=100)
            ##
            line, = ax.plot(x_vals,
                            y_vals,
                            markevery=int(markerFreq),
                            markersize=int(self.markersize),
                            color=color,
                            lw=self.line_width,
                            alpha=1.0,
                            label=label,
                            ls=ls,
                            marker=marker)

            if self.legend_type == "line":
                x_point, y_point = label_positions[i]
                angle = get_label_angle(x_vals,
                                        y_vals,
                                        label_indices[i],
                                        ax,
                                        color='0.5',
                                        size=12)

                box = dict(
                    facecolor="white",
                    edgecolor=color,
                    linestyle=ls,
                    #hatch=marker,
                    linewidth=int(2 * self.ratio),
                    boxstyle="round")

                ax.text(x_point,
                        y_point,
                        label,
                        va='center',
                        ha='center',
                        rotation=angle,
                        color=color,
                        bbox=box,
                        fontsize=self.fontsize)

            else:
                plt.legend(loc="best")

        if self.shareRowLabel and (((self.pIndex - 1) % (self.ncols)) == 0):
            ax.set_ylabel(ylabel, fontsize=self.axFontSize)

        if not self.shareRowLabel:
            ax.set_ylabel(ylabel, fontsize=self.axFontSize)

        ax.set_xlabel(xlabel, fontsize=self.axFontSize)

        ax.tick_params(labelsize=self.axTickSize)
        ax.tick_params(axis='y', labelsize=int(self.axTickSize * 1.5))
        self.y_list = []
        self.x_list = []
        self.labels = []
        self.converged = []

        self.pIndex += 1
        self.axList += [ax]

        ax.minorticks_off()
        #vals = np.logspace(np.log10(y_min),np.log10(y_max), 5)
        #vals=np.linspace(y_min-y_min%10,10*(y_max-y_max%10),5)

        vals = np.logspace(np.log10(prev), np.log10(latter),
                           int(np.log10(latter / prev)) + 1)
        #vals=map(lambda x:'%e'%x,vals)
        ax.set_yticks(vals)

        ax.yaxis.set_major_formatter(FuncFormatter(myticks))
        plt.semilogy()

        return fig, ax
Exemplo n.º 55
0
time_grid = np.linspace(0, time[-1], len(Ex_max))
pl.plot(time_grid, Ex_max, label=r'$\mathrm{PIC}$')
pl.plot(time_LT, Ex_max_LT, '--', lw=3, label='$\mathrm{LT}$')
pl.plot(time_CK, Ex_max_CK, label='$\mathrm{Cheng\;Knorr}$')
# print('(abs(Ex_amplitude[0])) is ',(abs(Ex_amplitude[0])))
# print('(abs(Ex_max[0])) is ',(abs(Ex_max[0])))
pl.xlabel('$t$')
pl.ylabel('$\mathrm{MAX}(|E_{x}|)$')
pl.legend()
pl.show()
pl.savefig('MaxE_LT.png')
pl.clf()

# In[ ]:

pl.semilogy(time_grid, Ex_max, label=r'$\mathrm{PIC}$')
pl.semilogy(time_LT, Ex_max_LT, '--', lw=3, label='$\mathrm{LT}$')
pl.semilogy(time_CK, Ex_max_CK, label='$\mathrm{Cheng\;Knorr}$')
pl.legend()
pl.xlabel('$t$')
pl.ylabel('$\mathrm{MAX}(|E_{x}|)$')
pl.show()
pl.savefig('MaxE_semilogy_LT.png')
pl.clf()

# In[ ]:

pl.loglog(time_grid, Ex_max, label=r'$\mathrm{PIC}$')
pl.loglog(time_LT, Ex_max_LT, '--', lw=3, label='$\mathrm{LT}$')
pl.semilogy(time_CK, Ex_max_CK, label='$\mathrm{Cheng\;Knorr}$')
pl.legend()
Exemplo n.º 56
0
def plot_arches():
    catalog_file = '/u/jlu/work/arches/mwhosek/osiris_5_14/catalog_key1_Aks0.0.fits'

    cat = atpy.Table(catalog_file)

    scale = 120.0
    nexposures = 21.0

    # Repair the positional uncertainties so that they are the error on the mean rather than
    # the standard deviation.
    xe = scale * cat['xe_2010_f153m'] / math.sqrt(nexposures - 1.0)
    ye = scale * cat['ye_2010_f153m'] / math.sqrt(nexposures - 1.0)
    m = cat['m_2010_f153m']

    pe = (xe + ye) / 2.0

    at_m = 18
    m_min = at_m - 0.3
    m_max = at_m + 0.3
    idx = np.where((m > m_min) & (m < m_max))[0]
    print len(idx)

    pe_at_m = pe[idx].mean()

    ve = (cat['fit_vxe'] + cat['fit_vye']) / 2.0
    ve_at_m = ve[idx].mean()

    t = np.array([2010.6043, 2010.615, 2010.615, 2011.6829, 2012.6156])
    # t = np.array([2010.6043, 2011.6829, 2012.6156])
    ve_predict = predict_pm_err(t, pe_at_m)

    print 'Median Positional Error at F153M  = {0:d} mag: {1:.2f} mas'.format(
        at_m, pe_at_m)
    print 'Median Velocity Error at F153M    = {0:d} mag: {1:.2f} mas/yr'.format(
        at_m, ve_at_m)
    print 'Predicted velocity error at F153M = {0:d} mag: {1:.2f} mas/yr'.format(
        at_m, ve_predict)

    py.close(1)
    py.figure(1)
    py.clf()
    py.semilogy(m, pe, 'k.', ms=2)
    py.axhline(pe_at_m, linestyle='--', color='blue', linewidth=2)
    py.text(11,
            pe_at_m * 1.1,
            'Median at \nF153M={0:d} mag'.format(at_m),
            color='blue')
    py.plot(at_m, pe_at_m, 'rs', ms=15, color='blue')
    py.xlabel('WFC3-IR F153M Magnitude')
    py.ylabel('Positional Error (mas)')
    py.ylim(0.0025, 25)
    py.ylim(0.01, 10)
    py.xlim(10, 21)
    py.savefig('/u/jlu/doc/papers/proceed_2014_spie/wfc3ir_arches_pos_err.png')

    py.close(2)
    py.figure(2)
    py.clf()
    py.semilogy(m, ve, 'k.', ms=2)
    py.axhline(ve_predict, linestyle='--', color='blue', linewidth=2)
    py.text(11,
            ve_predict * 1.1,
            'Predicted based\non Pos. Err. at\nF153M={0:d} mag'.format(at_m),
            color='blue')
    py.plot(at_m, ve_at_m, 'rs', ms=15, color='yellow')
    py.xlabel('WFC3-IR F153M Magnitude')
    py.ylabel('Proper Motion Error (mas/yr)')
    #py.ylim(0.00, 1.0)
    py.ylim(0.01, 1.0)
    py.xlim(10, 21)
    py.savefig('/u/jlu/doc/papers/proceed_2014_spie/wfc3ir_arches_pm_err.png')
Exemplo n.º 57
0
         t2 * eta + t3 * (eta**2))
    afin = 1 / (1 + q)**2 * math.sqrt(a1**2 + (a2**2) * (q**4) + 2 * a1 * a2 *
                                      (q**2) * cosa + 2 *
                                      (a1 * cosb + a2 *
                                       (q**2) * cosc) * l * q + (l**2) *
                                      (q**2))
    return afin


#Missed-Found plots
fignum = 0
for instruments in summ.set_instruments_to_calculate():
    found, missed = summ.get_injections(instruments)
    fignum = fignum + 1
    pylab.figure(fignum)
    pylab.semilogy([f.mchirp for f in found], [f.eff_dist_l for f in found],
                   '.')
    pylab.semilogy([m.mchirp for m in missed], [m.eff_dist_l for m in missed],
                   'k.')
    pylab.xlabel('Chirp Mass ($M_\odot$)')
    pylab.ylabel('Effective Distance (Mpc): L1')
    pylab.title('Missed-Found:L1 Effective Distance vs Chirp Mass')
    pylab.savefig('L1effdist_chirpmass.png')

    fignum = fignum + 1
    pylab.figure(fignum)
    pylab.semilogy([f.mass1 + f.mass2 for f in found],
                   [f.eff_dist_l for f in found], '.')
    pylab.semilogy([m.mass1 + m.mass2 for m in missed],
                   [m.eff_dist_l for m in missed], 'k.')
    pylab.xlabel('Total Mass ($M_\odot$)')
    pylab.ylabel('Effective Distance (Mpc): L1')
Exemplo n.º 58
0
        np.sqrt(np.pi / 2.0) *
        (1.0 + sgn * erf(sgn *
                         (rho - a) / np.sqrt(2.0))) / np.power(rho, 4.0) -
        4.0 * np.exp(-0.5 * a * a + a * rho - 0.5 * rho * rho) /
        np.power(rho, 5.0))
    evidence1 = factor * np.exp(((0.5 * rho * rho)))

    # Low snr approximation
    evidence1 = evidence1 + 0.3333333333 * np.exp((-0.5 * a * a + rho * a))

    #
    # Plot the evidence and the approximation
    #
    pl.figure(1)
    mylabel = "$\hat{\sigma} = %0.4f$" % a
    pl.semilogy(rho, evidence, '-', label=mylabel)
    pl.semilogy(rho, evidence1, 'o')

    #
    # Plot the fractional error as a function of evidence
    #
    pl.figure(2)
    ratio = evidence / evidence1.ravel()
    pl.semilogx(evidence, np.abs(ratio), label=mylabel)

    print("This is a visual pacifier")

pl.figure(1)
pl.legend(loc='upper left')
pl.xlabel(r'$\rho$', fontsize=16)
pl.ylabel(r'$I/r_{\mathrm{max}^3}$', fontsize=16)
Exemplo n.º 59
0
        E_ls_tau_1e_minus_3,
        '--',
        color='C5',
        label=r'$\tau=0.001$')
pl.plot(time_array_7, E_nls_tau_0, color='C6', label=r'$\tau=0$')
pl.plot(time_array_7, E_ls_tau_0, '--', color='C6', label=r'$\tau=0$')
pl.ylabel(r'SUM($|E|^2$)')
pl.xlabel('Time')
pl.legend()
pl.savefig('linearplot.png')
pl.clf()

#pl.semilogy(time_array_1, E_nls_tau_inf, color = 'C0', label=r'$\tau=\infty$')
#pl.semilogy(time_array_1, E_ls_tau_inf, '--', color = 'C0', label = r'$\tau=\inf$')
#pl.semilogy(time_array_2, E_nls_tau_100, color = 'C1', label=r'$\tau=100$')
pl.semilogy(time_array_2, E_ls_tau_100, '--', color='C1', label=r'$\tau=100$')
#pl.semilogy(time_array_3, E_nls_tau_10, color = 'C2', label=r'$\tau=10$')
pl.semilogy(time_array_3, E_ls_tau_10, '--', color='C2', label=r'$\tau=10$')
#pl.semilogy(time_array_4, E_nls_tau_1, color = 'C3', label=r'$\tau=1$')
pl.semilogy(time_array_4, E_ls_tau_1, '--', color='C3', label=r'$\tau=1$')
#pl.semilogy(time_array_5, E_nls_tau_1e_minus_2, color = 'C4', label=r'$\tau=0.01$')
#pl.semilogy(time_array_5, E_ls_tau_1e_minus_2, '--', color = 'C4', label = r'$\tau=0.01$')
#pl.semilogy(time_array_6, E_nls_tau_1e_minus_3, color = 'C5', label=r'$\tau=0.001$')
#pl.semilogy(time_array_6, E_ls_tau_1e_minus_3, '--', color = 'C5', label = r'$\tau=0.001$')
#pl.semilogy(time_array_7, E_nls_tau_0, color = 'C6', label=r'$\tau=0$')
#pl.semilogy(time_array_7, E_ls_tau_0, '--', color = 'C6', label = r'$\tau=0$')
pl.ylabel(r'SUM($|E|^2$)')
pl.xlabel('Time')
pl.legend()
pl.savefig('linearplot.png')
pl.clf()
Exemplo n.º 60
0
ds = []
os = []
for bl in data.keys():
    x, y = aa.get_baseline(*bl)[:2]
    #    phs = np.unwrap(np.angle(data[bl][pol]))[integration:integration+3,:] # try plane fitting for a single time.
    phs = np.unwrap(np.angle(data[bl][pol]))
    dlys, offsets = vector_fit_line_to_phase(phs, fqs, valid)
    if opts.plot:
        fq_plot = np.c_[[fqs for i in range(phs.shape[0])]].T
        p.subplot(211)
        p.plot(fqs, 2 * np.pi * fq_plot * dlys + offsets, lw=4)
        p.plot(fqs, 2 * np.pi * fq_plot * dlys, lw=4)
        p.plot(fqs, phs.flatten(), 'k')
        p.plot(fqs, np.angle(data[bl][pol][integration]))
        p.subplot(212)
        p.semilogy(fqs, np.abs(data[bl][pol][integration]))
        p.show()
    bs.append(bl)
    xs.append(x)
    ys.append(y)
    ds.append(dlys)
    os.append(offsets)

#Now fit it the data points, xs,ys,ds

C = fit_plane(ds, xs, ys).T
O = fit_plane(os, xs, ys).T

import IPython
IPython.embed()
#apply fits