Beispiel #1
0
def pore_size_distribution(network, fig=None):
    r"""
    Plot the pore and throat size distribution which is the accumulated
    volume vs. the diameter in a semilog plot

    Parameters
    ----------
    network : OpenPNM Network object

    """
    if fig is None:
        fig = _plt.figure()
    dp = network['pore.diameter']
    Vp = network['pore.volume']
    dt = network['throat.diameter']
    Vt = network['throat.volume']
    dmax = max(max(dp), max(dt))
    steps = _sp.linspace(0, dmax, 100, endpoint=True)
    vals = _sp.zeros_like(steps)
    for i in range(0, len(steps)-1):
        temp1 = dp > steps[i]
        temp2 = dt > steps[i]
        vals[i] = sum(Vp[temp1]) + sum(Vt[temp2])
    yaxis = vals
    xaxis = steps
    _plt.semilogx(xaxis, yaxis, 'b.-')
    _plt.xlabel('Pore & Throat Diameter (m)')
    _plt.ylabel('Cumulative Volume (m^3)')
    return fig
Beispiel #2
0
	def PSTH(self):
	
			
		TimeRes = np.array([0.1,0.25,0.5,1,2.5,5.0,10.0,25.0,50.0,100.0])

		Projection_PSTH = np.zeros((2,len(TimeRes)))
		for i in range(0,len(TimeRes)):
			Data_Hist,STA_Hist,Model_Hist,B = Hist(TimeRes[i])
			data = Data_Hist/np.linalg.norm(Data_Hist)
			sta = STA_Hist/np.linalg.norm(STA_Hist)
			model = Model_Hist/np.linalg.norm(Model_Hist)
			Projection_PSTH[0,i] = np.dot(data,sta)
			Projection_PSTH[1,i] = np.dot(data,model)
			
		import matplotlib.font_manager as fm
		
		plt.figure()
		plt.semilogx(TimeRes,Projection_PSTH[0,:],'gray',TimeRes,Projection_PSTH[1,:],'k',
			     linewidth=3, marker='o', markersize = 12)
		plt.xlabel('Time Resolution, ms',fontsize=25)
		plt.xticks(fontsize=25)
		#plt.axis["right"].set_visible(False)
		plt.ylabel('Projection onto PSTH',fontsize=25)
		plt.yticks(fontsize=25)
		prop = fm.FontProperties(size=20)
		plt.legend(('1D model','2D model'),loc='upper left',prop=prop)
		plt.tight_layout()
		plt.show()
    def check_models(self):
        plt.figure('Bandgap narrowing')
        Na = np.logspace(12, 20)
        Nd = 0.
        dn = 1e14
        temp = 300.

        for author in self.available_models():
            BGN = self.update(Na=Na, Nd=Nd, nxc=dn,
                              author=author,
                              temp=temp)

            if not np.all(BGN == 0):
                plt.plot(Na, BGN, label=author)

        test_file = os.path.join(
            os.path.dirname(os.path.realpath(__file__)),
            'Si', 'check data', 'Bgn.csv')

        data = np.genfromtxt(test_file, delimiter=',', names=True)

        for name in data.dtype.names[1:]:
            plt.plot(
                data['N'], data[name], 'r--',
                label='PV-lighthouse\'s: ' + name)

        plt.semilogx()
        plt.xlabel('Doping (cm$^{-3}$)')
        plt.ylabel('Bandgap narrowing (K)')

        plt.legend(loc=0)
Beispiel #4
0
def analyze(title, x, y, func, func_title):
    print('-' * 80)
    print(title)
    print('x: %s:%s %s' % (list(x.shape), x.dtype, [x.min(), x.max()]))
    print('y: %s:%s %s' % (list(y.shape), y.dtype, [y.min(), y.max()]))

    popt, pcov = curve_fit(func, x, y)
    print('popt=%s' % popt)
    print('pcov=\n%s' % pcov)

    a, b = popt
    print('a=%e' % a)
    print('b=%e' % b)
    print(func_title(a, b))
    xf = np.linspace(x.min(), x.max(), 100)
    yf = func(xf, a, b)
    print('xf: %s:%s %s' % (list(xf.shape), xf.dtype, [xf.min(), xf.max()]))
    print('yf: %s:%s %s' % (list(yf.shape), yf.dtype, [yf.min(), yf.max()]))

    plt.title(func_title(a, b))
    # plt.xlim(0, x.max())
    # plt.ylim(0, y.max())
    plt.semilogx(x, y, label='data')
    plt.semilogx(xf, yf, label='fit')
    plt.legend(loc='best')
    plt.savefig('%s.png' % title)
    plt.close()
def experiment_plot( ctr, trials, success ):
	"""
	Pass in the ctr, trials and success returned
	by the `experiment` function and plot
	the Cumulative Number of Turns For Each Arm and
	the CTR's Convergence Plot side by side
	"""
	T, K = trials.shape
	n = np.arange(T) + 1
	fig = plt.figure( figsize = ( 14, 7 ) )

	plt.subplot(121)	
	for i in range(K):
		plt.loglog( n, trials[ :, i ], label = "arm {}".format(i + 1) )

	plt.legend( loc = "upper left" )
	plt.xlabel("Number of turns")
	plt.ylabel("Number of turns/arm")
	plt.title("Cumulative Number of Turns For Each Arm")

	plt.subplot(122)
	for i in range(K):
		plt.semilogx( n, np.zeros(T) + ctr[i], label = "arm {}'s CTR".format( i + 1 ) )

	plt.semilogx( n, ( success[ :, 0 ] + success[ :, 1 ] ) / n, label = "CTR at turn t" )

	plt.axis([ 0, T, 0, 1 ] )
	plt.legend( loc = "upper left" )
	plt.xlabel("Number of turns")
	plt.ylabel("CTR")
	plt.title("CTR's Convergence Plot")

	return fig
Beispiel #6
0
def make_corr1d_fig(dosave=False):
    corr = make_corr_both_hemi()
    lw=2; fs=16
    pl.figure(1)#, figsize=(8, 7))
    pl.clf()
    pl.xlim(4,300)
    pl.ylim(-400,+500)    
    lambda_titles = [r'$20 < \lambda < 30$',
                     r'$30 < \lambda < 40$',
                     r'$\lambda > 40$']
    colors = ['blue','green','red']
    for i in range(3):
        corr1d, rcen = corr_1d_from_2d(corr[i])
        ipdb.set_trace()
        pl.semilogx(rcen, corr1d*rcen**2, lw=lw, color=colors[i])
        #pl.semilogx(rcen, corr1d*rcen**2, 'o', lw=lw, color=colors[i])
    pl.xlabel(r'$s (Mpc)$',fontsize=fs)
    pl.ylabel(r'$s^2 \xi_0(s)$', fontsize=fs)    
    pl.legend(lambda_titles, 'lower left', fontsize=fs+3)
    pl.plot([.1,10000],[0,0],'k--')
    s_bao = 149.28
    pl.plot([s_bao, s_bao],[-9e9,+9e9],'k--')
    pl.text(s_bao*1.03, 420, 'BAO scale')
    pl.text(s_bao*1.03, 370, '%0.1f Mpc'%s_bao)
    if dosave: pl.savefig('xi1d_3bin.pdf')
Beispiel #7
0
def plot(x, y, semilogx = False):
	if semilogx:
		pylab.semilogx(x, y)
	else:
		pylab.plot(x, y)
		
	pylab.grid(True)
	pylab.show()
Beispiel #8
0
def deg_coll_plots(dirlist=None, **kwargs):
    if kwargs.get("perzeta",False)==True:
        zetalist = []
        for dirnames in dirlist:
          zetalist.append(zetaper(w_dir=dirnames,nrad=kwargs.get('nrad',638)))
          #f1 = plt.figure()
        plt.xlabel(r'Distance Along Field Line : s [AU]',labelpad=6)
        plt.ylabel(r'$\Delta \zeta [\%]$',labelpad=10)
        plt.plot(zetalist[0][0],zetalist[0][1],'k-.')
        plt.plot(zetalist[1][0],zetalist[1][1],'k--')
        plt.plot(zetalist[2][0],zetalist[2][1],'k')
        plt.axis([0.0,150.0,0.0,40.0])
        plt.minorticks_on()
    else:
        magalf = np.linspace(19.67,19.67,100)
        magfast = np.linspace(12.53,12.53,100)
        
        Msarr = [20.0,25.0,30.0,45.0,50.0,60.0]
        alfMs = [20.44,23.27,26.05, 26.84, 28.86,31.45]
        fastMs = [15.57,19.94,23.40,24.35,26.03,29.25]
        asymMs=[]
        Alparr = [0.55,0.60,0.65]
        alfAlp =[31.35,23.73,20.93]
        fastAlp=[29.25,21.48,17.28]
        asymAlp =[]
        Denarr =[3.0e-14,5.0e-14,1.0e-13,5.0e-13]
        alfDen =[30.30,26.05,22.91,20.42]
        fastDen =[27.79,23.40,19.47,15.03]
        asymDen=[]
        Betarr=[1.0,3.0,5.0]
        alfBet =[21.98,25.63,26.05]
        fastBet=[16.45,20.68,23.40]
        asymBet=[]

        #nice_plots()
        #f1 = plt.figure()
        #ax1 = f1.add_subplot(111)
        
        plt.ylabel(r"Opening Angle : $\phi^\circ$",labelpad=10)
        plt.minorticks_on()
        if kwargs.get('cplot',False)==True:
            dum1 = np.linspace(1.0e-14,1.0e-12,100)
            ms1 = plt.semilogx(Denarr,alfDen,'r*')
            ms2 = plt.semilogx(Denarr,fastDen,'bo')
            plt.xlabel(r"Base Density : $\rho_0$ [g/cm$^{3}$]")
            plt.semilogx(dum1,magalf,'r--',dum1,magfast,'b--')
            plt.axis([1.0e-14,1.0e-12,10.0,35.0])
           # plt.legend([ms1,ms2],[r'Alfven point',r'Fast point'],loc='lower left')
        else:
            dum1 = np.linspace(10.0,70.0,100)
            ms1 = plt.plot(Msarr,alfMs,'k*')
            ms2 = plt.plot(Msarr,fastMs,'ko')
            plt.xlabel(r"Stellar Mass : $M_{*}$ [$M_{\odot}$]",labelpad=6)
            plt.plot(dum1,magalf,'k--',dum1,magfast,'k')
            plt.axis([10.0,70.0,10.0,35.0])
def makePlot(xVals, yVals, title, xLabel, yLabel, style, logX=False, logY=False):
    """用给定的标题和标签绘制xVals和yVals
    """
    plt.figure()
    plt.title(title)
    plt.xlabel(xLabel)
    plt.ylabel(yLabel)
    plt.plot(xVals, yVals, style)
    if logX:
        plt.semilogx()
    if logY:
        plt.semilogy()
Beispiel #10
0
def plot_ac_to_compare(filter_data_from_spice, coef_b, coef_a, FS, visualise=False):
    if visualise:
        # get and plot data from spice
        w1, h1 = parse_data(filter_data_from_spice)
        # get and plot data from coesfs
        w, h = signal.freqz(coef_b, coef_a)
        plt.figure()
        plt.plot(w1, h1)
        plt.semilogx(w / max(w) * FS / 2, 20 * np.log10(abs(h)))
        plt.xlabel("Frequency [ Hz ]")
        plt.ylabel("Amplitude [dB]")
        plt.margins(0, 0.1)
        plt.grid(which="both", axis="both")
        plt.show()
def showErrorBars(minExp, maxRxp, numTrials):
    """绘制误差图
    """
    means, sds = [], []
    xVals = []
    for exp in range(minExp, maxRxp+1):
        xVals.append(2**exp)
    for numFlips in xVals:
        fracHeads, mean, sd = flipSim(numFlips, numTrials)
        means.append(mean)
        sds.append(sd)
    plt.errorbar(xVals, means, yerr=2*np.array(sds))
    plt.semilogx()
    plt.title('Mean Fraction of Heads (' + str(numTrials) + ' Trials)')
    plt.xlabel('Number of flips per trial')
    plt.ylabel("Fraction of heads & 95% confidence")
Beispiel #12
0
def plot_result(fname='results/experiment_run.npy'):
    """ Plot beta/density overview
    """
    data = np.load(fname)

    plt.semilogx(*zip(*data), marker='o', ls='')

    plt.title(r'Influence of $\beta$ on spiral-tip density')
    plt.xlabel(r'$\beta$')
    plt.ylabel(r'spiral-tip density')

    img_dir = 'images'
    if not os.path.isdir(img_dir):
        os.makedirs(img_dir)
    plt.savefig(
        os.path.join(img_dir, 'beta_overview.png'),
        bbox_inches='tight', dpi=300)
Beispiel #13
0
def check_klaassen():
    '''compares to values taken from www.PVlighthouse.com.au'''
    a = Mobility('Si')
    a.change_model('klaassen1992')

    print('''The model disagrees at low tempeature owing to dopant\
           ionisation\
           I am unsure if mobility should take ionisated dopants or\
            non ionisaed\
           most likley it should take both, currently it only takes one''')

    dn = np.logspace(10, 20)
    # dn = np.array([1e14])
    Nd = 1e14
    Na = 0

    folder = os.path.join(
        os.path.dirname(__file__), 'Si', 'test_mobility_files')
    fnames = [r'Klassen_1e14_dopants.dat',
              r'Klassen_1e14_temp-450.dat']
    print(os.path.isdir(folder))

    for temp, f_name in zip([300, 450], fnames):

        plt.figure('Mobility - Klaassen: Deltan at ' + str(temp))

        plt.plot(dn, a.hole_mobility(dn, Na, Nd, temp=temp),
                 'r-',
                 label='hole-here')
        plt.plot(dn, a.electron_mobility(dn, Na, Nd, temp=temp),
                 'b-',
                 label='electron-here')

        print(f_name)
        data = np.genfromtxt(os.path.join(folder, f_name), names=True)

        plt.plot(data['deltan'], data['uh'], 'b--',
                 label='hole - PV-lighthouse')
        plt.plot(data['deltan'], data['ue'], 'r--',
                 label='electron - PV-lighthouse')
        plt.legend(loc=0, title='Mobility from')

        plt.semilogx()
        plt.xlabel(r'$\Delta$n (cm$^{-3}$)')
        plt.xlabel(r'Moblity  (cm$^2$V$^{-1}$s$^{-1}$)')
Beispiel #14
0
def cdf(x,color='b',label=" ",lw=1,xlabel="x",ylabel="CDF",logx=False):
    """ plot the cumulative density function of x

    Parameters
    ----------

    x :  np.array  (N)
    color : string
        color symbol
    label : string
        label
    lw: float
        linewidth
    xlabel : string
        xlabel
    ylabel : string
        ylabel

    Examples
    --------

    .. plot::
        :include-source:

        >>> from matplotlib.pyplot import *
        >>> import pylayers.util.pyutil as pyu
        >>> from scipy import *
        >>> import matplotlib.pylab as plt
        >>> x = randn(100)
        >>> pyu.cdf(x)

    """
    x  = np.sort(x)
    n  = len(x)
    x2 = np.repeat(x, 2)
    y2 = np.hstack([0.0, np.repeat(np.arange(1,n) / float(n), 2), 1.0])
    if logx:
        plt.semilogx(x2,y2,color=color,label=label,linewidth=lw)
    else:
        plt.plot(x2,y2,color=color,label=label,linewidth=lw)
    plt.legend()
    plt.xlabel(xlabel)
    plt.ylabel(ylabel)
Beispiel #15
0
    def PL(self, itx):
        """ plot Path Loss

        itx
        """
        td = []
        tEa = []
        tEo = []
        for irx in self.dcir[itx].keys():
            d = self.delay(itx, irx) * 0.3
            cira, ciro = self.loadcir(itx, irx)

            td.append(d)
            tEa.append(Ea)
            tEo.append(Eo)

        plt.semilogx(td, 10 * np.log10(tEa), 'xr')
        plt.semilogx(td, 10 * np.log10(tEo), 'xb')
        plt.show()
        return td, tEa, tEo
def check_dorkel():
    '''compares to values taken from www.PVlighthouse.com.au'''

    a = Mobility('Si')
    a.change_model(author='dorkel1981')

    dn = np.logspace(10, 20)
    # dn = np.array([1e14])
    Nd = 1e14
    Na = 0

    folder = os.path.join(
        os.path.dirname(__file__), 'Si', 'test_mobility_files')
    # file name and temp its at
    compare = [
        ['dorkel_1e14_carriers.dat', 300],
        ['dorkel_1e14_temp-450.dat', 450],
    ]

    for comp in compare:

        plt.figure('Mobility - Dorkel: Deltan at ' + str(comp[1]))

        plt.plot(dn, a.hole_mobility(dn, Na, Nd, temp=comp[1]),
                 'r-',
                 label='hole-here')
        plt.plot(dn, a.electron_mobility(dn, Na, Nd, temp=comp[1]),
                 'b-',
                 label='electron-here')

        data = np.genfromtxt(os.path.join(folder, comp[0]), names=True)

        plt.plot(data['deltan'], data['uh'], 'b--',
                 label='hole - PV-lighthouse')
        plt.plot(data['deltan'], data['ue'], 'r--',
                 label='electron - PV-lighthouse')
        plt.legend(loc=0, title='Mobility from')

        plt.semilogx()
        plt.xlabel(r'$\Delta$n (cm$^{-3}$)')
        plt.xlabel(r'Moblity  (cm$^2$V$^{-1}$s$^{-1}$)')
Beispiel #17
0
    def check_models(self):
        '''
        Plots a check of the modeled data against Digitised data from either
        papers or from other implementations of the model.
        '''
        plt.figure('Ionised impurities')

        iN_imp = N_imp = np.logspace(15, 20)

        dn = 1e10
        temp = 300

        for impurity in ['phosphorous', 'arsenic']:

            iN_imp = self.update_dopant_ionisation(N_imp,
                                                   dn,
                                                   impurity,
                                                   temp, author=None)

            if not np.all(iN_imp == 0):
                plt.plot(
                    N_imp, iN_imp / N_imp * 100, label='Altermatt: ' + impurity)

        test_file = os.path.join(
            os.path.dirname(os.path.realpath(__file__)),
            'Si', 'check data', 'donors.csv')

        data = np.genfromtxt(test_file, delimiter=',', skip_header=1)

        for i in range(0, (data.shape[1] + 2) / 2, 2):
            # print i
            plt.plot(
                data[:, i], data[:, i + 1] * 100, 'r.',
                label='Digitised data')

        plt.semilogx()
        plt.xlabel('Impurity (cm$^{-3}$)')
        plt.ylabel('Fraction of Ionised impurities (%)')

        plt.legend(loc=0)
Beispiel #18
0
def expt1():
	"""
	Experiment 1: Chooses the result files and generates figures
	"""
	# filename = sys.argv[1]
	result_file = "./expt1.txt"
	input_threads, input_sizes, throughputs, resp_times \
		= parse_output(result_file) 

	throughputs_MiB = [tp/2**20 for tp in throughputs]

	fig1 = pl.figure()
	fig1.set_tight_layout(True)
	fig1.add_subplot(221)
	
	pl.semilogx(input_sizes, throughputs_MiB, 
				'bo-', ms=MARKER_SIZE, mew=0, mec='b')
	pl.xlabel("fixed file size (Bytes)")
	pl.ylabel("throughput (MiB/sec)")
	pl.text(2E3, 27, "(A)")

	fig1.add_subplot(222)
	pl.loglog(input_sizes, resp_times, 
			  'mo-', ms=MARKER_SIZE, mew=0, mec='m')
	pl.xlabel("fixed file size (Bytes)")
	pl.ylabel("response time (sec)")
	pl.text(2E3, 500, "(B)")

	fig1.add_subplot(223)
	pl.semilogx(resp_times, throughputs_MiB, 
				'go-', ms=MARKER_SIZE, mew=0, mec='g')
	pl.xlabel("response time(sec)")
	pl.ylabel("throughput (MiB/sec)")
	pl.text(0.2, 27, "(C)")

	pl.tight_layout()
	pl.savefig("./figures/%s" % result_file.replace(".txt", ".pdf"))
def bode(G,f=np.arange(.01,100,.01)):
    plt.figure()
    jw = 2*np.pi*f*1j
    y = np.polyval(G.num, jw) / np.polyval(G.den, jw)
    mag = 20.0*np.log10(abs(y))
    phase = np.arctan2(y.imag, y.real)*180.0/np.pi % 360

    plt.subplot(211)
    #plt.semilogx(jw.imag, mag)
    plt.semilogx(f,mag)
    plt.grid()
    plt.gca().xaxis.grid(True, which='minor')

    plt.ylabel(r'Magnitude (db)')

    plt.subplot(212)
    #plt.semilogx(jw.imag, phase)
    plt.semilogx(f,phase)
    plt.grid()
    plt.gca().xaxis.grid(True, which='minor')
    plt.ylabel(r'Phase (deg)')
    plt.yticks(np.arange(0, phase.min()-30, -30))

    return mag, phase
Beispiel #20
0
def createAndSaveFig(xData, yData, figFileRoot, xLabel="", yLabel="",
                     fileExt='.png', xMin=-1, xMax=-1, yMin=-10, yMax=-1,
                     log2Y=0, log2X=0, plotType='bo', axisFontSize=20,
                     tickFontSize=16, svgFlag=0):

    figFileName = figFileRoot + fileExt
    xData = convert_list_to_array(xData)
    yData = convert_list_to_array(yData)
    if log2Y == 1:
        tempPlot = plt.semilogy(xData, yData, plotType, basey=2, hold='False')
    else:
        if log2X == 0:
            tempPlot = plt.plot(
                xData, yData, plotType, hold="False", alpha=0.5)
        else:
            tempPlot = plt.semilogx(
                xData, yData, plotType, basey=2, hold='False')
    plt.xlabel(xLabel, fontsize=axisFontSize)
    plt.ylabel(yLabel, fontsize=axisFontSize)
    ax = plt.gca()
    for tick in ax.xaxis.get_major_ticks():
        tick.label1.set_fontsize(tickFontSize)
    for tick in ax.yaxis.get_major_ticks():
        tick.label1.set_fontsize(tickFontSize)
    if xMin == -1:
        xMin = min(xData.tolist())
    if xMax == -1:
        xMax = max(xData.tolist())
    if yMin == -10:
        yMin = min(yData.tolist())
        if isnan(yMin):
            yMin = 0
    if yMax == -1:
        yMax = 0
        yDataList = yData.tolist()
        for item in yDataList:
            if not isnan(item):
                if item > yMax:
                    yMax = item
    plt.xlim(xMin, xMax)
    plt.ylim(yMin, yMax)
    plt.savefig(figFileName, dpi=150)
    if svgFlag == 1:
        figFileName = figFileRoot + '.svg'
        plt.savefig(figFileName, dpi=150)
    plt.clf()
impulse = np.zeros(N); impulse[N/2] = 1.
yf_imp_fir, zf = lfilter(b_fir, a_fir, impulse, zi=np.zeros(max(len(a_fir),len(b_fir))-1))
yf_imp_iir, zf = lfilter(b_iir, a_iir, impulse, zi=np.zeros(max(len(a_iir),len(b_iir))-1))

plt.figure()

plt.subplot(411)
plt.title("original and filtered signals")
plt.plot(x, y)
plt.plot(x, yf_fir)
plt.plot(x, yf_iir)

plt.subplot(412)
plt.title("original and filtered frequency distributions")
freqScale = fftshift(fftfreq(N,1./fs))[N/2:]
plt.semilogx(freqScale, fftshift(20. * np.log10(abs(fft(y*window))))[N/2:])
plt.semilogx(freqScale, fftshift(20. * np.log10(abs(fft(yf_fir*window))))[N/2:])
plt.semilogx(freqScale, fftshift(20. * np.log10(abs(fft(yf_iir*window))))[N/2:])

plt.subplot(413)
plt.title("filter impulse magnitude response")
plt.semilogx(freqScale, fftshift(20. * np.log10(abs(fft(impulse*window))))[N/2:], label="signal")
plt.semilogx(freqScale, fftshift(20. * np.log10(abs(fft(yf_imp_fir*window))))[N/2:], label="FIR filt.")
plt.semilogx(freqScale, fftshift(20. * np.log10(abs(fft(yf_imp_iir*window))))[N/2:], label="IIR filt.")
plt.legend(loc='lower left')

plt.subplot(414)
plt.title("filter impulse phase response")
plt.semilogx(freqScale, fftshift(np.angle(fft(impulse*window)))[N/2:], label="signal")
plt.semilogx(freqScale, fftshift(np.angle(fft(yf_imp_fir*window)))[N/2:], label="FIR filt.")
plt.semilogx(freqScale, fftshift(np.angle(fft(yf_imp_iir*window)))[N/2:], label="IIR filt.")
Beispiel #22
0
    def OnDarkT(self, event):
        visFr = self.visFr
        pipeline = visFr.pipeline
        mdh = pipeline.mdh

        NTMIN = 5
        maxPts = 1e4
        t = pipeline['t']
        if len(t) > maxPts:
            Warn(
                None,
                'aborting darktime analysis: too many events, current max is %d'
                % maxPts)
            return
        x = pipeline['x']
        y = pipeline['y']

        # determine darktime from gaps and reject zeros (no real gaps)
        dts = t[1:] - t[0:-1] - 1
        dtg = dts[dts > 0]
        nts = dtg.shape[0]

        if nts > NTMIN:
            # now make a cumulative histogram from these
            cumux = np.sort(
                dtg + 0.01 * np.random.random(nts)
            )  # hack: adding random noise helps us ensure uniqueness of x values
            cumuy = (1.0 + np.arange(nts)) / np.float(nts)
            bbx = (x.min(), x.max())
            bby = (y.min(), y.max())
            voxx, voxy, _ = mdh.voxelsize_nm

            bbszx = bbx[1] - bbx[0]
            bbszy = bby[1] - bby[0]
            maxtd = dtg.max()

            # generate histograms 2nd way
            binedges = 0.5 + np.arange(0, maxtd)
            binctrs = 0.5 * (binedges[0:-1] + binedges[1:])
            h, be2 = np.histogram(dtg, bins=binedges)
            hc = np.cumsum(h)
            hcg = hc[h > 0] / float(nts)  # only nonzero bins and normalise
            binctrsg = binctrs[h > 0]

        # fit theoretical distributions
        popth, pcovh, popt, pcov = (None, None, None, None)
        if nts > NTMIN:
            popth, pcovh, infodicth, errmsgh, ierrh = curve_fit(
                cumuexpfit, binctrsg, hcg, p0=(300.0), full_output=True)
            chisqredh = (
                (hcg - infodicth['fvec'])**2).sum() / (hcg.shape[0] - 1)
            popt, pcov, infodict, errmsg, ierr = curve_fit(cumuexpfit,
                                                           cumux,
                                                           cumuy,
                                                           p0=(300.0),
                                                           full_output=True)
            chisqred = ((cumuy - infodict['fvec'])**2).sum() / (nts - 1)

            # plot data and fitted curves
            plt.figure()
            plt.subplot(211)
            plt.plot(cumux, cumuy, 'o')
            plt.plot(cumux, cumuexpfit(cumux, popt[0]))
            plt.plot(binctrs, hc / float(nts), 'o')
            plt.plot(binctrs, cumuexpfit(binctrs, popth[0]))
            plt.ylim(-0.2, 1.2)
            plt.subplot(212)
            plt.semilogx(cumux, cumuy, 'o')
            plt.semilogx(cumux, cumuexpfit(cumux, popt[0]))
            plt.semilogx(binctrs, hc / float(nts), 'o')
            plt.semilogx(binctrs, cumuexpfit(binctrs, popth[0]))
            plt.ylim(-0.2, 1.2)
            plt.show()

            outstr = StringIO()

            analysis = {
                'Nevents': t.shape[0],
                'Ndarktimes': nts,
                'filterKeys': pipeline.filterKeys.copy(),
                'darktimes': (popt[0], popth[0]),
                'darktimeErrors': (np.sqrt(pcov[0][0]), np.sqrt(pcovh[0][0]))
            }

            if not hasattr(self.visFr, 'analysisrecord'):
                self.visFr.analysisrecord = []
                self.visFr.analysisrecord.append(analysis)

            outstr.write(u"events: %d \n" % t.shape[0])
            outstr.write(u"dark times: %d \n" % nts)
            outstr.write(u"region: %d x %d nm (%d x %d pixel) \n" %
                         (bbszx, bbszy, bbszx / voxx, bbszy / voxy))
            outstr.write(
                u"centered at %d,%d (%d,%d pixels) \n" %
                (x.mean(), y.mean(), x.mean() / voxx, y.mean() / voxy))
            outstr.write(
                u"darktime: %.1f (%.1f) frames - chisqr %.2f (%.2f) \n" %
                (popt[0], popth[0], chisqred, chisqredh))
            outstr.write(
                u"qunits: %.2f (%.2f), eunits: %.2f \n" %
                (100.0 / popt[0], 100.0 / popth[0], t.shape[0] / 500.0))

            labelstr = str(outstr.getvalue())
            plt.annotate(labelstr,
                         xy=(0.5, 0.1),
                         xycoords='axes fraction',
                         fontsize=10)
        else:
            Warn(None, 'not enough data points (<%d)' % NTMIN, 'Error')
Beispiel #23
0
    def perform_selection(self,
                          delta_values,
                          strategy,
                          plots_fn=None,
                          results_fn=None):
        """Perform delta selection for kernel ridge regression

        delta_values : array-like, shape = [n_steps_delta]
            Array of delta values to test

        strategy : {'full_cv','insample_cv'}
            Strategy to perform feature selection:
            - 'full_cv' perform cross-validation over delta
            - 'insample_cv' pestimates delta in sample using maximum likelihood.

        plots_fn    : str, optional, default=None
            File name for generated plot. if not specified, the plot is not saved

        results_fn  : str, optional, default=None
            file name for saving cross-validation results. if not specified, nothing is saved
        Returns
        -------
        best_delta : float
            best regularization parameter delta for ridge regression

        """
        import matplotlib
        matplotlib.use(
            'Agg'
        )  #This lets it work even on machines without graphics displays
        import matplotlib.pylab as PLT

        # use precomputed data if available
        if self.K == None:
            self.setup_kernel()

        print('run selection strategy %s' % strategy)

        model = fastlmm.lmm()
        nInds = self.K.shape[0]

        if strategy == 'insample':
            # take delta with largest likelihood
            model.setK(self.K)
            model.sety(self.y)
            model.setX(self.X)
            best_delta = None
            best_nLL = SP.inf

            # evaluate negative log-likelihood for different values of alpha
            nLLs = SP.zeros(len(delta_values))
            for delta_idx, delta in enumerate(delta_values):
                res = model.nLLeval(delta=delta, REML=True)
                if res["nLL"] < best_nLL:
                    best_delta = delta
                    best_nLL = res["nLL"]

                nLLs[delta_idx] = res['nLL']

            fig = PLT.figure()
            fig.add_subplot(111)
            PLT.semilogx(delta_values, nLLs, color='g', linestyle='-')
            PLT.axvline(best_delta, color='r', linestyle='--')
            PLT.xlabel('logdelta')
            PLT.ylabel('nLL')
            PLT.title('Best delta: %f' % best_delta)
            PLT.grid(True)
            if plots_fn != None:
                PLT.savefig(plots_fn)
            if results_fn != None:
                SP.savetxt(results_fn,
                           SP.vstack((delta_values, nLLs)).T,
                           delimiter='\t',
                           header='delta\tnLLs')

        if strategy == 'cv':
            # run cross-validation for determining best delta
            kfoldIter = SKCV.KFold(nInds,
                                   n_folds=self.num_folds,
                                   shuffle=True,
                                   random_state=self.random_state)
            Ypred = SP.zeros((len(delta_values), nInds))
            for Itrain, Itest in kfoldIter:
                model.setK(self.K[Itrain][:, Itrain])
                model.sety(self.y[Itrain])
                model.setX(self.X[Itrain])

                model.setTestData(Xstar=self.X[Itest],
                                  K0star=self.K[Itest][:, Itrain])

                for delta_idx, delta in enumerate(delta_values):
                    res = model.nLLeval(delta=delta, REML=True)
                    beta = res['beta']
                    Ypred[delta_idx, Itest] = model.predictMean(beta=beta,
                                                                delta=delta)

            MSE = SP.zeros(len(delta_values))
            for i in range(len(delta_values)):
                MSE[i] = SKM.mean_squared_error(self.y, Ypred[i])
            idx_bestdelta = SP.argmin(MSE)
            best_delta = delta_values[idx_bestdelta]

            fig = PLT.figure()
            fig.add_subplot(111)
            PLT.semilogx(delta_values, MSE, color='g', linestyle='-')
            PLT.axvline(best_delta, color='r', linestyle='--')
            PLT.xlabel('logdelta')
            PLT.ylabel('MSE')
            PLT.grid(True)
            PLT.title('Best delta: %f' % best_delta)
            if plots_fn != None:
                PLT.savefig(plots_fn)
            if results_fn != None:
                SP.savetxt(results_fn,
                           SP.vstack((delta_values, MSE)).T,
                           delimiter='\t',
                           header='delta\tnLLs')

        return best_delta
Beispiel #24
0
    w_noreg[:, k] = np.linalg.solve(XtX, Xty).squeeze()
    # Compute mean squared error without regularization
    Error_train[k] = np.square(y_train - X_train @ w_noreg[:, k]).sum(
        axis=0) / y_train.shape[0]
    Error_test[k] = np.square(y_test - X_test @ w_noreg[:, k]).sum(
        axis=0) / y_test.shape[0]
    # OR ALTERNATIVELY: you can use sklearn.linear_model module for linear regression:
    #m = lm.LinearRegression().fit(X_train, y_train)
    #Error_train[k] = np.square(y_train-m.predict(X_train)).sum()/y_train.shape[0]
    #Error_test[k] = np.square(y_test-m.predict(X_test)).sum()/y_test.shape[0]

    # Display the results for the last cross-validation fold
    if k == K - 1:
        figure(k, figsize=(12, 8))
        subplot(1, 2, 1)
        semilogx(lambdas, mean_w_vs_lambda.T[:, 1:],
                 '.-')  # Don't plot the bias term
        xlabel('Regularization factor')
        ylabel('Mean Coefficient Values')
        grid()
        # You can choose to display the legend, but it's omitted for a cleaner
        # plot, since there are many attributes
        #legend(attributeNames[1:], loc='best')

        subplot(1, 2, 2)
        title('Optimal lambda: 1e{0}'.format(np.log10(opt_lambda)))
        loglog(lambdas, train_err_vs_lambda.T, 'b.-', lambdas,
               test_err_vs_lambda.T, 'r.-')
        xlabel('Regularization factor')
        ylabel('Squared error (crossvalidation)')
        legend(['Train error', 'Validation error'])
        grid()
Beispiel #25
0
    def perform_selection(self,delta_values,strategy,plots_fn=None,results_fn=None):
        """Perform delta selection for kernel ridge regression

        delta_values : array-like, shape = [n_steps_delta]
            Array of delta values to test

        strategy : {'full_cv','insample_cv'}
            Strategy to perform feature selection:
            - 'full_cv' perform cross-validation over delta
            - 'insample_cv' pestimates delta in sample using maximum likelihood.

        plots_fn    : str, optional, default=None
            File name for generated plot. if not specified, the plot is not saved

        results_fn  : str, optional, default=None
            file name for saving cross-validation results. if not specified, nothing is saved
        Returns
        -------
        best_delta : float
            best regularization parameter delta for ridge regression

        """
        import matplotlib
        matplotlib.use('Agg') #This lets it work even on machines without graphics displays
        import matplotlib.pylab as PLT 


        # use precomputed data if available
        if self.K == None:
            self.setup_kernel()

        print 'run selection strategy %s'%strategy

        model = fastlmm.lmm()
        nInds = self.K.shape[0]
   
        if strategy=='insample':
            # take delta with largest likelihood
            model.setK(self.K)
            model.sety(self.y)
            model.setX(self.X)
            best_delta = None
            best_nLL = SP.inf

            # evaluate negative log-likelihood for different values of alpha
            nLLs = SP.zeros(len(delta_values))
            for delta_idx, delta in enumerate(delta_values):
                res = model.nLLeval(delta=delta,REML=True)
                if res["nLL"] < best_nLL:
                    best_delta = delta
                    best_nLL = res["nLL"]

                nLLs[delta_idx] = res['nLL']

            fig = PLT.figure()
            fig.add_subplot(111)
            PLT.semilogx(delta_values,nLLs,color='g',linestyle='-')
            PLT.axvline(best_delta,color='r',linestyle='--')
            PLT.xlabel('logdelta')
            PLT.ylabel('nLL')
            PLT.title('Best delta: %f'%best_delta)
            PLT.grid(True)
            if plots_fn!=None:
                PLT.savefig(plots_fn)
            if results_fn!=None:
                SP.savetxt(results_fn, SP.vstack((delta_values,nLLs)).T,delimiter='\t',header='delta\tnLLs')
            
        if strategy=='cv':
            # run cross-validation for determining best delta
            kfoldIter = SKCV.KFold(nInds,n_folds=self.num_folds,shuffle=True,random_state=self.random_state)
            Ypred = SP.zeros((len(delta_values),nInds))
            for Itrain,Itest in kfoldIter:
                model.setK(self.K[Itrain][:,Itrain])
                model.sety(self.y[Itrain])
                model.setX(self.X[Itrain])

                model.setTestData(Xstar=self.X[Itest],K0star=self.K[Itest][:,Itrain])
                
                for delta_idx,delta in enumerate(delta_values):
                    res = model.nLLeval(delta=delta,REML=True)
                    beta = res['beta']
                    Ypred[delta_idx,Itest] = model.predictMean(beta=beta,delta=delta)

            MSE = SP.zeros(len(delta_values))
            for i in range(len(delta_values)):
                MSE[i] = SKM.mean_squared_error(self.y,Ypred[i])
            idx_bestdelta = SP.argmin(MSE)
            best_delta = delta_values[idx_bestdelta]

            fig = PLT.figure()
            fig.add_subplot(111)
            PLT.semilogx(delta_values,MSE,color='g',linestyle='-')
            PLT.axvline(best_delta,color='r',linestyle='--')
            PLT.xlabel('logdelta')
            PLT.ylabel('MSE')
            PLT.grid(True)
            PLT.title('Best delta: %f'%best_delta)
            if plots_fn!=None:
                PLT.savefig(plots_fn)
            if results_fn!=None:
                SP.savetxt(results_fn, SP.vstack((delta_values,MSE)).T,delimiter='\t',header='delta\tnLLs')

        return best_delta
Beispiel #26
0
    # plt.plot(gamxz[0], tauxz[0], "o")
    
    plt.figure(2)
    plt.plot(evol*100, p, label = lab,  linewidth=2)

    plt.figure(3)
    plt.plot(p, q, label = lab,  linewidth=2)




    plt.figure(4)
    g_gmax = sp.diff(tauxz)/sp.diff(gamxz)
    gamxz_av = (gamxz[0:-1] + gamxz[1:])/2

    plt.semilogx(gamxz_av[:Nsteps-1], g_gmax[:Nsteps-1]/g_gmax[0],  label =lab, linewidth=2)
# maxtau = 400
plt.figure(1)
plt.xlim([-maxgam, maxgam])
plt.ylim([-1.1*maxtau, 1.1*maxtau])
plt.grid()
plt.legend(loc="upper left", framealpha=0)
plt.xlabel("Shear Strain, $\gamma_{xz}$ [%]")
plt.ylabel("Shear Stress, $\\tau_{xz}$ [KPa]")


plt.figure(2)
plt.legend(loc="upper right", framealpha=0)
plt.grid()
plt.ylabel("Mean Stress, $p$ [KPa]")
plt.xlabel("Volumetric Strain, $\epsilon_{vol}$ [%]")
Beispiel #27
0
# In order to plot the GLS and BLS together 'run -i' this script:
magic = get_ipython().magic

magic('per obs')  # to calculate GLS

from OPEN.periodograms import bls
default.per2 = bls(default)  # calculate BLS and store it in the system

# for normalization
a1 = default.per2.power.max()
a2 = default.per.power.max()

from matplotlib.pylab import semilogx, legend, show
semilogx(1./default.per.freq, default.per.power, 'b-', label='gls')
semilogx(1./default.per2.freq, default.per2.power/a1*a2, 'r-', label='bayesian', lw=2.5)
legend()
show()
Beispiel #28
0
if __name__ == "__main__":
    filename = ourgui.openFile("log")
    data = np.loadtxt(filename, delimiter=",", comments="#")

    f = data[:, 0]
    amp = np.abs(data[:, 1])
    ampr = amp / amp[0]
    ampdb = 20 * np.log10(ampr)

    phase = data[:, 3]
    phase = np.array([p if p < 0 else p-np.pi for p in phase])/np.pi * 180
    phase = phase - phase[0]
    
    pl.subplot(211)
    pl.semilogx(f, ampdb, 'k-')
    pl.semilogx(f, np.zeros(len(f))-3, 'r--', label='-3dB line')
    pl.xlabel("Frequency (Hz)")
    pl.ylabel("Amplitude (dB)")
    pl.title("PZT transfer function")
    pl.legend(loc='best')

    pl.subplot(212)
    pl.semilogx(f, phase, 'k-')
    pl.semilogx(f, np.zeros(len(f))-90, 'r--', label='-90deg line')
    pl.ylabel("Phase (deg)")
    pl.xlabel("Frequency (Hz)")
    pl.ylim([np.min(phase)-20, np.max(phase)+20])
    pl.legend(loc='best')

    pl.show()
Beispiel #29
0
#sd.stop()

from scripts.DefaultFigures import Time, SpecMag, SpecPh
plt.figure()
Time(t, data)
plt.figure()
SpecMag(F, DATA)

#Tertsband test
#Small Tesst audio

N = int(4096)

[fs, data] = wavfile.read("../09 Sample 15sec.wav")  # ,dtype=float)
t = np.arange(0, N/fs, 1/fs)
data = data[2048:2048+N:]
data = np.reshape(np.delete(data, 0, 1), len(data))
[F, DATA] = Transform.FFT(data, fs)
DATA = abs(DATA[len(DATA)/2::])
F = F[len(F)/2::]

(tertsF, tertsA) = OctaveBands.Octave3(DATA, F)

# Show curves for narrow bands and 1/3 octave bands.
plt.figure()
plt.semilogx(F, DATA, 'k-')  # ,'linewidth',2)
plt.semilogx(tertsF, tertsA, 'ro', 'MarkerSize', 10)
plt.xlabel('Frequency (Hz)')
plt.ylabel('Sound absorption coefficient')
plt.legend('Narrow bands', '1/3 octave bands', 4)
Beispiel #30
0
def make_stf(dt=0.10, nt=5000, fmin=1.0/100.0, fmax=1.0/8.0, filename='../INPUT/stf_new', plot=True):

	"""
	Generate a source time function for ses3d by applying a bandpass filter to a Heaviside function.

	make_stf(dt=0.13, nt=4000, fmin=1.0/100.0, fmax=1.0/8.0, filename='../INPUT/stf_new', plot=True)

	dt: Length of the time step. Must equal dt in the event_* file.
	nt: Number of time steps. Must equal to or greater than nt in the event_* file.
	fmin: Minimum frequency of the bandpass.
	fmax: Maximum frequency of the bandpass.
	filename: Output filename.

	"""

	#- Make time axis and original Heaviside function. --------------------------------------------

	t = np.arange(0.0,float(nt+1)*dt,dt)
	h = np.ones(len(t))

	#- Apply filters. -----------------------------------------------------------------------------

	h = flt.highpass(h, fmin, 1.0/dt, 3, zerophase=False)
	h = flt.lowpass(h, fmax, 1.0/dt, 5, zerophase=False)

	#- Plot output. -------------------------------------------------------------------------------

	if plot == True:

		#- Time domain.

		plt.plot(t,h,'k')
		plt.xlim(0.0,float(nt)*dt)
		plt.xlabel('time [s]')
		plt.title('source time function (time domain)')

		plt.show()

		#- Frequency domain.

		hf = np.fft.fft(h)
		f = np.fft.fftfreq(len(hf), dt)

		plt.semilogx(f,np.abs(hf),'k')
		plt.plot([fmin,fmin],[0.0, np.max(np.abs(hf))],'r--')
		plt.text(1.1*fmin, 0.5*np.max(np.abs(hf)), 'fmin')
		plt.plot([fmax,fmax],[0.0, np.max(np.abs(hf))],'r--')
		plt.text(1.1*fmax, 0.5*np.max(np.abs(hf)), 'fmax')
		plt.xlim(0.1*fmin,10.0*fmax)
		plt.xlabel('frequency [Hz]')
		plt.title('source time function (frequency domain)')

		plt.show()

	#- Write to file. -----------------------------------------------------------------------------

	f = open(filename, 'w')

	#- Header.

	f.write('source time function, ses3d version 4.1\n')
	f.write('nt= '+str(nt)+', dt='+str(dt)+'\n')
	f.write('filtered Heaviside, highpass(fmin='+str(fmin)+', corners=3, zerophase=False), lowpass(fmax='+str(fmax)+', corners=5, zerophase=False)\n')
	f.write('-- samples --\n')

	for k in range(len(h)):
		f.write(str(h[k])+'\n')

	f.close()
Beispiel #31
0
    print device.ask("SSTR ? 0")
    print device.ask("SSTP ? 0")

    device.write("SRPT 2,0")

    device.ask("DSPS?")
    device.write("STRT")
    dataa, datab = False, False
    while not dataa or not datab:
        res = device.display_status_word(int(device.ask("DSPS ?")))
        print res
        codes = res[1]
        if 'SSA' in codes:
            dataa = True
        if 'SSB' in codes:
            datab = True
        # if dataa and datab:
        #     break
        sleep(1)

    # print device.ask("ACTD ?")
    # print device.ask("DTRD ? 2,0")
    # device.write("NOTE 0,1,0,50,50,Hello")
    # print device.ask("DUMP")
    data = [float(num) for num in device.ask("DSPY ? 0").split(',')]
    data = data[0:-1]
    pts = len(data)
    f = np.logspace(np.log10(start), np.log10(stop), pts) # this is incorrect
    pl.semilogx(f, data)
    pl.show()
#plot data from ex22_data_single.txt and ex22_data_double.txt

from file_interact import Read
import matplotlib.pylab as plt

inst = Read("ex22_data.txt") 
#inst_sing.matrix is data_points[:,i] --> i=N,up,down 

N = inst.array[:,0]
upsum_sing = inst.array[:,1]
downsum_sing = inst.array[:,2]
upsum_doub = inst.array[:,3]
downsum_doub = inst.array[:,4]


plt.semilogx(N,upsum_sing,'b-',label="single precision")
plt.semilogx(N,downsum_sing,'r-',label="single precision")
plt.semilogx(N,upsum_doub,'b--',label="double precision")
plt.semilogx(N,downsum_doub,'r--',label="double precision")
plt.xlabel("N iterations log-scale")
plt.ylabel("sum-value")
plt.title("sum up is blue; sum down is red")
plt.hlines(y=2,xmin=N[0],xmax=N[-1])#,'k-',label="Analytical sol.") 
plt.legend(numpoints=1,bbox_to_anchor=[1,1.5])
plt.show()
Beispiel #33
0
  #yy1 = interp1d(z1,y1,axis=0)
  #yy2 = interp1d(z2,y2,axis=0)
  #f.Y = w0*y0+w1*yy1(z0)+w2*yy2(z0)
  f.regrid() ; f.solve_steady()

  tt = interp1d(f.z,f.T,kind="cubic")
  Tst = np.append(Tst,tt(f.z_ref))
  Xst = np.append(Xst,f.chi_ref)
  G   = np.append(G,np.trapz(f.d*f.G,f.z))
  S   = np.append(S,np.trapz(f.d*f.S,f.z))
  y2 = y1 ; y1 = y0 ; y0 = f.Y
  z2 = z1 ; z1 = z0 ; z0 = f.z

  pl.subplot(3,1,1) ; pl.plot(f.z,f.T)
  pl.subplot(3,1,2) ; pl.plot(f.z,f.Y)
  pl.subplot(3,1,3) ; pl.plot(f.z,f.s)
  pl.pause(.1) ; pl.clf()

  if f.chi_ref > 204: #####HACK!!!
    break
  
for x,t,g,s in zip(Xst,Tst,G,S):
  print "%e, %f, %e, %e" % (x,t,g,s)

print "%g %g %g" % (Teq,Geq,Seq)
print "%g %g %g" % (Tmix,Gmix,Smix)

pl.close(); pl.semilogx(Xst,Tst,'.-') ; pl.show()
pl.close(); pl.plot(G,Tst,'.-') ; pl.show()
pl.close(); pl.plot(S,Tst,'.-') ; pl.show()
Beispiel #34
0
font = {
    'family': 'serif',
    'color': 'black',
    'weight': 'normal',
    'size': 16,
}

plt.subplots_adjust(left=0.1,
                    bottom=0.1,
                    right=0.98,
                    top=0.95,
                    wspace=0,
                    hspace=0)

plt.semilogx()
plt.grid(False)
plt.xlim(5, 30000)
plt.ylim(30, 130)

plt.errorbar(x1,
             y1,
             yerr=erry1,
             fmt='wo',
             ecolor='k',
             capthick=0.5,
             label=r"$\bar{p}p$")
plt.errorbar(x2,
             y2,
             yerr=erry2,
             fmt='ko',
Beispiel #35
0
	curren_accu=[]
	accu = test_accu[i]
	accu = accu.split("\t")
	print accu
	t = test_time[i]
	t = t.split("\t")
	for j in range(len(line)):
		index = int(line[j]) - 1
		if (accu[index] > best_accuracy):
			best_accuracy = accu[index]
			curren_accu.append(best_accuracy)
		else:
			curren_accu.append(best_accuracy)
		sum_time = float(sum_time) + float(t[index])
		time_.append(sum_time)
	plt.semilogx(time_, curren_accu, 'b',marker='x', label = "Cost-Aware-GP-UCB")

	best_accuracy = 0
	curren_accu=[]
	sum_time=0
	time_=[]


	for j in range(len(gp_tmp)):
		index = int(gp_tmp[j]) - 1
		if (accu[index] > best_accuracy):
			best_accuracy = accu[index]
			curren_accu.append(best_accuracy)
		else:
			curren_accu.append(best_accuracy)
		sum_time = float(sum_time) + float(t[index])
Beispiel #36
0
#calculo do espectro
aa = espec.espec1(df.mare, nfft, fs)

#espectro mare meteo
aamm = espec.espec1(dfp.mm, nfft, fs)
aap = espec.espec1(dfp.prev, nfft, fs)

#achar a onda de mare meteo
dfp.Mareonda = pd.rolling_mean(dfp.mm, 1080)
onda = dfp.Mareonda - np.mean(dfp.Mareonda)

pl.figure()
pl.plot(df.index, df.mare)

pl.figure()
pl.semilogx(aa[:, 0], aa[:, 1], 'b')
pl.semilogx(aamm[:, 0], aamm[:, 1], 'r')
pl.semilogx(aap[:, 0], aap[:, 1], 'k')
pl.grid()
pl.legend(['nivel_medido', 'prev'])
pl.xlabel('Freq. (cpd)')
pl.ylabel('m2/cpd')

pl.figure()
pl.subplot(211)
pl.plot(df.index, df.mare - np.mean(df.mare), 'b', df.index,
        dfp.prev - np.mean(dfp.prev), 'r')
pl.legend(['Mare_Medido', 'Mare_Harminicos'])
pl.subplot(212)
pl.plot(dfp.index, dfp.mm - np.mean(dfp.mm))
pl.plot([dfp.index[0], dfp.index[-1]], [0, 0], 'r--')
Beispiel #37
0

Densities = np.ones(hnums) / hnums * mcitrs  # Densities G(E)
EngTics = (htics[:-1] + htics[1:]) / 2.  # エネルギー軸の刻み E, ヒストグラムの中央点

plt.figure()

for it in range(100):
    plt.plot(EngTics, Densities)
    Partitions = getPartition(Densities, EngTics, mcbetas)
    Densities = getDensity(Hbetas, Partitions, EngTics, mcbetas, mcitrs)

plt.show()

plt.figure()

Dall = Densities.sum()
Densities /= Dall

plt.semilogx(Densities, EngTics)
plt.ylabel('Energy(negative CVscore)')
plt.xlabel('Density')
plt.title('Density of States')

eshist = np.histogram(truees[1:] / 13., bins=htics)[0]
eshistall = eshist.sum()
eshist = eshist / eshistall
plt.semilogx(eshist, EngTics)

plt.show()
Beispiel #38
0
Datei: MHD.py Projekt: wathen/PhD
    # # # if (ShowResultPlots == 'yes'):

    # plot(u_k)
    # # plot(interpolate(ue,Velocity))

    # plot(p_k)
    # # pe = interpolate(pe,Pressure)
    # # pe.vector()[:] -= np.max(pe.vector().array() )/2
    # # plot(interpolate(pe,Pressure))

    # plot(b_k)
    # # plot(interpolate(be,Magnetic))

    # plot(r_k)
    # # plot(interpolate(re,Lagrange))

    # # # interactive()

plt.semilogx(InnerTolerances,TotalWork)
plt.xlabel("Inner tolerance")
plt.ylabel("Inner x Outer iterations")
plt.show()






    # interactive()