コード例 #1
0
ファイル: utilsplot.py プロジェクト: Mvuyiso/Skogestad-Python
def condtn_nm_plot(G, w_start=-2, w_end=2, axlim=None, points=1000):
    """
    Plot of the condition number, the maximum over the minimum singular value

    Parameters
    ----------
    G : numpy matrix
        Plant model.

    Returns
    -------
    Plot : matplotlib figure

    Note
    ----
    A condition number over 10 may indicate sensitivity to uncertainty and
    control problems

    With a small condition number, Gamma =1, the system is insensitive to
    input uncertainty, irrespective of controller (p248).
    """

    s, w, axlim = df.frequency_plot_setup(axlim, w_start, w_end, points)

    def cndtn_nm(G):
        return utils.sigmas(G)[0]/utils.sigmas(G)[-1]

    freqresp = [G(si) for si in s]

    plt.loglog(w, [cndtn_nm(Gfr) for Gfr in freqresp], label='$\gamma (G)$')
    plt.axis(axlim)
    plt.ylabel('$\gamma (G)$')
    plt.xlabel('Frequency [rad/unit time]')
    plt.axhline(10., color='red', ls=':', label='"Large" $\gamma (G) = 10$')
    plt.legend()
コード例 #2
0
def plot_mpl_fig(): 
    rootdir = '/Users/catherinefielder/Documents/Research_Halos/HaloDetail'
    cs = []
    pops = []
    for subdir, dirs, files in os.walk(rootdir):
        head,tail = os.path.split(subdir)
        haloname = tail
        for file in files:
            if file.endswith('_columnsadded'):
                values = ascii.read(os.path.join(subdir, file), format = 'commented_header') #Get full path and access file
                host_c = values[1]['host_c']  
                cs = np.append(cs, host_c)                     
                pop = len(values['mvir(10)'])
                pops = np.append(pops, pop)
                print pop
                plt.loglog(host_c, pop, alpha=0.8,label = haloname)
        print "%s done. On to the next." %haloname
    #plt.xscale('log')
    #plt.yscale('log')
    plt.xlabel('Host Concentration')
    plt.ylabel('Nsat')
    plt.title('Abundance vs. Host Concentration', ha='center')
    #plt.legend(loc='best')
    spearman = scipy.stats.spearmanr(cs, pops)
    print spearman
コード例 #3
0
ファイル: plotting.py プロジェクト: ArcticSnow/dempy
def plotPeriodogram(fvec, pvec,  axes=None, thinning=None):
    '''
    **plotPeriodogram(fvec, pvec,  axes=None, thinning=None)**
    Function that plot a periodogram
    
    Parameters
    ----------
    **fvec** : vector containing  the frequencies resulting from fftdem() \n
    **pvec** : vector containing  the power values resulting from fftdem() \n
    **axes** : string indicating what type of axes to use. Possible options are: \n\t
        - "loglog" \n\t
        - "semilogx"\n\t
        - "semilogy" \n\t
        - None (default option)\n
    **thinning** : parameter to thin the data o plot as vectors can be ver large. It will plot only the number of dots indicated by thinning '''
    # Wvec=1/fvec
    if thinning == None:
        thinning = thinningCheckup(fvec)
    plt.figure()
    if axes == "loglog":
        plt.loglog(fvec[range(0,fvec.size,thinning)],pvec[range(0,pvec.size,thinning)])
    elif axes == "semilogx":
        plt.semilogx(fvec[range(0,fvec.size,thinning)],pvec[range(0,pvec.size,thinning)])
    elif axes == "semilogy":
        plt.semilogy(fvec[range(0,fvec.size,thinning)],pvec[range(0,pvec.size,thinning)])
    else:
        plt.plot(fvec[range(0,fvec.size,thinning)],pvec[range(0,pvec.size,thinning)])
    plt.title("Periodogram")
    plt.ylabel("DFT mean square amplitude")
    plt.xlabel("Frequency (1/m)")
    plt.show()
コード例 #4
0
ファイル: RGA.py プロジェクト: Terrance88/Skogestad-Python
def RGA_w(w_start, w_end, x, y):
    """ w_start is the start of logspace 
    w_end is the ending of the logspace 
    x and y is refer to the indices of the RGA matrix that needs to be plotted
    
    this is to calculate the RGA at different freqeuncies
    this give more conclusive values of which pairing would give fast responses
    under dynamic situations"""

    w = np.logspace(w_start, w_end, 1000)
    store = np.zeros([len(x), len(w)])

    count = 0
    for w_i in w:
        A = G(w_i)
        RGA_w = np.multiply(A, np.transpose(np.linalg.pinv(A)))
        store[:, count] = RGA_w[x, y]
        count = count + 1

    for i in range(len(x)):
        plt.loglog(w, store[i, :])

    plt.title("RGA over Freq")
    plt.xlabel("w")
    plt.ylabel("|RGA values| gevin x ,y ")
    plt.show()
コード例 #5
0
ファイル: plot1.py プロジェクト: taoyiliang/unc-quant
def addPlot(title,lbl,ref=None):
  inFile = file(title,'r')

  entries=[]
  for line in inFile:
    if line=='\n' or line.startswith('Moments') or line.startswith('N,'):
      continue
    entries.append([])
    vals=line.strip().split(',')
    entries[-1].append(int(vals[0]))
    entries[-1].append(float(vals[1]))
    entries[-1].append(float(vals[2]))

  entries=np.array(entries)
  entries=entries[entries[:,0].argsort()]
  if ref==None:
    errs=np.zeros([len(entries)-1,3])
    errs[:,0] = entries[:-1,0]
    errs[:,1] = abs(entries[:-1,1]-entries[-1,1])/entries[-1,1]
    errs[:,2] = abs(entries[:-1,2]-entries[-1,2])/entries[-1,2]
  else:
    errs=np.zeros([len(entries),3])
    errs[:,0] = entries[:,0]
    errs[:,1] = abs(entries[:,1]-ref[1])/ref[1]
    errs[:,2] = abs(entries[:,2]-ref[2])/ref[2]

  #for e in errs:
  #  print e
  errs=errs[errs[:,0].argsort()]
#  print '\n\n'
#  for e in errs:
#    print e
  errs=zip(*errs)
  plt.loglog(errs[0],errs[1],'-o',label=lbl)
コード例 #6
0
ファイル: test_gaussian.py プロジェクト: plazar/Ratings2.0
def fit_and_plot(cand):
    data = cand.profile
    n = len(data)
    xs = np.linspace(0.0, 1.0, n, endpoint=False)
    G = gauss._compute_data(cand)
    print "k: %g, log(k): %g" % (G.k, np.log10(G.k))
    test_ks = np.logspace(np.log10(G.k)-2, np.log10(G.k)+1, 1e3)
    #test_ks = np.exp(np.linspace(np.log(1e-1),np.log(1e3),1e3))
    
    plt.figure(1)
    resids = [gauss._rms_residual(k,data) for k in test_ks]
    plt.loglog(test_ks,resids,color="green", label="_nolabel_")
    #plt.axvline(true_k,color="red", label="true k")
    best_k = test_ks[np.argmin(resids)]
    plt.axvline(best_k,color="green", label="best k")
    plt.axvline(G.k,color="cyan", label="k from fit")
    plt.ylabel("RMS of residuals")
    plt.xlabel("Value of k used (i.e. held fixed) when fitting")
    plt.legend(loc="best")

    plt.figure(2)
    mue, ae, be = gauss._fit_all_but_k(best_k, data)
    #plt.plot(xs, true_prof, color="red", label="true")
    plt.plot(xs, data, color="black", label="data")
    plt.plot(xs, (ae*utils.vonmises_histogram(best_k,mue,n)+be), color="green", label="exhaustive best fit")
    plt.plot(xs, G.histogram(n), color="cyan", label="best fit")

    plt.legend(loc="best")
    plt.show()
コード例 #7
0
	def __call__(self,u,v,w,bx,by,bz):
		q = 4

		d = (self.sim.zmx, self.sim.ymx, self.sim.xmx)
		vspec = spec(u,v,w,dims=d)
		bspec = spec(bx,by,bz,dims=d)

		plt.subplot(221)
		plt.plot(vspec)
		plt.semilogy()
		plt.title('v-spec')
		plt.axis("tight")
		plt.ylim([1e-12,1e-2])

		plt.subplot(222)
		plt.plot(vspec)
		plt.loglog()
		plt.title('v-spec')
		plt.axis("tight")
		plt.ylim([1e-12,1e-2])

		plt.subplot(223)
		plt.plot(bspec)
		plt.semilogy()
		plt.title('b-spec')
		plt.axis("tight")
		plt.ylim([1e-12,1e-2])

		plt.subplot(224)
		plt.plot(bspec)
		plt.loglog()
		plt.title('b-spec')
		plt.axis("tight")
		plt.ylim([1e-12,1e-2])
コード例 #8
0
def disk_spec(file):
    #Construct table
    table=construct_table('tmpd')
    disk_params=get_params(file)
    specs=params_to_spec(disk_params, table)

    r=disk_params[:, 0:2]
    Teff=disk_params[:, 2]

    #Finding the total flux
    totf=sum_spec(r, specs, Teff)
    totf1=totf[1]
    totf2=totf[0]

    #max1=np.max(totf1[1])
    #max2=np.max(totf2[1])
    #peak=np.max(max1, max2)

    plt.loglog()
    #plt.figsize(20, 8)

    plt.xlabel("nu [hz]")
    plt.ylabel("nu L_nu [ergs/s]")

    plt.axis([10.**14, 2*10.**18, 10.**38, 10.**44]) 

    plt.plot(totf1[0], totf1[0]*totf1[1])
    plt.plot(totf2[0], totf2[0]*totf2[1])
    plt.show()
コード例 #9
0
ファイル: PlotMatplotlib.py プロジェクト: yeus/PySimulator
def plotBode2(zpk, n=200, f_range=None, f_logspace=True):
    """
    Bode plot of ZerosAndPoles object using matplotlib
    """
    (f, y) = zpk.frequencyResponse(n=n, f_range=f_range, f_logspace=f_logspace)

    y_A = numpy.abs(y)
    y_phi = Misc.to_deg(Misc.continuousAngle(y))

    plt.figure()
    plt.subplot(211)
    if f_logspace:
        plt.loglog(f, y_A)
    else:
        plt.plot(f, y_A)
    plt.grid(True, which="both")
    plt.ylabel("Amplitude")

    plt.subplot(212)
    if f_logspace:
        plt.semilogx(f, y_phi)
    else:
        plt.plot(f, y_A)
    plt.grid(True, which="both")
    plt.xlabel("Frequency [Hz]")
    plt.ylabel("Phase [deg]")

    plt.show()
コード例 #10
0
    def plot_area_vs_energy(self, filename=None, show_save_energy=True):
        """
        Plot effective area vs. energy.
        """
        import matplotlib.pyplot as plt

        energy_hi = self.energy_hi.value
        effective_area = self.effective_area.value
        plt.plot(energy_hi, effective_area)
        if show_save_energy:
            plt.vlines(self.energy_thresh_hi.value, 1E3, 1E7, 'k', linestyles='--')
            plt.text(self.energy_thresh_hi.value - 1, 3E6,
                     'Safe energy threshold: {0:3.2f}'.format(
                         self.energy_thresh_hi),
                     ha='right')
            plt.vlines(self.energy_thresh_lo.value, 1E3, 1E7, 'k', linestyles='--')
            plt.text(self.energy_thresh_lo.value + 0.1, 3E3,
                     'Safe energy threshold: {0:3.2f}'.format(self.energy_thresh_lo))
        plt.xlim(0.1, 100)
        plt.ylim(1E3, 1E7)
        plt.loglog()
        plt.xlabel('Energy [TeV]')
        plt.ylabel('Effective Area [m^2]')
        if filename is not None:
            plt.savefig(filename)
            log.info('Wrote {0}'.format(filename))
コード例 #11
0
ファイル: construction.py プロジェクト: NatalieP-J/Summer2014
def makegood(prereqs,func,r,size,grid,smallrexp,largerexp,plotting):
    """
    prereqs - array containing model class instance as first element
    func - function to be evaluated
    r - independent variable array
    size - size of generated independent variable array with format 
    	   [log10(max),log10(min),stepsize]
    grid - choice of grid generator function
    smallrexp - log slope at small r or large E
    largerexp - log slope at large r or small E
    plotting - if False, do not plot. 
               if not False, must be array with ['<xlabel>','<ylabel>']
    
    Returns an interpolated object version of the function based 
    computed values.
    """
    model = prereqs[0]
    #generate independent array grid
    rarray,rchange,rstart = grid([model],size[0],size[1],size[2])
    #compute value of function for grid points
    tab,problems = func(rarray,prereqs)
    frac = float(len(problems))/float(len(tab))
    #report the fraction of problem points to console and file
    print 'fraction reporting a message: {0}'.format(frac)
    model.statfile.write('\nmesg frac = {0}\n'.format(frac))
    #check for problem points not caught in integration process
    gcheck = goodcheck(tab)
    #interpolate in log10 space
    inter = interp1d(log10(rarray),log10(tab))
    #generate array to further extremes using powerlaw behaviour
    m = piecewise(r,inter,tab[0],tab[len(rarray)-1],rstart,rchange,smallrexp,largerexp)
    #interpolate extended array in log10 space
    inter2 = interp1d(log10(r),log10(m))
    #save values used to interpolate to file (NOT in log10 space)
    saver = column_stack((r,m))
    funcname = str(func).split(' ')[1][4:]
    pklwrite('{0}/{1}.pkl'.format(model.directory,funcname),saver)
    #if plotting is possible and the array doesn't consist entirely of problems
    #add plot to pdf and return interpolate functional form
    if plotting != False and gcheck == True:
        xaxis,yaxis = plotting
        plt.figure()
        plt.loglog(r[1:-1],m[1:-1],'c',linewidth = 5)
        plt.loglog(rarray,tab,'.',color = 'DarkOrange')
        plt.ylabel(r'{0}'.format(yaxis))
        plt.xlabel('{0}'.format(xaxis))
        plt.xlim(min(r[1:-1]),max(r[1:-1]))
        plt.ylim(min(m[1:-1]),max(m[1:-1]))
        plt.title(model.name)
        model.pdfdump.savefig()
        plt.close()
        return inter2
    #if plotting isn't possible but array doesn't consist entirely of problems
    #return interpolated functional form
    elif plotting == False and gcheck == True:
        return inter2
    #if computation failed, return 0
    #this signals the rest of the program that computation failed here
    elif gcheck == False:
        return 0
コード例 #12
0
ファイル: slbw.py プロジェクト: cjosey/PINSPEC
def compareXS(isotope, type_xs='capture', dir='.'):
	
    #Get fake XS from info given
    El, A = isotope.split('-', 1)
    #Find proper filename for fake XS
    if type_xs=='scatter':
		type_xs='elastic'
    path=str(pinspec.getXSLibDirectory())+'/'+El+'-'+A+'-'+type_xs+'.txt'
    #Read in array for fictitious XS at 300
    EnT=numpy.array([])
    barnsT=numpy.array([])
    invEnT=numpy.array([])
    with open(path) as resT:
   	#Parse out String containing Temperature
	Junk, temp = resT.readline().split('=', 1)
	for line in resT:
 	    EnTt, barnsTt=line.split(',', 1)
	    EnTt=float(EnTt)
	    barnsTt=float(barnsTt)
	    invEnTt=1/EnTt
	    EnT=numpy.append(EnT,EnTt)
	    barnsT=numpy.append(barnsT,barnsTt)
	    invEnT=numpy.append(invEnT, invEnTt)
	
    py_printf('INFO', 'Read in Doppler Broadened XS correctly')

    #Read in array for ENDF7 XS at 300
    npath=str(pinspec.getXSLibDirectory())+'/BackupXS/'+El+'-'+A+'-' + \
                                                            type_xs+'.txt'
    EndfE300=numpy.array([])
    barnsEndF300=numpy.array([])
    invEndfE300=numpy.array([])
    with open(npath) as Endf300:
	#Parse out String containing Temperature
	Junk, xssource = Endf300.readline().split(' ', 1)
	xssource=xssource.strip()
	for line in Endf300:
 	    EndfE300temp, barnsEndF300temp=line.split(',', 1)
	    EndfE300temp=float(EndfE300temp)
	    barnsEndF300temp=float(barnsEndF300temp)
	    invEndfE300temp=1/EndfE300temp
	    EndfE300=numpy.append(EndfE300,EndfE300temp)
	    barnsEndF300=numpy.append(barnsEndF300,barnsEndF300temp)
	    invEndfE300=numpy.append(invEndfE300,invEndfE300temp)
	
    log_printf(INFO,'Read in ENDF/B-VII XS correctly')

    #Plot values on top of each other
    fig=plt.figure()
    plt.loglog(EnT,barnsT)
    plt.loglog(EndfE300,barnsEndF300)
    CXStype=type_xs.title()
    plt.legend(['Doppler broadened '+El+'-'+A+' '+CXStype+' XS at temp=' + \
			temp,xssource+' '+El+'-'+A+' '+CXStype+' XS at temp=300K'], \
                                        loc='lower left',prop={'size':10})
    plt.grid()
    plt.title(CXStype+' Cross Section Comparison')
    plt.xlabel('XS [barns]')
    plt.ylabel('E [eV]')
    plt.savefig(dir+'/'+CXStype+'_XS_Comparison.png')
コード例 #13
0
    def plotting(self, msdtype="ensemble", particlemsdtime=0,error=0, showlegend=None,scale="loglog"):
        """
        :param error: Number of standard deviations from mean, which is shown in the figure


        :return: A figure with plotting of the Ensemble MSD
        """
        if msdtype=="ensemble":
            msd,std=self.msd_ensemble()
        if msdtype=="time":
            msd,std=self.msd_time(particlemsdtime)


        colors=['r','b','g','k','c','w','b','r','g','b','k','c','w','b','r','g','b','k','c','w','bo','ro','go','bo','ko','co','wo','bo']
        #fig=plt.plot(range(msd_ensemble.size), msd_ensemble ,colors[2], label="ensemble msd")
        if scale == "lin":
            plt.plot(self.t*self.dt,self.msdanalyt(),":",color=colors[1], label="analytisch D=%f,particles=%d,length=%d,alpha=%f" %(self.D,self.particles,self.n,self.alpha))
        if scale == "loglog":
            plt.loglog(self.t*self.dt,self.msdanalyt(),":",color=colors[1], label="analytisch D=%f,particles=%d,length=%d,alpha=%f" %(self.D,self.particles,self.n,self.alpha))
        fig=plt.errorbar(self.t*self.dt, msd, yerr=error*std,label="Spektrale Methode mit D=%f,particles=%d, length=%d ,alpha=%f, Std=%f" %(self.D,self.particles,self.n,self.alpha,error))
        if showlegend is not None:
            plt.legend(loc=2)
        plt.xlabel('Steps', fontsize=14)
        plt.ylabel('MSD', fontsize=14)
        return fig
コード例 #14
0
def plot_clustering_spectrum (graph, path):
    """Plot the clusttering spectrum of the graph and save the figure
       at the given path. On X-axis we have degrees and on Y-axis we have
       average clustering coefficients of the nodes that have that degree"""

    node_to_degree = graph.degree()
    node_to_clustering = nx.clustering(graph)
    degree_to_clustering = {}

    # calculate average clustering coefficients for nodes with certain degree
    for node in node_to_degree:
        deg = node_to_degree[node]
        tmp = degree_to_clustering.get(deg, [])
        tmp.append(node_to_clustering[node])
        degree_to_clustering[deg] = tmp

    for degree in degree_to_clustering:
        tmp = degree_to_clustering[degree]
        degree_to_clustering[degree] = float(sum(tmp)) / len(tmp)

    x = sorted(degree_to_clustering.keys(), reverse = True)
    y = [degree_to_clustering[i] for i in x]

    plt.loglog(x, y, 'b-', marker = '.')
    plt.title("Clustering Spectrum")
    plt.ylabel("Average clustering coefficient")
    plt.xlabel("Degree")
    plt.axis('tight')
    plt.savefig(path)
コード例 #15
0
def plot_shortest_path_spectrum (graph, path, paths_data):
    """Plot distribution of shortest paths of the graph and save the figure
       at the given path. On X-axis we have distance values and on Y-axis we
       have percentage of node pairs that have that distance value"""

    diameter = graph_diameter(paths_data)
    pairs = graph.order() * (graph.order()-1) * 0.5

    distances_count = [0 for i in xrange(diameter + 1)]
    for i in xrange(8):
        with open('%s_%d' % (paths_data, i), 'r') as in_file:
            for line in in_file:
                tokens = line.split()
                distances_count[int(tokens[2])] += 1

    for i in xrange(diameter + 1):
        distances_count[i] *= (100.0 / pairs)

    y = distances_count
    plt.loglog(y, 'b-', marker = '.')
    plt.title("Shortest Paths Spectrum")
    plt.ylabel("Percent of pairs")
    plt.xlabel("Distance")
    plt.axis('tight')
    plt.savefig(path)
コード例 #16
0
def plot_closeness_dist (graph, path):
    """Plot distribution of closeness centrality of the graph and save the figure
       at the given path. On X-axis we have closeness centrality values and on
       Y-axis we have percentage of the nodes that have that closeness value"""

    N = float(graph.order())
    node_to_closeness = nx.closeness_centrality(graph)
    closeness_to_percent = {}

    # calculate percentages of nodes with certain closeness value
    for node in node_to_closeness:
        closeness_to_percent[node_to_closeness[node]] = 1 + \
                closeness_to_percent.get(node_to_closeness[node], 0)
    for c in closeness_to_percent:
        closeness_to_percent[c] = closeness_to_percent[c] / N * 100

    x = sorted(closeness_to_percent.keys(), reverse = True)
    y = [closeness_to_percent[i] for i in x]

    plt.loglog(x, y, 'b-', marker = '.')
    plt.title("Closeness Centrality Distribution")
    plt.ylabel("Percentage")
    plt.xlabel("Closeness value")
    plt.axis('tight')
    plt.savefig(path)
コード例 #17
0
def plot_betweenness_dist (graph, path):
    """Plot distribution of betweenness centrality of the graph and save the figure
       at the given path. On X-axis we have betweenness centrality values and on
       Y-axis we have percentage of the nodes that have that betweenness value.
       k is the number of samples for estimating the betweenness centrality."""

    N = float(graph.order())
    node_to_betweenness = nx.betweenness_centrality(graph)
    betweenness_to_percent = {}

    # calculate percentages of nodes with certain betweeness value
    for node in node_to_betweenness:
        betweenness_to_percent[node_to_betweenness[node]] = 1 + \
                betweenness_to_percent.get(node_to_betweenness[node], 0)
    for c in betweenness_to_percent:
        betweenness_to_percent[c] = betweenness_to_percent[c] / N * 100

    x = sorted(betweenness_to_percent.keys(), reverse = True)
    y = [betweenness_to_percent[i] for i in x]

    plt.loglog(x, y, 'b-', marker = '.')
    plt.title("Betweenness Centrality Distribution")
    plt.ylabel("Percentage")
    plt.xlabel("Betweenness value")
    plt.axis('tight')
    plt.savefig(path)
コード例 #18
0
def plot(dic):
    items = dic.items()
    items.sort()
    x = [item[0] for item in items]
    y = [item[1] for item in items]
    pyplot.loglog(x, y, 'ro', color='blue')
    pyplot.show()
コード例 #19
0
ファイル: Salpeter.py プロジェクト: jilkoval/amuse
def main(N, m, M, ximf):
    numpy.random.seed(31415)
    x_label = "M$_\odot$"
    y_label = "N"
    fig, ax = figure_frame(x_label, y_label, xsize=12, ysize=8)
    cols = get_distinct(2)

    masses = new_salpeter_mass_distribution(N, m, M, ximf)
    masses = new_salpeter_mass_distribution(N, m, M, ximf)
    lm = math.log10(0.5*m.value_in(units.MSun))
    lM = math.log10(1.5*M.value_in(units.MSun))
    bins = 10**numpy.linspace(lm, lM, 51)
    Nbin, bin_edges= numpy.histogram(masses.value_in(units.MSun), bins=bins)
    bin_sizes = bin_edges[1:] - bin_edges[:-1]
    y = Nbin / bin_sizes
    x = (bin_edges[1:] + bin_edges[:-1]) / 2.0
    for i in range(len(y)):
        y[i] = max(y[i], 1.e-10)
    pyplot.scatter(x, y, s=100, c=cols[0], lw=0)
    
    c = ((M.value_in(units.MSun)**(ximf+1)) - (m.value_in(units.MSun)**(ximf+1))) / (ximf+1)
    pyplot.plot(x, N/ c * (x**ximf), c=cols[1])
    pyplot.loglog()
    pyplot.xlabel('$M [M_\odot]$')
    pyplot.ylabel('N')
    #pyplot.xlim(-3, 3)
#    pyplot.show()
    pyplot.savefig("salpeter")
コード例 #20
0
ファイル: measurement.py プロジェクト: odebeir/ivctrack
def hurst_curv_exponent(xy,verbose=False):
    """computes the Hurst coefficient which qualify how the trajectory is persistent in time
    it is related to the fractal dimension of the trajectory, here the denominator is the curvilinear distance
    """
    #compute all distances
    d = squareform(pdist(xy, 'euclidean'))

    #max number of successive positions
    N = 10
    data = npy.zeros((N,2))

    for k in range(N):
        kd = npy.diag(d,k+1)
        c_length = k+1
        data[k,:] = (c_length,npy.mean(kd))

    #linear fit in log-log
    x = npy.log(data[:,0])
    y = npy.log(data[:,1])
    A = npy.vstack([x, npy.ones(len(x))]).T
    m, c = npy.linalg.lstsq(A, y)[0]

    if verbose:
        import matplotlib.pyplot as plt

        fig = plt.figure()
        ax = fig.add_subplot(111, aspect='equal')
        plt.loglog(data[:,0],data[:,1])
        plt.legend()
        plt.show()
    return m
コード例 #21
0
def plot():
    hmasses = []
    lmasses = []
    hvels = []
    lvels = []
    rootdir = 'C:\Users\Cat\Documents\Research_Halos\HaloDetail'
    for subdir, dirs, files in os.walk(rootdir):
        head,tail = os.path.split(subdir)
        haloname = tail
        for file in files:
            if file.endswith('.list'):
                hostvalues = ascii.read(os.path.join(subdir, file), format = 'commented_header')
                highmass=max(hostvalues['mvir(10)'])
                hi = np.where(highmass>=1.0e+12)                                                  #Index of host with highmass
                low = np.where(highmass<1.0e+12)
                hi_id = hostvalues[hi]['id(1)']                                                   #Id of the highmass host
                low_id = hostvalues[low]['id(1)']
                whhi = np.where(hostvalues['pid(5)']==hi_id)                                        #Indices of subhalos corresponding to the highmass host
                whlow = np.where(hostvalues['pid(5)']==low_id)
                hmass = hostvalues[whhi]['mvir(10)']
                print len(hmass)
                lmass = hostvalues[whlow]['mvir(10)']
                print len(lmass)
                hvel = hostvalues[whhi]['vmax(16)']
                lvel = hostvalues[whlow]['vmax(16)'] 
                hmasses = np.append(hmasses, hmass)
                lmasses = np.append(lmasses, lmass)
                hvels = np.append(hvels, hvel)
                lvels = np.append(lvels, lvel)              
            print "%s done. On to the next." %haloname
    plt.loglog(hmasses, hvels, lmasses, lvels, alpha=0.8)
コード例 #22
0
def GpPlot(row, col, figNum):
    n = 4
    GpsAdd = np.zeros((n**3,1000), dtype=complex)
    GpsMult = np.zeros((n**3,1000), dtype=complex)
    w = np.logspace(-3,1,1000)
    for i in range(1000):
        GpsAdd[:,i], GpsMult[:,i] = Gp(w[i]*1j, row, col, n)
    plt.figure(figNum)
#    plt.clf()
    plt.subplot(211)
    for i in range(n**3):
        plt.loglog(w, GpsAdd[i,],'-', color = ([row*0.3, col*0.3, 1]), alpha=0.2)
        plt.grid(True)
        plt.ylabel('|Additive Uncertainty|')
        plt.xlabel('Frequency [rad/s)]')
    plt.subplot(212)
    for i in range(n**3):
        plt.loglog(w, GpsMult[i,],'-', color = ([row*0.3, col*0.3, 1]), alpha=0.2)
        plt.grid(True)
        plt.ylabel('|Multiplicative Uncertainty|')
        plt.xlabel('Frequency [rad/s)]')
    fig = plt.gcf()
    BG = fig.patch
    BG.set_facecolor('white')
    fig.subplots_adjust(bottom=0.2) 
    fig.subplots_adjust(top=0.9) 
    fig.subplots_adjust(left=0.2) 
    fig.subplots_adjust(right=0.9)
コード例 #23
0
def Total_mass_plot(rbin, mTbin):
	plt.loglog( rbin, mTbin)
	#plt.axhline(chosen_ratio_number*particleMass, color = 'g')
	plt.ylim(1e32, 1e37)
	plt.xlim(3e-3, 3e0)
	plt.ylabel(r'$M$ $({\rm g})$', fontsize=25)
	plt.xlabel(r'$r$ $({\rm pc})$', fontsize=25)
コード例 #24
0
def plot_powerlaw_fit(xdata, ydata, amp, index, yerr=None, fignum=None):
    '''
        Plot a powerlaw with some associated datapoints
    '''

    plt.figure(fignum)
    plt.subplot(2, 1, 1)
    plt.plot(xdata, utils_math.powerlaw(xdata, amp, index))     # Fit

    if yerr is None:
        plt.plot(xdata, ydata, 'k.')  # Data
    else:
        plt.errorbar(xdata, ydata, yerr=yerr, fmt='k.')

    plt.title('Best Fit Power Law')
    plt.xlabel('X')
    plt.ylabel('Y')
    plt.xlim((xdata.min()*0.9, xdata.max()*1.1))

    plt.subplot(2, 1, 2)
    plt.loglog(xdata, utils_math.powerlaw(xdata, amp, index))

    if yerr is None:
        plt.plot(xdata, ydata, 'k.')  # Data
    else:
        plt.errorbar(xdata, ydata, yerr=yerr, fmt='k.')

    plt.xlabel('X (log scale)')
    plt.ylabel('Y (log scale)')
    plt.xlim((xdata.min()*0.9, xdata.max()*1.1))
コード例 #25
0
ファイル: plot.py プロジェクト: philchang/pittman-paper
def total_mass_plot(rbin, mTbin):
	pl.clf()
	pl.loglog( rbin, mTbin)
	pl.ylim(1e32, 1e37)
	pl.xlim(1e-3, 3e0)
	pl.ylabel('Total Mass ($g$)')
	pl.xlabel('Radius ($pc$)')
コード例 #26
0
def Magnetic_vs_radius(rbin, Btotbin):
	plt.loglog( rbin, Btotbin*Btotbin/(8.*np.pi), 'b', label='$B_{tot}^2 / 8 \pi$', lw = 2)  
	plt.legend(loc=0, fontsize=22, frameon=False)
#	plt.ylim(1e-1, 3e0)
#	plt.xlim(3e-3, 3e0)
	plt.ylabel(' $B$ $({\\rm gauss })$', fontsize=25)
	plt.xlabel('$r$ $(\\rm pc)$', fontsize=25)
コード例 #27
0
def main():

    citation_graph = load_graph(CITATION_URL)

    print compute_in_degrees(citation_graph)


    start_time = timeit.default_timer()

    dist = in_degree_distribution(citation_graph)
    print 'dist =', dist
    total = sum(dist.itervalues())
    normalized = {key: float(value)/total for key, value in dist.items()}
    print(timeit.default_timer() - start_time)

    x = normalized.keys()
    y = normalized.values()
    print len(y)
    plt.loglog(x, y, 'ro')

    plt.yscale('log')
    plt.xscale('log')
    plt.minorticks_off()

    plt.xlabel('In-degree distribution')
    plt.ylabel('Normalized In-degree distribution')
    plt.title('Graph of Citations')
    plt.grid(True)
    plt.savefig('citations-q1.png')
    plt.show()
コード例 #28
0
def plot_resolution_cddf(snap=3, maxfac=1.):
    """Plot the effect of changing resolution on the CDDF."""
    base_large = myname.get_name(7, box=25)
    base_small = myname.get_name(7, box=7.5)
    ahalo_large = CIVPlottingSpectra(snap, base_large, None, None, savefile="rand_civ_spectra.hdf5", spec_res=5.,load_halo=True)
    ahalo_small = CIVPlottingSpectra(snap, base_small, None, None, savefile="rand_civ_spectra.hdf5", spec_res=5.,load_halo=True)
    maxmass = np.max(ahalo_small.sub_mass)/maxfac
    print("Max mass=",maxmass/1e10," was ",np.max(ahalo_large.sub_mass)/1e10)
    print("Small box has ",np.size(np.where(ahalo_small.sub_mass > maxmass))," larger halos")
    print("Larger box has ",np.size(np.where(ahalo_large.sub_mass > maxmass))," larger halos")
    ahalo_large.get_col_density("C",4)
    ahalo_small.get_col_density("C",4)
    (halos_large,_) = ahalo_large.find_nearest_halo("C",4, thresh=50)
    (halos_small,_) = ahalo_small.find_nearest_halo("C",4, thresh=50)
    ind_large = np.where((ahalo_large.sub_mass[halos_large] < maxmass)*(halos_large > 0))
    ind_small = np.where((ahalo_small.sub_mass[halos_small] < maxmass)*(halos_small > 0))
    print("Now ",np.size(ind_large),np.size(ind_small)," spectra")
    #Editing the private data like this is perilous
    ahalo_large.colden[("C",4)] = ahalo_large.colden[("C",4)][ind_large]
    ahalo_small.colden[("C",4)] = ahalo_small.colden[("C",4)][ind_small]
    (NHI_large, cddf_large) = ahalo_large.column_density_function("C", 4, minN=11.5,maxN=16.5, line=False, close=50.)
    plt.loglog(NHI_large,cddf_large,color="blue", label="25 Mpc Box", ls="-")
    (NHI_small, cddf_small) = ahalo_small.column_density_function("C", 4, minN=11.5,maxN=16.5, line=False, close=50.)
    plt.loglog(NHI_small,cddf_small,color="red", label="7.5 Mpc Box", ls="--")
    ax=plt.gca()
    ax.set_xlabel(r"$N_\mathrm{CIV} (\mathrm{cm}^{-2})$")
    ax.set_ylabel(r"$f(N) (\mathrm{cm}^2)$")
    plt.xlim(10**12, 10**15)
    plt.legend(loc=0)
    ax=plt.gca()
    ax.set_xlabel(r"$N_\mathrm{CIV} (\mathrm{cm}^{-2})$")
    ax.set_ylabel(r"$f(N) (\mathrm{cm}^2)$")
コード例 #29
0
    def plot_P(self, kmin, kmax, step, units_k = 'default', units_P = 'default'):
        stepsize = (kmax - kmin) / float(step)
        
        x = [kmin + float(i) * stepsize for i in range(0, step)]
        y1 = [self.P_delta(k, units_k, units_P) for k in x]
        plot1 = plt.loglog(x, y1, basex = 10, basey = 10, label = r'P(k)')
        
        if units_k == 'default':
            plt.xlabel(r'$k$ $(h \, Mpc^{-1})$')
        elif units_k == 'mpc-1' or units_k == 'Mpc-1':
            plt.xlabel(r'$k$ $(Mpc^{-1})$')
        
        if units_P == 'default':
            plt.ylabel(r'$P(k)$ $(h^{-3} Mpc^3)$')
        elif units_P == 'mpc3' or units_P == 'Mpc3':
            plt.ylabel(r'$P(k)$ $(Mpc^3)$')


        if units_k == 'default' and units_P == 'default':
            xcomp, ycomp = np.loadtxt('compare.dat', unpack = True)
            plot2 = plt.loglog(xcomp, ycomp, basex = 10, basey = 10, label = r'P(k) from iCosmo')
            xcomp2, ycomp2 = np.loadtxt('test_matterpower.dat', unpack = True)
            plot3 = plt.loglog(xcomp2, ycomp2, basex = 10, basey = 10, label = r'P(k) from CAMB')

        plt.legend(loc = 'upper right')
        plt.title(r'Matter Power Spectrum' )
        plt.grid(True)
        plt.xlim([10**(-3), 10])
        plt.ylim([1, 100000])

        
        plt.savefig('powerspectrum.png')
        plt.show()
コード例 #30
0
ファイル: cs1_functions.py プロジェクト: pdwan/COMP-47270.NW
def calc_degree_sequence(g, dest_file):
    """
    calc_degree_sequence(g)
    Calculate & plot the degree sequence of the graph g & writes data to the created data output file
    :param g:   graph as source
    :return:    --
    """
    func_intro = "\n\nDegree Sequence ... "
    logging.info(cs_ref, func_intro)
    print func_intro
    with open(dest_file, "a") as dat_file:
        dat_file.write(func_intro)

    degree_sequence = sorted(nx.degree(g).values(), reverse=True)
    with open(dest_file, "a") as dat_file:
        dat_file.write("\n\tDegree Sequence = \t" + str(degree_sequence))

    plt.loglog(degree_sequence, 'g-', marker='o')
    plt.title("Degree Rank/Sequence" +src_file)
    plt.ylabel("degree")
    plt.xlabel("rank")
    gcc = sorted(nx.connected_component_subgraphs(g), key=len, reverse=True)[0]
    pos = nx.spring_layout(gcc)
    plt.axes([0.45, 0.45, 0.45, 0.45])
    plt.axis('off')
    nx.draw_networkx_nodes(gcc, pos, node_size=10)
    nx.draw_networkx_nodes(gcc, pos, alpha=0.4)
    plt.figure(1)
    plt.savefig("plots/cs1_degree_histogram.png")
    plt.show()
コード例 #31
0
timer.add_callback(close_ventana)

#ejemplo 5, custom plots with logs
''' legend(), axis(), xlabel(), ylabel() y savefig()'''

#es una opcion, la otra es logaritmicamente espaciada
#x=np.linspace(0,10,20)
x = np.logspace(-1, 1, 40)  #0.1, 10, 40
y1 = x**2.0
y2 = x**1.5
'''
con las dos plots seguidas, las imprime ambas
'''
plt.loglog(x,
           y1,
           "bo-",
           linewidth=2,
           markersize=12,
           label='$\sum_{i=0}^\infty x_i$')
plt.loglog(x, y2, "gs-", linewidth=2, markersize=12, label="second verde")
'''Ajusto los ejes con
	 plt.axis([xmin, xmax, ymin, ymax]) '''
'''Puedo usar latex adentro!!! :D'''
plt.xlabel(r"$\frac{x}{y}$")
plt.ylabel("$y$")
plt.title(r'$\alpha > \beta$', fontsize=16, color='r')
'''mostramos lo que hay en label, con loc="parametro" le decimos donde queremos que este las 
lineas de cada funcion y1 o y2 correspondientes'''
plt.legend(loc="upper left")

#ejepmlo 2
#x=np.logspace(0,1,10)
コード例 #32
0
y_init = [0, 0]  # initial conditions

list_dic = integrate()

list_theta = list_dic['theta']  # dump psi values here
list_d_theta = list_dic['d_theta']  # dump dpsi values here
t = list_dic['xi']  # create array of values. Equidistant integration steps.

plt.xlim([0.1, 20])
plt.ylim([0.001, 1.0])

plt.plot(t, np.exp(list_theta), 'o', t,
         np.power(1 + np.power(t / 2.88, 2), -1.47))

plt.loglog(t, np.exp(list_theta), 'o', t,
           np.power(1 + np.power(t / 2.88, 2), -1.47))

for i in range(0, len(t) - 190000):
    print(t[i], np.exp(list_theta[i]))

#
# # plt.plot(x,y1,'o', x_new, y1_new)
# plt.show()
# list_for_fit_theta =[]
# list_for_fit_d_theta =[]
#
# for i in range(0,len(t)):
#
#    list_for_fit_theta.append((t[i], np.exp(psi[i])))
#    list_for_fit_d_theta.append((t[i], dpsi[i]))
#
コード例 #33
0
def guo(halo_catalog,
        clear=False,
        compare=True,
        baryfrac=False,
        filename=False,
        **kwargs):
    '''Stellar Mass vs. Halo Mass

    Takes a halo catalogue and plots the member stellar masses as a
    function of halo mass.

    Usage:

    >>> import pynbody.plot as pp
    >>> h = s.halos()
    >>> pp.guo(h,marker='+',markerfacecolor='k')

    **Options:**

    *compare* (True): Should comparison line be plotted?
         If compare = 'guo', Guo+ (2010) plotted instead of Behroozi+ (2013)

    *baryfrac* (False):  Should line be drawn for cosmic baryon fraction?

    *filename* (None): name of file to which to save output
    '''

    # if 'marker' not in kwargs :
    #    kwargs['marker']='o'

    starmasshalos = []
    totmasshalos = []

    halo_catalog._halos[1]['mass'].convert_units('Msol')

    for i in np.arange(len(halo_catalog._halos)) + 1:
        halo = halo_catalog[i]
        halostarmass = np.sum(halo.star['mass'])
        if halostarmass:
            starmasshalos.append(halostarmass)
            totmasshalos.append(np.sum(halo['mass']))

    if clear:
        plt.clf()

    plt.loglog(totmasshalos, starmasshalos, 'o', **kwargs)
    plt.xlabel('Total Halo Mass')
    plt.ylabel('Halo Stellar Mass')

    if compare:
        xmasses = np.logspace(np.log10(min(totmasshalos)),
                              1 + np.log10(max(totmasshalos)), 20)
        if compare == 'guo':
            # from Sawala et al (2011) + Guo et al (2009)
            ystarmasses = xmasses * 0.129 * ((xmasses / 2.5e11)**-0.926 +
                                             (xmasses / 2.5e11)**0.261)**-2.44
        else:
            ystarmasses, errors = behroozi(
                xmasses, halo_catalog._halos[1].properties['z'])
        plt.fill_between(xmasses,
                         np.array(ystarmasses) / np.array(errors),
                         y2=np.array(ystarmasses) * np.array(errors),
                         facecolor='#BBBBBB',
                         color='#BBBBBB')
        plt.loglog(xmasses, ystarmasses, label='Behroozi et al (2013)')

    if baryfrac:
        xmasses = np.logspace(np.log10(min(totmasshalos)),
                              1 + np.log10(max(totmasshalos)), 20)
        ystarmasses = xmasses * 0.04 / 0.24
        plt.loglog(xmasses,
                   ystarmasses,
                   linestyle='dotted',
                   label='f_b = 0.16')

    plt.axis([
        0.8 * min(totmasshalos), 1.2 * max(totmasshalos),
        0.8 * min(starmasshalos), 1.2 * max(starmasshalos)
    ])

    if (filename):
        logger.info("Saving %s", filename)
        plt.savefig(filename)
コード例 #34
0
             color='C0')  #, normed=True)
    #     plt.axis([1e-2, 73, 0, 11])
    plt.axis([1e-2, 73, 0, 220])
    plt.gca().set_xscale('log')
    plt.gca().set_xticklabels([])
    plt.gca().set_yticklabels([])
    plt.title('{} GHz'.format(band_labels[band]))

    if jband == 0:
        plt.ylabel('$1/f$ knees of\nnoise stares')

# ax = plt.subplot(2,3,4)
plt.subplot2grid((4, 3), (1, 0), colspan=1, rowspan=3)
asd_diff = np.array(d["AverageASDDiff"]['90.0_w204'])
asd_sum = np.array(d["AverageASDSum"]['90.0_w204'])
plt.loglog(freq[freq < f_hi], asd_diff[freq < f_hi] / np.sqrt(2.))
plt.loglog(freq[freq < f_hi], asd_sum[freq < f_hi] / np.sqrt(2.))
print(np.mean(asd_diff[(freq > 1) & (freq < 5)]) / np.sqrt(2.))
plt.grid()
plt.axis([1e-2, 73, 200, 50000])
plt.ylabel('NET [$\mu$K $\sqrt{s}$]')

# plt.subplot(2,3,5)
plt.subplot2grid((4, 3), (1, 1), colspan=1, rowspan=3)
asd_diff = np.array(d["AverageASDDiff"]['150.0_w204'])
asd_sum = np.array(d["AverageASDSum"]['150.0_w204'])
plt.loglog(freq[freq < f_hi], asd_diff[freq < f_hi] / np.sqrt(2.))
plt.loglog(freq[freq < f_hi], asd_sum[freq < f_hi] / np.sqrt(2.))
print(np.mean(asd_diff[(freq > 1) & (freq < 5)]) / np.sqrt(2.))
plt.grid()
plt.axis([1e-2, 73, 200, 50000])
コード例 #35
0
#model lomb
model_periods,model_mag,model_ph,model_fr,model_fi = modules.take_lomb(model_time,model_var,ofac,1./24)

obs_time = np.array(obs_time)
obs_var = np.array(obs_var)

#set plotting area & background to white
fig=plt.figure(figsize=(20,12))
fig.patch.set_facecolor('white')
ax = fig.add_subplot(1,1,1)

obs_periods,obs_mag, obs_breakpoint = modules.find_breakpoint(obs_periods,obs_mag)
model_periods,model_mag, model_breakpoint = modules.find_breakpoint(model_periods,model_mag)

plt.loglog(obs_periods,obs_mag, color='black', label = 'Obs.')
plt.axvline(x=obs_breakpoint, color = 'blue', linestyle = '--')
plt.loglog(model_periods,model_mag, color='red', label = 'GEOS %s'%(model_version))
plt.axvline(x=model_breakpoint, color = 'green', linestyle = '--')

def form2(x, pos):
	""" This function returns a string with 3 decimal places, given the input x"""
	return '%.2f' % x	

def form5(x, pos):
	""" This function returns a string with 3 decimal places, given the input x"""
	return '%.5f' % x

xformatter = FuncFormatter(form2)
yformatter = FuncFormatter(form5)
コード例 #36
0
    model = lstm_for_dynamics(cf_trunc, deployment_mode)
    output_state_lstm, state_tracker_lstm = evaluate_rom_deployment_lstm(
        model, cf_trunc, tsteps)
    np.save('Burgulence_LSTM_Coefficients.npy', state_tracker_lstm)

    #Visualization - Spectra
    u_true = sm_mean + (np.matmul(phi_trunc, perfect_output))[:]
    u_gp = sm_mean + (np.matmul(phi_trunc, output_state_gp))[:]
    u_lstm = sm_mean + (np.matmul(phi_trunc, output_state_lstm[:, 0]))[:]

    plt.figure()
    kx_plot = np.array([float(i) for i in list(range(0, nx // 2))])
    espec1 = spectra_calculation(u_true)
    espec2 = spectra_calculation(u_gp)
    espec3 = spectra_calculation(u_lstm)
    plt.loglog(kx_plot, espec1, label='Truth')
    plt.loglog(kx_plot, espec2, label='GP')
    plt.loglog(kx_plot, espec3, label='LSTM')
    plt.legend()
    plt.show()

    # Spectra residuals
    plt.figure()
    kx_plot = np.array([float(i) for i in list(range(0, nx // 2))])
    plt.loglog(kx_plot, np.abs(espec2 - espec1), label='GP-Residual')
    plt.loglog(kx_plot, np.abs(espec3 - espec1), label='LSTM-Residual')
    plt.legend()
    plt.show()

    plt.figure()
    plt.plot(x[:], u_true[:], label='Truth')
コード例 #37
0
ファイル: PSD_k_cmp_itp.py プロジェクト: HansInM36/ppcode
tSeq_11, xSeq_11, ySeq_11, zSeq_11, uSeq_11, coors_11 = getData_sowfa(
    ppDir, 'prbg11', ((0, 0, 0), 30.0), 'U', 0)
ky_seq_11, psdy_seq_11 = PSD_ky_sowfa(tSeq_11, ySeq_11, xSeq_11, 1, 5, uSeq_11,
                                      2000 // 5)

tSeq_12, xSeq_12, ySeq_12, zSeq_12, uSeq_12, coors_12 = getData_sowfa(
    ppDir, 'prbg12', ((0, 0, 0), 30.0), 'U', 0)
ky_seq_12, psdy_seq_12 = PSD_ky_sowfa(tSeq_12, ySeq_12, xSeq_12, 1, 5, uSeq_12,
                                      2000 // 5)
""" check PSD_kx """
fig, ax = plt.subplots(figsize=(6, 4))
dn = 1
plt.loglog(kx_seq_3[0::dn],
           psdx_seq_3[0::dn],
           label='cell',
           linewidth=1.0,
           linestyle='-',
           color='r')
plt.loglog(kx_seq_7[0::dn],
           psdx_seq_7[0::dn],
           label='cellPoint',
           linewidth=1.0,
           linestyle='-',
           color='b')
plt.loglog(kx_seq_8[0::dn],
           psdx_seq_8[0::dn],
           label='cellPointFace',
           linewidth=1.0,
           linestyle='-',
           color='g')
plt.loglog(kx_seq_9[0::dn],
コード例 #38
0
def plot_dQ_Q(Q: pd.Series):

    dQ = dQdt(Q)
    plt.loglog(Q[dQ < 0], -dQ[dQ < 0], '.')
    plt.xlabel('Discharge ($Q, mm day^{-1}$)')
    plt.ylabel(r'$-\frac{dQ}{dt} mm day^{-1}$')
コード例 #39
0
ファイル: comic.py プロジェクト: XilunWu/courses
print numOfCh
if (count in countDict):
    countDict[count] += 1
else:
    countDict[count] = 1

countList.sort()
countList.reverse()
cumulativeCountDict = {}  #ECCDF P[X > x]
bte = 0
bt = 0
for count in countList:
    bte += countDict[count]
    bt = bte - countDict[count]
    cumulativeCountDict[count] = bt * 1.0 / numOfCh

#print cumulativeCountDict
input.close()
output.close()

import numpy as np
import matplotlib.pyplot as plt
import math
x = cumulativeCountDict.keys()
y = cumulativeCountDict.values()

plt.xlim([min(x), max(x)])
#plt.plot(x,y,'ro')
plt.loglog(x, y, 'ro')
plt.show()
コード例 #40
0
ファイル: plot.py プロジェクト: hyungyukang/MPAS-Model
                ncfile.variables[tracer + 'Tend'][0, :, iz], [ny, nx])
            dif = abs(var[1:ny - 1, 1:nx - 1] - sol[1:ny - 1, 1:nx - 1])
            err = abs((var[1:ny - 1, 1:nx - 1] - sol[1:ny - \
                      1, 1:nx - 1]) / sol[1:ny - 1, 1:nx - 1])
            difL2[i, j, k] = np.sqrt(np.mean(dif[:]**2))
            errL2[i, j, k] = np.sqrt(np.mean(err[:]**2))
            #errL2[i,j,k] = np.max(err[:])
        ncfileIC.close()
        ncfile.close()

for i in range(ntests):
    test = tests[i]
    plt.subplot(ntests, 3, 3 * i + 1)
    for k in range(len(tracers)):
        tracer = tracers[k]
        plt.loglog(dx, difL2[i, :, k], '-x', label=tracer)

    plt.ylabel('diff: rms(exact[:] - calc[:])')
    plt.legend()
    plt.grid()
plt.xlabel('cell width, km')

for i in range(ntests):
    test = tests[i]
    plt.subplot(ntests, 3, 3 * i + 2)
    for k in range(len(tracers)):
        tracer = tracers[k]
        plt.loglog(dx, errL2[i, :, k], '-x', label=tracer)

    plt.title('Error in Redi tendancy term, ' + test)
    plt.ylabel('error: rms((exact[:] - calc[:])/exact[:])')
コード例 #41
0
ファイル: templ.py プロジェクト: gbrammer/eazy-photoz
def fit_gm():
    import numpy as np
    import matplotlib.pyplot as plt
    from scipy.optimize import nnls

    lnorm = 20

    nwave = 800

    # files = glob.glob('SED_J13/*RES')
    # files = glob.glob('SED_C11/*RES')
    # #files = glob.glob('SED_DL07/*RES')
    #
    # du = np.zeros((nwave, len(files)))
    #
    # for i, file in enumerate(files):
    #     dust = np.loadtxt(file, skiprows=8)
    #     w, f = dust[:,0], dust[:,1]
    #     print(file, len(w))
    #     plt.plot(w, f/np.interp(lnorm, w, f), label=file, alpha=0.5)
    #     du[:,i] = f/np.interp(lnorm, w, f)
    #     du_wave = w

    # Magdis
    mag = np.loadtxt('ms.txt')
    mag_wave = mag[:, 0]
    mag_flux = mag[:, 1:]
    for i in range(10):
        mag_flux[:, i] /= np.interp(lnorm, mag_wave, mag_flux[:, i])
        #plt.plot(mag_wave, mag_flux[:,i], color='k', alpha=0.8)

    ### By hand, blackbodies following da Cunha + Magphys
    from astropy.modeling.physical_models import BlackBody
    import astropy.units as u
    from astropy.constants import c

    # Eazy for wavelength grid
    ez = np.loadtxt('templates/fsps_full/fsps_QSF_12_v3_001.dat')
    wave_grid = ez[:, 0] / 1.e4
    nwave = len(wave_grid)

    # PAH template.  C11 dies down quickly
    file = 'SED_C11/SED_C11_100.RES'
    #file = 'SED_DL07/SED_DL07_100.RES'
    dust = np.loadtxt(file, skiprows=8)
    du_wave, du_pah = dust[:, 0], dust[:, 1]

    comps = [np.interp(wave_grid, du_wave, du_pah)]

    nu = (c / (wave_grid * u.um)).to(u.Hz)

    # equilibrium components, da Cunha
    # modified black-bodies, extra factor of 1 turns from Fnu to nu Fnu
    # cold
    for t in np.arange(20, 40):
        comps.append(
            BlackBody(temperature=t * u.K)(wave_grid * u.um) * nu**(1 + 2.0))

    #warm
    for t in np.arange(30, 80):
        comps.append(
            BlackBody(temperature=t * u.K)(wave_grid * u.um) * nu**(1 + 1.5))

    # Hot
    for t in [130, 250]:
        comps.append(
            BlackBody(temperature=t * u.K)(wave_grid * u.um) * nu**(1 + 1))

    _A = np.array(comps).T

    nc = _A.shape[1]
    for i in range(nc):
        _A[:, i] /= np.interp(lnorm, wave_grid, _A[:, i])

    #######
    mag_int = np.zeros((nwave, 10))
    clip = (wave_grid > 4.) & (wave_grid < 5000)

    models_flam = mag_int * 0.

    for i in range(10):
        mag_int[:, i] = np.interp(wave_grid, mag_wave, mag_flux[:, i])

        _a = nnls(_A[clip, :], mag_int[clip, i])
        model = _A.dot(_a[0])

        norm = np.trapz(model / wave_grid, wave_grid)

        pl = plt.plot(wave_grid, mag_int[:, i] / norm, linewidth=4, alpha=0.2)
        plt.plot(wave_grid[clip],
                 mag_int[clip, i] / norm,
                 linewidth=4,
                 alpha=0.8,
                 color=pl[0].get_color())

        plt.plot(wave_grid, model / norm, linewidth=3, color='w', alpha=0.8)
        plt.plot(wave_grid,
                 model / norm,
                 linewidth=1,
                 color=pl[0].get_color(),
                 alpha=0.8)

        mflam = model / wave_grid
        mflam /= np.trapz(mflam, wave_grid)

        models_flam[:, i] = mflam

        fp = open(f'templates/magdis/magdis_{i+1:02d}.txt', 'w')
        np.savetxt(
            fp,
            np.array([wave_grid * 1.e4, mflam]).T,
            header=
            'wave flam\n wave: A, flux: flam \nnormalized to unit energy',
            fmt='%.5e')
        fp.close()

        # components
        # plt.plot(wave_grid, (_A*_a[0]), linewidth=1, color='k', alpha=0.2)

    plt.loglog()
    plt.xlim(0.2, 5000)
    plt.ylim(1.e-6, 10)
    plt.grid()
    plt.savefig('templates/magdis/magdis_fit.png')
コード例 #42
0
n_runs = len(rho_err)
slope_1_ref = [2e-6 / 2**i
               for i in range(n_runs)]  # adjust initial error as necessary
slope_2_ref = [1e-7 / 4**i
               for i in range(n_runs)]  # adjust initial error as necessary

plt.figure(figsize=(8, 6))
plt.rc('text', usetex=True)
plt.rc('font', family='sans-serif')
ax = plt.subplot(1, 1, 1)
ax.get_yaxis().get_major_formatter().set_useOffset(False)
plt.xlabel("Mesh size, $h$")
plt.ylabel("Error, $\\|e\\|_1$")
plt.loglog(dx,
           slope_1_ref,
           linestyle='-',
           marker='',
           color='black',
           label='$m=1$')
plt.loglog(dx,
           slope_2_ref,
           linestyle='-',
           marker='',
           color='gray',
           label='$m=2$')
plt.loglog(dx,
           rho_err,
           linestyle='--',
           marker='x',
           color='indianred',
           label='$\\|\\rho - \\rho_h\\|_1$')
plt.loglog(dx,
コード例 #43
0
def prob4():
    """Compare the build and search speeds of the SinglyLinkedList, BST, and
    AVL classes. For search times, use iterative_search(), BST.find(), and
    AVL.find() to search for 5 random elements in each structure. Plot the
    number of elements in the structure versus the build and search times.
    Use log scales if appropriate.
    """
    N = 11

    # Initialize lists to hold results
    lls_build, lls_search = [], []
    bst_build, bst_search = [], []
    avl_build, avl_search = [], []

    with open("english.txt", 'r') as infile:
        data = infile.readlines()
    domain = 2**np.arange(3,N+1)

    for n in domain:

        # Initialize the data subset and the data structures.
        subset = np.random.choice(data, size=n, replace=False)
        bst = BST()
        avl = AVL()
        lls = SinglyLinkedList()

        # Time the SinglyLinkedList build.
        start = time()
        for item in subset:
            lls.append(item)
        lls_build.append(time() - start)

        # Time the BST build.
        start = time()
        for item in subset:
            bst.insert(item)
        bst_build.append(time() - start)

        # Time the AVL Tree build.
        start = time()
        for item in subset:
            avl.insert(item)
        avl_build.append(time() - start)

        random_subset = np.random.choice(subset, size=5, replace=False)

        # Time the SinglyLinkedList search (using iterative_search()).
        start = time()
        for target in random_subset:
            iterative_search(lls, target)
        lls_search.append(time() - start)

        # Time the BST search.
        start = time()
        for target in random_subset:
            bst.find(target)
        bst_search.append(time() - start)

        # Time the AVL Tree search.
        start = time()
        for target in random_subset:
            avl.find(target)
        avl_search.append(time() - start)

    # Plot the data.
    plt.subplot(121)
    plt.title("Build Times")
    plt.loglog(domain, lls_build, 'b.-', lw=2, ms=10, basex=2, basey=2,
                                                    label='Singly Linked List')
    plt.loglog(domain, bst_build, 'g.-', lw=2, ms=10, basex=2, basey=2,
                                                    label='Binary Search Tree')
    plt.loglog(domain, avl_build, 'r.-', lw=2, ms=10, basex=2, basey=2,
                                                    label='AVL Tree')
    plt.xlabel("n")
    plt.ylabel("Seconds")
    plt.legend(loc='upper left')

    plt.subplot(122)
    plt.title("Search Times")
    plt.loglog(domain, lls_search, 'b.-', lw=2, ms=10, basex=2, basey=2,
                                                    label='Singly Linked List')
    plt.loglog(domain, bst_search, 'g.-', lw=2, ms=10, basex=2, basey=2,
                                                    label='Binary Search Tree')
    plt.loglog(domain, avl_search, 'r.-', lw=2, ms=10, basex=2, basey=2,
                                                    label='AVL Tree')
    plt.xlabel("n")
    plt.legend(loc='upper left')

    plt.suptitle("Problem 4 Solution")
    plt.show()
コード例 #44
0
"""compute for additive parametric uncertainty"""
"""The condition for robust stability is derived & gives: RS <==> K*S < 1/W_A
    with Lp = Gp*K = k*(G + W_A*delta_I)"""

def W_A(Gn, G):     # We take W_A = l_A , Additive error (l_A = G'- G)
    return  np.abs(Gn - G)

def S(G, K):
    return 1/(1 + G*K)       
     
w = np.logspace(-3, 1, 300)
s = 1j*w

plt.figure(0)
plt.loglog(w, l(Gnom(s), G(s)), 'r', label='Relative Error')
plt.loglog(w, np.abs(Wi(s)), 'k', label='$W_I$')
plt.title(r'Figure 7.12')
plt.xlabel(r'Frequency [rad/s]', fontsize=14)
plt.ylabel(r'Magnitude', fontsize=15)
plt.legend()

#Plotting with multiplicative uncertainty 
plt.figure(1)
plt.loglog(w, np.abs(T(G(s), K(s, 1.13))), label='$T_1$ (not RS)')
plt.loglog(w, np.abs(T(G(s), K(s, 0.31))), label='$T_2$')
line = plt.loglog(w, 1/np.abs(Wi(s)), label='$1/W_I$')
plt.title(r'Figure 7.13')
plt.xlabel(r'Frequency [rad/s]', fontsize=14)
plt.ylabel(r'Magnitude', fontsize=15)
plt.legend()
コード例 #45
0
ファイル: emweak.py プロジェクト: calum-strange/SDE-Higham
T = 1
M = 50000

Xem = np.zeros((5, 1))
for p in range(1, 6):
    Dt = 2**(p - 10)
    L = float(T) / Dt
    Xtemp = Xzero * np.ones((M, 1))
    for j in range(1, int(L) + 1):
        Winc = np.sqrt(Dt) * np.random.randn(M)
        Xtemp += Dt * gamma * Xtemp + mu * np.multiply(Xtemp.T, Winc).T
    Xem[p - 1] = np.mean(Xtemp, 0)
Xerr = np.abs(Xem - np.exp(gamma))

Dtvals = np.power(float(2), [x - 10 for x in range(1, 6)])
plt.loglog(Dtvals, Xerr, 'b*-')
plt.loglog(Dtvals, Dtvals, 'r--')
plt.axis([1e-3, 1e-1, 1e-4, 1])
plt.xlabel('$\Delta t$')
plt.ylabel('| $E(X(T))$ - Sample average of $X_L$ |')
plt.title('emweak.py', fontsize=16)

### Least squares fit of error = C * Dt^q ###
A = np.column_stack((np.ones((p, 1)), np.log(Dtvals)))
rhs = np.log(Xerr)
sol = np.linalg.lstsq(A, rhs)[0]
q = sol[1][0]
resid = np.linalg.norm(np.dot(A, sol) - rhs)
#print 'q = ', q
#print 'residual = ', resid
コード例 #46
0
w = np.logspace(-3, 3, 1000)
dim = G(0).shape[0]
Sv_G = np.zeros((len(w), dim))
Sv_G_min = np.zeros((len(w), 1))
Sv_G_max = np.zeros((len(w), 1))
wB_index = 0
for i in range(len(w)):
    _, Sv_G[i, :], _ = np.linalg.svd(G(1j * w[i]))
    Sv_G_min[i] = np.min(Sv_G[i, :])
    if w[i] > 0.05 and wB_index == 0:
        wB_index = i

figure = plt.figure()
plt.hold
plt.loglog(w, Sv_G_min, label='$\sigma_{min}(G)$')
plt.loglog([w[wB_index], w[wB_index]], [plt.ylim()[0], plt.ylim()[1]], '--')
plt.legend()
plt.xlabel('Frequency  [rad/s]')
plt.ylabel('Magnitude')

# Note that minimum singular value of G(iw) where w < wB* is located at steady state (w=0)

u, _, _ = np.linalg.svd(G(0))

# Most difficult output direction
u_min = u[:, 1]

# Unsure of how to form gd with given information, cannot determine gd = y/d

# TODO Complete section b
コード例 #47
0
TDGC = (dm_mass_cands == 0) & (star_mass_cands > 5e-3)
rich = (dm_mass_cands / star_mass_cands >= 1) & (star_mass_cands < 0.1) & (
    star_mass_cands > 5e-3) & (dm_mass_cands > 0)
poor = (dm_mass_cands / star_mass_cands < 1) & (star_mass_cands < 0.1) & (
    star_mass_cands > 5e-3) & (dm_mass_cands > 0)

print('TDGCs: ', np.sum(TDGC), '\nDM-rich: ', np.sum(rich), '\nDM-poor: ',
      np.sum(poor))

# In[13]:

base_path = './haslbauer_subhalomasstype'
pathlib.Path(base_path).mkdir(parents=True, exist_ok=True)

plt.loglog(hst_shmr[hst_shmr != 0], s[hst_shmr != 0], '.')
x = np.linspace(10**(-2), 10**4, 100)

plt.loglog(x, 2 * x, '--b', label='s/shmr = 2')
plt.loglog(x, 5 * x, '--k', label='s/shmr = 5')
plt.loglog(x, 10 * x, '--r', label='s/shmr = 10')
plt.loglog(x, 100 * x, '--g', label='s/shmr = 100')

plt.legend(fontsize='x-large')
plt.xlabel('host stellar half mass', fontsize=20)
plt.ylabel('Distance to host', fontsize=20)
plt.savefig(base_path + '/dist_crit.png')

# In[16]:

plt.hist2d(10 + np.log10(star_mass[dmc]),
コード例 #48
0
with open('zipf.txt', 'r') as zipf_file:
    zipf_wrank, zipf_normedcumsum = getFileCDF(zipf_file)
zipf_file.close()
print("zipf CDF calculation done")

with open('uniform.txt', 'r') as uniform_file:
    uniform_wrank, uniform_normedcumsum = getFileCDF(uniform_file)
uniform_file.close()
print("uniform CDF calculation done")

#here i calculate the maximum point wise distance for both 2 generated corpuses
print("calculating the maximum point wise distance for zipf")
print("zipf max point= " +
      str(max(list(map(operator.sub, normedcumsum,
                       zipf_normedcumsum)))))  #0.411679964745
print("calculating the maximum point wise distance for uniform")
print("uniform max point= " +
      str(max(list(map(operator.sub, normedcumsum,
                       uniform_normedcumsum)))))  #0.436318188857

print("plotting now")

plt.title("Exercise 2")
plt.xlabel('Word Rrank')
plt.ylabel('CDF')
plt.loglog(list(range(len(wrank))), normedcumsum)
plt.loglog(list(range(len(zipf_wrank))), zipf_normedcumsum)
plt.loglog(list(range(len(uniform_wrank))), uniform_normedcumsum)
plt.show()
コード例 #49
0
            f[i] = peaks(x)
            nfe += 1

        # find m best parents, truncation selection
        ix = np.argsort(f)[:m]
        Q = P[ix, :]  # parents

        # keep track of best here
        if f_best is None or f[ix[0]] < f_best:
            f_best = f[ix[0]]
            x_best = Q[0, :]

        # then mutate: each parent generates l/m children (integer division)
        child = 0
        for x in Q:
            for _ in range(int(l / m)):
                P[child, :] = mutate(x, lb, ub, s)  # new population members
                child += 1

        ft[seed, int(nfe / l) - 1] = f_best

    # for each trial print the result (but the traces are saved in ft)
    print(x_best)
    print(f_best)

nfe = range(l, max_NFE + 1, l)
plt.loglog(nfe, ft.T, color='steelblue', linewidth=1)
plt.xlabel('NFE')
plt.ylabel('Objective Value')
plt.show()
コード例 #50
0
ファイル: 39.py プロジェクト: chenshuyi0408/nlp100
with open("neko.txt.mecab") as f:
    surface = []
    for i, line in enumerate(f):
        split_line = line.rstrip("\r\n").split("\t")
        if len(split_line) > 1:
            surface.append(split_line[0])
surfaces = []
surfaces = collections.Counter(surface)

fre = []
for v in surfaces.values():
    fre.append(v)

fre = collections.Counter(surfaces)
fres = []

for value in fre.values():
    fres.append(value)
fres = collections.Counter(fres)

f = zip(fres.values(), fres.keys())
f = sorted(f, reverse=True)

ka = [i[0] for i in f]
va = range(1, len(ka) + 1)

import matplotlib.pyplot as plt
plt.loglog(va, ka)

plt.show()
コード例 #51
0
ファイル: magcamb.py プロジェクト: brycem1/CMB
neg15_1_45nG_vecCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/neg15_1_45nG_vecCls.dat')[:,3]
neg10_1_45nG_vecCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/neg10_1_45nG_vecCls.dat')[:,3]
pos0_1_45nG_vecCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/pos0_1_45nG_vecCls.dat')[:,3]
pos10_1_45nG_vecCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/pos10_1_45nG_vecCls.dat')[:,3]

neg29_2_45nG_tensCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/neg29_2_45nG_tensCls.dat')[:,3]
neg25_2_45nG_tensCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/neg25_2_45nG_tensCls.dat')[:,3]
neg20_2_45nG_tensCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/neg20_2_45nG_tensCls.dat')[:,3]
neg15_2_45nG_tensCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/neg15_2_45nG_tensCls.dat')[:,3]
neg10_2_45nG_tensCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/neg10_2_45nG_tensCls.dat')[:,3]
pos0_2_45nG_tensCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/pos0_2_45nG_tensCls.dat')[:,3]
pos10_2_45nG_tensCls = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/pos10_2_45nG_tensCls.dat')[:,3]

ells = np.loadtxt('/home/student.unimelb.edu.au/brycem1/MagCAMB/bryce/research17/pos10_2_45nG_tensCls.dat')[:,0]

plt.loglog(ells,neg29_1_1nG_vecCls,label = r'$n_B = -2.9$')
plt.loglog(ells,neg25_1_1nG_vecCls,label = r'$n_B = -2.5$')
plt.loglog(ells,neg20_1_1nG_vecCls,label = r'$n_B = -2.0$')
plt.loglog(ells,neg15_1_1nG_vecCls,label = r'$n_B = -1.5$')
plt.loglog(ells,neg10_1_1nG_vecCls,label = r'$n_B = -1.0$')
plt.loglog(ells,pos0_1_1nG_vecCls,label = r'$n_B = 0.0$')
plt.loglog(ells,pos10_1_1nG_vecCls,label = r'$n_B = 1.0$')

plt.loglog(ells,neg29_1_45nG_vecCls,label = r'$n_B = -2.9$')
plt.loglog(ells,neg25_1_45nG_vecCls,label = r'$n_B = -2.5$')
plt.loglog(ells,neg20_1_45nG_vecCls,label = r'$n_B = -2.0$')
plt.loglog(ells,neg15_1_45nG_vecCls,label = r'$n_B = -1.5$')
plt.loglog(ells,neg10_1_45nG_vecCls,label = r'$n_B = -1.0$')
plt.loglog(ells,pos0_1_45nG_vecCls,label = r'$n_B = 0.0$')
plt.loglog(ells,pos10_1_45nG_vecCls,label = r'$n_B = 1.0$')
コード例 #52
0
    'C0o',  # Input jitter
    'C9o',  # Output jitter
    'C4o',  # Scatter
    'C2o',  # Intensity
    'C6o',  # Frequency
    'C7o',  # Dark
    'ko',  # OMC Length
    'C5o',  # PUM DAC
    'C8o',  # Stray Fields
])

for label, ASD, style in zip(labels, ASDs, styles):
    # ASD_logbinned = np.dot(logbin_matrix, ASD)
    lin_log_ff, lin_log_ASD = linear_log_ASD(fflog, ff, ASD)
    if label == 'Measured noise (O3)':
        plt.loglog(lin_log_ff, lin_log_ASD, style, label=label, zorder=3)
    else:
        plt.loglog(lin_log_ff, lin_log_ASD, style, label=label)

#lin_log_ff_O1, lin_log_ASD_O1 = linear_log_ASD(fflog, ff_O1, arm_length*spectra['o1'][:,1])
#lin_log_ff_O2, lin_log_ASD_O2 = linear_log_ASD(fflog, ff_O2, arm_length*spectra['o2'][:,1])

#plt.loglog(lin_log_ff_O1, lin_log_ASD_O1, 'C1-', label='O1', alpha=0.5)
#plt.loglog(lin_log_ff_O2, lin_log_ASD_O2, 'C7-', label='O2', alpha=0.5)

plt.xlabel('Frequency [Hz]')
plt.ylabel(r'DARM [$\mathrm{m}/\sqrt{\mathrm{Hz}}$]')
plt.grid()
plt.grid(which='minor', ls='--', alpha=0.7)
plt.legend(ncol=2, markerscale=3, loc='upper right')
コード例 #53
0
    pows = np.logspace(2, 4, 300)
    mass_mot_burton = mass_motor_electric(pows, method="burton")
    mass_mot_hobbyking = mass_motor_electric(pows, method="hobbyking")
    mass_mot_astroflight = mass_motor_electric(pows, method="astroflight")

    import matplotlib.pyplot as plt
    import matplotlib.style as style
    import plotly.express as px
    import plotly.graph_objects as go
    import dash
    import seaborn as sns

    sns.set(font_scale=1)

    plt.loglog(pows, np.array(mass_mot_burton), label="Burton Model")
    plt.plot(pows, np.array(mass_mot_hobbyking), label="Hobbyking Model")
    plt.plot(pows, np.array(mass_mot_astroflight), label="Astroflight Model")
    plt.xlabel("Motor Power [W]")
    plt.ylabel("Motor Mass [kg]")
    plt.title("Motor Mass Models")
    plt.tight_layout()
    plt.legend()
    plt.show()

    print(
        mass_wires(wire_length=1,
                   max_current=100,
                   allowable_voltage_drop=1,
                   material="aluminum"))
コード例 #54
0
        uu = Function(V, u)
        err[xx - 1] = errornorm(ue,
                                Function(V, u),
                                norm_type="L2",
                                degree_rise=3,
                                mesh=mesh)
        print err[xx - 1]
    # uE = interpolate(ue,V)
    # ue = uE.vector().array()
    # u = u.vector().array()

    # print scipy.linalg.norm(u-ue)
# # Plot solution
# plot(u)
# plot(interpolate(ue,V))

# interactive()
# print N,err

# print '\n\n'
# print (err[0:m-2]/err[1:m-1])
# print '\n\n'
if Saving == 'yes':
    MO.SaveEpertaMatrix(AA.down_cast().mat(), "A2d")
else:
    plt.loglog(N, err)
    plt.title('Error plot for P2 elements - L2 convergence = %f' %
              np.log2(np.average((err[0:m - 2] / err[1:m - 1]))))
    plt.xlabel('N')
    plt.ylabel('L2 error')
    plt.show()
コード例 #55
0
ファイル: dEdx_weak.py プロジェクト: Jean1995/Masterarbeit
    # =========================================================
    # 	Plot
    # =========================================================

    plt.rcParams.update(conf.params)
    plt.figure(figsize=(conf.width,3.5))

    labels = [r'$e$ pair production', 'Bremsstrahlung', 'Photonuclear', 'Ionization', r'$\mu$ pair production']
    colors = ['C0', 'C1', 'C2', 'C3', 'C4']

    for dEdx, param, _label, color in zip(dEdx_photo, params, labels, colors):
        plt.loglog(
            energy,
            dEdx,
            linestyle='-',
            label=_label,
            c = color
        )

    plt.loglog(energy, energy * sigma_decay(energy, medium.mass_density), linestyle='-', label='Decay', c = 'C5')    

    plt.loglog(energy, dEdx_weak(energy), linestyle='-', label='Weak interaction', c = 'C6')

    plt.xlabel(r'$E \,/\, \mathrm{MeV} $')
    plt.ylabel(r'$\left\langle\frac{\mathrm{d}E}{\mathrm{d}X}\right\rangle \,\left/\, \left( \rm{MeV} \cdot \rm{g}^{-1} \rm{cm}^2 \right) \right. $')
    plt.grid(conf.grid_conf)
    plt.legend(loc='best')

    plt.xlim(1e5, 1e12)
コード例 #56
0
    # Log the progress made
    print('Objective value is %f at iteration %d' % (phi, i + 1))

# In[17]:

#NBVAL_IGNORE_OUTPUT

# Plot inverted velocity model
plot_velocity(model0)

# In[18]:

#NBVAL_SKIP
import matplotlib.pyplot as plt

# Plot objective function decrease
plt.figure()
plt.loglog(history)
plt.xlabel('Iteration number')
plt.ylabel('Misift value Phi')
plt.title('Convergence')
plt.show()

# ## References
#
# [1] _Virieux, J. and Operto, S.: An overview of full-waveform inversion in exploration geophysics, GEOPHYSICS, 74, WCC1–WCC26, doi:10.1190/1.3238367, http://library.seg.org/doi/abs/10.1190/1.3238367, 2009._
#
# [2] _Haber, E., Chung, M., and Herrmann, F. J.: An effective method for parameter estimation with PDE constraints with multiple right hand sides, SIAM Journal on Optimization, 22, http://dx.doi.org/10.1137/11081126X, 2012._

# <sup>This notebook is part of the tutorial "Optimised Symbolic Finite Difference Computation with Devito" presented at the Intel® HPC Developer Conference 2017.</sup>
コード例 #57
0
for language in os.listdir(book_dir):
    for author in os.listdir(book_dir + "/" + language):
        for title in os.listdir(book_dir + "/" + language + "/" + author):
            inputfile = book_dir + "/" + language + "/" + author + "/" + title
            print(inputfile)
            text = read_book(inputfile)
            (num_unique, counts)  = word_stats(count_words(text))
            stats.loc[title_num] = language, author.capitalize(), title.replace(".txt", ""), sum(counts), num_unique
            title_num += 1

stats.length
stats.unique

import matplotlib.pyplot as plt
plt.plot(stats.length, stats.unique, "bo")
plt.loglog(stats.length, stats.unique, "bo")

stats[stats.language == "English"]
stats[stats.language == "French"]

plt.figure(figsize = (10, 10))
subset = stats[stats.language == "English"]
plt.loglog(subset.length, subset.unique, "o", label = "English", color = "crimson")

subset = stats[stats.language == "French"]
plt.loglog(subset.length, subset.unique, "o", label = "French", color = "forestgreen")

subset = stats[stats.language == "German"]
plt.loglog(subset.length, subset.unique, "o", label = "German", color = "orange")

subset = stats[stats.language == "Portuguese"]
コード例 #58
0
#qdata_bkd=[]
#Idata_bkd=[]
#for i in range(5,30):
#    qdata_bkd.append(qdata[i])
#    Idata_bkd.append(Idata[i])
#for i in range(200,300):
#    qdata_bkd.append(qdata[i])
#    Idata_bkd.append(Idata[i])
#qdata_bkd=np.array(qdata_bkd)
#Idata_bkd=np.array(Idata_bkd)
#-----------------------------------------------------------------------------------------------------
plt.figure(figsize=(7, 5), dpi=300)
#now graph the rest of the data
plt.loglog(qdata,
           Idata,
           'o',
           color='k',
           label='(5.0-2.0) (Mg$^{2+}$) diblock',
           markersize=4)  # this graphs the data
# identify the region to fit for leibler, ie, "just the hump"
qdata_leib = []
Isub_leib = []
for i in range(1, 70):
    qdata_leib.append(qdata[i])
    Isub_leib.append(Idata[i])
qdata_leib = np.array(qdata_leib)
Isub_leib = np.array(Isub_leib)
#print(len(qdata_leib),len(Isub_leib))
#------------------------------------------------------------
#identify the initial guesses for x0
x0 = [26, .18, .04, .0294, 2.79]
bds = ([0, 0.01, .001, .02,