Ejemplo n.º 1
0
def plotMassDensity(mbincnt=50, dbincnt=50, 
		    showContour=True, outfile='massDensity',
		    file=root+'07_05_18/mc_zero3e4.log'):
    """
    Plot a 2D histogram of mass and density values output by an efit monte
    carlo. The density is calculated from the mass and the periapse passage 
    of the star's orbit found in the log file (presumably S0-2).
    Saves the plot to a EPS file. You must pass in the file name
    containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=massDensity)
    file:     Name of the file containing the efit output. By default
              this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_05_18/mc_zero3e4.log
              as an example file.
    mbincnt:  Number of bins across the mass axis in the histogram (def=50)
    dbincnt: Number of bins across the density axis in the histogram (def=50)
    showContour:  Set to True (default value) to draw contours.
    """
    cc = objects.Constants()

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table = asciidata.open(file)

    # Make things into arrays of floats, etc.
    r0 = table[0].tonumpy()     # in pc
    x0 = table[1].tonumpy()     # in pix
    y0 = table[2].tonumpy()     # in pix
    amas = table[3].tonumpy()   # in mas
    p = table[4].tonumpy()      # in yrs
    e = table[5].tonumpy()

    # convert semi-major axis into AU
    a = amas * r0 / 1000.0

    # convert semi-major axis and period into mass
    m = a**3 / p**2

    # calculate periapse distance (in pc)
    pdist = a * (1.0 - e) / cc.au_in_pc

    # determine density (solar masses per pc^3)
    density = m / ((4.0/3.0) * math.pi * pdist**3)

    ##########
    #
    # Make 2D histogram
    #
    ##########
    # Lets put everything in log scale first
    mlog = log10(m)
    dlog = log10(density)

    (hist, mbins, dbins) = h2d.histogram2d(mlog, dlog, bins=(mbincnt, dbincnt))

    # Need to convert the 2d histogram into floats
    probDist = array(hist, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist /= float(len(m))

    if (showContour == True):
        levels = getContourLevels(probDist)

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()

    # Display the 2D histogram
    # Convert bins into x10^6 Msun and kpc
    imshow(probDist, cmap=cm.hot_r, origin='lower', aspect='auto',
           extent=[mbins[0], mbins[-1], dbins[0], dbins[-1]])

    # Make a colorbar
    #colorbar()

    # Draw contours
    if (showContour == True):
        contour(probDist, levels, origin=None, colors='black',
                extent=[mbins[0], mbins[-1], dbins[0], dbins[-1]])

    # Stretch axes
    axis('tight')

    # Draw labels using latex by putting "r" before the string
    font = {'fontname' : 'Sans',
	    'fontsize' : 20}
    xlabel(r'$\log_{10} M$ (M$_\odot$)', font)
    ylabel(r'$\log_{10} \rho$ (M$_\odot$/pc$^3$)', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')
    
    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    savefig(outfile + '.eps')
    savefig(outfile + '.png')

    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 2
0
def plotSgraXY(bincnt=50, outfile='bhPosition', showContour=True,
               file=root+'07_09_05/MC04/mc_zero.log'):
    """
    Plot a 2D histogram of x0 and y0 values output by an efit monte
    carlo. Saves the plot to a EPS file. You must pass in the file name
    containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=bhPosition)
    file:     Name of the file containing the efit output. By default
              this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_09_05/MC04/mc_zero.log
              as an example file.
    bincnt:  Number of bins across the axes in the histogram (def=50)
    """

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table = asciidata.open(file)

    # Make things into arrays of floats, etc.
    x0 = table[1].tonumpy()  # in pix
    y0 = table[2].tonumpy()  # in pix

    ##########
    #
    # Make 2D histogram
    #
    ##########
    (hist, x0bins, y0bins) = h2d.histogram2d(x0, y0, bins=(bincnt, bincnt))

    # Need to convert the 2d histogram into floats
    probDist = array(hist, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist /= float(len(x0))

    # Determine levels for contours
    levels = getContourLevels(probDist)

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()
    subplots_adjust(left=0.16, bottom=0.12, right=0.95, top=0.92)

    # Display the 2D histogram
    # Convert bins into x10^6 Msun and kpc
    imshow(probDist, cmap=cm.hot_r, origin='lower', aspect='auto',
           extent=[x0bins[0], x0bins[-1], y0bins[0], y0bins[-1]])

    # Make a colorbar
    #colorbar()

    # Draw contours
    if (showContour == True):
        contour(probDist, levels, origin=None, colors='black',
                extent=[x0bins[0], x0bins[-1], y0bins[0], y0bins[-1]])

    # Stretch axes
    axis('equal')

    # Draw labels using latex by putting "r" before the string
    font = {'fontname' : 'Sans',
	    'fontsize' : 20}
    xlabel(r'$\Delta$RA Offset (")', font)
    ylabel(r'$\Delta$Dec. Offset (")', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')
    
    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    savefig(outfile + '.eps')
    savefig(outfile + '.png')

    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 3
0
def plotMassRo(mbincnt=50, r0bincnt=50, showContour=True, outfile='massRo',
               file=root+'06_10_20/MC3/mc_zero1e4d.log'):
    """
    Plot a 2D histogram of mass and Ro values output by an efit monte
    carlo. Saves the plot to a EPS and PNG file. You must pass in the file
    name containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=massRo)
    file:     Name of the file containing the efit output. By default
              this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/06_10_20/MC3/mc_zero1e4d.log
              as an example file.
    mbincnt:  Number of bins across the mass axis in the histogram (def=50)
    r0bincnt: Number of bins across the Ro axis in the histogram (def=50)
    showContour:  Set to True (default value) to draw contours.
    """

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table = asciidata.open(file)

    # Make things into arrays of floats, etc.
    r0 = table[0].tonumpy()  # in pc
    x0 = table[1].tonumpy()  # in pix
    y0 = table[2].tonumpy()  # in pix
    a = table[3].tonumpy()   # in mas
    p = table[4].tonumpy()   # in yrs

    # convert semi-major axis and period into mass
    m = (a * r0 / 1000.0)**3 / p**2

    ##########
    #
    # Make 2D histogram
    #
    ##########
    (hist, r0bins, mbins) = h2d.histogram2d(r0, m, bins=(r0bincnt, mbincnt))

    # Need to convert the 2d histogram into floats
    probDist = array(hist, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist /= float(len(m))

    if (showContour == True):
        levels = getContourLevels(probDist)

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()

    # Display the 2D histogram
    # Convert bins into x10^6 Msun and kpc
    mbins /= 1.0e6
    r0bins /= 1.0e3
    imshow(probDist, cmap=cm.hot_r, origin='lower', aspect='auto',
           extent=[r0bins[0], r0bins[-1], mbins[0], mbins[-1]])

    # Make a colorbar
    #colorbar()

    # Draw contours
    if (showContour == True):
        contour(probDist, levels, origin=None, colors='black',
                extent=[r0bins[0], r0bins[-1], mbins[0], mbins[-1]])

    # Stretch axes
    axis('tight')

    # Draw labels using latex by putting "r" before the string
    font = {'fontname' : 'Sans',
	    'fontsize' : 20}
    xlabel(r'R$_o$ (kpc)', font)
    ylabel(r'Mass ($\times$10$^6$ M$_\odot$)', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')
    
    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    savefig('massRo.eps')
    savefig('massRo.png')


    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 4
0
def plotRoVz(vzbincnt=50, r0bincnt=50, showContour=True, outfile='massRo',
               file=root+'08_02_16/MCdir/bias_cent_vel3d/mc_zero.new.1e2.log'):
    """
    Plot a 2D histogram of Ro and Vz values output by an efit monte
    carlo. Saves the plot to a EPS and PNG file. You must pass in the file
    name containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=massRo)
    file:     Name of the file containing the efit output. By default
              this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/08_02_16/MCdir/bias_cent_vel3d/mc_zero.log
              as an example file.
    vzbincnt:  Number of bins across the mass axis in the histogram (def=50)
    r0bincnt: Number of bins across the Ro axis in the histogram (def=50)
    showContour:  Set to True (default value) to draw contours.
    """

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table = asciidata.open(file)

    # Make things into arrays of floats, etc.
    r0 = table[0].tonumpy()  # in pc
    x0 = table[1].tonumpy()  # in pix
    y0 = table[2].tonumpy()  # in pix
    a = table[3].tonumpy()   # in mas
    p = table[4].tonumpy()   # in yrs
    vz = table[12].tonumpy()   # in km/sec
    
    ##########
    #
    # Make 2D histogram
    #
    ##########
    (hist, vzbins, r0bins) = h2d.histogram2d(vz, r0, bins=(vzbincnt, r0bincnt))

    # Need to convert the 2d histogram into floats
    probDist = array(hist, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist /= float(len(r0))

    if (showContour == True):
        levels = getContourLevels(probDist)

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()

    # Display the 2D histogram
    # Convert r0bins into kpc
    r0bins /= 1.0e3
    imshow(probDist, cmap=cm.hot_r, origin='lower', aspect='auto',
           extent=[vzbins[0], vzbins[-1], r0bins[0], r0bins[-1]])

    # Make a colorbar
    #colorbar()

    # Draw contours
    if (showContour == True):
        contour(probDist, levels, origin=None, colors='black',
                extent=[vzbins[0], vzbins[-1], r0bins[0], r0bins[-1]])

    # Stretch axes
    axis('tight')
    #axis([-120, 80, 6, 11])

    # Draw labels using latex by putting "r" before the string
    font = {'fontname' : 'Sans',
	    'fontsize' : 20}
    xlabel(r'V$_z$ (km/sec)', font)
    ylabel(r'R$_o$ (kpc)', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')
    
    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    savefig('RoVz.eps')
    savefig('RoVz.png')


    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 5
0
def process_files(fn):
    drive_files = {}                         # index of drives
    all_parts  = []
    all_files = []
    files_by_md5 = {}           # a dictionary of sets of fiobject, indexed by md5
    extension_len_histogram = histogram2d()
    extension_fragments_histogram = histogram2d()
    partition_histogram = histogram2d()

    def cb(fi):
        # add the md5 to the set
        if fi.is_file() and fi.filesize():
            files_by_md5.get(fi.md5,set()).add(fi)
            ext = fi.ext()
            if not ext: print(fi.meta_type(),fi)
            extension_len_histogram.add(ext,fi.filesize())
            extension_fragments_histogram.add(ext,fi.fragments())
            partition_histogram.add(fi.partition(),fi.filesize())

    if fn.endswith('xml'):
        fiwalk.fiwalk_using_sax(xmlfile=open(fn),callback=cb)
    else:
        fiwalk.fiwalk_using_sax(imagefile=open(fn),callback=cb)


    #
    # Typeset the information
    #

    tab = ttable()
    tab.header     = "File extension popularity and average size (suppressing 0-len files)"
    tab.col_headings = [['Ext','Count','Average Size','Max','Std Dev']]
    tab.omit_row = [[0,'']]
    extension_len_histogram.statcol = ['iaverage','maxx','istddev']
    print(extension_len_histogram.typeset(tab=tab))

    #
    # Information about fragmentation patterns
    #
    tab = ttable()
    tab.header="Fragmentation pattern by file system and file type:"
    tab.col_headings = [['Ext','Count','Average Size','Max','Std Dev']]
    tab.omit_row = [[0,'']]
    extension_fragments_histogram.statcol = ['iaverage','maxx','istddev']
    print(extension_fragments_histogram.typeset(tab=tab))
    exit(0)

    for fstype in fstypes:
        for ftype in ['jpg','pdf','doc','txt']:
            len1stats = statbag()
            len2stats = statbag()
            delta_hist = histogram()
            delta_re = re.compile("(\d+)\-?(\d+)? ?(\d+)\-?(\d+)?")
            for i in filter((lambda f: f.ext()==ftype and f.fragments==2),all_files):
                runs = False
                if(hasattr(i,'block_runs')): runs = i.block_runs
                if(hasattr(i,'sector_runs')): runs = i.sector_runs
                if not runs: continue
                m = delta_re.search(runs)
                r = []
                for j in range(1,5):
                    try:
                        r.append(int(m.group(j)))
                    except TypeError:
                        r.append(int(m.group(j-1)))

                len1 = r[1] - r[0] + 1
                len2 = r[3] - r[2] + 1
                delta = r[2]-r[1]

                len1stats.addx(len1)
                len2stats.addx(len2)
                delta_hist.add(delta)

            if len1stats.count()>0:
                print("\n\n")
                print("fstype:",fstype,"  ftype:",ftype)
                print("len1 average: %f stddev: %f" % (len1stats.average(),len1stats.stddev()))
                print("len2 average: %f stddev: %f" % (len2stats.average(),len2stats.stddev()))
                print("delta average: %f" % delta_hist.average())
                print("delta histogram:")
                delta_hist.print_top(10)


    exit(0)


    print("Partition histogram:")
    partition_histogram.print_top(n=100)
    print("Counts by extension:")
    extension_len_histogram.print_top(n=100)
    print("Fragments by extension:")
    extension_fragments_histogram.print_top(n=100)

    exit(0)
    for fstype in fstypes:
        if fstype=='(unrecognized)': continue
        print(fstype,"Partitions:")

        def isfstype(x): return x.fstype==fstype
        these_parts = filter(isfstype,all_parts)
        these_files = []
        for part in these_parts:
            these_files.extend(part.files)
        print(fragmentation_table(these_files))


    exit(0)

    sys.exit(0)


    #
    # Typeset information about file extensions
    #
    hist_exts = histogram2d()
    hist_exts.topn = 20
    for i in all_files:
        if i.size>0 and i.fragments>0: hist_exts.add(i.ext(),i.size)
    tab = table()
    tab.header     = "File extension popularity and average size (suppressing 0-len files)"
    tab.col_headings = ['Ext','Count','Average Size','Max','Std Dev']
    tab.omit_row = [[0,'']]
    hist_exts.statcol = ['iaverage','maxx','istddev']
    print(hist_exts.typeset(t=tab))

    hist_exts = histogram2d()
    hist_exts.topn = 20
    for i in all_files:
        if i.fragments>0: hist_exts.add(i.ext(),i.fragments)
    tab = table()
    tab.header     = "Fragmentation by file extension (suppressing files with 0 fragments)"
    tab.col_headings = ['Ext','Count','Avg Fragments','Max','Std Dev']
    tab.omit_row = [[0,'']]
    hist_exts.statcol = ['average','maxx','stddev']
    print(hist_exts.typeset(t=tab))

    print("===========================")


    #
    # Typeset the File Systems on Drives table
    #

    tab = table()
    tab.header     = "File Systems on Drives"
    tab.col_headings = ["FS Type","Drives","MBytes"]
    tab.col_totals = [1,2]
    fstypeh.statcol = 'sumx'
    print(fstypeh.typeset(t=tab))

    #
    # Typeset overall fragmentation stats
    #

    print(fragmentation_table(all_files))
Ejemplo n.º 6
0
def ImageIntegrate(image,data,masks,blkSize=128,dlg=None,returnN=False):
    'Needs a doc string'    #for q, log(q) bins need data['binType']
    import histogram2d as h2d
    print 'Begin image integration'
    LUtth = np.array(data['IOtth'])
    LRazm = np.array(data['LRazimuth'],dtype=np.float64)
    numAzms = data['outAzimuths']
    numChans = data['outChannels']
    azmOff = data['azmthOff']
    Dazm = (LRazm[1]-LRazm[0])/numAzms
    if 'log(q)' in data['binType']:
        lutth = np.log(4.*np.pi*npsind(LUtth/2.)/data['wavelength'])
    elif 'q' == data['binType']:
        lutth = 4.*np.pi*npsind(LUtth/2.)/data['wavelength']
    elif '2-theta' in data['binType']:
        lutth = LUtth                
    dtth = (lutth[1]-lutth[0])/numChans
    muT = data['SampleAbs'][0]
    if 'SASD' in data['type']:
        muT = -np.log(muT)/2.       #Transmission to 1/2 thickness muT
    NST = np.zeros(shape=(numAzms,numChans),order='F',dtype=np.float32)
    H0 = np.zeros(shape=(numAzms,numChans),order='F',dtype=np.float32)
    imageN = len(image)
    Nx,Ny = data['size']
    nXBlks = (Nx-1)/blkSize+1
    nYBlks = (Ny-1)/blkSize+1
    Nup = nXBlks*nYBlks*3+3
    tbeg = time.time()
    Nup = 0
    if dlg:
        dlg.Update(Nup)
    times = [0,0,0,0,0]
    for iBlk in range(nYBlks):
        iBeg = iBlk*blkSize
        iFin = min(iBeg+blkSize,Ny)
        for jBlk in range(nXBlks):
            jBeg = jBlk*blkSize
            jFin = min(jBeg+blkSize,Nx)
            # next is most expensive step!
            TA,tam = Make2ThetaAzimuthMap(data,masks,(iBeg,iFin),(jBeg,jFin),times)           #2-theta & azimuth arrays & create position mask
            Nup += 1
            if dlg:
                dlg.Update(Nup)
            Block = image[iBeg:iFin,jBeg:jFin]
            t0 = time.time()
            tax,tay,taz,tad,tabs = Fill2ThetaAzimuthMap(masks,TA,tam,Block)    #and apply masks
            times[2] += time.time()-t0
            Nup += 1
            if dlg:
                dlg.Update(Nup)
            tax = np.where(tax > LRazm[1],tax-360.,tax)                 #put azm inside limits if possible
            tax = np.where(tax < LRazm[0],tax+360.,tax)
            if data['SampleAbs'][1]:
                if 'PWDR' in data['type']:
                    muR = muT*(1.+npsind(tax)**2/2.)/(npcosd(tay))
                    tabs = G2pwd.Absorb(data['SampleShape'],muR,tay)
                elif 'SASD' in data['type']:    #assumes flat plate sample normal to beam
                    tabs = G2pwd.Absorb('Fixed',muT,tay)
            if 'log(q)' in data['binType']:
                tay = np.log(4.*np.pi*npsind(tay/2.)/data['wavelength'])
            elif 'q' == data['binType']:
                tay = 4.*np.pi*npsind(tay/2.)/data['wavelength']
            t0 = time.time()
            if any([tax.shape[0],tay.shape[0],taz.shape[0]]):
                NST,H0 = h2d.histogram2d(len(tax),tax,tay,taz*tad/tabs,
                    numAzms,numChans,LRazm,lutth,Dazm,dtth,NST,H0)
            times[3] += time.time()-t0
            Nup += 1
            if dlg:
                dlg.Update(Nup)
    t0 = time.time()
    NST = np.array(NST,dtype=np.float)
    H0 = np.divide(H0,NST)
    H0 = np.nan_to_num(H0)
    H2 = np.array([tth for tth in np.linspace(lutth[0],lutth[1],numChans+1)])
    if 'log(q)' in data['binType']:
        H2 = 2.*npasind(np.exp(H2)*data['wavelength']/(4.*np.pi))
    elif 'q' == data['binType']:
        H2 = 2.*npasind(H2*data['wavelength']/(4.*np.pi))
    if Dazm:        
        H1 = np.array([azm for azm in np.linspace(LRazm[0],LRazm[1],numAzms+1)])
    else:
        H1 = LRazm
    H0 /= npcosd(H2[:-1])           #**2? I don't think so, **1 is right for powders
    if 'SASD' in data['type']:
        H0 /= npcosd(H2[:-1])           #one more for small angle scattering data?
    if data['Oblique'][1]:
        H0 /= G2pwd.Oblique(data['Oblique'][0],H2[:-1])
    if 'SASD' in data['type'] and data['PolaVal'][1]:
        #NB: in G2pwd.Polarization azm is defined from plane of polarization, not image x axis!
        H0 /= np.array([G2pwd.Polarization(data['PolaVal'][0],H2[:-1],Azm=azm-90.)[0] for azm in (H1[:-1]+np.diff(H1)/2.)])
    Nup += 1
    if dlg:
        dlg.Update(Nup)
    times[4] += time.time()-t0
    print 'Step times: \n apply masks  %8.3fs xy->th,azm   %8.3fs fill map     %8.3fs \
        \n binning      %8.3fs cleanup      %8.3fs'%(times[0],times[1],times[2],times[3],times[4])
    print "Elapsed time:","%8.3fs"%(time.time()-tbeg)
    print 'Integration complete'
    if returnN:     #As requested by Steven Weigand
        return H0,H1,H2,NST
    else:
        return H0,H1,H2
Ejemplo n.º 7
0
            return x.fstype == fstype

        these_parts = filter(isfstype, all_parts)
        these_files = []
        for part in these_parts:
            these_files.extend(part.files)
        print fragmentation_table(these_files)

    exit(0)

    sys.exit(0)

    #
    # Typeset information about file extensions
    #
    hist_exts = histogram2d()
    hist_exts.topn = 20
    for i in all_files:
        if i.size > 0 and i.fragments > 0: hist_exts.add(i.ext(), i.size)
    tab = table()
    tab.header = "File extension popularity and average size (suppressing 0-len files)"
    tab.col_headings = ['Ext', 'Count', 'Average Size', 'Max', 'Std Dev']
    tab.omit_row = [[0, '']]
    hist_exts.statcol = ['iaverage', 'maxx', 'istddev']
    print hist_exts.typeset(t=tab)

    hist_exts = histogram2d()
    hist_exts.topn = 20
    for i in all_files:
        if i.fragments > 0: hist_exts.add(i.ext(), i.fragments)
    tab = table()
Ejemplo n.º 8
0
def process_files(fn):
    drive_files = {}                         # index of drives
    all_parts  = []
    all_files = []
    files_by_md5 = {}           # a dictionary of sets of fiobject, indexed by md5
    extension_len_histogram = histogram2d()
    extension_fragments_histogram = histogram2d()
    partition_histogram = histogram2d()

    def cb(fi):
        # add the md5 to the set
        if fi.is_file() and fi.filesize():
            files_by_md5.get(fi.md5,set()).add(fi)
            ext = fi.ext()
            if not ext: print fi.meta_type(),fi
            extension_len_histogram.add(ext,fi.filesize())
            extension_fragments_histogram.add(ext,fi.fragments())
            partition_histogram.add(fi.partition(),fi.filesize())

    if fn.endswith('xml'):
        fiwalk.fiwalk_using_sax(xmlfile=open(fn),callback=cb)
    else:
        fiwalk.fiwalk_using_sax(imagefile=open(fn),callback=cb)
    

    #
    # Typeset the information
    #

    tab = ttable()
    tab.header     = "File extension popularity and average size (suppressing 0-len files)"
    tab.col_headings = [['Ext','Count','Average Size','Max','Std Dev']]
    tab.omit_row = [[0,'']]
    extension_len_histogram.statcol = ['iaverage','maxx','istddev']
    print extension_len_histogram.typeset(tab=tab)

    #
    # Information about fragmentation patterns
    #
    tab = ttable()
    tab.header="Fragmentation pattern by file system and file type:"
    tab.col_headings = [['Ext','Count','Average Size','Max','Std Dev']]
    tab.omit_row = [[0,'']]
    extension_fragments_histogram.statcol = ['iaverage','maxx','istddev']
    print extension_fragments_histogram.typeset(tab=tab)
    exit(0)

    for fstype in fstypes:
        for ftype in ['jpg','pdf','doc','txt']:
            len1stats = statbag()
            len2stats = statbag()
            delta_hist = histogram()
            delta_re = re.compile("(\d+)\-?(\d+)? ?(\d+)\-?(\d+)?")
            for i in filter( (lambda(f): f.ext()==ftype and f.fragments==2),all_files):
                runs = False
                if(hasattr(i,'block_runs')): runs = i.block_runs
                if(hasattr(i,'sector_runs')): runs = i.sector_runs
                if not runs: continue
                m = delta_re.search(runs)
                r = []
                for j in range(1,5):
                    try:
                        r.append(int(m.group(j)))
                    except TypeError:
                        r.append(int(m.group(j-1)))

                len1 = r[1] - r[0] + 1
                len2 = r[3] - r[2] + 1
                delta = r[2]-r[1]
                
                len1stats.addx(len1)
                len2stats.addx(len2)
                delta_hist.add(delta)

            if len1stats.count()>0:
                print "\n\n"
                print "fstype:",fstype,"  ftype:",ftype
                print "len1 average: %f stddev: %f" % (len1stats.average(),len1stats.stddev())
                print "len2 average: %f stddev: %f" % (len2stats.average(),len2stats.stddev())
                print "delta average: %f" % delta_hist.average()
                print "delta histogram:"
                delta_hist.print_top(10)
Ejemplo n.º 9
0
def plotSgraXY(bincnt=50,
               outfile='bhPosition',
               showContour=True,
               file=root + '07_09_05/MC04/mc_zero.log'):
    """
    Plot a 2D histogram of x0 and y0 values output by an efit monte
    carlo. Saves the plot to a EPS file. You must pass in the file name
    containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=bhPosition)
    file:     Name of the file containing the efit output. By default
              this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_09_05/MC04/mc_zero.log
              as an example file.
    bincnt:  Number of bins across the axes in the histogram (def=50)
    """

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table = asciidata.open(file)

    # Make things into arrays of floats, etc.
    x0 = table[1].tonumpy()  # in pix
    y0 = table[2].tonumpy()  # in pix

    ##########
    #
    # Make 2D histogram
    #
    ##########
    (hist, x0bins, y0bins) = h2d.histogram2d(x0, y0, bins=(bincnt, bincnt))

    # Need to convert the 2d histogram into floats
    probDist = array(hist, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist /= float(len(x0))

    # Determine levels for contours
    levels = getContourLevels(probDist)

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()
    subplots_adjust(left=0.16, bottom=0.12, right=0.95, top=0.92)

    # Display the 2D histogram
    # Convert bins into x10^6 Msun and kpc
    imshow(probDist,
           cmap=cm.hot_r,
           origin='lower',
           aspect='auto',
           extent=[x0bins[0], x0bins[-1], y0bins[0], y0bins[-1]])

    # Make a colorbar
    #colorbar()

    # Draw contours
    if (showContour == True):
        contour(probDist,
                levels,
                origin=None,
                colors='black',
                extent=[x0bins[0], x0bins[-1], y0bins[0], y0bins[-1]])

    # Stretch axes
    axis('equal')

    # Draw labels using latex by putting "r" before the string
    font = {'fontname': 'Sans', 'fontsize': 20}
    xlabel(r'$\Delta$RA Offset (")', font)
    ylabel(r'$\Delta$Dec. Offset (")', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    savefig(outfile + '.eps')
    savefig(outfile + '.png')

    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 10
0
def plotComparePos(bincnt=50,
                   outfile='bhComparePos',
                   file1=root + '08_02_16/MCdir/bias_cent_vel3d/mc_zero.log',
                   file2=root + '08_02_16/MCdir_16/bias_cent_vel/mc_zero.log',
                   sgra=root + '08_02_16/points/SgrA.deep_lgsao.points'):
    """
    Plot a 2D histogram of x0 and y0 values output by an efit monte
    carlo. Saves the plot to a EPS file. You must pass in the file name
    containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=bhComparePos)
    file1:    Name of the file containing the efit output for S0-2.
              By default this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_09_05/MC04/mc_zero.log
    file2:    Name of the file containing the efit output for S0-16.
              By default this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_09_05/MC04_16/mc_zero.log
    sgra:     The points file for Sgr A*. By default this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_09_05/points/04jul/SgrA.points
    bincnt:   Number of bins across the axes in the histogram (def=50)
    """

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table1 = asciidata.open(file1)
    table2 = asciidata.open(file2)

    # Make things into arrays of floats, etc.
    x01 = table1[1].tonumpy()  # in pix
    y01 = table1[2].tonumpy()  # in pix
    x02 = table2[1].tonumpy()  # in pix
    y02 = table2[2].tonumpy()  # in pix

    ##########
    #
    # Make 2D histogram
    #
    ##########
    (hist1, x0bins1, y0bins1) = h2d.histogram2d(x01,
                                                y01,
                                                bins=(bincnt, bincnt))
    (hist2, x0bins2, y0bins2) = h2d.histogram2d(x02,
                                                y02,
                                                bins=(bincnt, bincnt))

    # Need to convert the 2d histogram into floats
    probDist1 = array(hist1, dtype=float)
    probDist2 = array(hist2, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist1 /= float(len(x01))
    probDist2 /= float(len(x02))

    # Find the dynamical center based on S0-2's orbit
    maxPix = probDist1.flatten().argmax()
    sgraDynX = x0bins1[maxPix % probDist1.shape[1]]
    sgraDynY = y0bins1[maxPix / probDist1.shape[1]]
    #scale = 9.950 # mas/pixel
    scale = 1000.0  # mas/pixel
    print 'Using Dynamical Center as Reference (mas): x = %5.1f; y = %5.1f' % \
          (sgraDynX*scale, sgraDynY*scale)
    print 'Using plate scale of %8.3f mas/pixel' % (scale)

    # Get contour levels.
    levels1 = getContourLevels(probDist1)
    levels2 = getContourLevels(probDist2)

    ##########
    #
    # Get photometric positions of Sgr A*
    #
    ##########
    sgraTable = asciidata.open(sgra)
    sgraX = sgraTable[1].tonumpy()
    sgraY = sgraTable[2].tonumpy()
    sgraXerr = sgraTable[3].tonumpy()
    sgraYerr = sgraTable[4].tonumpy()

    ##########
    #
    # Convert everything into arcsec offset from the
    # dynamical center (set by S0-2's orbit).
    #
    ##########
    #sgraX = (sgraX - sgraDynX) * -scale
    #sgraY = (sgraY - sgraDynY) * scale
    #x0bins1 = (x0bins1 - sgraDynX) * -scale
    #x0bins2 = (x0bins2 - sgraDynX) * -scale
    #y0bins1 = (y0bins1 - sgraDynY) * scale
    #y0bins2 = (y0bins2 - sgraDynY) * scale

    sgraX = (sgraX) * -scale
    sgraY = (sgraY) * scale
    x0bins1 = (x0bins1) * -scale
    x0bins2 = (x0bins2) * -scale
    y0bins1 = (y0bins1) * scale
    y0bins2 = (y0bins2) * scale

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()
    subplots_adjust(left=0.16, bottom=0.12, right=0.95, top=0.92)

    # Plot SgrA*-Radio
    an = linspace(0, 2 * pi, 100)
    plot(3.8 * cos(an), 4.2 * sin(an), 'k--')
    plot(2. * 3.8 * cos(an), 2. * 4.2 * sin(an), 'k--')
    plot(3. * 3.8 * cos(an), 3. * 4.2 * sin(an), 'k--')

    # Display the 2D histogram
    # Convert bins into x10^6 Msun and kpc
    c1 = contour(probDist1,
                 levels1,
                 origin=None,
                 colors='blue',
                 extent=[x0bins1[0], x0bins1[-1], y0bins1[0], y0bins1[-1]])
    c2 = contour(probDist2,
                 levels2,
                 origin=None,
                 colors='red',
                 extent=[x0bins2[0], x0bins2[-1], y0bins2[0], y0bins2[-1]])

    errorbar(sgraX, sgraY, xerr=sgraXerr, yerr=sgraYerr, fmt='k.')

    # Draw labels using latex by putting "r" before the string
    font = {'fontname': 'Sans', 'fontsize': 20}
    xlabel(r'$\Delta$RA Offset (mas)', font)
    ylabel(r'$\Delta$Dec. Offiset (mas)', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    legend((c1, c2), ('S0-2', 'S0-16'))
    legText = gca().get_legend().get_texts()
    setp(legText[0], color='b')
    setp(legText[1], color='r')

    # Stretch axes
    axis('equal')
    rng = axis()
    xlim(rng[1], rng[0])

    savefig(outfile + '.eps')
    savefig(outfile + '.png')

    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 11
0
def plotMassDensity(mbincnt=50,
                    dbincnt=50,
                    showContour=True,
                    outfile='massDensity',
                    file=root + '07_05_18/mc_zero3e4.log'):
    """
    Plot a 2D histogram of mass and density values output by an efit monte
    carlo. The density is calculated from the mass and the periapse passage 
    of the star's orbit found in the log file (presumably S0-2).
    Saves the plot to a EPS file. You must pass in the file name
    containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=massDensity)
    file:     Name of the file containing the efit output. By default
              this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_05_18/mc_zero3e4.log
              as an example file.
    mbincnt:  Number of bins across the mass axis in the histogram (def=50)
    dbincnt: Number of bins across the density axis in the histogram (def=50)
    showContour:  Set to True (default value) to draw contours.
    """
    cc = objects.Constants()

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table = asciidata.open(file)

    # Make things into arrays of floats, etc.
    r0 = table[0].tonumpy()  # in pc
    x0 = table[1].tonumpy()  # in pix
    y0 = table[2].tonumpy()  # in pix
    amas = table[3].tonumpy()  # in mas
    p = table[4].tonumpy()  # in yrs
    e = table[5].tonumpy()

    # convert semi-major axis into AU
    a = amas * r0 / 1000.0

    # convert semi-major axis and period into mass
    m = a**3 / p**2

    # calculate periapse distance (in pc)
    pdist = a * (1.0 - e) / cc.au_in_pc

    # determine density (solar masses per pc^3)
    density = m / ((4.0 / 3.0) * math.pi * pdist**3)

    ##########
    #
    # Make 2D histogram
    #
    ##########
    # Lets put everything in log scale first
    mlog = log10(m)
    dlog = log10(density)

    (hist, mbins, dbins) = h2d.histogram2d(mlog, dlog, bins=(mbincnt, dbincnt))

    # Need to convert the 2d histogram into floats
    probDist = array(hist, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist /= float(len(m))

    if (showContour == True):
        levels = getContourLevels(probDist)

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()

    # Display the 2D histogram
    # Convert bins into x10^6 Msun and kpc
    imshow(probDist,
           cmap=cm.hot_r,
           origin='lower',
           aspect='auto',
           extent=[mbins[0], mbins[-1], dbins[0], dbins[-1]])

    # Make a colorbar
    #colorbar()

    # Draw contours
    if (showContour == True):
        contour(probDist,
                levels,
                origin=None,
                colors='black',
                extent=[mbins[0], mbins[-1], dbins[0], dbins[-1]])

    # Stretch axes
    axis('tight')

    # Draw labels using latex by putting "r" before the string
    font = {'fontname': 'Sans', 'fontsize': 20}
    xlabel(r'$\log_{10} M$ (M$_\odot$)', font)
    ylabel(r'$\log_{10} \rho$ (M$_\odot$/pc$^3$)', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    savefig(outfile + '.eps')
    savefig(outfile + '.png')

    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 12
0
def plotMassRo(mbincnt=50,
               r0bincnt=50,
               showContour=True,
               outfile='massRo',
               file=root + '06_10_20/MC3/mc_zero1e4d.log'):
    """
    Plot a 2D histogram of mass and Ro values output by an efit monte
    carlo. Saves the plot to a EPS and PNG file. You must pass in the file
    name containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=massRo)
    file:     Name of the file containing the efit output. By default
              this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/06_10_20/MC3/mc_zero1e4d.log
              as an example file.
    mbincnt:  Number of bins across the mass axis in the histogram (def=50)
    r0bincnt: Number of bins across the Ro axis in the histogram (def=50)
    showContour:  Set to True (default value) to draw contours.
    """

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table = asciidata.open(file)

    # Make things into arrays of floats, etc.
    r0 = table[0].tonumpy()  # in pc
    x0 = table[1].tonumpy()  # in pix
    y0 = table[2].tonumpy()  # in pix
    a = table[3].tonumpy()  # in mas
    p = table[4].tonumpy()  # in yrs

    # convert semi-major axis and period into mass
    m = (a * r0 / 1000.0)**3 / p**2

    ##########
    #
    # Make 2D histogram
    #
    ##########
    (hist, r0bins, mbins) = h2d.histogram2d(r0, m, bins=(r0bincnt, mbincnt))

    # Need to convert the 2d histogram into floats
    probDist = array(hist, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist /= float(len(m))

    if (showContour == True):
        levels = getContourLevels(probDist)

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()

    # Display the 2D histogram
    # Convert bins into x10^6 Msun and kpc
    mbins /= 1.0e6
    r0bins /= 1.0e3
    imshow(probDist,
           cmap=cm.hot_r,
           origin='lower',
           aspect='auto',
           extent=[r0bins[0], r0bins[-1], mbins[0], mbins[-1]])

    # Make a colorbar
    #colorbar()

    # Draw contours
    if (showContour == True):
        contour(probDist,
                levels,
                origin=None,
                colors='black',
                extent=[r0bins[0], r0bins[-1], mbins[0], mbins[-1]])

    # Stretch axes
    axis('tight')

    # Draw labels using latex by putting "r" before the string
    font = {'fontname': 'Sans', 'fontsize': 20}
    xlabel(r'R$_o$ (kpc)', font)
    ylabel(r'Mass ($\times$10$^6$ M$_\odot$)', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    savefig('massRo.eps')
    savefig('massRo.png')

    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 13
0
def plotRoVz(vzbincnt=50,
             r0bincnt=50,
             showContour=True,
             outfile='massRo',
             file=root + '08_02_16/MCdir/bias_cent_vel3d/mc_zero.new.1e2.log'):
    """
    Plot a 2D histogram of Ro and Vz values output by an efit monte
    carlo. Saves the plot to a EPS and PNG file. You must pass in the file
    name containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=massRo)
    file:     Name of the file containing the efit output. By default
              this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/08_02_16/MCdir/bias_cent_vel3d/mc_zero.log
              as an example file.
    vzbincnt:  Number of bins across the mass axis in the histogram (def=50)
    r0bincnt: Number of bins across the Ro axis in the histogram (def=50)
    showContour:  Set to True (default value) to draw contours.
    """

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table = asciidata.open(file)

    # Make things into arrays of floats, etc.
    r0 = table[0].tonumpy()  # in pc
    x0 = table[1].tonumpy()  # in pix
    y0 = table[2].tonumpy()  # in pix
    a = table[3].tonumpy()  # in mas
    p = table[4].tonumpy()  # in yrs
    vz = table[12].tonumpy()  # in km/sec

    ##########
    #
    # Make 2D histogram
    #
    ##########
    (hist, vzbins, r0bins) = h2d.histogram2d(vz, r0, bins=(vzbincnt, r0bincnt))

    # Need to convert the 2d histogram into floats
    probDist = array(hist, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist /= float(len(r0))

    if (showContour == True):
        levels = getContourLevels(probDist)

    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()

    # Display the 2D histogram
    # Convert r0bins into kpc
    r0bins /= 1.0e3
    imshow(probDist,
           cmap=cm.hot_r,
           origin='lower',
           aspect='auto',
           extent=[vzbins[0], vzbins[-1], r0bins[0], r0bins[-1]])

    # Make a colorbar
    #colorbar()

    # Draw contours
    if (showContour == True):
        contour(probDist,
                levels,
                origin=None,
                colors='black',
                extent=[vzbins[0], vzbins[-1], r0bins[0], r0bins[-1]])

    # Stretch axes
    axis('tight')
    #axis([-120, 80, 6, 11])

    # Draw labels using latex by putting "r" before the string
    font = {'fontname': 'Sans', 'fontsize': 20}
    xlabel(r'V$_z$ (km/sec)', font)
    ylabel(r'R$_o$ (kpc)', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    # Set the label axis formatting.
    thePlot = gca()
    setp(thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp(thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    savefig('RoVz.eps')
    savefig('RoVz.png')

    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 14
0
def plotComparePos(bincnt=50, outfile='bhComparePos',
                   file1=root+'08_02_16/MCdir/bias_cent_vel3d/mc_zero.log',
                   file2=root+'08_02_16/MCdir_16/bias_cent_vel/mc_zero.log',
                   sgra=root+'08_02_16/points/SgrA.deep_lgsao.points'):
    """
    Plot a 2D histogram of x0 and y0 values output by an efit monte
    carlo. Saves the plot to a EPS file. You must pass in the file name
    containing the simulation results. Parameters are:

    outfile:  Root name of the file to save the plot to (def=bhComparePos)
    file1:    Name of the file containing the efit output for S0-2.
              By default this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_09_05/MC04/mc_zero.log
    file2:    Name of the file containing the efit output for S0-16.
              By default this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_09_05/MC04_16/mc_zero.log
    sgra:     The points file for Sgr A*. By default this is set to
              /net/uni/Groups/ghez/ghez/analysis/Ro/07_09_05/points/04jul/SgrA.points
    bincnt:   Number of bins across the axes in the histogram (def=50)
    """

    # Read in the efit monte carlo output file. Reading in this way
    # organizes stuff by column (e.g. table[0] = first column
    table1 = asciidata.open(file1)
    table2 = asciidata.open(file2)

    # Make things into arrays of floats, etc.
    x01 = table1[1].tonumpy()  # in pix
    y01 = table1[2].tonumpy()  # in pix
    x02 = table2[1].tonumpy()  # in pix
    y02 = table2[2].tonumpy()  # in pix

    ##########
    #
    # Make 2D histogram
    #
    ##########
    (hist1, x0bins1, y0bins1) = h2d.histogram2d(x01, y01, bins=(bincnt, bincnt))
    (hist2, x0bins2, y0bins2) = h2d.histogram2d(x02, y02, bins=(bincnt, bincnt))

    # Need to convert the 2d histogram into floats
    probDist1 = array(hist1, dtype=float)
    probDist2 = array(hist2, dtype=float)

    # We can turn the histogram into a probability distribution
    # just by dividing by the total number of trials
    probDist1 /= float(len(x01))
    probDist2 /= float(len(x02))

    # Find the dynamical center based on S0-2's orbit
    maxPix = probDist1.flatten().argmax()
    sgraDynX = x0bins1[maxPix % probDist1.shape[1]]
    sgraDynY = y0bins1[maxPix / probDist1.shape[1]]
    #scale = 9.950 # mas/pixel
    scale = 1000.0 # mas/pixel
    print 'Using Dynamical Center as Reference (mas): x = %5.1f; y = %5.1f' % \
          (sgraDynX*scale, sgraDynY*scale)
    print 'Using plate scale of %8.3f mas/pixel' % (scale)

    # Get contour levels.
    levels1 = getContourLevels(probDist1)
    levels2 = getContourLevels(probDist2)


    ##########
    #
    # Get photometric positions of Sgr A*
    #
    ##########
    sgraTable = asciidata.open(sgra)
    sgraX = sgraTable[1].tonumpy()
    sgraY = sgraTable[2].tonumpy()
    sgraXerr = sgraTable[3].tonumpy()
    sgraYerr = sgraTable[4].tonumpy()

    ##########
    #
    # Convert everything into arcsec offset from the
    # dynamical center (set by S0-2's orbit).
    #
    ##########
    #sgraX = (sgraX - sgraDynX) * -scale
    #sgraY = (sgraY - sgraDynY) * scale
    #x0bins1 = (x0bins1 - sgraDynX) * -scale
    #x0bins2 = (x0bins2 - sgraDynX) * -scale
    #y0bins1 = (y0bins1 - sgraDynY) * scale
    #y0bins2 = (y0bins2 - sgraDynY) * scale
    
    sgraX = (sgraX) * -scale
    sgraY = (sgraY) * scale
    x0bins1 = (x0bins1) * -scale
    x0bins2 = (x0bins2) * -scale
    y0bins1 = (y0bins1) * scale
    y0bins2 = (y0bins2) * scale
    
    ##########
    #
    # Plotting
    #
    ##########
    rc('text', usetex=True)

    # Clear the plot
    clf()
    subplots_adjust(left=0.16, bottom=0.12, right=0.95, top=0.92)

   # Plot SgrA*-Radio 
    an = linspace(0, 2*pi, 100)
    plot(3.8*cos(an), 4.2*sin(an), 'k--')
    plot(2.*3.8*cos(an), 2.*4.2*sin(an), 'k--')
    plot(3.*3.8*cos(an), 3.*4.2*sin(an), 'k--')

    # Display the 2D histogram
    # Convert bins into x10^6 Msun and kpc
    c1 = contour(probDist1, levels1, origin=None, colors='blue',
                 extent=[x0bins1[0], x0bins1[-1], y0bins1[0], y0bins1[-1]])
    c2 = contour(probDist2, levels2, origin=None, colors='red',
                 extent=[x0bins2[0], x0bins2[-1], y0bins2[0], y0bins2[-1]])

    errorbar(sgraX, sgraY, xerr=sgraXerr, yerr=sgraYerr, fmt='k.')


    # Draw labels using latex by putting "r" before the string
    font = {'fontname' : 'Sans',
	    'fontsize' : 20}
    xlabel(r'$\Delta$RA Offset (mas)', font)
    ylabel(r'$\Delta$Dec. Offiset (mas)', font)

    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')
    
    # Set the label axis formatting.
    thePlot = gca()
    setp( thePlot.get_xticklabels(), fontsize=16, fontweight='bold')
    setp( thePlot.get_yticklabels(), fontsize=16, fontweight='bold')

    legend((c1, c2), ('S0-2', 'S0-16'))
    legText = gca().get_legend().get_texts()
    setp( legText[0], color='b')
    setp( legText[1], color='r')

    # Stretch axes
    axis('equal')
    rng = axis()
    xlim(rng[1], rng[0])

    savefig(outfile + '.eps')
    savefig(outfile + '.png')

    # Turn of Latex Processing
    rc('text', usetex=False)
Ejemplo n.º 15
0
def process_files(fn):
    drive_files = {}  # index of drives
    all_parts = []
    all_files = []
    files_by_md5 = {}  # a dictionary of sets of fiobject, indexed by md5
    extension_len_histogram = histogram2d()
    extension_fragments_histogram = histogram2d()
    partition_histogram = histogram2d()

    def cb(fi):
        # add the md5 to the set
        if fi.is_file() and fi.filesize():
            files_by_md5.get(fi.md5, set()).add(fi)
            ext = fi.ext()
            if not ext: print fi.meta_type(), fi
            extension_len_histogram.add(ext, fi.filesize())
            extension_fragments_histogram.add(ext, fi.fragments())
            partition_histogram.add(fi.partition(), fi.filesize())

    if fn.endswith('xml'):
        fiwalk.fiwalk_using_sax(xmlfile=open(fn), callback=cb)
    else:
        fiwalk.fiwalk_using_sax(imagefile=open(fn), callback=cb)

    #
    # Typeset the information
    #

    tab = ttable()
    tab.header = "File extension popularity and average size (suppressing 0-len files)"
    tab.col_headings = [['Ext', 'Count', 'Average Size', 'Max', 'Std Dev']]
    tab.omit_row = [[0, '']]
    extension_len_histogram.statcol = ['iaverage', 'maxx', 'istddev']
    print extension_len_histogram.typeset(tab=tab)

    #
    # Information about fragmentation patterns
    #
    tab = ttable()
    tab.header = "Fragmentation pattern by file system and file type:"
    tab.col_headings = [['Ext', 'Count', 'Average Size', 'Max', 'Std Dev']]
    tab.omit_row = [[0, '']]
    extension_fragments_histogram.statcol = ['iaverage', 'maxx', 'istddev']
    print extension_fragments_histogram.typeset(tab=tab)
    exit(0)

    for fstype in fstypes:
        for ftype in ['jpg', 'pdf', 'doc', 'txt']:
            len1stats = statbag()
            len2stats = statbag()
            delta_hist = histogram()
            delta_re = re.compile("(\d+)\-?(\d+)? ?(\d+)\-?(\d+)?")
            for i in filter((lambda
                             (f): f.ext() == ftype and f.fragments == 2),
                            all_files):
                runs = False
                if (hasattr(i, 'block_runs')): runs = i.block_runs
                if (hasattr(i, 'sector_runs')): runs = i.sector_runs
                if not runs: continue
                m = delta_re.search(runs)
                r = []
                for j in range(1, 5):
                    try:
                        r.append(int(m.group(j)))
                    except TypeError:
                        r.append(int(m.group(j - 1)))

                len1 = r[1] - r[0] + 1
                len2 = r[3] - r[2] + 1
                delta = r[2] - r[1]

                len1stats.addx(len1)
                len2stats.addx(len2)
                delta_hist.add(delta)

            if len1stats.count() > 0:
                print "\n\n"
                print "fstype:", fstype, "  ftype:", ftype
                print "len1 average: %f stddev: %f" % (len1stats.average(),
                                                       len1stats.stddev())
                print "len2 average: %f stddev: %f" % (len2stats.average(),
                                                       len2stats.stddev())
                print "delta average: %f" % delta_hist.average()
                print "delta histogram:"
                delta_hist.print_top(10)
Ejemplo n.º 16
0
def ImageIntegrate(image, data, masks, blkSize=128, dlg=None, returnN=False):
    'Needs a doc string'  #for q, log(q) bins need data['binType']
    import histogram2d as h2d
    print 'Begin image integration'
    LUtth = np.array(data['IOtth'])
    LRazm = np.array(data['LRazimuth'], dtype=np.float64)
    numAzms = data['outAzimuths']
    numChans = data['outChannels']
    azmOff = data['azmthOff']
    Dazm = (LRazm[1] - LRazm[0]) / numAzms
    if 'log(q)' in data['binType']:
        lutth = np.log(4. * np.pi * npsind(LUtth / 2.) / data['wavelength'])
    elif 'q' == data['binType']:
        lutth = 4. * np.pi * npsind(LUtth / 2.) / data['wavelength']
    elif '2-theta' in data['binType']:
        lutth = LUtth
    dtth = (lutth[1] - lutth[0]) / numChans
    muT = data['SampleAbs'][0]
    if 'SASD' in data['type']:
        muT = -np.log(muT) / 2.  #Transmission to 1/2 thickness muT
    NST = np.zeros(shape=(numAzms, numChans), order='F', dtype=np.float32)
    H0 = np.zeros(shape=(numAzms, numChans), order='F', dtype=np.float32)
    imageN = len(image)
    Nx, Ny = data['size']
    nXBlks = (Nx - 1) / blkSize + 1
    nYBlks = (Ny - 1) / blkSize + 1
    Nup = nXBlks * nYBlks * 3 + 3
    tbeg = time.time()
    Nup = 0
    if dlg:
        dlg.Update(Nup)
    times = [0, 0, 0, 0, 0]
    for iBlk in range(nYBlks):
        iBeg = iBlk * blkSize
        iFin = min(iBeg + blkSize, Ny)
        for jBlk in range(nXBlks):
            jBeg = jBlk * blkSize
            jFin = min(jBeg + blkSize, Nx)
            # next is most expensive step!
            TA, tam = Make2ThetaAzimuthMap(
                data, masks, (iBeg, iFin), (jBeg, jFin),
                times)  #2-theta & azimuth arrays & create position mask
            Nup += 1
            if dlg:
                dlg.Update(Nup)
            Block = image[iBeg:iFin, jBeg:jFin]
            t0 = time.time()
            tax, tay, taz, tad, tabs = Fill2ThetaAzimuthMap(
                masks, TA, tam, Block)  #and apply masks
            times[2] += time.time() - t0
            Nup += 1
            if dlg:
                dlg.Update(Nup)
            tax = np.where(tax > LRazm[1], tax - 360.,
                           tax)  #put azm inside limits if possible
            tax = np.where(tax < LRazm[0], tax + 360., tax)
            if data['SampleAbs'][1]:
                if 'PWDR' in data['type']:
                    muR = muT * (1. + npsind(tax)**2 / 2.) / (npcosd(tay))
                    tabs = G2pwd.Absorb(data['SampleShape'], muR, tay)
                elif 'SASD' in data[
                        'type']:  #assumes flat plate sample normal to beam
                    tabs = G2pwd.Absorb('Fixed', muT, tay)
            if 'log(q)' in data['binType']:
                tay = np.log(4. * np.pi * npsind(tay / 2.) /
                             data['wavelength'])
            elif 'q' == data['binType']:
                tay = 4. * np.pi * npsind(tay / 2.) / data['wavelength']
            t0 = time.time()
            if any([tax.shape[0], tay.shape[0], taz.shape[0]]):
                NST, H0 = h2d.histogram2d(len(tax), tax, tay, taz * tad / tabs,
                                          numAzms, numChans, LRazm, lutth,
                                          Dazm, dtth, NST, H0)
            times[3] += time.time() - t0
            Nup += 1
            if dlg:
                dlg.Update(Nup)
    t0 = time.time()
    NST = np.array(NST, dtype=np.float)
    H0 = np.divide(H0, NST)
    H0 = np.nan_to_num(H0)
    H2 = np.array(
        [tth for tth in np.linspace(lutth[0], lutth[1], numChans + 1)])
    if 'log(q)' in data['binType']:
        H2 = 2. * npasind(np.exp(H2) * data['wavelength'] / (4. * np.pi))
    elif 'q' == data['binType']:
        H2 = 2. * npasind(H2 * data['wavelength'] / (4. * np.pi))
    if Dazm:
        H1 = np.array(
            [azm for azm in np.linspace(LRazm[0], LRazm[1], numAzms + 1)])
    else:
        H1 = LRazm
    H0 /= npcosd(H2[:-1])  #**2? I don't think so, **1 is right for powders
    if 'SASD' in data['type']:
        H0 /= npcosd(H2[:-1])  #one more for small angle scattering data?
    if data['Oblique'][1]:
        H0 /= G2pwd.Oblique(data['Oblique'][0], H2[:-1])
    if 'SASD' in data['type'] and data['PolaVal'][1]:
        #NB: in G2pwd.Polarization azm is defined from plane of polarization, not image x axis!
        H0 /= np.array([
            G2pwd.Polarization(data['PolaVal'][0], H2[:-1], Azm=azm - 90.)[0]
            for azm in (H1[:-1] + np.diff(H1) / 2.)
        ])
    Nup += 1
    if dlg:
        dlg.Update(Nup)
    times[4] += time.time() - t0
    print 'Step times: \n apply masks  %8.3fs xy->th,azm   %8.3fs fill map     %8.3fs \
        \n binning      %8.3fs cleanup      %8.3fs' % (
        times[0], times[1], times[2], times[3], times[4])
    print "Elapsed time:", "%8.3fs" % (time.time() - tbeg)
    print 'Integration complete'
    if returnN:  #As requested by Steven Weigand
        return H0, H1, H2, NST
    else:
        return H0, H1, H2
Ejemplo n.º 17
0
        these_parts = filter(isfstype,all_parts)
        these_files = []
        for part in these_parts:
            these_files.extend(part.files)
        print fragmentation_table(these_files)

    
    exit(0)

    sys.exit(0)


    #
    # Typeset information about file extensions
    #
    hist_exts = histogram2d()
    hist_exts.topn = 20
    for i in all_files:
        if i.size>0 and i.fragments>0: hist_exts.add(i.ext(),i.size)
    tab = table()
    tab.header     = "File extension popularity and average size (suppressing 0-len files)"
    tab.col_headings = ['Ext','Count','Average Size','Max','Std Dev']
    tab.omit_row = [[0,'']]
    hist_exts.statcol = ['iaverage','maxx','istddev']
    print hist_exts.typeset(t=tab)

    hist_exts = histogram2d()
    hist_exts.topn = 20
    for i in all_files:
        if i.fragments>0: hist_exts.add(i.ext(),i.fragments)
    tab = table()