Ejemplo n.º 1
0
def roiEnergyAnalysis(data):
    '''Troubleshooting function, compares the observed sum energy in an ROI to the genEnergy''' 
    genEnergies = [] 
    sumEnergies = []
    pbar = progressbar("Processing event &count&:", len(data)+1)
    pbar.start()
    count = 0
    for event in data:
        genEnergy = event[2]['getpt'] * np.cosh(event[2]['geneta'])         
        for i in range(len(genEnergy)):
            clustersIndices = np.compress(event[1]['ROI'] == i, event[1]['clusterID'], axis=0)      #|Only take clusters corresponding to right ROI
            clusterEnergies = []
            for clusterID in clustersIndices:                                                       #|Only take hits corresponding to correct cluster
                hits = np.compress(event[0]['clusterID'] == clusterID, event[0], axis=0) 
                energies = hits['en'] 
                for energy in energies: 
                    clusterEnergies.append(energy)                                                  #|Add the energy to the cluster energies
            ROIEnergy = np.sum(clusterEnergies)
            # Append to original lists
            genEnergies.append(genEnergy[i])
            sumEnergies.append(ROIEnergy)
        pbar.update(count)
        count += 1
    pbar.finish()
    # np.save("sums.npy", sumEnergies)
    # np.save("gens.npy", genEnergies)
    # Plot it
    Plotter.sumEnergyVsGenEnergy(sumEnergies, genEnergies) 
Ejemplo n.º 2
0
def energyResolution((directory, f, threadID)):
    '''Multiprocessed function for analysing resolution as a function of energy.'''
    smearingValue = 50
    writer        = Writer((0,2*threadID+2))
    pbar          = progressbar("Loading %s" % f, 1, fd=writer)
    pbar.start()
    pbar.update(0)
    data          = np.load(os.path.expanduser(directory+f))
    data          = timeSmearing(data, smearingValue)
    pbar.finish()
    name          = "{0:22s} &count&: ".format(str(f))
    # en            = f.split("_")[-1].split(".")[0]
    pbar          = progressbar(name, len(data) + 1, fd=writer)
    pbar.start()
    diffs         = vertexData(data, pbar=pbar, quiet=True)
    pbar.finish()
    outfile       = os.path.expanduser("Data/Processed/50ps/50ps"+str(f[:-4]) + "_tVertexed.npy")
    np.save(outfile, np.multiply(10,diffs))
Ejemplo n.º 3
0
def energyResolution((directory, f, threadID)):
    '''Multiprocessed function for analysing resolution as a function of energy.'''
    smearingValue = 50
    writer = Writer((0, 2 * threadID + 2))
    pbar = progressbar("Loading %s" % f, 1, fd=writer)
    pbar.start()
    pbar.update(0)
    data = np.load(os.path.expanduser(directory + f))
    data = timeSmearing(data, smearingValue)
    pbar.finish()
    name = "{0:22s} &count&: ".format(str(f))
    en = f.split("_")[-1].split(".")[0]
    pbar = progressbar(name, len(data) + 1, fd=writer)
    pbar.start()
    diffs = tVertexData(data, pbar=pbar, quiet=True)
    pbar.finish()
    outfile = os.path.expanduser("Data/Processed/50ps/50ps" + str(f[:-4]) +
                                 "_tVertexed.npy")
    np.save(outfile, np.multiply(10, diffs))
Ejemplo n.º 4
0
def singleThreadEnergyResolution(directory, files):
    '''Singly-processed function for analysing resolution as a function of energy.'''
    dataSets = []
    for f in files:
        print "Loading %s..." % f 
        dataSets.append(np.load(os.path.expanduser(directory+f)))
    totalLength = 0
    for entry in dataSets:
        totalLength += len(entry)

    for data in dataSets: 
        print "Processing %i of %i files..." % (dataSets.index(data), len(dataSets))
        pbar  = progressbar("Computing &count&:", totalLength + 1)
        pbar.start()                         
        diffs = vertexData(data, pbar=pbar, quiet=True)  
        np.save(directory+str(f[:-5]) + "_tVertexed.npy", np.multiply(10,diffs))
        pbar.finish() 
        print "Finished; closing file %s.\n" % f
Ejemplo n.º 5
0
def timeSmearingTest():
    '''Iterator function to run a resolution analysis on all of the data available.'''
    data           = np.load("Data/500GeVPhoton.npy")
    res            = 50
    diffsList      = np.array([])
    smearingValues = np.array([])
    num            = 0
    pbar           = progressbar("Computing &count&:", res*len(data))
    pbar.start()
    for smearingVal in np.linspace(0,50,res):
        data           = np.load("Data/500GeVPhoton.npy")                                           #|Quick and dirty reload, numpy was doing funky shit
        diffs          = vertexData(timeSmearing(data, smearingVal), runNumber=num, pbar=pbar, quiet=True)
        diffsList      = np.append(diffsList, diffs)
        smearingValues = np.append(smearingValues, np.repeat(smearingVal, len(diffs)))    
        num            += 1
    pbar.finish()
    np.save("diffs.npy", np.multiply(10,diffsList))
    np.save("vals.npy", smearingValues)
    Plotter.tVertexErrorHist2D(np.multiply(10,diffsList), smearingValues)
Ejemplo n.º 6
0
def singleThreadEnergyResolution(directory, files):
    '''Singly-processed function for analysing resolution as a function of energy.'''
    dataSets = []
    for f in files:
        print "Loading %s..." % f
        dataSets.append(np.load(os.path.expanduser(directory + f)))
    totalLength = 0
    for entry in dataSets:
        totalLength += len(entry)

    for data in dataSets:
        print "Processing %i of %i files..." % (dataSets.index(data),
                                                len(dataSets))
        pbar = progressbar("Computing &count&:", totalLength + 1)
        pbar.start()
        diffs = tVertexData(data, pbar=pbar, quiet=True)
        np.save(directory + str(f[:-5]) + "_tVertexed.npy",
                np.multiply(10, diffs))
        pbar.finish()
        print "Finished; closing file %s.\n" % f
Ejemplo n.º 7
0
def timeSmearingTest():
    '''Iterator function to run a resolution analysis on all of the data available.'''
    data = np.load("Data/500GeVPhoton.npy")
    res = 50
    diffsList = np.array([])
    smearingValues = np.array([])
    num = 0
    pbar = progressbar("Computing &count&:", res * len(data))
    pbar.start()
    for smearingVal in np.linspace(0, 50, res):
        data = np.load("Data/500GeVPhoton.npy"
                       )  #|Quick and dirty reload, numpy was doing funky shit
        diffs = tVertexData(timeSmearing(data, smearingVal),
                            runNumber=num,
                            pbar=pbar,
                            quiet=True)
        diffsList = np.append(diffsList, diffs)
        smearingValues = np.append(smearingValues,
                                   np.repeat(smearingVal, len(diffs)))
        num += 1
    pbar.finish()
    np.save("diffs.npy", np.multiply(10, diffsList))
    np.save("vals.npy", smearingValues)
    Plotter.tVertexErrorHist2D(np.multiply(10, diffsList), smearingValues)
Ejemplo n.º 8
0
def showerAnimator(
    hits, eventNo, title, clusterID=-1, delete=False, frames=30, endLoop=0, projections=True, transparency=False
):
    """
    Usage: showerAnimator(recordedHits, eventToAnalyse, plotTitle, clusterToAnalyse=None, 
                          deleteFramesOnFinish=False, enableMatplotlibTransparency=False)

    Plots an animated 5D (x,y,z,t,energy) graph over time and renders it to an animated gif.
    If you want to make plots like this, I've also created a cleaned-up version of this code 
    and put it on GitHub as an easily-callable package. (github.com/bencbartlett/Animator5D)
    """
    # Set up figure
    fig = plt.figure()
    ax = fig.add_subplot(111, projection="3d")
    import matplotlib.cm as cm

    # Parse data for proper cluster ID and existence of timing data
    if clusterID >= 0:
        hits = np.extract(
            np.logical_and(hits["clusterID"] == clusterID, hits["t"] > 0), hits
        )  # |Select only the rechits corresponding to the given cluster and with time data
    else:
        hits = np.extract(hits["t"] > 0, hits)  # |Only include timing data points

    # Further data parsing
    hits = np.sort(hits, order=["tofT"])
    xh, yh, zh, cth, Eh = hits["x"], hits["y"], hits["z"], hits["tofT"], hits["en"]  # |Separating hits
    maxEh = np.max(Eh)
    xmin, xmax = min(xh), max(xh)  # |Get and set limits
    ymin, ymax = min(yh), max(yh)
    zmin, zmax = min(zh), max(zh)
    xc = np.mean([xmin, xmax])  # |x and y centroids, so we can keep scale proportional on all axes
    yc = np.mean([ymin, ymax])
    zd = zmax - zmin

    # Set limits and labels
    ax.set_zlim(
        xc - zd / 2, xc + zd / 2
    )  # |Not a mistake, the reversal of the xyz pairing is so that z appears horizontally on the plot.
    ax.set_ylim(yc - zd / 2, yc + zd / 2)
    ax.set_xlim(zmin, zmax)
    ax.set_zlabel("x (cm)")  # |Label stuff
    ax.set_ylabel("y (cm)")
    ax.set_xlabel("Beamline (cm)")

    # Clear out existing crap
    path = "Plots/ShowerAnimations/" + title + "/Event" + str(eventNo)
    if os.path.exists(path):  # |Remove previous crap
        shutil.rmtree(path)
        os.makedirs(path + "/frames")
    else:
        os.makedirs(path + "/frames")

    # Set up animation
    count = 1
    t = t0 = np.min(cth)
    maxt = np.max(cth)
    tstep = (maxt - t) / float(frames)  # |Time step in ns
    title = ax.text2D(0.3, 1.0, "Shower simulation", transform=ax.transAxes, size="large")
    colorcal = ax.scatter([0, 0], [0, 0], [0, 0], c=[0, maxEh + 1], cmap=cm.jet)
    cbar = fig.colorbar(colorcal, shrink=0.7)
    pbar = progressbar("Rendering &count& frames:", int((maxt - t0) / tstep) + 1)
    cbar.set_label("Energy (MIPs)")
    pbar.start()

    # Render frames
    while t <= np.max(maxt):
        mask = np.logical_and(t < cth, cth <= (t + tstep))  # |What to plot in this time step
        xplt = np.extract(mask, xh)
        yplt = np.extract(mask, yh)
        zplt = np.extract(mask, zh)
        Eplt = np.extract(mask, Eh)
        txt = ax.text2D(0.1, 0.9, "$t=%.3f$ns" % t, transform=ax.transAxes)
        cx = np.ones_like(xplt) * ax.get_zlim3d()[0]  # |Again, not a typo with mixing x and z
        cy = np.ones_like(yplt) * ax.get_ylim3d()[1]
        cz = np.ones_like(zplt) * ax.get_xlim3d()[0]
        mark = ax.scatter(
            zplt, yplt, xplt, c=Eplt, cmap=cm.jet, vmin=0, vmax=maxEh, s=100 * Eplt / maxEh, marker=",", lw=1
        )
        if projections:
            ax.scatter(
                zplt, yplt, cx, c="#444444", marker=",", lw=0, s=100 * Eplt / maxEh, alpha=0.3
            )  # |Plot the projections
            ax.scatter(zplt, cy, xplt, c="#444444", marker=",", lw=0, s=100 * Eplt / maxEh, alpha=0.3)
            ax.scatter(cz, yplt, xplt, c="#444444", marker=",", lw=0, s=100 * Eplt / maxEh, alpha=0.3)
        if not transparency:
            mark.set_edgecolors = (
                mark.set_facecolors
            ) = lambda *args: None  # |Super-hacky way to disable transparency in the 3D plot, makes it cleaner to read.

        plt.draw()
        filename = path + "/frames/" + str(count).zfill(3) + ".gif"
        plt.savefig(filename)  # |Save the frame
        txt.remove()
        pbar.update(count)
        count += 1
        t += tstep

    pbar.finish()

    if endLoop:
        print "Copying tail frames..."
    for i in xrange(endLoop):
        shutil.copyfile(filename, filename[:-7] + str(count).zfill(3) + ".gif")
        count += 1

    # Combine frames to animated gif
    print "Combining frames..."
    import subprocess

    args = [
        "convert",
        "-delay",
        ".1",
        "-loop",
        "0",
        path + "/frames/*.gif",
        path + "/Shower.gif",
    ]  # |This part requires ImageMagick to function
    print "Saved to path " + str(os.path.abspath(path)) + "/Shower.gif"
    subprocess.check_call(args)

    # Delete frames if told to do so
    if delete:
        shutil.rmtree(path + "/frames")
Ejemplo n.º 9
0
def layerVarianceAnalysis(
    data, eventNo, title, mode=None, clusterID=0, rng=None, endLoop=0, numEvents=1, delete=False, cumulative=False
):
    """
    Plots an evolving 2D histogram by layer of a hit.
    Warning: running this on cumulative mode for very large datasets can make you run out of RAM.
    """
    # Parse data for proper cluster ID and existence of timing data
    if not cumulative:
        hits = np.extract(
            data[eventNo][0]["clusterID"] == clusterID, data[eventNo][0]
        )  # |Select only the rechits corresponding to the given cluster
        # Further data parsing
        xh, yh, zh, Eh = hits["x"], hits["y"], hits["z"], hits["en"]  # |Separating hits
        center = [data[eventNo][1]["eta"][clusterID], data[eventNo][1]["phi"][clusterID]]
        eta, phi = XYZtoEtaPhi([xh, yh, zh])
        layers = hits["layerID"]
        if mode == "log":
            repeats = np.ceil(np.log(Eh)).astype(int)
            etaw = np.repeat(eta, repeats)  # |Energy-weighted eta and phi values
            phiw = np.repeat(phi, repeats)
            layersw = np.repeat(layers, repeats)
        elif mode == "linear":
            repeats = np.ceil(Eh).astype(int)
            etaw = np.repeat(eta, repeats)  # |Energy-weighted eta and phi values
            phiw = np.repeat(phi, repeats)
            layersw = np.repeat(layers, repeats)
        elif mode == "flat":
            etaw, phiw, layersw = eta, phi, layers
    else:
        center = [0, 0]
        eta = np.array([])
        phi = np.array([])
        etaw = np.array([])
        phiw = np.array([])
        layers = np.array([])
        layersw = np.array([])
        Eh = np.array([])
        k = 0
        pbar = progressbar("Processing &count& events:", len(data) + 1)
        pbar.start()
        for event in data:
            hits = np.extract(
                event[0]["clusterID"] == clusterID, event[0]
            )  # |Select only the rechits corresponding to the given cluster
            # Further data parsing
            xh, yh, zh, EhE = hits["x"], hits["y"], hits["z"], hits["en"]  # |Separating hits
            etaE, phiE = XYZtoEtaPhi([xh, yh, zh])
            layersE = hits["layerID"]
            for i in range(len(hits)):
                clusterID = hits[i]["clusterID"]
                etaE[i] -= event[1]["eta"][clusterID]
                phiE[i] -= event[1]["phi"][clusterID]
                if phiE[i] > 3.0:
                    phiE[i] -= 2 * np.pi  # |This fixes some modular issues we were having, with -epsilon = 2pi-epsilon
                elif phiE[i] < -3.0:
                    phiE[i] += 2 * np.pi
            if mode == "log":
                repeats = np.ceil(np.log(EhE)).astype(int)
                etawE = np.repeat(etaE, repeats)  # |Energy-weighted eta and phi values
                phiwE = np.repeat(phiE, repeats)
                layerswE = np.repeat(layersE, repeats)
            elif mode == "linear":
                repeats = np.ceil(EhE).astype(int)
                etawE = np.repeat(etaE, repeats)  # |Energy-weighted eta and phi values
                phiwE = np.repeat(phiE, repeats)
                layerswE = np.repeat(layersE, repeats)
            elif mode == "flat":
                etawE, phiwE, layerswE = etaE, phiE, layersE
            eta = np.append(eta, etaE)
            phi = np.append(phi, phiE)
            etaw = np.append(etaw, etawE)
            phiw = np.append(phiw, phiwE)
            layers = np.append(layers, layersE)
            layersw = np.append(layersw, layerswE)
            Eh = np.append(Eh, EhE)
            k += 1
            pbar.update(k)
        pbar.finish()
    #     print "Saving array..."
    #     np.save("Data/etaPhiProcessed.npy", [eta, phi, etaw, phiw, layers, layersw, Eh])
    # eta, phi, etaw, phiw, layers, layersw, Eh = np.load("Data/etaPhiProcessed.npy")
    # center = [0,0]
    # Set plot ranges
    if rng == None:
        pltrange = np.array([(np.min(eta), np.max(eta)), (np.min(phi), np.max(phi))])
    else:
        pltrange = rng
    # Clear out existing crap
    path = "Plots/LayerHitAnimations/" + title + "/Event" + str(eventNo)
    if os.path.exists(path):  # |Remove previous crap
        shutil.rmtree(path)
        os.makedirs(path + "/frames")
    else:
        os.makedirs(path + "/frames")
    # Set up animation
    minlayer = 1  # |Minimum layer
    maxlayer = 30  # |Maximum layer
    count = minlayer
    pbar = progressbar("Rendering &count& frames:", maxlayer - minlayer + 1)
    pbar.start()
    # Render frames
    while count <= maxlayer:
        etapltw = np.extract(layersw == count, etaw)
        phipltw = np.extract(layersw == count, phiw)
        etaplt = np.extract(layers == count, eta)
        phiplt = np.extract(layers == count, phi)
        # Eplt = np.extract(layers == count, Eh)
        filename = path + "/frames/" + str(count).zfill(3) + ".gif"
        if len(etaplt) > 0:
            layerVarianceFrame(
                [etapltw, phipltw],
                [etaplt, phiplt],
                count,
                center,
                rng=pltrange,
                numEvents=numEvents,
                maxE=np.max(Eh),
                xbins=50,
                ybins=50,
                saveAs=filename,
            )
        pbar.update(count)
        count += 1
    pbar.finish()
    # Render tail if needed:
    if endLoop:
        print "Copying tail frames..."
    for i in xrange(endLoop):
        shutil.copyfile(filename, filename[:-7] + str(count).zfill(3) + ".gif")
        count += 1
    # Combine frames to animated gif
    print "Combining frames..."
    import subprocess

    args = [
        "convert",
        "-delay",
        "25",
        "-loop",
        "0",
        path + "/frames/*.gif",
        path + "/Shower.gif",
    ]  # |This part requires ImageMagick to function
    print "Saved to path " + str(os.path.abspath(path)) + "/Shower.gif"
    subprocess.check_call(args)
    # Delete frames if told to do so
    if delete:
        shutil.rmtree(path + "/frames")
Ejemplo n.º 10
0
def process((f, outdir, showProgress, threadID, quiet)):
	'''
	Usage: process(fileName, writeDirectory)

	Takes a HGCROI-formatted .root file and converts it to a list of structured numpy arrays.

	The format is as follows:
		List of events
			For each event, array for various data structures (RecHits, Clusters, etc.).
			Unfortunately, there is no way to do recursive recarrays, so we use the following
			organisation method:
			eventArray[0]=RecHits
			eventArray[1]=Clusters
			eventArray[2]=ROIs
			eventArray[2]=Vertices
			eventArray[3]=GenVertex
				For each data structure array, recarray of properties

	For example:
		(Index 0 = Event 0)
			(Index 0 = RecHits)
				'x' -> [1.23, 4.25, ...]
				'y' -> [5.24, 6.42, ...]
				...
				'clusterID' -> [1, 2, 1, 1, ...]
			(Index 1 = Clusters)
				'centerX' -> [3.21, 2.56, ...]
				...
			...
		(Index 1 = Event 1)
			(Index 0 = RecHits)
				...
			(Index 1 = Clusters)
				...
			...
		...

	So, for example, to access the array of x positions in the RecHits of the 7th event,
	you would use:
		xpos = Data[7][0]['x']
	'''
	if not quiet: print "Processing file: " + str(f) + "..."                                                                                                                
	outArray   = []                                           
	fIn        = ROOT.TFile.Open(f)
	tree       = fIn.Get('analysis/HGC') 
	numentries = tree.GetEntries() 
	if showProgress:
		if threadID:
			writer = Writer((0, 2*threadID+2))
			string = "{0:22s} &count&: ".format(str(f))
			pbar   = progressbar(string, numentries + 1, fd=writer) 
		else: 
			pbar   = progressbar("Processing &count& events from %s"%str(f), numentries + 1) 
		pbar.start()                                                
	for i in xrange(0, numentries): 
		tree.GetEntry(i) 
		eventArray = []
		names      = ""
		# RecHits
		RecHits    = True                                                                           #|Store rechits
		x          = []                                                                             #|Position of the rechit
		y          = []
		z          = []
		t          = []
		tofT   	   = []                                                                             #|Time-of-flight corrected time data
		en         = []                                                                             #|Energy 
		clusterID  = []                                                                             #|What cluster the hit belongs to
		detID      = []
		layerID    = [] 
		isIn3x3    = []                                                                             #|Is in 3x3 grid from center of energy
		isIn5x5    = []                                                                             #|Is in 5x5 grid from center of energy
		isIn7x7    = []                                                                             #|Is in 7x7 grid from center of energy

		# Clusters
		Clusters   = True                                                                           #|Store clusters
		center_x_  = []                                                                             #|Center of the cluster (energy weighted)
		center_y_  = []
		center_z_  = []
		axis_x_    = []                                                                             #|Direction the cluster is pointing (looking back at the beamline from the cluster)
		axis_y_    = []
		axis_z_    = []
		ev_1_      = []                                                                             #|Eigenvalues from principal component analysis
		ev_2_      = []
		ev_3_      = []
		clusteren  = []                                                                             #|Energy
		clustereta = []                                                                             #|Eta
		clusterphi = []                                                                             #|Phi
		slfClustID = []                                                                             #|Self-referencing cluster ID
		clusterroi = []                                                                             #|ROI ID for the cluster
		# ROIs
		ROIs       = True 
		roiID      = []                                                                             #|Self-referencing ROI ID
		roieta     = []                                                                             #|Energy-weighted eta
		roiphi     = []                                                                             #|Energy-weighted phi
		roipt      = []                                                                             #|Energy-weighted pt
		roimass    = []
		roiarea    = []
		roigenpt   = []
		roigeneta  = []
		roigenphi  = []
		roigenmass = []
		roigenarea = []
		roistablex = []
		roistabley = []
		roistablez = []
		roistablID = []
		# Vertices
		Vertices   = False                                                                          #|Store vertices, won't work for photon gun
		vertex_x_  = []                                                                             #|Reconstructed vertex location using tracker 
		vertex_y_  = []
		vertex_z_  = []
		# Generated vertices ("true" vertices)
		GenVert    = True                                                                           #|Store generated vertices
		gen_x_     = []                                                                             #|Actual vertex location from simulated event
		gen_y_     = []
		gen_z_     = []

		if RecHits:
			for hit in tree.RecHits:                                                                #|Modify properties you want to extract at will.
				x        .append(hit.x_)                                                            #|Loops over files, extracting the xyzt data from rechits in each file and saving each rechit array to a .npy file for later use.
				y        .append(hit.y_)                                                                        
				z        .append(hit.z_)                                                                        
				t        .append(hit.t_)                                                                        
				tofT     .append(hit.t_ + np.sqrt(hit.x_**2 + hit.y_**2 + hit.z_**2)/c)
				en       .append(hit.en_)
				clusterID.append(hit.clustId_)
				detID    .append(hit.detId_)
				layerID  .append(hit.layerId_)
				isIn3x3  .append(hit.isIn3x3_)
				isIn5x5  .append(hit.isIn5x5_)
				isIn7x7  .append(hit.isIn7x7_)
			recHitsArray = np.core.records.fromarrays([x, y, z, t, en, tofT, clusterID,
													   detID, layerID, isIn3x3, isIn5x5, isIn7x7],
											  names = 'x,y,z,t,en,tofT,clusterID,\
											  		   detID,layerID,isIn3x3,isIn5x5,isIn7x7')      #|Form rechit array
			eventArray.append(recHitsArray)                                                         #|Append to event array
			names += 'RecHits'                                                                      #|Add to names list
		else:
			eventArray.append([])                                                                   #|This is to keep the index of the arrays the same

		clusterindex = 0
		if Clusters:
			for cluster in tree.Clusters:
				center_x_ .append(cluster.center_x_)
				center_y_ .append(cluster.center_y_)
				center_z_ .append(cluster.center_z_)
				axis_x_   .append(cluster.axis_x_)
				axis_y_   .append(cluster.axis_y_)
				axis_z_   .append(cluster.axis_z_)
				ev_1_     .append(cluster.ev_1_)
				ev_2_     .append(cluster.ev_2_)
				ev_3_     .append(cluster.ev_3_)
				clusteren .append(cluster.en_)
				clustereta.append(cluster.eta_)
				clusterphi.append(cluster.phi_)
				slfClustID.append(clusterindex)
				clusterroi.append(cluster.roiidx_)
				clusterindex += 1
			clusterArray = np.core.records.fromarrays([center_x_, center_y_, center_z_,
													   axis_x_, axis_y_, axis_z_,
													   ev_1_, ev_2_, ev_3_, clusteren, clustereta,
													   clusterphi, slfClustID, clusterroi],
													   names = 'centerX,centerY,centerZ,\
													   			axisX,axisY,axisZ,ev1,ev2,ev3,en,\
													   			eta,phi,clusterID,ROI')             #|Form array for clusters
			eventArray.append(clusterArray)                                                         #|Append to event array
			names += ',Clusters'
		else:
			eventArray.append([])                                                                   #|This is to keep the index of the arrays the same

		ROIindex = 0
		if ROIs:
			for ROI in tree.ROIs:
				roiID     .append(ROIindex)
				roipt     .append(ROI.pt_)
				roieta    .append(ROI.eta_)
				roiphi    .append(ROI.phi_)
				roimass   .append(ROI.mass_)
				roiarea   .append(ROI.area_)
				roigenpt  .append(ROI.genpt_)
				roigeneta .append(ROI.geneta_)
				roigenphi .append(ROI.genphi_)
				roigenmass.append(ROI.genmass_)
				roigenarea.append(ROI.genarea_)
				roistablex.append(ROI.stablex_)
				roistabley.append(ROI.stabley_)
				roistablez.append(ROI.stablez_)
				roistablID.append(ROI.stableid_)
				ROIindex +=1
			ROIArray = np.core.records.fromarrays([roiID, roipt, roieta, roiphi, roimass, roiarea,
												   roigenpt, roigeneta, roigenphi, roigenmass,
												   roigenarea, roistablex, roistabley, roistablez,
												   roistablID],
												   names = 'roiID,pt,eta,phi,mass,area,getpt,\
												   			geneta,genphi,genarea,stablex,stabley,\
												   			stablez,stableID')
			eventArray.append(ROIArray)
			names += ',ROIs'
		else:
			eventArray.append([])

		if Vertices:
			for vertex in tree.Vertices:
				vertex_x_.append(vertex.x_)
				vertex_y_.append(vertex.y_)
				vertex_z_.append(vertex.z_)
			vertexArray = np.core.records.fromarrays([vertex_x_, vertex_y_, vertex_z_],
													  names='x,y,z')
			eventArray.append(vertexArray)                                                          #|Vertices array
			names += ',Vertices'
		else:
			eventArray.append([])                                                                   #|This is to keep the index of the arrays the same

		if GenVert:
			try:
				gen_x_.append(tree.GenVertex.X())                                                   #|GenVertex is not iterable like the other classes, since there is only one per event.
				gen_y_.append(tree.GenVertex.Y())
				gen_z_.append(tree.GenVertex.Z())
			except AttributeError:                                                                  #|Some sets use TLorentzVectors, some use TVector3. For some unfathomable reason, these have different capitalizations.
				gen_x_.append(tree.GenVertex.x())                                                   #|GenVertex is not iterable like the other classes, since there is only one per event.
				gen_y_.append(tree.GenVertex.y())
				gen_z_.append(tree.GenVertex.z())
			genVertexArray = np.core.records.fromarrays([gen_x_, gen_y_, gen_z_],
													  	 names='x,y,z')
			eventArray.append(genVertexArray)                                                       #|Generated vertices array
			names += ',GenVertex'
		else:
			eventArray.append([])                                                                   #|This is to keep the index of the arrays the same

		# Combine arrays for single event and append to outArray
		outArray.append(eventArray)                                                                 #|Converts to a 2D numpy structured array indexed by event number
		if showProgress: pbar.update(i)                                                             #|Update progressbar
		if (not showProgress) and (i % 100 == 0) and (not quiet): 
			print "Thread %i >> Processed %i of %i events." % (threadID, i, numentries)             #|In the case this is being multiprocessed, progressbar doesn't work well, so just print the stuff.

	# Finish up and save array to file
	if showProgress: pbar.finish()
	filename = str(f[:-5]) + ".npy"                                                                 #|Replace .root with .npy
	filename = filename.split("/")[-1]                                                              #|Removes directory prefixes
	filepath = outdir+filename
	if not quiet: print "Writing file " + os.path.abspath(filepath) + "..."
	np.save(filepath, outArray)
	if not quiet: print "Processing complete.\n"
Ejemplo n.º 11
0
def process((f, outdir, showProgress, threadID, quiet)):
    '''
	Usage: process(fileName, writeDirectory)

	Takes a HGCROI-formatted .root file and converts it to a list of structured numpy arrays.

	The format is as follows:
		List of events
			For each event, array for various data structures (RecHits, Clusters, etc.).
			Unfortunately, there is no way to do recursive recarrays, so we use the following
			organisation method:
			eventArray[0]=RecHits
			eventArray[1]=Clusters
			eventArray[2]=ROIs
			eventArray[2]=Vertices
			eventArray[3]=GenVertex
				For each data structure array, recarray of properties

	For example:
		(Index 0 = Event 0)
			(Index 0 = RecHits)
				'x' -> [1.23, 4.25, ...]
				'y' -> [5.24, 6.42, ...]
				...
				'clusterID' -> [1, 2, 1, 1, ...]
			(Index 1 = Clusters)
				'centerX' -> [3.21, 2.56, ...]
				...
			...
		(Index 1 = Event 1)
			(Index 0 = RecHits)
				...
			(Index 1 = Clusters)
				...
			...
		...

	So, for example, to access the array of x positions in the RecHits of the 7th event,
	you would use:
		xpos = Data[7][0]['x']
	'''
    if not quiet: print "Processing file: " + str(f) + "..."
    outArray = []
    fIn = ROOT.TFile.Open(f)
    tree = fIn.Get('analysis/HGC')
    numentries = tree.GetEntries()
    if showProgress:
        if threadID:
            writer = Writer((0, 2 * threadID + 2))
            string = "{0:22s} &count&: ".format(str(f))
            pbar = progressbar(string, numentries + 1, fd=writer)
        else:
            pbar = progressbar("Processing &count& events from %s" % str(f),
                               numentries + 1)
        pbar.start()
    for i in xrange(0, numentries):
        tree.GetEntry(i)
        eventArray = []
        names = ""
        # RecHits
        RecHits = True  #|Store rechits
        x = []  #|Position of the rechit
        y = []
        z = []
        t = []
        tofT = []  #|Time-of-flight corrected time data
        en = []  #|Energy
        clusterID = []  #|What cluster the hit belongs to
        detID = []
        layerID = []
        isIn3x3 = []  #|Is in 3x3 grid from center of energy
        isIn5x5 = []  #|Is in 5x5 grid from center of energy
        isIn7x7 = []  #|Is in 7x7 grid from center of energy

        # Clusters
        Clusters = True  #|Store clusters
        center_x_ = []  #|Center of the cluster (energy weighted)
        center_y_ = []
        center_z_ = []
        axis_x_ = [
        ]  #|Direction the cluster is pointing (looking back at the beamline from the cluster)
        axis_y_ = []
        axis_z_ = []
        ev_1_ = []  #|Eigenvalues from principal component analysis
        ev_2_ = []
        ev_3_ = []
        clusteren = []  #|Energy
        clustereta = []  #|Eta
        clusterphi = []  #|Phi
        slfClustID = []  #|Self-referencing cluster ID
        clusterroi = []  #|ROI ID for the cluster
        # ROIs
        ROIs = True
        roiID = []  #|Self-referencing ROI ID
        roieta = []  #|Energy-weighted eta
        roiphi = []  #|Energy-weighted phi
        roipt = []  #|Energy-weighted pt
        roimass = []
        roiarea = []
        roigenpt = []
        roigeneta = []
        roigenphi = []
        roigenmass = []
        roigenarea = []
        roistablex = []
        roistabley = []
        roistablez = []
        roistablID = []
        # Vertices
        Vertices = False  #|Store vertices, won't work for photon gun
        vertex_x_ = []  #|Reconstructed vertex location using tracker
        vertex_y_ = []
        vertex_z_ = []
        # Generated vertices ("true" vertices)
        GenVert = True  #|Store generated vertices
        gen_x_ = []  #|Actual vertex location from simulated event
        gen_y_ = []
        gen_z_ = []

        if RecHits:
            for hit in tree.RecHits:  #|Modify properties you want to extract at will.
                x.append(
                    hit.x_
                )  #|Loops over files, extracting the xyzt data from rechits in each file and saving each rechit array to a .npy file for later use.
                y.append(hit.y_)
                z.append(hit.z_)
                t.append(hit.t_)
                tofT.append(hit.t_ +
                            np.sqrt(hit.x_**2 + hit.y_**2 + hit.z_**2) / c)
                en.append(hit.en_)
                clusterID.append(hit.clustId_)
                detID.append(hit.detId_)
                layerID.append(hit.layerId_)
                isIn3x3.append(hit.isIn3x3_)
                isIn5x5.append(hit.isIn5x5_)
                isIn7x7.append(hit.isIn7x7_)
            recHitsArray = np.core.records.fromarrays(
                [
                    x, y, z, t, en, tofT, clusterID, detID, layerID, isIn3x3,
                    isIn5x5, isIn7x7
                ],
                names='x,y,z,t,en,tofT,clusterID,\
											  		   detID,layerID,isIn3x3,isIn5x5,isIn7x7')  #|Form rechit array
            eventArray.append(recHitsArray)  #|Append to event array
            names += 'RecHits'  #|Add to names list
        else:
            eventArray.append(
                [])  #|This is to keep the index of the arrays the same

        clusterindex = 0
        if Clusters:
            for cluster in tree.Clusters:
                center_x_.append(cluster.center_x_)
                center_y_.append(cluster.center_y_)
                center_z_.append(cluster.center_z_)
                axis_x_.append(cluster.axis_x_)
                axis_y_.append(cluster.axis_y_)
                axis_z_.append(cluster.axis_z_)
                ev_1_.append(cluster.ev_1_)
                ev_2_.append(cluster.ev_2_)
                ev_3_.append(cluster.ev_3_)
                clusteren.append(cluster.en_)
                clustereta.append(cluster.eta_)
                clusterphi.append(cluster.phi_)
                slfClustID.append(clusterindex)
                clusterroi.append(cluster.roiidx_)
                clusterindex += 1
            clusterArray = np.core.records.fromarrays(
                [
                    center_x_, center_y_, center_z_, axis_x_, axis_y_, axis_z_,
                    ev_1_, ev_2_, ev_3_, clusteren, clustereta, clusterphi,
                    slfClustID, clusterroi
                ],
                names='centerX,centerY,centerZ,\
													   			axisX,axisY,axisZ,ev1,ev2,ev3,en,\
													   			eta,phi,clusterID,ROI')  #|Form array for clusters
            eventArray.append(clusterArray)  #|Append to event array
            names += ',Clusters'
        else:
            eventArray.append(
                [])  #|This is to keep the index of the arrays the same

        ROIindex = 0
        if ROIs:
            for ROI in tree.ROIs:
                roiID.append(ROIindex)
                roipt.append(ROI.pt_)
                roieta.append(ROI.eta_)
                roiphi.append(ROI.phi_)
                roimass.append(ROI.mass_)
                roiarea.append(ROI.area_)
                roigenpt.append(ROI.genpt_)
                roigeneta.append(ROI.geneta_)
                roigenphi.append(ROI.genphi_)
                roigenmass.append(ROI.genmass_)
                roigenarea.append(ROI.genarea_)
                roistablex.append(ROI.stablex_)
                roistabley.append(ROI.stabley_)
                roistablez.append(ROI.stablez_)
                roistablID.append(ROI.stableid_)
                ROIindex += 1
            ROIArray = np.core.records.fromarrays(
                [
                    roiID, roipt, roieta, roiphi, roimass, roiarea, roigenpt,
                    roigeneta, roigenphi, roigenmass, roigenarea, roistablex,
                    roistabley, roistablez, roistablID
                ],
                names='roiID,pt,eta,phi,mass,area,getpt,\
												   			geneta,getphi,genarea,stablex,stabley,\
												   			stablez,stableID')
            eventArray.append(ROIArray)
            names += ',ROIs'
        else:
            eventArray.append([])

        if Vertices:
            for vertex in tree.Vertices:
                vertex_x_.append(vertex.x_)
                vertex_y_.append(vertex.y_)
                vertex_z_.append(vertex.z_)
            vertexArray = np.core.records.fromarrays(
                [vertex_x_, vertex_y_, vertex_z_], names='x,y,z')
            eventArray.append(vertexArray)  #|Vertices array
            names += ',Vertices'
        else:
            eventArray.append(
                [])  #|This is to keep the index of the arrays the same

        if GenVert:
            try:
                gen_x_.append(
                    tree.GenVertex.X()
                )  #|GenVertex is not iterable like the other classes, since there is only one per event.
                gen_y_.append(tree.GenVertex.Y())
                gen_z_.append(tree.GenVertex.Z())
            except AttributeError:  #|Some sets use TLorentzVectors, some use TVector3. For some unfathomable reason, these have different capitalizations.
                gen_x_.append(
                    tree.GenVertex.x()
                )  #|GenVertex is not iterable like the other classes, since there is only one per event.
                gen_y_.append(tree.GenVertex.y())
                gen_z_.append(tree.GenVertex.z())
            genVertexArray = np.core.records.fromarrays(
                [gen_x_, gen_y_, gen_z_], names='x,y,z')
            eventArray.append(genVertexArray)  #|Generated vertices array
            names += ',GenVertex'
        else:
            eventArray.append(
                [])  #|This is to keep the index of the arrays the same

        # Combine arrays for single event and append to outArray
        outArray.append(
            eventArray
        )  #|Converts to a 2D numpy structured array indexed by event number
        if showProgress: pbar.update(i)  #|Update progressbar
        if (not showProgress) and (i % 100 == 0) and (not quiet):
            print "Thread %i >> Processed %i of %i events." % (
                threadNo, i, numentries
            )  #|In the case this is being multiprocessed, progressbar doesn't work well, so just print the stuff.

    # Finish up and save array to file
    if showProgress: pbar.finish()
    filename = str(f[:-5]) + ".npy"  #|Replace .root with .npy
    filename = filename.split("/")[-1]  #|Removes directory prefixes
    filepath = outdir + filename
    if not quiet: print "Writing file " + os.path.abspath(filepath) + "..."
    np.save(filepath, outArray)
    if not quiet: print "Processing complete.\n"
Ejemplo n.º 12
0
def showerAnimator(hits,
                   eventNo,
                   title,
                   clusterID=-1,
                   delete=False,
                   frames=30,
                   endLoop=0,
                   projections=True,
                   transparency=False):
    '''
    Usage: showerAnimator(recordedHits, plotTitle, eventToAnalyse, clusterToAnalyse=None, 
                          deleteFramesOnFinish=False, enableMatplotlibTransparency=False)

    Plots an animated 5D (x,y,z,t,energy) graph over time and renders it to an animated gif.
    If you want to make plots like this, I've also created a cleaned-up version of this code 
    and put it on GitHub as an easily-callable package. (github.com/bencbartlett/Animator5D)
    '''
    # Set up figure
    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')
    import matplotlib.cm as cm

    # Parse data for proper cluster ID and existence of timing data
    if clusterID >= 0:
        hits = np.extract(
            np.logical_and(hits['clusterID'] == clusterID, hits['t'] > 0), hits
        )  #|Select only the rechits corresponding to the given cluster and with time data
    else:
        hits = np.extract(hits['t'] > 0,
                          hits)  #|Only include timing data points

    # Further data parsing
    hits = np.sort(hits, order=['tofT'])
    xh, yh, zh, th, cth, Eh = hits['x'], hits['y'], hits['z'], hits['t'], hits[
        'tofT'], hits['en']  #|Separating hits
    maxEh = np.max(Eh)
    normEh = Eh / maxEh
    xmin, xmax = min(xh), max(xh)  #|Get and set limits
    ymin, ymax = min(yh), max(yh)
    zmin, zmax = min(zh), max(zh)
    xc = np.mean([
        xmin, xmax
    ])  #|x and y centroids, so we can keep scale proportional on all axes
    yc = np.mean([ymin, ymax])
    zd = zmax - zmin

    # Set limits and labels
    ax.set_zlim(
        xc - zd / 2, xc + zd / 2
    )  #|Not a mistake, the reversal of the xyz pairing is so that z appears horizontally on the plot.
    ax.set_ylim(yc - zd / 2, yc + zd / 2)
    ax.set_xlim(zmin, zmax)
    ax.set_zlabel('x (cm)')  #|Label stuff
    ax.set_ylabel('y (cm)')
    ax.set_xlabel('Beamline (cm)')

    # Clear out existing crap
    path = "Plots/ShowerAnimations/" + title + "/Event" + str(eventNo)
    if os.path.exists(path):  #|Remove previous crap
        shutil.rmtree(path)
        os.makedirs(path + "/frames")
    else:
        os.makedirs(path + "/frames")

    # Set up animation
    count = 1
    t = t0 = np.min(cth)
    maxt = np.max(cth)
    tstep = (maxt - t) / float(frames)  #|Time step in ns
    title = ax.text2D(.3,
                      1.0,
                      'Shower simulation',
                      transform=ax.transAxes,
                      size='large')
    colorcal = ax.scatter([0, 0], [0, 0], [0, 0],
                          c=[0, maxEh + 1],
                          cmap=cm.rainbow)
    cbar = fig.colorbar(colorcal, shrink=.7)
    pbar = progressbar("Rendering &count& frames:",
                       int((maxt - t0) / tstep) + 1)
    cbar.set_label("Energy (MIPs)")
    pbar.start()

    # Render frames
    while t <= np.max(maxt):
        mask = np.logical_and(t < cth, cth <=
                              (t + tstep))  #|What to plot in this time step
        xplt = np.extract(mask, xh)
        yplt = np.extract(mask, yh)
        zplt = np.extract(mask, zh)
        Eplt = np.extract(mask, Eh)
        txt = ax.text2D(0.1, 0.9, '$t=%.3f$ns' % t, transform=ax.transAxes)
        cx = np.ones_like(xplt) * ax.get_zlim3d()[
            0]  #|Again, not a typo with mixing x and z
        cy = np.ones_like(yplt) * ax.get_ylim3d()[1]
        cz = np.ones_like(zplt) * ax.get_xlim3d()[0]
        mark = ax.scatter(zplt, yplt, xplt, c=Eplt, cmap=cm.rainbow, vmin=0, vmax=maxEh, \
                                            s=100*Eplt/maxEh, marker=',', lw=1)
        if projections:
            ax.scatter(zplt,
                       yplt,
                       cx,
                       c='#444444',
                       marker=',',
                       lw=0,
                       s=100 * Eplt / maxEh,
                       alpha=0.3)  #|Plot the projections
            ax.scatter(zplt,
                       cy,
                       xplt,
                       c='#444444',
                       marker=',',
                       lw=0,
                       s=100 * Eplt / maxEh,
                       alpha=0.3)
            ax.scatter(cz,
                       yplt,
                       xplt,
                       c='#444444',
                       marker=',',
                       lw=0,
                       s=100 * Eplt / maxEh,
                       alpha=0.3)
        if not transparency:
            mark.set_edgecolors = mark.set_facecolors = lambda *args: None  #|Super-hacky way to disable transparency in the 3D plot, makes it cleaner to read.

        plt.draw()
        filename = path + "/frames/" + str(count).zfill(3) + ".gif"
        plt.savefig(filename)  #|Save the frame
        txt.remove()
        pbar.update(count)
        count += 1
        t += tstep

    pbar.finish()

    if endLoop: print "Copying tail frames..."
    for i in xrange(endLoop):
        shutil.copyfile(filename, filename[:-7] + str(count).zfill(3) + ".gif")
        count += 1

    # Combine frames to animated gif
    print "Combining frames..."
    import subprocess
    args = ([
        'convert', '-delay', '.1', '-loop', '0', path + "/frames/*.gif",
        path + "/Shower.gif"
    ])  #|This part requires ImageMagick to function
    print "Saved to path " + str(os.path.abspath(path)) + "/Shower.gif"
    subprocess.check_call(args)

    # Delete frames if told to do so
    if delete:
        shutil.rmtree(path + "/frames")