def compute_normals_hash(sc): out = [] triangle_normals = len(sc.faces) * [[.0, .0, .0]] def hash(p): return .11234 * p[0] + .35678 * p[1] + .67257 * p[2] from collections import defaultdict pt_table = defaultdict(list) for i, t in enumerate(sc.faces): p1 = sc.points[t[0]] p2 = sc.points[t[1]] p3 = sc.points[t[2]] pt_table[hash(p1)].append((i, p1, t[0])) pt_table[hash(p2)].append((i, p2, t[1])) pt_table[hash(p3)].append((i, p3, t[2])) normal = vcross(sub(p2, p1), sub(p3, p1)) normal = vnorm(normal) triangle_normals[i] = normal i = 0 normals = [] faces_normals = [] for t in sc.faces: p1 = sc.points[t[0]] p2 = sc.points[t[1]] p3 = sc.points[t[2]] for point in [p1, p2, p3]: # we assume no collisions in the hash value = pt_table[hash(point)] point_index = value[0][2] first_point = value[0][1] # compute the normal of each triangles around # TODO should be done just once for each triangle in pre-process neighbors_normals = [] for t_index, p, _ in value: assert p == first_point neighbors_normals.append(triangle_normals[t_index]) N = (sum(n[0] for n in neighbors_normals) / len(neighbors_normals), sum(n[1] for n in neighbors_normals) / len(neighbors_normals), sum(n[2] for n in neighbors_normals) / len(neighbors_normals)) # normalize normal N = vnorm(N) # print N normals.append(N) faces_normals.append((3 * i, 3 * i + 1, 3 * i + 2)) i += 1 return normals, faces_normals
def compute_normals(sc): out = len(sc.points) * [ [.0, .0, .0] ] triangle_normals = len(sc.faces) * [ [.0, .0, .0] ] def hash(p): return .11234 * p[0] + .35678 * p[1] + .67257 * p[2] from collections import defaultdict pt_table = defaultdict(list) for i, t in enumerate(sc.faces): p1 = sc.points[t[0]] p2 = sc.points[t[1]] p3 = sc.points[t[2]] pt_table[hash(p1)].append( (i, p1, t[0]) ) pt_table[hash(p2)].append( (i, p2, t[1]) ) pt_table[hash(p3)].append( (i, p3, t[2]) ) normal = vcross(sub(p2, p1), sub(p3, p1)) normal = vnorm(normal) triangle_normals[i] = normal for key, value in pt_table.iteritems(): # we assume no collisions in the hash point_index = value[0][2] first_point = value[0][1] # compute the normal of each triangles around # TODO should be done just once for each triangle in pre-process normals = [] for t_index, p, _ in value: assert p == first_point normals.append(triangle_normals[t_index]) N = ( sum(n[0] for n in normals) / len(normals), sum(n[1] for n in normals) / len(normals), sum(n[2] for n in normals) / len(normals) ) # print N out[point_index] = N return out
def compute_normals_fast(sc): out = [] triangle_normals = range(len(sc.faces)) vert_faces = [[] for _ in sc.points] for i, t in enumerate(sc.faces): # Compute face normal p1 = sc.points[t[0]] p2 = sc.points[t[1]] p3 = sc.points[t[2]] normal = vcross(sub(p2, p1), sub(p3, p1)) normal = vnorm(normal) triangle_normals[i] = normal # add triangles in point/triangle table vert_faces[t[0]].append(i) vert_faces[t[1]].append(i) vert_faces[t[2]].append(i) i = 0 normals = [] faces_normals = [] for t in sc.faces: for fv in t: X, Y, Z = 0, 0, 0 for incident_face in vert_faces[fv]: x, y, z = triangle_normals[incident_face] X += x Y += y Z += z cnt = len(vert_faces[fv]) N = (X / cnt, Y / cnt, Z / cnt) # normalize normal N = vnorm(N) # print N normals.append(N) faces_normals.append((3 * i, 3 * i + 1, 3 * i + 2)) i += 1 return normals, faces_normals
def getHeavyHadronDecayLength(mcp): #True if flavour change start = mcp.getVertex() same_flavour = [quarkType(d.getPDG()) == quarkType(mcp.getPDG()) for d in mcp.getDaughters()] while any(same_flavour): mcp = mcp.getDaughters()[same_flavour.index(True)] same_flavour = [quarkType(d.getPDG()) == quarkType(mcp.getPDG()) for d in mcp.getDaughters()] end = mcp.getEndpoint() return mu.threeDRadius(mu.sub(start,end))
def compute_normals(sc): out = len(sc.points) * [[.0, .0, .0]] triangle_normals = len(sc.faces) * [[.0, .0, .0]] def hash(p): return .11234 * p[0] + .35678 * p[1] + .67257 * p[2] from collections import defaultdict pt_table = defaultdict(list) for i, t in enumerate(sc.faces): p1 = sc.points[t[0]] p2 = sc.points[t[1]] p3 = sc.points[t[2]] pt_table[hash(p1)].append((i, p1, t[0])) pt_table[hash(p2)].append((i, p2, t[1])) pt_table[hash(p3)].append((i, p3, t[2])) normal = vcross(sub(p2, p1), sub(p3, p1)) normal = vnorm(normal) triangle_normals[i] = normal for key, value in pt_table.iteritems(): # we assume no collisions in the hash point_index = value[0][2] first_point = value[0][1] # compute the normal of each triangles around # TODO should be done just once for each triangle in pre-process normals = [] for t_index, p, _ in value: assert p == first_point normals.append(triangle_normals[t_index]) N = (sum(n[0] for n in normals) / len(normals), sum(n[1] for n in normals) / len(normals), sum(n[2] for n in normals) / len(normals)) # print N out[point_index] = N return out
def set_matrix(self, v): ''' To debug this, make sure gluPerspective and gluLookAt have the same parameter when given the same mouse events in cpp and in python ''' ############ # Projection glMatrixMode( GL_PROJECTION ) glLoadIdentity() pixel_ratio = self.w / float(self.h) zF = v.focal / 30.0 diam2 = 2.0 * self.scene.bb.sphere_beam() look = sub(v.tget, v.eye) diam = 0.5 * norm(look) recul = 2 * diam zNear = 0.01 * recul # 1% du segment de visee oeil-cible zFar = recul + diam2 if pixel_ratio < 1: zF /= pixel_ratio logger.info('gluPerspective %f %f %f %f' % (zF*30, pixel_ratio, zNear, zFar)) gluPerspective (zF*30, pixel_ratio, zNear, zFar) # For debug: hard-coded values for some models #gluPerspective ( 32, 1.34, 27, 54 ) # Gears #gluPerspective ( 32, 1.44, 204, 409 ) # spaceship ############ # Model View glMatrixMode(GL_MODELVIEW) glLoadIdentity() glTranslatef(v.recenterX, v.recenterY, 0.0) # Take care of the eye rotation_matrix = quaternion_to_matrix(v.quat) new_look = [0, 0, recul] # LOL name v.eye = multiply_point_by_matrix(rotation_matrix, new_look) v.eye = add(v.eye, self.scene.bb.center()) # Vector UP (Y) vup_t = multiply_point_by_matrix(rotation_matrix, [0.0, 1.0, 0.0]) logger.info('gluLookAt eye %s' % str(v.eye)) logger.info('gluLookAt tget %s' % str(v.tget)) logger.info('gluLookAt vup %s' % str(vup_t)) gluLookAt ( v.eye[0], v.eye[1], v.eye[2], v.tget[0], v.tget[1], v.tget[2], vup_t[0], vup_t[1], vup_t[2] )
def set_matrix(self, v): ''' To debug this, make sure gluPerspective and gluLookAt have the same parameter when given the same mouse events in cpp and in python ''' ############ # Projection glMatrixMode(GL_PROJECTION) glLoadIdentity() pixel_ratio = self.w / float(self.h) zF = v.focal / 30.0 diam2 = 2.0 * self.scene.bb.sphere_beam() look = sub(v.tget, v.eye) diam = 0.5 * norm(look) recul = 2 * diam zNear = 0.01 * recul # 1% du segment de visee oeil-cible zFar = recul + diam2 if pixel_ratio < 1: zF /= pixel_ratio logger.info('gluPerspective %f %f %f %f' % (zF * 30, pixel_ratio, zNear, zFar)) gluPerspective(zF * 30, pixel_ratio, zNear, zFar) # For debug: hard-coded values for some models #gluPerspective ( 32, 1.34, 27, 54 ) # Gears #gluPerspective ( 32, 1.44, 204, 409 ) # spaceship ############ # Model View glMatrixMode(GL_MODELVIEW) glLoadIdentity() glTranslatef(v.recenterX, v.recenterY, 0.0) # Take care of the eye rotation_matrix = quaternion_to_matrix(v.quat) new_look = [0, 0, recul] # LOL name v.eye = multiply_point_by_matrix(rotation_matrix, new_look) v.eye = add(v.eye, self.scene.bb.center()) # Vector UP (Y) vup_t = multiply_point_by_matrix(rotation_matrix, [0.0, 1.0, 0.0]) logger.info('gluLookAt eye %s' % str(v.eye)) logger.info('gluLookAt tget %s' % str(v.tget)) logger.info('gluLookAt vup %s' % str(vup_t)) gluLookAt(v.eye[0], v.eye[1], v.eye[2], v.tget[0], v.tget[1], v.tget[2], vup_t[0], vup_t[1], vup_t[2])
def mcVertexNumber(event,rp): track = rp.getTracks()[0] mcps = event.getRelatedTo(track,[collectionnames.trackmcpcollection,]) mcp = mcps[0] v = mcp.getVertex() #Go back through the tree counting all none resonant decays v_num = 0 while mc.quarkType(mcp.getPDG()) != -1: #print mcp.getPDG() mcp = getCorrectParent(mcp) if um.threeDRadius(um.sub(mcp.getVertex(),mcp.getEndpoint())) > 0.0000000001 : v_num += 1 #print flavourOfParentVertex(event,rp) , v_num, v return v_num
def mCVertices(mcps): #group mc particles by production point, assume same point if they have a distance less than 0.01micron! vertices = {} for mcp in mcps: #check that there is not a key that is below the threshold nearkeys = (key for key in vertices.iterkeys() if mu.threeDRadius(mu.sub(mcp.getVertex(),key)) < 0.0000001) try: vertices[nearkeys.next()].append(mcp) except StopIteration: try: vertices[mcp.getVertex()].append(mcp) except KeyError: vertices[mcp.getVertex()] = [mcp,] return vertices
def printMCTree(mcps,filename="temp.dot"): #start the graph f=open(filename, 'w') f.write('digraph G {\nranksep="equally";\noverlap="false";\nrankdir="LR";\ncompound=true;\n') #make a node for each mcp colors = {-1:"white",0:"white",1:"grey",2:"yellow",3:"green",4:"red",5:"blue",6:"pink"} for mcp in mcps: name = str(mcp.id()) label = lc.pdgToName(mcp.getPDG()) colour= colors[quarkType(mcp.getPDG())] f.write('"'+name+'" [ label="'+label+' '+str(len(mcp.getParents()))+'",style="filled",color="'+colour+'" ];\n') #idn = 0 #mcp_ids = [mcp.id() for mcp in mcps] #for mcp in mcps: # daughters = [daughter for daughter in mcp.getDaughters() if daughter.id() in mcp_ids] # if daughters: # f.write('subgraph cluster'+str(idn)+'{\n') # for daughter in daughters: # f.write(str(daughter.id())+';\n') # f.write('}\n') # f.write(str(mcp.id())+'->'+str(daughter.id())+' [ lhead=cluster'+str(idn)+',label = #"'+str(mu.threeDRadius(sub(mcp.getVertex(),mcp.getEndpoint())))[:4]+'" ];') # idn += 1 #make a node in each cluster #idn = 0 #for vert in mCVertices(mcps).itervalues(): # #we need to remove the ones that leave this vertex # #vert = [mcp for mcp in vert if mu.threeDRadius(sub(mcp.getVertex(),mcp.getEndpoint())) < 0.00000001] # if len(vert) > 1: # f.write('subgraph cluster'+str(idn)+'{\n') # for mcp in vert: # f.write(str(mcp.id())+';\n') # f.write('}\n') # idn += 1 #make a link for each decay (whose product is in the list!) mcp_ids = [mcp.id() for mcp in mcps ]#if fromIP(mcp)] for mcp in mcps: for parent in mcp.getParents(): # if fromIP(parent): f.write(str(parent.id())+'->'+str(mcp.id())+' [ label = "'+str(mu.threeDRadius(mu.sub(parent.getVertex(),parent.getEndpoint())))[:4]+'" ];') #end graph f.write('}\n') f.close() f=open(filename+".png", 'w') sp.call(['dot', '-Tpng', filename],stdout = f) f.close() sp.call(['eog', filename+".png"])
def compute_normals_hash(sc): out = [] triangle_normals = len(sc.faces) * [[0.0, 0.0, 0.0]] def hash(p): return 0.11234 * p[0] + 0.35678 * p[1] + 0.67257 * p[2] from collections import defaultdict pt_table = defaultdict(list) for i, t in enumerate(sc.faces): p1 = sc.points[t[0]] p2 = sc.points[t[1]] p3 = sc.points[t[2]] pt_table[hash(p1)].append((i, p1, t[0])) pt_table[hash(p2)].append((i, p2, t[1])) pt_table[hash(p3)].append((i, p3, t[2])) normal = vcross(sub(p2, p1), sub(p3, p1)) normal = vnorm(normal) triangle_normals[i] = normal i = 0 normals = [] faces_normals = [] for t in sc.faces: p1 = sc.points[t[0]] p2 = sc.points[t[1]] p3 = sc.points[t[2]] for point in [p1, p2, p3]: # we assume no collisions in the hash value = pt_table[hash(point)] point_index = value[0][2] first_point = value[0][1] # compute the normal of each triangles around # TODO should be done just once for each triangle in pre-process neighbors_normals = [] for t_index, p, _ in value: assert p == first_point neighbors_normals.append(triangle_normals[t_index]) N = ( sum(n[0] for n in neighbors_normals) / len(neighbors_normals), sum(n[1] for n in neighbors_normals) / len(neighbors_normals), sum(n[2] for n in neighbors_normals) / len(neighbors_normals), ) # normalize normal N = vnorm(N) # print N normals.append(N) faces_normals.append((3 * i, 3 * i + 1, 3 * i + 2)) i += 1 return normals, faces_normals