예제 #1
0
 def __genElasticConn(self, particle, elasticParticles):
     '''
     Find elastc neighbour for particle
     extend elastic connections list
     '''
     nMi = elasticParticles.index(particle)*self.nMuscles/len(elasticParticles);
     neighbour_collection = [p for p in elasticParticles if Particle.dot_particles(particle, p) <= Const.r0_squared * 3.05 and p != particle ]
     neighbour_collection.sort(key=lambda p: Particle.distBetween_particles(particle, p))
     if len(neighbour_collection) > Const.MAX_NUM_OF_NEIGHBOUR:
         neighbour_collection = neighbour_collection[0:Const.MAX_NUM_OF_NEIGHBOUR]
     elastic_connections_collection = []
     for p in neighbour_collection:
         nMj = elasticParticles.index(p) * self.nMuscles / len(elasticParticles)
         val1 = 0
         if self.nMuscles > 0:
             if nMj == nMi:
                 dx2 = particle.position.x - p.position.x
                 dy2 = particle.position.y - p.position.y
                 dz2 = particle.position.z - p.position.z
                 dx2 *= dx2
                 dy2 *= dy2
                 dz2 *= dz2 
                 val1 = (1.1+nMi)*float((dz2 > 100*dx2)and(dz2 > 100*dy2))  
         elastic_connections_collection.append( ElasticConnection(self.particles.index(p),Particle.distBetween_particles(p,particle), val1, 0) )
     '''
     If number of elastic connection less that MAX_NUM_OF_NEIGHBOUR then 
     we extend collection of elastic connection with non particle value
     '''
     if len(neighbour_collection) < Const.MAX_NUM_OF_NEIGHBOUR:
         elastic_connections_collection.extend([ElasticConnection(Const.NO_PARTICEL_ID,0,0,0)] * (Const.MAX_NUM_OF_NEIGHBOUR - len(neighbour_collection)) )
     self.elasticConnections.extend( elastic_connections_collection )
예제 #2
0
 def __genElasticConn(self, particle, elasticParticles, part_phys_mod):
     '''
     Find elastc neighbour for particle
     extend elastic connections list
     '''
     nMi = elasticParticles.index(particle) * self.nMuscles / len(
         elasticParticles)
     neighbour_collection = []
     for p_i in range(len(elasticParticles)):
         p = elasticParticles[p_i]
         if Particle.dot_particles(particle, p) <= (
                 part_phys_mod * part_phys_mod) * 3.05 and p != particle:
             neighbour_collection.append(p)
     neighbour_collection.sort(
         key=lambda p: Particle.distBetween_particles(particle, p))
     if len(neighbour_collection) > Const.MAX_NUM_OF_NEIGHBOUR:
         neighbour_collection = neighbour_collection[0:Const.
                                                     MAX_NUM_OF_NEIGHBOUR]
     elastic_connections_collection = []
     for p in neighbour_collection:
         nMj = elasticParticles.index(p) * self.nMuscles / len(
             elasticParticles)
         val1 = 0
         if self.nMuscles > 0:
             if nMj == nMi:
                 dx2 = particle.position.x - p.position.x
                 dy2 = particle.position.y - p.position.y
                 dz2 = particle.position.z - p.position.z
                 dx2 *= dx2
                 dy2 *= dy2
                 dz2 *= dz2
                 val1 = (1.1 + nMi) * float(
                     (dz2 > 100 * dx2) and (dz2 > 100 * dy2))
         elastic_connections_collection.append(
             ElasticConnection(self.particles.index(p),
                               Particle.distBetween_particles(p, particle),
                               val1, 0))
     '''
     If number of elastic connection less that MAX_NUM_OF_NEIGHBOUR then 
     we extend collection of elastic connection with non particle value
     '''
     if len(neighbour_collection) < Const.MAX_NUM_OF_NEIGHBOUR:
         elastic_connections_collection.extend(
             [ElasticConnection(Const.NO_PARTICEL_ID, 0, 0, 0)] *
             (Const.MAX_NUM_OF_NEIGHBOUR - len(neighbour_collection)))
     self.elasticConnections.extend(elastic_connections_collection)
	def import_collada(self, col_file, dist_scalar, dist_exp):
		'''
		Importing boundry box assumes box verticies are in collada file in format vertice 1-8 =
		(0 0 0) (0 0 1) (0 1 0) (0 1 1) (1 0 0) (1 0 1) (1 1 0) (1 1 1)

		Importing collada transforms need 'TransRotLoc' and not 'Matrix' style 
		transforms currently

		TODO: rotation not implemented in transformations yet

		Currently only importing one elastic mesh and boundry box is supported.  Multiple 
		liquid meshes can be imported.

		The size of elastic_connections_collection = [None]*Const.MAX_NUM_OF_NEIGHBOUR*len(particles)*3
		is a heuristic which provides enough size in the list to random access indices before the real
		list values are created, in order to add the real values.  It is the max connections that could exist.
		'''
		print("collada import")
		boundry_box = [0, 100.2, 0, 66.8, 0, 668] #default
		boundry_parts = []
		elast_pos_section = re.compile(".*<float_array id=\"(elastic.*)-mesh-positions-array\" count=\"\d+\">.*")
		liquid_pos_section = re.compile(".*<float_array id=\"(liquid.*)-mesh-positions-array\" count=\"\d+\">.*")
		bound_pos_section = re.compile(".*<float_array id=\"(boundry.*)-mesh-positions-array\" count=\"\d+\">.*")
		material_section = re.compile(".*<polylist material=\"(.*)-material\" count=\"\d+\">.*")
		geo_section_end = re.compile(".*</geometry>.*")
		elastic_pattern = re.compile("elastic.*") 
		liquid_pattern = re.compile("liquid.*")
		boundry_pattern = re.compile("boundry.*")
		sect_patterns = [elastic_pattern, liquid_pattern, boundry_pattern]
		section_coords = []		
		transf_section = re.compile(".*<node id=\".*\" name=\"(.*)\" type=\"NODE\">.*")
		tran_loc_sect = re.compile(".*<translate sid=\"location\">(.*)</translate>.*")
		tran_scale_sect = re.compile(".*<scale sid=\"scale\">(.*)</scale>.*")
		trans_loc = []
		trans_scale = []
		trans_axis_values = 4
		elastic_found = False
		ptype_found = False
		tris_section = re.compile("\s+<p>(.*)</p>")
		tris_triplet = re.compile("(\S+)\s(\S+)\s(\S+)\s(\S+)\s(\S+)\s(\S+)\s?")
		xml_pattern = "(.*[>])+(.*)([<].*)+"
		vertex_pattern = "(\S+)\s(\S+)\s(\S+)(\s?)"
		current_transf_name = ""
		current_ptype_name = ""
		elastic_particles = []
		liquid_particles = []
		particles = []
		unsorted_connections = []
		membranes = []
		parm_memb_index = []
		elastic_connections_collection = []
		nMuscles = 1
		muscle_particles = []

		with open(col_file, "r") as ins:
			for line in ins:
				if elast_pos_section.match(line.rstrip()):
					p_type = 2.1
					new_particles = self.extract_particles(p_type, line, xml_pattern, vertex_pattern)
					elastic_particles.extend(new_particles)
					elastic_found = True	

					object_3d_name = elast_pos_section.match(line.rstrip()).group(1)
					section_coords.append([object_3d_name, 0, len(new_particles)])
					particles.extend(elastic_particles)					
				elif liquid_pos_section.match(line.rstrip()):
					p_type = 1.1
					new_particles = self.extract_particles(p_type, line, xml_pattern, vertex_pattern)
					liquid_particles.extend(new_particles)

					object_3d_name = liquid_pos_section.match(line.rstrip()).group(1)
					section_coords.append([object_3d_name, 0, len(new_particles)])
				elif bound_pos_section.match(line.rstrip()):
					p_type = 3.1
					new_particles = self.extract_particles(p_type, line, xml_pattern, vertex_pattern)
					boundry_parts.extend(new_particles)

					x_b, y_b, z_b = [], [], []
					for i in range(len(boundry_parts)):
						x_b.append(boundry_parts[i].position.x)
						y_b.append(boundry_parts[i].position.y)
						z_b.append(boundry_parts[i].position.z)
					x_b.sort(); y_b.sort(); z_b.sort()

					x1, x2, y1, y2, z1, z2 = x_b[0], x_b[-1], y_b[0], y_b[-1], z_b[0], z_b[-1]
					boundry_box = [x1, x2, y1, y2, z1, z2]			

					object_3d_name = bound_pos_section.match(line.rstrip()).group(1)
				elif material_section.match(line.rstrip()):
					current_ptype_name = material_section.match(line.rstrip()).group(1)
					ptype_found = True
				elif transf_section.match(line.rstrip()):
					current_transf_name = transf_section.match(line.rstrip()).group(1)
				elif tran_loc_sect.match(line.rstrip()):
					trans_entry = [current_transf_name]
					trans_coords = tran_loc_sect.match(line.rstrip()).group(1)
					trans_entry.extend(trans_coords.split(' '))
					trans_loc.append(trans_entry)
				elif tran_scale_sect.match(line.rstrip()):
					trans_entry = [current_transf_name]
					trans_coords = tran_scale_sect.match(line.rstrip()).group(1)
					trans_entry.extend(trans_coords.split(' '))
					trans_scale.append(trans_entry)
				elif tris_section.match(line.rstrip()) and elastic_found == True:
					for tris in re.finditer(tris_triplet, tris_section.match(line.rstrip()).group(1)):
						# read in elastic connections
						p1 = int(tris.group(1))
						p3 = int(tris.group(3))
						p5 = int(tris.group(5))
								
						unsorted_connections.append([[p1,p3],[p1,p5],[p3,p5]])

						# create membranes
						membrane_triple = [p1, p3, p5]
						if not membrane_triple in membranes:
							membranes.append(membrane_triple)

						# find muscles
						if ptype_found == True and current_ptype_name == "muscle":
							muscle_particles.append([p1,p3])
							muscle_particles.append([p1,p5])
							muscle_particles.append([p3,p5])
					ptype_found = False

				if geo_section_end.match(line.rstrip()) and elastic_found == True:
					# after unsorted_connections is filled up now elastic connections are created
					for p_i in range(len(particles)):
						total_conn = 0
						found_j = []
						new_conns = []
						conn_1 = 0
						conn_2 = 0
						for con_i in range(len(unsorted_connections)):
							for connection in unsorted_connections[con_i]:
								conn_1 = connection[0]
								conn_2 = connection[1]
								if (p_i == conn_1 or p_i == conn_2) and (total_conn < Const.MAX_NUM_OF_NEIGHBOUR):
									part_i = particles[p_i]
									j_index = (p_i == conn_1) and conn_2 or conn_1
									part_j = particles[j_index]
									if not j_index in found_j:
										#val1 = self.calc_part_val1(particles, part_i, part_j, nMuscles)
										val1 = self.calc_ptype(muscle_particles, p_i, j_index)
										dist = ((Particle.distBetween_particles(part_j,part_i)**float(dist_exp)) * float(dist_scalar))
										new_conns.append( ElasticConnection(particles.index(part_j)+0.2, dist, val1, 0) )
										found_j.append(j_index)
										total_conn += 1

						sorted_conns = self.sort_conns(new_conns)
						elastic_connections_collection.extend(sorted_conns)
						elastic_connections_collection.extend([ElasticConnection(Const.NO_PARTICEL_ID,0,0,0)] * (Const.MAX_NUM_OF_NEIGHBOUR - total_conn))

					elastic_found = False

			particles.extend(liquid_particles)

			# create pmis
			print("particles:")
			print(float(len(particles)))
			for p_i in range(len(particles)):
				if particles[p_i].type == 2.1:
					pmi_group = []
					for m_i in range(len(membranes)):
						for memb_vert in membranes[m_i]:
							if p_i == memb_vert and len(pmi_group) < Const.MAX_MEMBRANES_INCLUDING_SAME_PARTICLE:
								pmi_group.append(m_i)

					for pmi_i in pmi_group:
						parm_memb_index.append(pmi_i)

					for blank_i in range(Const.MAX_MEMBRANES_INCLUDING_SAME_PARTICLE - len(pmi_group)):
						parm_memb_index.append(-1)

			print("parm_memb_index:")
			print(len(parm_memb_index)/float(Const.MAX_MEMBRANES_INCLUDING_SAME_PARTICLE))

			# transforms	
			boundry_box, particles = self.translate_mesh(trans_scale, trans_loc, section_coords, sect_patterns, boundry_box, particles)

			# test removing sections
			#membranes = []
			#parm_memb_index = []
			#elastic_connections_collection = []

		return boundry_box, particles, elastic_connections_collection, membranes, parm_memb_index