def wander(self): t = bge.logic.getRealTime() v = Vector((t, t, t)) + self.worldPosition n = noise.noise_vector(v) #return self.wander_direction return n.to_2d()
def deepnoise(v, _noise_type): u = noise.noise_vector(v, _noise_type)[:] a = u[0], u[1], u[2]-1 # a = u minus (0,0,1) return sqrt((a[0] * a[0]) + (a[1] * a[1]) + (a[2] * a[2])) * 0.5
"CELL-S": (lambda u: cell(Vector(u)), 1) } vector_out = { "CROSS": (lambda u, v: Vector(u).cross(v)[:], 2), "ADD": (lambda u, v: (u[0]+v[0],u[1]+v[1],u[2]+v[2]), 2), "SUB": (lambda u, v: (u[0]-v[0],u[1]-v[1],u[2]-v[2]), 2), "REFLECT": (lambda u, v: Vector(u).reflect(v)[:], 2), "PROJECT": (lambda u, v: Vector(u).project(v)[:], 2), "SCALAR": (lambda u, s: (Vector(u) * s)[:], 2), "1/SCALAR": (lambda u, s: (Vector(u) * (1 / s))[:], 2), "ROUND": (lambda u, s: Vector(u).to_tuple(s), 2), "NORMALIZE": (lambda u: Vector(u).normalized()[:], 1), "NEG": (lambda u: -Vector(u)[:], 1), "NOISE-V": (lambda u: noise_vector(Vector(u))[:], 1), "CELL-V": (lambda u: cell_vector(Vector(u))[:], 1) } class VectorMathNode(bpy.types.Node, SverchCustomTreeNode): ''' VectorMath Node ''' bl_idname = 'VectorMathNode' bl_label = 'Vector Math' bl_icon = 'OUTLINER_OB_EMPTY' # vector math functions mode_items = [ ("CROSS", "Cross product", "", 0), ("DOT", "Dot product", "", 1),
def evaluate(self, x, y, z): noise.seed_set(self.seed) return noise.noise_vector((x, y, z), noise_basis=self.noise_type)
def mk_noise(v): r = noise.noise_vector(v, noise_basis=self.noise_type) return r[0], r[1], r[2]
def execute(self, context): depsgraph = bpy.context.depsgraph ob = bpy.context.active_object obj_eval = depsgraph.objects.get(ob.name, None) # particleObj = context.active_object particleObj = obj_eval if bpy.context.active_object.particle_systems is None: # create new one self.report({'INFO'}, 'No active Particle Hair System found!') return {"CANCELLED"} index = particleObj.particle_systems.active_index psys_active = particleObj.particle_systems[index] if psys_active.settings.type != 'HAIR': # create new one self.report({'INFO'}, 'Active Particle System is not Hair type! Cancelling') return {"CANCELLED"} pointsList_hair = [] context.scene.update() if len(psys_active.particles) == 0: # require more that three strands self.report({'INFO'}, 'Active Particle System has zero strands! Cancelling') return {"CANCELLED"} diagonal = sqrt( pow(particleObj.dimensions[0], 2) + pow(particleObj.dimensions[1], 2) + pow(particleObj.dimensions[2], 2)) # to normalize some values for particle in psys_active.particles: # for strand point pointsList_hair.append([ hair_key.co for hair_key in particle.hair_keys ]) # DONE: exclude duplicates if first strand[0] in list already if len(psys_active.particles ) == 1: #create two fake strands so that barycentric works pointsList_hair.append([ x.xyz + Vector((0.01 * diagonal, 0, 0)) for x in pointsList_hair[0] ]) pointsList_hair.append([ x.xyz + Vector((0, 0.01 * diagonal, 0)) for x in pointsList_hair[0] ]) elif len(psys_active.particles ) == 2: #create one fake strands so that barycentric works pointsList_hair.append([ x.xyz + Vector((0.01 * diagonal, 0, 0)) for x in pointsList_hair[0] ]) pointsList_uniq = [] [ pointsList_uniq.append(x) for x in pointsList_hair if x not in pointsList_uniq ] #removing doubles (can cause zero size tris) #same_point_count cos barycentric transform requires it pointsList = interpol_Catmull_Rom( pointsList_uniq, self.t_in_y, uniform_spacing=True, same_point_count=True) # just gives smoother result on borders searchDistance = 100 * diagonal parentRoots = [strand[0] for strand in pointsList] # first point of roots #create nnew Part Sytem with uniform points pointsChildRoots = self.createUniformParticleSystem( context, self.childCount, self.PlacementJittering, self.Seed) # return child part roots positions kd = kdtree.KDTree(len(parentRoots)) for i, root in enumerate(parentRoots): kd.insert(root, i) kd.balance() sourceSurface_BVHT = BVHTree.FromObject(particleObj, context.depsgraph) childStrandsPoints = [] #will contain strands with child points childStrandRootNormals = [] length_ver_group_index = -1 vertex_group_length_name = psys_active.vertex_group_length if vertex_group_length_name: # calc weight based on root point length_ver_group_index = particleObj.vertex_groups[ vertex_group_length_name].index particleObjMesh = particleObj.to_mesh(context.depsgraph, apply_modifiers=True, calc_undeformed=False) seed(a=self.lenSeed, version=2) embed = self.embed * 0.04 * diagonal cpow = calc_power(self.noiseFalloff) cpowClump = calc_power(self.ClumpingFalloff) noiseFalloff = [pow(i / self.t_in_y, cpow) for i in range(self.t_in_y)] ClumpFalloff = [ pow((i + 1) / self.t_in_y, cpowClump) for i in range(self.t_in_y) ] for i, childRoot in enumerate( pointsChildRoots ): #for each child find it three parents and genereate strands by barycentric transform snappedPoint, normalChildRoot, rootHitIndex, distance = sourceSurface_BVHT.find_nearest( childRoot, searchDistance) childStrandRootNormals.append(normalChildRoot) threeClosestParentRoots = kd.find_n( childRoot, 3) #find three closes parent roots rootTri_co, ParentRootIndices, distances = zip( *threeClosestParentRoots) #split it into 3 arrays sourceTri_BVHT = BVHTree.FromPolygons( rootTri_co, [(0, 1, 2)], all_triangles=True) # [0,1,2] - polygon == vert indices list childRootSnapped, normalChildProjected, index, distance = sourceTri_BVHT.find_nearest( childRoot, searchDistance ) #snap generated child to parent triangle ares \normals are sometimes flipped childRootSnapped2, normalChildProjected2, index2, distance2 = sourceSurface_BVHT.find_nearest( childRootSnapped, searchDistance) #this gives ok normals always lenWeight = 1 if length_ver_group_index != -1: # if vg exist averageWeight = 0 for vertIndex in particleObjMesh.polygons[ rootHitIndex].vertices: #DONE: check if work on mesh with modifiers for group in particleObjMesh.vertices[vertIndex].groups: if group.group == length_ver_group_index: averageWeight += group.weight break lenWeight = averageWeight / len( particleObjMesh.polygons[rootHitIndex].vertices) ranLen = uniform(-self.RandomizeLengthMinus, self.RandomizeLengthPlus) lenWeight *= (1 + ranLen) # diff = childRoot - childRootSnapped # mat_loc = Matrix.Translation(childRootSnapped) # matTriangleSpaceInv = mat_loc #* rotMatrix # matTriangleSpaceInv.invert() rotQuat = normalChildProjected2.rotation_difference( normalChildRoot) translationMatrix = Matrix.Translation(childRoot) rotMatrixRot = rotQuat.to_matrix().to_4x4() mat_sca = Matrix.Scale(lenWeight, 4) transformMatrix = translationMatrix @ rotMatrixRot strandPoints = [] #for childRootSnapped points transform them from parent root triangles to parent next segment triangle t1,t2,t3 # and compensate child snapping to root triangle from before for j, (t1, t2, t3) in enumerate( zip(pointsList[ParentRootIndices[0]], pointsList[ParentRootIndices[1]], pointsList[ParentRootIndices[2]])): pointTransformed = barycentric_transform( childRootSnapped, rootTri_co[0], rootTri_co[1], rootTri_co[2], Vector(t1), Vector(t2), Vector(t3)) childInterpolatedPoint = transformMatrix @ mat_sca @ ( pointTransformed - childRootSnapped ) #rotate child strand to original pos (from before snapt) #do noise noise.seed_set(self.Seed + i) # add seed per strand/ring ? noiseVectorPerStrand = noise.noise_vector( childInterpolatedPoint * self.freq / diagonal, noise_basis='PERLIN_ORIGINAL' ) * noiseFalloff[j] * self.noiseAmplitude * diagonal / 10 # childInterpolatedPoint += noiseVectorPerStrand #do clumping diff = Vector( t1 ) - childInterpolatedPoint # calculate distance to parent strand (first strand from trio) # point += noiseVectorPerStrand * noiseFalloff[j] * self.noiseAmplitude * diagonal / 10 # childClumped = childInterpolatedPoint + ClumpFalloff[j] * self.Clumping * diff + noiseVectorPerStrand * (1-ClumpFalloff[j]) childClumped = childInterpolatedPoint + ClumpFalloff[ j] * self.Clumping * diff + noiseVectorPerStrand * ( 1 - ClumpFalloff[j] * self.Clumping) # childClumped = childInterpolatedPoint + noiseVectorPerStrand strandPoints.append(childClumped) # embeding roots diff = strandPoints[0] - strandPoints[1] diff.normalize() normalWeight = abs(diff.dot(normalChildRoot)) strandPoints[0] += ( diff * normalWeight - normalChildRoot * (1 - normalWeight) ) * embed # do childStrandRootNormal to move it more into mesh surface childStrandsPoints.append(strandPoints) bpy.data.meshes.remove(particleObjMesh) # create the Curve Datablock curveData = bpy.data.curves.new(particleObj.name + '_curve', type='CURVE') splinePointsNp = np.array(childStrandsPoints, dtype=np.float32) if self.hairType != 'BEZIER': splinePointsNpOnes = np.ones( (len(childStrandsPoints), self.t_in_y, 4), dtype=np.float32) # 4 coord x,y,z ,1 splinePointsNpOnes[:, :, :-1] = splinePointsNp splinePointsNp = splinePointsNpOnes for strandPoints in splinePointsNp: # for strand point curveLength = len(strandPoints) polyline = curveData.splines.new(self.hairType) if self.hairType == 'BEZIER': polyline.bezier_points.add(curveLength - 1) elif self.hairType == 'POLY' or self.hairType == 'NURBS': polyline.points.add(curveLength - 1) if self.hairType == 'NURBS': polyline.order_u = 3 # like bezier thing polyline.use_endpoint_u = True if self.hairType == 'BEZIER': # polyline.bezier_points.co = (x, y, z) polyline.bezier_points.foreach_set("co", strandPoints.ravel()) polyline.bezier_points.foreach_set('handle_left_type', 'AUTO') polyline.bezier_points.foreach_set('handle_right_type', 'AUTO') else: polyline.points.foreach_set("co", strandPoints.ravel()) # polyline.points[i].co = (x, y, z, 1) curveData.resolution_u = self.strandResU curveData.dimensions = '3D' # create Object curveOB = bpy.data.objects.new(particleObj.name + '_curve', curveData) curveOB.matrix_world = particleObj.matrix_world scn = context.scene scn.collection.objects.link(curveOB) curveOB.targetObjPointer = particleObj.name # store source surface for snapping oper context.view_layer.objects.active = curveOB curveOB.select_set(True) # curveOB.data.show_normal_face = False if self.generateRibbons: bpy.ops.object.generate_ribbons(strandResU=self.strandResU, strandResV=self.strandResV, strandWidth=self.strandWidth, strandPeak=self.strandPeak, strandUplift=self.strandUplift, alignToSurface=self.alignToSurface) HT_OT_CurvesUVRefresh.uvCurveRefresh(curveOB) context.view_layer.objects.active = particleObj else: curveData.fill_mode = 'FULL' curveData.bevel_depth = 0.004 * diagonal curveData.bevel_resolution = 2 bpy.ops.object.curve_taper(TipRadiusFalloff=self.RadiusFalloff, TipRadius=self.TipRadius, MainRadius=self.Radius) return {"FINISHED"}