Beispiel #1
0
    def process(self):
        inputs, outputs = self.inputs, self.outputs

        if not outputs[0].is_linked:
            return
        
        out = []
        verts = inputs['Vertices'].sv_get()
        seeds = inputs['Seed'].sv_get()[0]
        _noise_type = noise_dict[self.noise_type]
        noise_function = noise_f[self.out_mode]

        
        for idx, (seed, obj) in enumerate(zip(*match_long_repeat([seeds, verts]))):
            # multi-pass, make sure seed_val is a number and it isn't 0.
            # 0 unsets the seed and generates unreproducable output based on system time
            # We force the seed to a non 0 value. 
            # See https://github.com/nortikin/sverchok/issues/1095#issuecomment-271261600
            seed_val = seed if isinstance(seed, (int, float)) else 0
            seed_val = int(round(seed_val)) or 140230

            noise.seed_set(seed_val)
            out.append([noise_function(v, _noise_type) for v in obj])

        if 'Noise V' in outputs:
            outputs['Noise V'].sv_set(Vector_degenerate(out))
        else:
            outputs['Noise S'].sv_set(out)
Beispiel #2
0
def main(context):
    
    for o in bpy.data.objects:
        if o.name.startswith('generator') or o.name.startswith('Ico') or o.name.startswith('instance'):
            o.user_clear()
            bpy.context.scene.objects.unlink(o)
            bpy.data.objects.remove(o)
    
#    guide = bpy.data.objects['Chemin']
#    ground = bpy.data.objects['Sol']
    guide = bpy.data.objects[bpy.context.scene.ant_guide]
    ground = bpy.data.objects[bpy.context.scene.ant_ground]
    number_ants = bpy.context.scene.ant_number
    start_frame = bpy.context.scene.ant_start_frame
    end_frame = bpy.context.scene.ant_end_frame
    scale = bpy.context.scene.ant_scale
#    ground = bpy.context.selected_objects[-1]
    a_ps = Particle_system(guide, ground, scale)
    a_ps.add_particles(number_ants)
    
    print('\n---')
    start = time()
    seed(0)
    noise.seed_set(0)
    for f in range(start_frame, end_frame+1):
#        a_ps.add_particles(1)
        if f%10 == 0:
            print('frame: {:04}'.format(f))
        a_ps.step()
    print('Simulated in {:05.5f} seconds'.format(time() - start))
Beispiel #3
0
    def process(self):
        # inputs
        if 'Count' in self.inputs and self.inputs['Count'].links and \
           type(self.inputs['Count'].links[0].from_socket) == bpy.types.StringsSocket:
            Coun = SvGetSocketAnyType(self, self.inputs['Count'])[0]
        else:
            Coun = [self.count_inner]

        if 'Seed' in self.inputs and self.inputs['Seed'].links and \
           type(self.inputs['Seed'].links[0].from_socket) == bpy.types.StringsSocket:
            Seed = SvGetSocketAnyType(self, self.inputs['Seed'])[0]
        else:
            Seed = [self.seed]

        # outputs
        if 'Random' in self.outputs and self.outputs['Random'].links:
            Random = []
            param = match_long_repeat([Coun, Seed])
            # set seed, protect against float input
            # seed = 0 is special value for blender which unsets the seed value
            # and starts to use system time making the random values unrepeatable.
            # So when seed = 0 we use a random value far from 0, generated used random.org
            for c, s in zip(*param):
                int_seed = int(round(s))
                if int_seed:
                    seed_set(int_seed)
                else:
                    seed_set(140230)

                Random.append([random_unit_vector().to_tuple() for i in range(int(max(1, c)))])

            SvSetSocketAnyType(self, 'Random', Random)
Beispiel #4
0
    def process(self):

        count_socket = self.inputs['Count']
        seed_socket = self.inputs['Seed']
        scale_socket = self.inputs['Scale']
        random_socket = self.outputs['Random']

        # inputs
        Coun = count_socket.sv_get(deepcopy=False)[0]
        Seed = seed_socket.sv_get(deepcopy=False)[0]
        Scale = scale_socket.sv_get(deepcopy=False, default=[])[0]

        # outputs
        if random_socket.is_linked:
            Random = []
            param = match_long_repeat([Coun, Seed, Scale])
            # set seed, protect against float input
            # seed = 0 is special value for blender which unsets the seed value
            # and starts to use system time making the random values unrepeatable.
            # So when seed = 0 we use a random value far from 0, generated used random.org
            for c, s, sc in zip(*param):
                int_seed = int(round(s))
                if int_seed:
                    seed_set(int_seed)
                else:
                    seed_set(140230)

                Random.append([(random_unit_vector()*sc).to_tuple() for i in range(int(max(1, c)))])

            random_socket.sv_set(Random)
Beispiel #5
0
def get_offset(seed):
    if seed == 0:
        offset = [0.0, 0.0, 0.0]
    else:
        noise.seed_set(seed)
        offset = noise.random_unit_vector() * 10.0
    return offset
def grow_branch(context, branchidx, ivy, bvh=None):
    '''
    Should have two branches maybe.
    Test if only the next coordinate is missing:
        if yes only calculate that one.
        Reduce computation per frame update

        if not recalc spline points from start
        as is done now
    '''
    opt = ivy.ivy
    seed_val = opt.seed + branchidx + 1
    seed(seed_val)
    noise.seed_set(seed_val)

    # GET BRANCH-NUMSTEPS FOR THIS FRAME
    if opt.animated:
        # this is fixed steps along animation range
        anim_frames_total = opt.end - opt.start
        anim_frame_current = context.scene.frame_current - opt.start
        numsteps = int((anim_frame_current / anim_frames_total) * opt.fixed_steps)
    else:
        numsteps = opt.fixed_steps

    # CUTOFF NUMSTEPS
    cutoff = random()
    if opt.steps_random_cutoff <= cutoff:
        # cut this one off
        cutoffamount = (1 - cutoff) * opt.cutoffamount
        cutoff += cutoffamount
        numsteps = int(cutoff * numsteps)

    uvec = noise.random_unit_vector()
    start_co = Vector((uvec.x * opt.root_area.x,
                       uvec.y * opt.root_area.y,
                       uvec.z * opt.root_area.z))
    coords = [start_co]
    #free = [True]

    def recalc_all():
        freefloatinglength = 0
        for step in range(numsteps):
            last_co = coords[-1]
            vec_grow = growvec(opt, coords, bvh, freefloatinglength)
            next_co = last_co + vec_grow
            if opt.has_collider:
                next_co, is_free = collision(opt, last_co, next_co, bvh)
                if is_free:
                    freefloatinglength += (last_co - next_co).length
                else:
                    freefloatinglength = 0
            else:
                freefloatinglength += (last_co - next_co).length

            coords.append(next_co)

    recalc_all()

    return coords
Beispiel #7
0
def vector_noise_multi_seed(params, noise_basis='PERLIN_ORIGINAL'):
    vert, seed_val, scale_out, matrix = params
    noise.seed_set(seed_val if seed_val else 1385)
    v_vert = Vector(vert)
    noise_v = noise.noise_vector(matrix @ v_vert, noise_basis=noise_basis)[:]
    return v_vert + Vector(
        (noise_v[0] * scale_out[0], noise_v[1] * scale_out[1],
         noise_v[2] * scale_out[2]))
Beispiel #8
0
    def evaluate_grid(self, xs, ys, zs):
        noise.seed_set(self.seed)

        def mk_noise(v):
            r = noise.noise_vector(v, noise_basis=self.noise_type)
            return r[0], r[1], r[2]

        vectors = np.stack((xs, ys, zs)).T
        return np.vectorize(mk_noise, signature="(3)->(),(),()")(vectors)
Beispiel #9
0
def vector_noise_normal_multi_seed(params, noise_basis='PERLIN_ORIGINAL'):
    vert, seed_val, scale_out, matrix, normal = params
    v_vert = Vector(vert)
    noise.seed_set(seed_val if seed_val else 1385)
    noise_scalar = deepnoise(matrix @ v_vert, noise_basis=noise_basis)
    noise_v = Vector(normal) * noise_scalar
    return v_vert + Vector(
        (noise_v[0] * scale_out[0], noise_v[1] * scale_out[1],
         noise_v[2] * scale_out[2]))
def export_ter(filepath):
    start_time = time.process_time()
    filename = filepath + '.ter'
    # start to set all the tags and values needed for the .ter file
    ter_header = 'TERRAGENTERRAIN '
    size_tag = 'SIZE'
    size = 64
    scal_tag = 'SCAL'
    scalx = 30.0
    scaly = 30.0
    scalz = 30.0
    altw_tag = 'ALTW'
    HeightScale = 80
    BaseHeight = 0
    totalpoints = (size + 1) * (size + 1)
    # set seed for noise.random()
    noise.seed_set(123)
    # values are packed as short (i.e = integers max 32767) so we map them in the right range
    values = []
    # values = [int(map_range(noise.random(), 0.0, 1.0, 0.0, 32767.0)) for i in range(totalpoints)]
    for x in range(size + 1):
        for y in range(size + 1):
            vec = Vector((x, y, 0.0))
            out = int(map_range(noise.noise(vec, 3), -1.0, 1.0, 0.0, 32767.0))
            values.append(out)
    print(values)
    eof_tag = 'EOF'  # end of file tag

    with open(filename, "wb") as file:
        # write the header
        file.write(ter_header.encode('ascii'))
        # write the size of the terrain
        file.write(size_tag.encode('ascii'))
        file.write(struct.pack('h', size))
        # padding byte needed after SIZE
        file.write(struct.pack('xx'))  # padding -> b'\x00\x00'
        # write the scale tag = SCAL
        file.write(scal_tag.encode('ascii'))
        # pack the scaling values as floats
        file.write(struct.pack('fff', scalx, scaly, scalz))
        # write the altitude ALTW tag
        file.write(altw_tag.encode('ascii'))
        # pack heightScale and baseHeight
        file.write(struct.pack('h', HeightScale))
        file.write(struct.pack('h', BaseHeight))
        # pack as shorts the elvetions values
        for v in values:
            file.write(struct.pack('h', v))
        # EOF = end of file
        file.write(eof_tag.encode('ascii'))
        file.close()

    print('Terrain exported in %.4f sec.' % (time.process_time() - start_time))
Beispiel #11
0
    def sv_execute(self, context, node):
        if node.number_type == 'vector':
            input_name = 'Vertices'
        else:
            input_name = 'Numbers'

        if node.inputs[input_name].is_linked:
            node.fill_from_input()
        else:
            seed_set(node.r_seed)
            node.fill_empty_dict()
        updateNode(node, context)
def setup_self(self, context):
    '''
    Setup:
    Set needed Values and prepare and store some data.
    '''
    self.curve = context.active_object
    groups = bpy.data.groups
    names = [n.strip() for n in self.leafgroupname.split(',')]
    self.dupli_groups = [groups[n] for n in names
                         if n in groups]

    #printd(self.dupli_groups)
    seed(self.seed)
    noise.seed_set(self.seed)

    #this is only if the scripts are not together in a moduke
    #if 'curve_ivy_animated' in context.user_preferences.addons.keys():
    self.ivy_loaded = True
    #printd('Animated Ivy Addon loaded ok', self.ivy_loaded)
    self.ivyopt = self.curve.ivy
    #else:
    #self.ivy_loaded = False

    ### CHECK FOR COLLIDER
    selected = context.selected_objects
    if len(selected) == 2:
        collider = [ob for ob in selected if ob != self.curve][-1]
        collider.select = False
        if collider.type == 'MESH':
            bm = bmesh.new()
            bm.from_object(collider, context.scene)
            bm.transform(collider.matrix_world)
            bm.transform(self.curve.matrix_world.inverted())
            bvh = BVHTree()
            bvh = bvh.FromBMesh(bm)
            self.bvh = bvh
    else:
        self.bvh = None

    ### TAKE ANIMATION FROM GROWING IVY IF AVAILABLE
    if self.ivy_loaded:
        if self.ivyopt.added_as_ivy: #was indeed intended as growing ivy
            if self.ivyopt.animated:
                print('taking animation from ivy')
                self.animated = self.ivyopt.animated
                self.start = self.ivyopt.start
                self.end = self.ivyopt.end

    #if no leafgroup found create simple leaf
    if not self.dupli_groups:
        pass
def setup_self(self, context):
    '''
    Setup:
    Set needed Values and prepare and store some data.
    '''
    self.curve = context.active_object
    groups = bpy.data.groups
    names = [n.strip() for n in self.leafgroupname.split(',')]
    self.dupli_groups = [groups[n] for n in names if n in groups]

    #printd(self.dupli_groups)
    seed(self.seed)
    noise.seed_set(self.seed)

    #this is only if the scripts are not together in a moduke
    #if 'curve_ivy_animated' in context.user_preferences.addons.keys():
    self.ivy_loaded = True
    #printd('Animated Ivy Addon loaded ok', self.ivy_loaded)
    self.ivyopt = self.curve.ivy
    #else:
    #self.ivy_loaded = False

    ### CHECK FOR COLLIDER
    selected = context.selected_objects
    if len(selected) == 2:
        collider = [ob for ob in selected if ob != self.curve][-1]
        collider.select = False
        if collider.type == 'MESH':
            bm = bmesh.new()
            bm.from_object(collider, context.scene)
            bm.transform(collider.matrix_world)
            bm.transform(self.curve.matrix_world.inverted())
            bvh = BVHTree()
            bvh = bvh.FromBMesh(bm)
            self.bvh = bvh
    else:
        self.bvh = None

    ### TAKE ANIMATION FROM GROWING IVY IF AVAILABLE
    if self.ivy_loaded:
        if self.ivyopt.added_as_ivy:  #was indeed intended as growing ivy
            if self.ivyopt.animated:
                print('taking animation from ivy')
                self.animated = self.ivyopt.animated
                self.start = self.ivyopt.start
                self.end = self.ivyopt.end

    #if no leafgroup found create simple leaf
    if not self.dupli_groups:
        pass
Beispiel #14
0
    def execute(self, context):
        node = bpy.data.node_groups[self.idtree].nodes[self.idname]
        if node.number_type == 'vector':
            input_name = 'Vertices'
        else:
            input_name = 'Numbers'

        if node.inputs[input_name].is_linked:
            node.fill_from_input()
        else:
            seed_set(node.r_seed)
            node.fill_empty_dict()
        updateNode(node, context)
        return {'FINISHED'}
Beispiel #15
0
    def sv_execute(self, context, node):

        if not node.inputs[0].is_linked:
            node.info_label = "Stopped - Fitness not linked"
            return

        tree = node.id_data
        
        genotype_frame = node.genotype
        evolver_mem[node.node_id] = {}
        
        seed_set(node.r_seed)
        np.random.seed(node.r_seed)
        population = Population(genotype_frame, node, tree)
        population.evolve()
        node.process_node(None)
Beispiel #16
0
    def execute(self, context):
        node = self.get_node(context)
        if not node: return {'CANCELLED'}

        if node.number_type == 'vector':
            input_name = 'Vertices'
        else:
            input_name = 'Numbers'

        if node.inputs[input_name].is_linked:
            node.fill_from_input()
        else:
            seed_set(node.r_seed)
            node.fill_empty_dict()
        updateNode(node, context)
        return {'FINISHED'}
Beispiel #17
0
    def sv_execute(self, context, node):

        if not node.inputs[0].is_linked:
            node.info_label = "Stopped - Fitness not linked"
            return

        genotype_frame = node.genotype
        evolver_mem[node.node_id] = {}

        seed_set(node.r_seed)
        np.random.seed(node.r_seed)

        population = Population(genotype_frame, node, tree)
        population.evolve()
        update_list = make_tree_from_nodes([node.name], tree)
        do_update(update_list, tree.nodes)
Beispiel #18
0
    def process(self):
        inputs, outputs = self.inputs, self.outputs

        if not (outputs[0].is_linked and inputs[0].is_linked):
            return

        out = []
        verts = inputs['Vertices'].sv_get(deepcopy=False)
        noise_matrix = inputs['Noise Matrix'].sv_get(deepcopy=False,
                                                     default=[])

        seeds = inputs['Seed'].sv_get()[0]
        noise_type = self.noise_type
        numpy_mode = noise_type in noise_numpy_types.keys()
        if noise_matrix:
            verts = preprocess_verts(noise_matrix, verts, numpy_mode)

        max_len = max(map(len, (seeds, verts)))

        out_mode = self.out_mode
        output_numpy = self.output_numpy

        if numpy_mode:

            noise_function = noise_numpy_types[noise_type][self.interpolate]
            smooth = self.smooth
            for i in range(max_len):
                seed = seeds[min(i, len(seeds) - 1)]
                obj_id = min(i, len(verts) - 1)
                numpy_noise(verts[obj_id], out, out_mode, seed, noise_function,
                            smooth, output_numpy)

        else:

            noise_function = noise.noise_vector

            for i in range(max_len):
                seed = seeds[min(i, len(seeds) - 1)]
                obj_id = min(i, len(verts) - 1)
                # 0 unsets the seed and generates unreproducable output based on system time
                seed_val = int(round(seed)) or 140230
                noise.seed_set(seed_val)
                mathulis_noise(verts[obj_id], out, out_mode, noise_type,
                               noise_function, output_numpy)

        outputs[0].sv_set(out)
Beispiel #19
0
    def process(self):
        # inputs
        if (
            "Count" in self.inputs
            and self.inputs["Count"].links
            and type(self.inputs["Count"].links[0].from_socket) == bpy.types.StringsSocket
        ):
            Coun = SvGetSocketAnyType(self, self.inputs["Count"])[0]
        else:
            Coun = [self.count_inner]

        if (
            "Seed" in self.inputs
            and self.inputs["Seed"].links
            and type(self.inputs["Seed"].links[0].from_socket) == bpy.types.StringsSocket
        ):
            Seed = SvGetSocketAnyType(self, self.inputs["Seed"])[0]
        else:
            Seed = [self.seed]
        if (
            "Scale" in self.inputs
            and self.inputs["Scale"].links
            and type(self.inputs["Scale"].links[0].from_socket) == bpy.types.StringsSocket
        ):
            Scale = self.inputs["Scale"].sv_get(deepcopy=False, default=[])[0]
        else:
            Scale = [self.scale]
        # outputs
        if "Random" in self.outputs and self.outputs["Random"].links:
            Random = []
            param = match_long_repeat([Coun, Seed, Scale])
            # set seed, protect against float input
            # seed = 0 is special value for blender which unsets the seed value
            # and starts to use system time making the random values unrepeatable.
            # So when seed = 0 we use a random value far from 0, generated used random.org
            for c, s, sc in zip(*param):
                int_seed = int(round(s))
                if int_seed:
                    seed_set(int_seed)
                else:
                    seed_set(140230)

                Random.append([(random_unit_vector() * sc).to_tuple() for i in range(int(max(1, c)))])

            SvSetSocketAnyType(self, "Random", Random)
Beispiel #20
0
    def execute(self, context):
        tree = bpy.data.node_groups[self.idtree]
        node = bpy.data.node_groups[self.idtree].nodes[self.idname]

        if not node.inputs[0].is_linked:
            node.info_label = "Stopped - Fitness not linked"
            return {'FINISHED'}

        genotype_frame = node.genotype
        evolver_mem[node.node_id] = {}

        seed_set(node.r_seed)
        np.random.seed(node.r_seed)

        population = Population(genotype_frame, node, tree)
        population.evolve()
        update_list = make_tree_from_nodes([node.name], tree)
        do_update(update_list, tree.nodes)
        return {'FINISHED'}
Beispiel #21
0
    def process(self):
        # if not self.inputs['Vertices'].is_linked:
            # return
        if self.node_id in self.node_mem:
            genes_out = self.node_mem[self.node_id]
        else:
            text_memory = self.check_memory_prop()
            if text_memory:
                self.node_mem[self.node_id] = text_memory
                genes_out = self.node_mem[self.node_id]
            else:
                seed_set(self.r_seed)
                self.fill_empty_dict()
                genes_out = self.node_mem[self.node_id]

        if self.number_type == 'vector':
            self.outputs['Vertices'].sv_set([genes_out])
        else:
            self.outputs['Numbers'].sv_set([genes_out])
Beispiel #22
0
    def turbulence(vec,oct,freq,rseed):
        #we choose a noise type
        noise_type = noise.types.STDPERLIN
        #set the seed but won't work with blender noise
        noise.seed_set(rseed)
        sndata = []
        value = 0.0

        for o in range(oct):
           
            freq *= 2.0
            vVec = Vector(vec)
            multVec = vVec * freq
            #print(multVec)
            #print(f)
            value += abs(noise.noise(multVec,noise_type))/freq 
            #value += (noise.noise(multVec,noise_type))/f
            #value += amplitude*(noise.noise(multVec,noise_type))
            
        return value
Beispiel #23
0
def noise_displace(params, constant, matching_f):
    result = []
    noise_function, noise_type, match_mode = constant
    params = matching_f(params)
    local_match = iter_list_match_func[match_mode]
    for props in zip(*params):
        verts, pols, seed_val, scale_out, matrix = props
        if type(matrix) == list:
            matrix = [m.inverted() for m in matrix]
        else:
            matrix = [matrix.inverted()]
        if len(seed_val) > 1:
            m_prop = local_match([seed_val, scale_out, matrix])
        else:
            m_prop = local_match([scale_out, matrix])
            seed_val = seed_val[0]
            noise.seed_set(int(seed_val) if seed_val else 1385)
        noise_function(verts, pols, m_prop, noise_type, result)

    return Vector_degenerate(result)
def randnum(low=0.0, high=1.0, seed=0):
    """
randnum( low=0.0, high=1.0, seed=0 )
Create random number
Parameters:
low - lower range
(type=float)
high - higher range
(type=float)
seed - the random seed number, if seed is 0, the current time will be used instead
(type=int)
Returns:
a random number
(type=float)
"""

    Noise.seed_set(seed)
    rnum = Noise.random()
    rnum = rnum*(high-low)
    rnum = rnum+low
    return rnum
def randnum(low=0.0, high=1.0, seed=0):
    """
randnum( low=0.0, high=1.0, seed=0 )
Create random number
Parameters:
low - lower range
(type=float)
high - higher range
(type=float)
seed - the random seed number, if seed is 0, the current time will be used instead
(type=int)
Returns:
a random number
(type=float)
"""

    Noise.seed_set(seed)
    rnum = Noise.random()
    rnum = rnum * (high - low)
    rnum = rnum + low
    return rnum
Beispiel #26
0
 def update(self):
     # inputs
     if 'Count' in self.inputs and len(self.inputs['Count'].links)>0 and \
         type(self.inputs['Count'].links[0].from_socket) == bpy.types.StringsSocket:
         if not self.inputs['Count'].node.socket_value_update:
             self.inputs['Count'].node.update()
         Coun = eval(self.inputs['Count'].links[0].from_socket.StringsProperty)
     else:
         Coun = [[self.count_inner]]
         
     if 'Seed' in self.inputs and len(self.inputs['Seed'].links)>0 and \
          type(self.inputs['Seed'].links[0].from_socket) == bpy.types.StringsSocket:
         if not self.inputs['Seed'].node.socket_value_update:
             self.inputs['Seed'].node.update()
         
         Seed = eval(self.inputs['Seed'].links[0].from_socket.StringsProperty)[0][0]
     else:
         Seed = self.seed
   
     
     # outputs 
     if 'Random' in self.outputs and len(self.outputs['Random'].links)>0:
         Random = []          
         # set seed, protect against float input
         # seed = 0 is special value for blender which unsets the seed value
         # and starts to use system time making the random values unrepeatable.
         # So when seed = 0 we use a random value far from 0, generated used random.org
         int_seed = int(round(Seed))
         if int_seed:
             seed_set(int_seed)
         else:
             seed_set(140230)  
             
         # Coun[0], only takes first list
         for number in Coun[0]:
             if number > 0:
                 Random.append( [random_unit_vector().to_tuple() \
                                     for i in range(int(number))])
         SvSetSocketAnyType(self,'Random',Random)
    def process(self):
        # inputs
        if 'Count' in self.inputs and self.inputs['Count'].links and \
            type(self.inputs['Count'].links[0].from_socket) == bpy.types.StringsSocket:
            Coun = SvGetSocketAnyType(self, self.inputs['Count'])[0]
        else:
            Coun = [self.count_inner]

        if 'Seed' in self.inputs and self.inputs['Seed'].links and \
            type(self.inputs['Seed'].links[0].from_socket) == bpy.types.StringsSocket:
            Seed = SvGetSocketAnyType(self, self.inputs['Seed'])[0]
        else:
            Seed = [self.seed]
        if 'Scale' in self.inputs and self.inputs['Scale'].links and \
            type(self.inputs['Scale'].links[0].from_socket) == bpy.types.StringsSocket:
            Scale = self.inputs['Scale'].sv_get(deepcopy=False, default=[])[0]
        else:
            Scale = [self.scale]
        # outputs
        if 'Random' in self.outputs and self.outputs['Random'].links:
            Random = []
            param = match_long_repeat([Coun, Seed, Scale])
            # set seed, protect against float input
            # seed = 0 is special value for blender which unsets the seed value
            # and starts to use system time making the random values unrepeatable.
            # So when seed = 0 we use a random value far from 0, generated used random.org
            for c, s, sc in zip(*param):
                int_seed = int(round(s))
                if int_seed:
                    seed_set(int_seed)
                else:
                    seed_set(140230)

                Random.append([(random_unit_vector() * sc).to_tuple()
                               for i in range(int(max(1, c)))])

            SvSetSocketAnyType(self, 'Random', Random)
Beispiel #28
0
    def process(self):
        n_id = node_id(self)
        nvBGL.callback_disable(n_id)
        inputs = self.inputs
        # end early
        if not self.activate:
            return

        if self.mode == 'Number':
            if not inputs['Number'].is_linked:
                return
            numbers = inputs['Number'].sv_get(default=[[]])
        elif self.mode == 'Curve':
            if not inputs['Curve'].is_linked:
                return
            curves = inputs['Curve'].sv_get(default=[[]])
        else:
            if not inputs['Vecs'].is_linked:
                return
            vecs = inputs['Vecs'].sv_get(default=[[]])

        edges = inputs['Edges'].sv_get(default=[[]])
        polygons = inputs['Polygons'].sv_get(default=[[]])
        vector_color = inputs['Vector Color'].sv_get(default=[[self.vector_color]])
        edge_color = inputs['Edge Color'].sv_get(default=[[self.edge_color]])
        poly_color = inputs['Polygon Color'].sv_get(default=[[self.polygon_color]])
        seed_set(self.random_seed)
        x, y, config = self.create_config()

        config.vector_color = vector_color
        config.edge_color = edge_color
        config.poly_color = poly_color
        config.edges = edges


        if self.mode == 'Number':
            config.size = self.drawing_size
            geom = generate_number_geom(config, numbers)
        elif self.mode == 'Path':
            geom = generate_graph_geom(config, vecs)
        elif self.mode == 'Curve':
            paths = []
            for curve in curves:
                t_min, t_max = curve.get_u_bounds()
                ts = np_linspace(t_min, t_max, num=self.curve_samples, dtype=np_float64)
                paths.append(curve.evaluate_array(ts).tolist())

            geom = generate_graph_geom(config, paths)

        else:
            config.polygons = polygons
            if not inputs['Edges'].is_linked and self.edge_toggle:
                config.edges = polygons_to_edges(polygons, unique_edges=True)

            geom = generate_mesh_geom(config, vecs)


        draw_data = {
            'mode': 'custom_function',
            'tree_name': self.id_data.name[:],
            'loc': (x, y),
            'custom_function': view_2d_geom,
            'args': (geom, config)
        }
        nvBGL.callback_enable(n_id, draw_data)
Beispiel #29
0
    def execute(self, context):
        depsgraph = bpy.context.depsgraph
        ob = bpy.context.active_object
        obj_eval = depsgraph.objects.get(ob.name, None)

        # particleObj = context.active_object
        particleObj = obj_eval
        if bpy.context.active_object.particle_systems is None:  # create new one
            self.report({'INFO'}, 'No active Particle Hair System found!')
            return {"CANCELLED"}
        index = particleObj.particle_systems.active_index
        psys_active = particleObj.particle_systems[index]
        if psys_active.settings.type != 'HAIR':  # create new one
            self.report({'INFO'},
                        'Active Particle System is not Hair type! Cancelling')
            return {"CANCELLED"}
        pointsList_hair = []
        context.scene.update()
        if len(psys_active.particles) == 0:  # require more that three strands
            self.report({'INFO'},
                        'Active Particle System has zero strands! Cancelling')
            return {"CANCELLED"}
        diagonal = sqrt(
            pow(particleObj.dimensions[0], 2) +
            pow(particleObj.dimensions[1], 2) +
            pow(particleObj.dimensions[2], 2))  # to normalize some values
        for particle in psys_active.particles:  # for strand point
            pointsList_hair.append([
                hair_key.co for hair_key in particle.hair_keys
            ])  # DONE: exclude duplicates if first strand[0] in list already
        if len(psys_active.particles
               ) == 1:  #create two fake strands so that barycentric works
            pointsList_hair.append([
                x.xyz + Vector((0.01 * diagonal, 0, 0))
                for x in pointsList_hair[0]
            ])
            pointsList_hair.append([
                x.xyz + Vector((0, 0.01 * diagonal, 0))
                for x in pointsList_hair[0]
            ])
        elif len(psys_active.particles
                 ) == 2:  #create one fake strands so that barycentric works
            pointsList_hair.append([
                x.xyz + Vector((0.01 * diagonal, 0, 0))
                for x in pointsList_hair[0]
            ])
        pointsList_uniq = []
        [
            pointsList_uniq.append(x) for x in pointsList_hair
            if x not in pointsList_uniq
        ]  #removing doubles (can cause zero size tris)

        #same_point_count cos barycentric transform requires it
        pointsList = interpol_Catmull_Rom(
            pointsList_uniq,
            self.t_in_y,
            uniform_spacing=True,
            same_point_count=True)  # just gives smoother result on borders

        searchDistance = 100 * diagonal
        parentRoots = [strand[0]
                       for strand in pointsList]  # first point of roots
        #create nnew Part Sytem with uniform points
        pointsChildRoots = self.createUniformParticleSystem(
            context, self.childCount, self.PlacementJittering,
            self.Seed)  # return child part roots positions

        kd = kdtree.KDTree(len(parentRoots))
        for i, root in enumerate(parentRoots):
            kd.insert(root, i)
        kd.balance()
        sourceSurface_BVHT = BVHTree.FromObject(particleObj, context.depsgraph)
        childStrandsPoints = []  #will contain strands with child points
        childStrandRootNormals = []
        length_ver_group_index = -1
        vertex_group_length_name = psys_active.vertex_group_length
        if vertex_group_length_name:  # calc weight based on root point
            length_ver_group_index = particleObj.vertex_groups[
                vertex_group_length_name].index
        particleObjMesh = particleObj.to_mesh(context.depsgraph,
                                              apply_modifiers=True,
                                              calc_undeformed=False)
        seed(a=self.lenSeed, version=2)
        embed = self.embed * 0.04 * diagonal
        cpow = calc_power(self.noiseFalloff)
        cpowClump = calc_power(self.ClumpingFalloff)
        noiseFalloff = [pow(i / self.t_in_y, cpow) for i in range(self.t_in_y)]
        ClumpFalloff = [
            pow((i + 1) / self.t_in_y, cpowClump) for i in range(self.t_in_y)
        ]

        for i, childRoot in enumerate(
                pointsChildRoots
        ):  #for each child find it three parents and genereate strands by barycentric transform
            snappedPoint, normalChildRoot, rootHitIndex, distance = sourceSurface_BVHT.find_nearest(
                childRoot, searchDistance)
            childStrandRootNormals.append(normalChildRoot)
            threeClosestParentRoots = kd.find_n(
                childRoot, 3)  #find three closes parent roots
            rootTri_co, ParentRootIndices, distances = zip(
                *threeClosestParentRoots)  #split it into 3 arrays
            sourceTri_BVHT = BVHTree.FromPolygons(
                rootTri_co, [(0, 1, 2)],
                all_triangles=True)  # [0,1,2] - polygon == vert indices list
            childRootSnapped, normalChildProjected, index, distance = sourceTri_BVHT.find_nearest(
                childRoot, searchDistance
            )  #snap generated child to parent triangle ares \normals are sometimes flipped
            childRootSnapped2, normalChildProjected2, index2, distance2 = sourceSurface_BVHT.find_nearest(
                childRootSnapped,
                searchDistance)  #this gives ok normals always

            lenWeight = 1
            if length_ver_group_index != -1:  # if vg exist
                averageWeight = 0
                for vertIndex in particleObjMesh.polygons[
                        rootHitIndex].vertices:  #DONE: check if work on mesh with modifiers
                    for group in particleObjMesh.vertices[vertIndex].groups:
                        if group.group == length_ver_group_index:
                            averageWeight += group.weight
                            break
                lenWeight = averageWeight / len(
                    particleObjMesh.polygons[rootHitIndex].vertices)
            ranLen = uniform(-self.RandomizeLengthMinus,
                             self.RandomizeLengthPlus)
            lenWeight *= (1 + ranLen)
            # diff = childRoot - childRootSnapped
            # mat_loc = Matrix.Translation(childRootSnapped)
            # matTriangleSpaceInv = mat_loc #* rotMatrix
            # matTriangleSpaceInv.invert()
            rotQuat = normalChildProjected2.rotation_difference(
                normalChildRoot)
            translationMatrix = Matrix.Translation(childRoot)
            rotMatrixRot = rotQuat.to_matrix().to_4x4()
            mat_sca = Matrix.Scale(lenWeight, 4)
            transformMatrix = translationMatrix @ rotMatrixRot
            strandPoints = []
            #for childRootSnapped points transform them from parent root triangles to parent next segment triangle t1,t2,t3
            # and compensate child snapping to root triangle from before
            for j, (t1, t2, t3) in enumerate(
                    zip(pointsList[ParentRootIndices[0]],
                        pointsList[ParentRootIndices[1]],
                        pointsList[ParentRootIndices[2]])):
                pointTransformed = barycentric_transform(
                    childRootSnapped, rootTri_co[0], rootTri_co[1],
                    rootTri_co[2], Vector(t1), Vector(t2), Vector(t3))
                childInterpolatedPoint = transformMatrix @ mat_sca @ (
                    pointTransformed - childRootSnapped
                )  #rotate child strand to original pos (from before snapt)
                #do noise
                noise.seed_set(self.Seed + i)  # add seed per strand/ring ?
                noiseVectorPerStrand = noise.noise_vector(
                    childInterpolatedPoint * self.freq / diagonal,
                    noise_basis='PERLIN_ORIGINAL'
                ) * noiseFalloff[j] * self.noiseAmplitude * diagonal / 10
                # childInterpolatedPoint += noiseVectorPerStrand

                #do clumping
                diff = Vector(
                    t1
                ) - childInterpolatedPoint  # calculate distance to parent strand (first strand from trio)
                # point += noiseVectorPerStrand * noiseFalloff[j] * self.noiseAmplitude * diagonal / 10
                # childClumped = childInterpolatedPoint + ClumpFalloff[j] * self.Clumping * diff + noiseVectorPerStrand * (1-ClumpFalloff[j])
                childClumped = childInterpolatedPoint + ClumpFalloff[
                    j] * self.Clumping * diff + noiseVectorPerStrand * (
                        1 - ClumpFalloff[j] * self.Clumping)
                # childClumped = childInterpolatedPoint + noiseVectorPerStrand

                strandPoints.append(childClumped)
            # embeding roots
            diff = strandPoints[0] - strandPoints[1]
            diff.normalize()
            normalWeight = abs(diff.dot(normalChildRoot))
            strandPoints[0] += (
                diff * normalWeight - normalChildRoot * (1 - normalWeight)
            ) * embed  # do childStrandRootNormal to move it more into mesh surface
            childStrandsPoints.append(strandPoints)

        bpy.data.meshes.remove(particleObjMesh)
        # create the Curve Datablock
        curveData = bpy.data.curves.new(particleObj.name + '_curve',
                                        type='CURVE')

        splinePointsNp = np.array(childStrandsPoints, dtype=np.float32)
        if self.hairType != 'BEZIER':
            splinePointsNpOnes = np.ones(
                (len(childStrandsPoints), self.t_in_y, 4),
                dtype=np.float32)  # 4 coord x,y,z ,1
            splinePointsNpOnes[:, :, :-1] = splinePointsNp
            splinePointsNp = splinePointsNpOnes
        for strandPoints in splinePointsNp:  # for strand point
            curveLength = len(strandPoints)
            polyline = curveData.splines.new(self.hairType)
            if self.hairType == 'BEZIER':
                polyline.bezier_points.add(curveLength - 1)
            elif self.hairType == 'POLY' or self.hairType == 'NURBS':
                polyline.points.add(curveLength - 1)
            if self.hairType == 'NURBS':
                polyline.order_u = 3  # like bezier thing
                polyline.use_endpoint_u = True

            if self.hairType == 'BEZIER':
                # polyline.bezier_points.co = (x, y, z)
                polyline.bezier_points.foreach_set("co", strandPoints.ravel())
                polyline.bezier_points.foreach_set('handle_left_type', 'AUTO')
                polyline.bezier_points.foreach_set('handle_right_type', 'AUTO')
            else:
                polyline.points.foreach_set("co", strandPoints.ravel())
                # polyline.points[i].co = (x, y, z, 1)
        curveData.resolution_u = self.strandResU
        curveData.dimensions = '3D'
        # create Object
        curveOB = bpy.data.objects.new(particleObj.name + '_curve', curveData)
        curveOB.matrix_world = particleObj.matrix_world
        scn = context.scene
        scn.collection.objects.link(curveOB)
        curveOB.targetObjPointer = particleObj.name  # store source surface for snapping oper
        context.view_layer.objects.active = curveOB
        curveOB.select_set(True)
        # curveOB.data.show_normal_face = False
        if self.generateRibbons:
            bpy.ops.object.generate_ribbons(strandResU=self.strandResU,
                                            strandResV=self.strandResV,
                                            strandWidth=self.strandWidth,
                                            strandPeak=self.strandPeak,
                                            strandUplift=self.strandUplift,
                                            alignToSurface=self.alignToSurface)
            HT_OT_CurvesUVRefresh.uvCurveRefresh(curveOB)
            context.view_layer.objects.active = particleObj
        else:
            curveData.fill_mode = 'FULL'
            curveData.bevel_depth = 0.004 * diagonal
            curveData.bevel_resolution = 2
            bpy.ops.object.curve_taper(TipRadiusFalloff=self.RadiusFalloff,
                                       TipRadius=self.TipRadius,
                                       MainRadius=self.Radius)
        return {"FINISHED"}
Beispiel #30
0
def generate_random_unitvectors():
    # may need many more directions to increase accuracy
    # generate up to 6 directions, filter later
    seed_set(140230)
    return [random_unit_vector() for i in range(6)]
Beispiel #31
0
def noise_gen(coords, props):

    terrain_name = props[0]
    cursor = props[1]
    smooth = props[2]
    triface = props[3]
    sphere = props[4]
    land_mat = props[5]
    water_mat = props[6]
    texture_name = props[7]
    subd_x = props[8]
    subd_y = props[9]
    meshsize_x = props[10]
    meshsize_y = props[11]
    meshsize = props[12]
    rseed = props[13]
    x_offset = props[14]
    y_offset = props[15]
    z_offset = props[16]
    size_x = props[17]
    size_y = props[18]
    size_z = props[19]
    nsize = props[20]
    ntype = props[21]
    nbasis = props[22]
    vlbasis = props[23]
    distortion = props[24]
    hardnoise = int(props[25])
    depth = props[26]
    amp = props[27]
    freq = props[28]
    dimension = props[29]
    lacunarity = props[30]
    offset = props[31]
    gain = props[32]
    marblebias = int(props[33])
    marblesharpnes = int(props[34])
    marbleshape = int(props[35])
    height = props[36]
    height_invert = props[37]
    height_offset = props[38]
    maximum = props[39]
    minimum = props[40]
    falloff = int(props[41])
    edge_level = props[42]
    falloffsize_x = props[43]
    falloffsize_y = props[44]
    stratatype = props[45]
    strata = props[46]
    addwater = props[47]
    waterlevel = props[48]
    vert_group = props[49]
    remove_double = props[50]
    fx_mixfactor = props[51]
    fx_mix_mode = props[52]
    fx_type = props[53]
    fx_bias = props[54]
    fx_turb = props[55]
    fx_depth = props[56]
    fx_frequency = props[57]
    fx_amplitude = props[58]
    fx_size = props[59]
    fx_loc_x = props[60]
    fx_loc_y = props[61]
    fx_height = props[62]
    fx_offset = props[63]
    fx_invert = props[64]

    x, y, z = coords

    # Origin
    if rseed == 0:
        origin = x_offset, y_offset, z_offset
        origin_x = x_offset
        origin_y = y_offset
        origin_z = z_offset
        o_range = 1.0
    else:
        # Randomise origin
        o_range = 100
        seed_set(rseed)
        origin = random_unit_vector()
        ox = (origin[0] * o_range)
        oy = (origin[1] * o_range)
        oz = 0
        origin_x = (ox - (ox * 0.5)) + x_offset
        origin_y = (oy - (oy * 0.5)) + y_offset
        origin_z = oz + z_offset

    ncoords = (x / (nsize * size_x) + origin_x,
               y / (nsize * size_y) + origin_y,
               z / (nsize * size_z) + origin_z)

    # Noise type's
    if ntype in [0, 'multi_fractal']:
        value = multi_fractal(
            ncoords, dimension, lacunarity, depth, noise_basis=nbasis) * 0.5

    elif ntype in [1, 'ridged_multi_fractal']:
        value = ridged_multi_fractal(ncoords,
                                     dimension,
                                     lacunarity,
                                     depth,
                                     offset,
                                     gain,
                                     noise_basis=nbasis) * 0.5

    elif ntype in [2, 'hybrid_multi_fractal']:
        value = hybrid_multi_fractal(ncoords,
                                     dimension,
                                     lacunarity,
                                     depth,
                                     offset,
                                     gain,
                                     noise_basis=nbasis) * 0.5

    elif ntype in [3, 'hetero_terrain']:
        value = hetero_terrain(
            ncoords, dimension, lacunarity, depth, offset,
            noise_basis=nbasis) * 0.25

    elif ntype in [4, 'fractal']:
        value = fractal(ncoords,
                        dimension,
                        lacunarity,
                        depth,
                        noise_basis=nbasis)

    elif ntype in [5, 'turbulence_vector']:
        value = turbulence_vector(ncoords,
                                  depth,
                                  hardnoise,
                                  noise_basis=nbasis,
                                  amplitude_scale=amp,
                                  frequency_scale=freq)[0]

    elif ntype in [6, 'variable_lacunarity']:
        value = variable_lacunarity(ncoords,
                                    distortion,
                                    noise_type1=nbasis,
                                    noise_type2=vlbasis)

    elif ntype in [7, 'marble_noise']:
        value = marble_noise(
            (ncoords[0] - origin_x + x_offset),
            (ncoords[1] - origin_y + y_offset),
            (ncoords[2] - origin_z + z_offset),
            (origin[0] + x_offset, origin[1] + y_offset, origin[2] + z_offset),
            nsize, marbleshape, marblebias, marblesharpnes, distortion, depth,
            hardnoise, nbasis, amp, freq)
    elif ntype in [8, 'shattered_hterrain']:
        value = shattered_hterrain(ncoords, dimension, lacunarity, depth,
                                   offset, distortion, nbasis)

    elif ntype in [9, 'strata_hterrain']:
        value = strata_hterrain(ncoords, dimension, lacunarity, depth, offset,
                                distortion, nbasis)

    elif ntype in [10, 'ant_turbulence']:
        value = ant_turbulence(ncoords, depth, hardnoise, nbasis, amp, freq,
                               distortion)

    elif ntype in [11, 'vl_noise_turbulence']:
        value = vl_noise_turbulence(ncoords, distortion, depth, nbasis,
                                    vlbasis, hardnoise, amp, freq)

    elif ntype in [12, 'vl_hTerrain']:
        value = vl_hTerrain(ncoords, dimension, lacunarity, depth, offset,
                            nbasis, vlbasis, distortion)

    elif ntype in [13, 'distorted_heteroTerrain']:
        value = distorted_heteroTerrain(ncoords, dimension, lacunarity, depth,
                                        offset, distortion, nbasis, vlbasis)

    elif ntype in [14, 'double_multiFractal']:
        value = double_multiFractal(ncoords, dimension, lacunarity, depth,
                                    offset, gain, nbasis, vlbasis)

    elif ntype in [15, 'rocks_noise']:
        value = rocks_noise(ncoords, depth, hardnoise, nbasis, distortion)

    elif ntype in [16, 'slick_rock']:
        value = slick_rock(ncoords, dimension, lacunarity, depth, offset, gain,
                           distortion, nbasis, vlbasis)

    elif ntype in [17, 'planet_noise']:
        value = planet_noise(ncoords, depth, hardnoise, nbasis)[2] * 0.5 + 0.5

    elif ntype in [18, 'blender_texture']:
        if texture_name != "" and texture_name in bpy.data.textures:
            value = bpy.data.textures[texture_name].evaluate(ncoords)[3]
        else:
            value = 0.0
    else:
        value = 0.5

    # Effect mix
    val = value
    if fx_type in [0, "0"]:
        fx_mixfactor = -1.0
        fxval = val
    else:
        fxcoords = Trans_Effect((x, y, z), fx_size, (fx_loc_x, fx_loc_y))
        effect = Effect_Function(fxcoords, fx_type, fx_bias, fx_turb, fx_depth,
                                 fx_frequency, fx_amplitude)
        effect = Height_Scale(effect, fx_height, fx_offset, fx_invert)
        fxval = Mix_Modes(val, effect, fx_mixfactor, fx_mix_mode)
    value = fxval

    # Adjust height
    value = Height_Scale(value, height, height_offset, height_invert)

    # Edge falloff:
    if not sphere:
        if falloff:
            ratio_x, ratio_y = abs(x) * 2 / meshsize_x, abs(y) * 2 / meshsize_y
            fallofftypes = [
                0,
                sqrt(ratio_y**falloffsize_y),
                sqrt(ratio_x**falloffsize_x),
                sqrt(ratio_x**falloffsize_x + ratio_y**falloffsize_y)
            ]
            dist = fallofftypes[falloff]
            value -= edge_level
            if (dist < 1.0):
                dist = (dist * dist * (3 - 2 * dist))
                value = (value - value * dist) + edge_level
            else:
                value = edge_level

    # Strata / terrace / layers
    if stratatype not in [0, "0"]:
        if stratatype in [1, "1"]:
            strata = strata / height
            strata *= 2
            steps = (sin(value * strata * pi) * (0.1 / strata * pi))
            value = (value * 0.5 + steps * 0.5) * 2.0

        elif stratatype in [2, "2"]:
            strata = strata / height
            steps = -abs(sin(value * strata * pi) * (0.1 / strata * pi))
            value = (value * 0.5 + steps * 0.5) * 2.0

        elif stratatype in [3, "3"]:
            strata = strata / height
            steps = abs(sin(value * strata * pi) * (0.1 / strata * pi))
            value = (value * 0.5 + steps * 0.5) * 2.0

        elif stratatype in [4, "4"]:
            strata = strata / height
            value = int(value * strata) * 1.0 / strata

        elif stratatype in [5, "5"]:
            strata = strata / height
            steps = (int(value * strata) * 1.0 / strata)
            value = (value * (1.0 - 0.5) + steps * 0.5)

    # Clamp height min max
    if (value < minimum):
        value = minimum
    if (value > maximum):
        value = maximum

    return value
    def execute(self, context):
        sourceSurface = bpy.data.objects[self.source_grid_mesh]
        if self.hairMethod == 'edge':
            coLoopsPerIslandsList = get_edge_ring_centers(sourceSurface)
            #detect if any island was made on one ring only, and if so switch to another algorithm
            islands_center_count = [
                len(loop_centers[0]) == 1
                for loop_centers in coLoopsPerIslandsList
            ]
            if any(islands_center_count):
                self.hairMethod = 'vert'
                coLoopsPerIslandsList.clear()
                coLoopsPerIslandsList = get_sorted_verts_co(sourceSurface)
        else:
            coLoopsPerIslandsList = get_sorted_verts_co(sourceSurface)

        self.yLengthPerIsland, self.xWidthPerIsland = get_islands_proportions(
            coLoopsPerIslandsList)

        # hide source surface
        sourceSurface.display_type = 'WIRE'
        sourceSurface.show_all_edges = True
        sourceSurface.hide_render = True
        sourceSurface.cycles_visibility.camera = False
        sourceSurface.cycles_visibility.diffuse = False
        sourceSurface.cycles_visibility.glossy = False
        sourceSurface.cycles_visibility.transmission = False
        sourceSurface.cycles_visibility.scatter = False
        sourceSurface.cycles_visibility.shadow = False

        # create the Curve Datablock
        if self.source_curve:
            curveOB = bpy.data.objects[self.source_curve]
            for spl in reversed(curveOB.data.splines):
                curveOB.data.splines.remove(spl)
            curveData = curveOB.data
        else:
            curveData = bpy.data.curves.new(sourceSurface.name + '_curve',
                                            type='CURVE')
            curveData.dimensions = '3D'
            curveData.fill_mode = 'FULL'
            curveOB = bpy.data.objects.new(sourceSurface.name + '_curve',
                                           curveData)
            context.scene.collection.objects.link(curveOB)
        # unitsScale = 1 # context.scene.unit_settings.scale_length
        if self.diagonal == 0:
            diagonal = math.sqrt(
                pow(sourceSurface.dimensions[0], 2) +
                pow(sourceSurface.dimensions[1], 2) +
                pow(sourceSurface.dimensions[2],
                    2))  # to normalize some values
        else:
            diagonal = self.diagonal
        # print("diagonal is: "+str(diagonal))
        curveData.bevel_depth = 0.004 * diagonal * self.Radius
        curveData.bevel_resolution = 2

        sourceSurface_BVHT = self.sourceSurface_BVHT
        searchDistance = 1000 * diagonal
        cpow = calc_power(self.noiseFalloff)
        np.random.seed(self.clump_Seed)
        if self.clump_amount > 0:
            clump_amount = max(int(self.t_in_x * self.clump_amount), 1)
            # without repeating  len(avg_clupm_size) < len(tab)
            parent_strands = np.random.choice(
                range(self.t_in_x), clump_amount,
                replace=False)  # without repeating
            clump_ids_int = np.sort(
                np.random.choice(parent_strands, self.t_in_x, replace=True)
            ).tolist()  # with repeating - get parents strands  t_in_x times

        for xFactor, yFactor, edgeCentersRingsList in zip(
                self.xWidthPerIsland, self.yLengthPerIsland,
                coLoopsPerIslandsList):  # for islands
            Centers_of_EdgeRingsInterpolated = self.callInterpolation(
                edgeCentersRingsList, xFactor, yFactor, self.shortenStrandLen)
            # map coords to spline
            for l, edgeRingCenters in enumerate(
                    Centers_of_EdgeRingsInterpolated):  # for each strand/ring
                curveLenght = len(edgeRingCenters)
                polyline = curveData.splines.new(self.hairType)
                if self.hairType == 'BEZIER':
                    polyline.bezier_points.add(curveLenght - 1)
                elif self.hairType == 'POLY' or self.hairType == 'NURBS':
                    polyline.points.add(curveLenght - 1)
                if self.hairType == 'NURBS':
                    polyline.order_u = 3  # like bezier thing
                    polyline.use_endpoint_u = True
                for i, edgeCenter in enumerate(
                        edgeRingCenters):  # for strand point
                    edgeCenter = Vector(edgeCenter)
                    noise.seed_set(self.Seed)
                    noiseVectorPerAllHair = noise.noise_vector(
                        edgeCenter * self.freq / diagonal,
                        noise_basis='PERLIN_ORIGINAL')
                    noise.seed_set(self.Seed + l)  # seed per strand/ring
                    noiseVectorPerStrand = noise.noise_vector(
                        edgeCenter * self.strandFreq / diagonal,
                        noise_basis='PERLIN_ORIGINAL')
                    if self.noiseMixVsAdditive:
                        noiseMix = noiseVectorPerAllHair + noiseVectorPerStrand * self.noiseMixFactor
                    else:
                        noiseMix = noiseVectorPerAllHair * (
                            1 - self.noiseMixFactor
                        ) + noiseVectorPerStrand * self.noiseMixFactor
                    noiseFalloff = math.pow(
                        i / curveLenght,
                        cpow)  # 0.1 to give 1% of influence on root
                    noisedEdgeCenter = edgeCenter + noiseMix * noiseFalloff * self.noiseAmplitude * diagonal  # linear fallof

                    snappedPoint, normalSourceSurf, index, distance = sourceSurface_BVHT.find_nearest(
                        noisedEdgeCenter, searchDistance)
                    if not snappedPoint:  # search radius is too small ...
                        snappedPoint = noisedEdgeCenter  # snap to itself...
                        normalSourceSurf = Vector((0, 0, 1))
                    snapMix = snappedPoint * self.snapAmount + noisedEdgeCenter * (
                        1 - self.snapAmount)
                    offsetAbove = snapMix + (
                        self.offsetAbove *
                        0.2) * diagonal * normalSourceSurf * noiseFalloff
                    x, y, z = offsetAbove
                    if self.hairType == 'BEZIER':
                        polyline.bezier_points[i].co = (x, y, z)
                        polyline.bezier_points[i].handle_left_type = 'AUTO'
                        polyline.bezier_points[i].handle_right_type = 'AUTO'
                    else:
                        polyline.points[i].co = (x, y, z, 1)
        if self.clump_amount > 0:
            cpowTip = calc_power(self.clump_falloff)
            for clump_id, spline in zip(clump_ids_int, curveData.splines):
                points = [
                    p for p in spline.bezier_points
                ] if self.hairType == 'BEZIER' else [p for p in spline.points]
                target_points = [
                    p.co for p in curveData.splines[clump_id]
                ] if self.hairType == 'BEZIER' else [
                    p.co for p in curveData.splines[clump_id].points
                ]
                for i, (source_point,
                        target_point) in enumerate(zip(points, target_points)):
                    Fallof_tip = math.pow((self.t_in_y - i) / self.t_in_y,
                                          cpowTip)
                    source_point.co = target_point * (1-Fallof_tip) * self.clump_strength + \
                        source_point.co * (self.clump_strength * (Fallof_tip - 1) + 1)
        curveData.resolution_u = self.bezierRes
        # create Object

        curveOB.targetObjPointer = sourceSurface.name  # store source surface for snapping oper
        curveOB.matrix_world = sourceSurface.matrix_world
        context.view_layer.objects.active = curveOB
        curveOB.select_set(True)
        sourceSurface.select_set(False)
        # curveOB.data.show_normal_face = False
        curveOB.data.use_uv_as_generated = True
        if self.generateRibbons:
            bpy.ops.object.generate_ribbons(strandResU=self.strandResU,
                                            strandResV=self.strandResV,
                                            strandWidth=self.strandWidth,
                                            strandPeak=self.strandPeak,
                                            strandUplift=self.strandUplift,
                                            alignToSurface=self.alignToSurface)
            HT_OT_CurvesUVRefresh.uvCurveRefresh(curveOB)
        self.save_settings(curveOB)
        curveOB.hair_grid_settings.was_created_from_grid = True
        self.save_settings(sourceSurface)
        return {"FINISHED"}
Beispiel #33
0
 def evaluate(self, x, y, z):
     noise.seed_set(self.seed)
     v = noise.noise_vector((x, y, z), noise_basis=self.noise_type)
     return np.array(v)
Beispiel #34
0
def noise_gen(coords, props):

    terrain_name = props[0]
    cursor = props[1]
    smooth = props[2]
    triface = props[3]
    sphere = props[4]
    land_mat = props[5]
    water_mat = props[6]
    texture_name = props[7]
    subd_x = props[8]
    subd_y = props[9]
    meshsize_x = props[10]
    meshsize_y = props[11]
    meshsize = props[12]
    rseed = props[13]
    x_offset = props[14]
    y_offset = props[15]
    z_offset = props[16]
    size_x = props[17]
    size_y = props[18]
    size_z = props[19]
    nsize = props[20]
    ntype = props[21]
    nbasis = int(props[22])
    vlbasis = int(props[23])
    distortion = props[24]
    hardnoise = int(props[25])
    depth = props[26]
    amp = props[27]
    freq = props[28]
    dimension = props[29]
    lacunarity = props[30]
    offset = props[31]
    gain = props[32]
    marblebias = int(props[33])
    marblesharpnes = int(props[34])
    marbleshape = int(props[35])
    height = props[36]
    height_invert = props[37]
    height_offset = props[38]
    maximum = props[39]
    minimum = props[40]
    falloff = int(props[41])
    edge_level = props[42]
    falloffsize_x = props[43]
    falloffsize_y = props[44]
    stratatype = props[45]
    strata = props[46]
    addwater = props[47]
    waterlevel = props[48]
    vert_group = props[49]
    remove_double = props[50]
    fx_mixfactor = props[51]
    fx_mix_mode = props[52]
    fx_type = props[53]
    fx_bias = props[54]
    fx_turb = props[55]
    fx_depth = props[56]
    fx_frequency = props[57]
    fx_amplitude = props[58]
    fx_size = props[59]
    fx_loc_x = props[60]
    fx_loc_y = props[61]
    fx_height = props[62]
    fx_offset = props[63]
    fx_invert = props[64]

    x, y, z = coords

    # Origin
    if rseed is 0:
        origin = x_offset, y_offset, z_offset
        origin_x = x_offset
        origin_y = y_offset
        origin_z = z_offset
        o_range = 1.0
    else:
        # Randomise origin
        o_range = 10000.0
        seed_set(rseed)
        origin = random_unit_vector()
        ox = (origin[0] * o_range)
        oy = (origin[1] * o_range)
        oz = (origin[2] * o_range)
        origin_x = (ox - (ox / 2)) + x_offset
        origin_y = (oy - (oy / 2)) + y_offset
        origin_z = (oz - (oz / 2)) + z_offset

    ncoords = (x / (nsize * size_x) + origin_x, y / (nsize * size_y) + origin_y, z / (nsize * size_z) + origin_z)

    # Noise basis type's
    if nbasis == 9:
        nbasis = 14  # Cellnoise
    if vlbasis == 9:
        vlbasis = 14

    # Noise type's
    if ntype in [0, 'multi_fractal']:
        value = multi_fractal(ncoords, dimension, lacunarity, depth, nbasis) * 0.5

    elif ntype in [1, 'ridged_multi_fractal']:
        value = ridged_multi_fractal(ncoords, dimension, lacunarity, depth, offset, gain, nbasis) * 0.5

    elif ntype in [2, 'hybrid_multi_fractal']:
        value = hybrid_multi_fractal(ncoords, dimension, lacunarity, depth, offset, gain, nbasis) * 0.5

    elif ntype in [3, 'hetero_terrain']:
        value = hetero_terrain(ncoords, dimension, lacunarity, depth, offset, nbasis) * 0.25

    elif ntype in [4, 'fractal']:
        value = fractal(ncoords, dimension, lacunarity, depth, nbasis)

    elif ntype in [5, 'turbulence_vector']:
        value = turbulence_vector(ncoords, depth, hardnoise, nbasis, amp, freq)[0]

    elif ntype in [6, 'variable_lacunarity']:
        value = variable_lacunarity(ncoords, distortion, nbasis, vlbasis)

    elif ntype in [7, 'marble_noise']:
        value = marble_noise(
                        (ncoords[0] - origin_x + x_offset),
                        (ncoords[1] - origin_y + y_offset),
                        (ncoords[2] - origin_z + z_offset),
                        (origin[0] + x_offset, origin[1] + y_offset, origin[2] + z_offset), nsize,
                        marbleshape, marblebias, marblesharpnes,
                        distortion, depth, hardnoise, nbasis, amp, freq
                        )
    elif ntype in [8, 'shattered_hterrain']:
        value = shattered_hterrain(ncoords, dimension, lacunarity, depth, offset, distortion, nbasis)

    elif ntype in [9, 'strata_hterrain']:
        value = strata_hterrain(ncoords, dimension, lacunarity, depth, offset, distortion, nbasis)

    elif ntype in [10, 'ant_turbulence']:
        value = ant_turbulence(ncoords, depth, hardnoise, nbasis, amp, freq, distortion)

    elif ntype in [11, 'vl_noise_turbulence']:
        value = vl_noise_turbulence(ncoords, distortion, depth, nbasis, vlbasis, hardnoise, amp, freq)

    elif ntype in [12, 'vl_hTerrain']:
        value = vl_hTerrain(ncoords, dimension, lacunarity, depth, offset, nbasis, vlbasis, distortion)

    elif ntype in [13, 'distorted_heteroTerrain']:
        value = distorted_heteroTerrain(ncoords, dimension, lacunarity, depth, offset, distortion, nbasis, vlbasis)

    elif ntype in [14, 'double_multiFractal']:
        value = double_multiFractal(ncoords, dimension, lacunarity, depth, offset, gain, nbasis, vlbasis)

    elif ntype in [15, 'rocks_noise']:
        value = rocks_noise(ncoords, depth, hardnoise, nbasis, distortion)

    elif ntype in [16, 'slick_rock']:
        value = slick_rock(ncoords,dimension, lacunarity, depth, offset, gain, distortion, nbasis, vlbasis)

    elif ntype in [17, 'planet_noise']:
        value = planet_noise(ncoords, depth, hardnoise, nbasis)[2] * 0.5 + 0.5

    elif ntype in [18, 'blender_texture']:
        if texture_name != "" and texture_name in bpy.data.textures:
            value = bpy.data.textures[texture_name].evaluate(ncoords)[3]
        else:
            value = 0.0
    else:
        value = 0.5

    # Effect mix
    val = value
    if fx_type in [0,"0"]:
        fx_mixfactor = -1.0
        fxval = val
    else:
        fxcoords = Trans_Effect((x, y, z), fx_size, (fx_loc_x, fx_loc_y))
        effect = Effect_Function(fxcoords, fx_type, fx_bias, fx_turb, fx_depth, fx_frequency, fx_amplitude)
        effect = Height_Scale(effect, fx_height, fx_offset, fx_invert)
        fxval = Mix_Modes(val, effect, fx_mixfactor, fx_mix_mode)
    value = fxval

    # Adjust height
    value = Height_Scale(value, height, height_offset, height_invert)

    # Edge falloff:
    if not sphere:
        if falloff:
            ratio_x, ratio_y = abs(x) * 2 / meshsize_x, abs(y) * 2 / meshsize_y
            fallofftypes = [0,
                            sqrt(ratio_y**falloffsize_y),
                            sqrt(ratio_x**falloffsize_x),
                            sqrt(ratio_x**falloffsize_x + ratio_y**falloffsize_y)
                           ]
            dist = fallofftypes[falloff]
            value -= edge_level
            if(dist < 1.0):
                dist = (dist * dist * (3 - 2 * dist))
                value = (value - value * dist) + edge_level
            else:
                value = edge_level

    # Strata / terrace / layers
    if stratatype not in [0, "0"]:
        if stratatype in [1, "1"]:
            strata = strata / height
            strata *= 2
            steps = (sin(value * strata * pi) * (0.1 / strata * pi))
            value = (value * 0.5 + steps * 0.5) * 2.0

        elif stratatype in [2, "2"]:
            strata = strata / height
            steps = -abs(sin(value * strata * pi) * (0.1 / strata * pi))
            value = (value * 0.5 + steps * 0.5) * 2.0

        elif stratatype in [3, "3"]:
            strata = strata / height
            steps = abs(sin(value * strata * pi) * (0.1 / strata * pi))
            value = (value * 0.5 + steps * 0.5) * 2.0

        elif stratatype in [4, "4"]:
            strata = strata / height
            value = int( value * strata ) * 1.0 / strata

        elif stratatype in [5, "5"]:
            strata = strata / height
            steps = (int( value * strata ) * 1.0 / strata)
            value = (value * (1.0 - 0.5) + steps * 0.5)

    # Clamp height min max
    if (value < minimum):
        value = minimum
    if (value > maximum):
        value = maximum

    return value
Beispiel #35
0
def generate_random_unitvectors():
    # may need many more directions to increase accuracy
    # generate up to 6 directions, filter later
    seed_set(140230)
    return [random_unit_vector() for i in range(6)]
Beispiel #36
0
    def process(self):
        if bpy.app.background:
            return
        self.handle_attr_socket()
        if not (self.id_data.sv_show and self.activate):
            callback_disable(node_id(self))
            return
        n_id = node_id(self)
        callback_disable(n_id)
        inputs = self.inputs
        # end early
        if not self.activate:
            return

        if not any([inputs['Vertices'].is_linked, inputs['Matrix'].is_linked]):
            return

        if inputs['Vertices'].is_linked:
            vecs = inputs['Vertices'].sv_get(default=[[]])

            edges = inputs['Edges'].sv_get(default=[[]])
            polygons = inputs['Polygons'].sv_get(default=[[]])
            matrix = inputs['Matrix'].sv_get(default=[[]])
            vector_color = inputs['Vector Color'].sv_get(
                default=[[self.vector_color]])
            edge_color = inputs['Edge Color'].sv_get(
                default=[[self.edge_color]])
            poly_color = inputs['Polygon Color'].sv_get(
                default=[[self.polygon_color]])
            seed_set(self.random_seed)
            config = self.create_config()

            config.vector_color = vector_color
            config.edge_color = edge_color
            config.poly_color = poly_color
            config.edges = edges

            if self.use_dashed:
                add_dashed_shader(config)

            config.polygons = polygons
            config.matrix = matrix
            if not inputs['Edges'].is_linked and self.display_edges:
                config.edges = polygons_to_edges(polygons, unique_edges=True)

            geom = generate_mesh_geom(config, vecs)

            draw_data = {
                'tree_name': self.id_data.name[:],
                'custom_function': view_3d_geom,
                'args': (geom, config)
            }
            callback_enable(n_id, draw_data)

        elif inputs['Matrix'].is_linked:
            matrices = inputs['Matrix'].sv_get(deepcopy=False,
                                               default=[Matrix()])

            gl_instructions = {
                'tree_name': self.id_data.name[:],
                'custom_function': draw_matrix,
                'args': (matrices, self.matrix_draw_scale)
            }
            callback_enable(n_id, gl_instructions)
Beispiel #37
0
def landscape_gen(x, y, z, falloffsize, options):

    # options = [0, 1.0, 'multi_fractal', 0, 0, 1.0, 0, 6, 1.0, 2.0, 1.0, 2.0,
    #            0, 0, 0, 1.0, 0.0, 1, 0.0, 1.0, 0, 0, 0, 0.0, 0.0]
    rseed = options[0]
    nsize = options[1]
    ntype = options[2]
    nbasis = int(options[3][0])
    vlbasis = int(options[4][0])
    distortion = options[5]
    hardnoise = options[6]
    depth = options[7]
    dimension = options[8]
    lacunarity = options[9]
    offset = options[10]
    gain = options[11]
    marblebias = int(options[12][0])
    marblesharpnes = int(options[13][0])
    marbleshape = int(options[14][0])
    invert = options[15]
    height = options[16]
    heightoffset = options[17]
    falloff = int(options[18][0])
    sealevel = options[19]
    platlevel = options[20]
    strata = options[21]
    stratatype = options[22]
    sphere = options[23]
    x_offset = options[24]
    y_offset = options[25]

    # origin
    if rseed == 0:
        origin = 0.0 + x_offset, 0.0 + y_offset, 0.0
        origin_x = x_offset
        origin_y = y_offset
        origin_z = 0.0
    else:
        # randomise origin
        seed_set(rseed)
        origin = random_unit_vector()
        origin[0] += x_offset
        origin[1] += y_offset
        origin_x = ((0.5 - origin[0]) * 1000.0) + x_offset
        origin_y = ((0.5 - origin[1]) * 1000.0) + y_offset
        origin_z = (0.5 - origin[2]) * 1000.0

    # adjust noise size and origin
    ncoords = (x / nsize + origin_x, y / nsize + origin_y,
               z / nsize + origin_z)

    # noise basis type's
    if nbasis == 9:
        nbasis = 14  # to get cellnoise basis you must set 14 instead of 9
    if vlbasis == 9:
        vlbasis = 14

    # noise type's
    if ntype == 'multi_fractal':
        value = multi_fractal(ncoords, dimension, lacunarity, depth,
                              nbasis) * 0.5

    elif ntype == 'ridged_multi_fractal':
        value = ridged_multi_fractal(ncoords, dimension, lacunarity, depth,
                                     offset, gain, nbasis) * 0.5

    elif ntype == 'hybrid_multi_fractal':
        value = hybrid_multi_fractal(ncoords, dimension, lacunarity, depth,
                                     offset, gain, nbasis) * 0.5

    elif ntype == 'hetero_terrain':
        value = hetero_terrain(ncoords, dimension, lacunarity, depth, offset,
                               nbasis) * 0.25

    elif ntype == 'fractal':
        value = fractal(ncoords, dimension, lacunarity, depth, nbasis)

    elif ntype == 'turbulence_vector':
        value = turbulence_vector(ncoords, depth, hardnoise, nbasis)[0]

    elif ntype == 'variable_lacunarity':
        value = variable_lacunarity(ncoords, distortion, nbasis, vlbasis) + 0.5

    elif ntype == 'marble_noise':
        value = marble_noise(x * 2.0 / falloffsize, y * 2.0 / falloffsize,
                             z * 2 / falloffsize, origin, nsize, marbleshape,
                             marblebias, marblesharpnes, distortion, depth,
                             hardnoise, nbasis)

    elif ntype == 'shattered_hterrain':
        value = shattered_hterrain(ncoords[0], ncoords[1], ncoords[2],
                                   dimension, lacunarity, depth, offset,
                                   distortion, nbasis)

    elif ntype == 'strata_hterrain':
        value = strata_hterrain(ncoords[0], ncoords[1], ncoords[2], dimension,
                                lacunarity, depth, offset, distortion, nbasis)

    elif ntype == 'planet_noise':
        value = planet_noise(ncoords, depth, hardnoise, nbasis)[2] * 0.5 + 0.5
    else:
        value = 0.0

    # adjust height
    if invert != 0:
        value = (1 - value) * height + heightoffset
    else:
        value = value * height + heightoffset

    # edge falloff
    if sphere == 0:  # no edge falloff if spherical
        if falloff != 0:
            fallofftypes = [
                0, hypot(x * x, y * y),
                hypot(x, y),
                abs(y), abs(x)
            ]
            dist = fallofftypes[falloff]
            if falloff == 1:
                radius = (falloffsize / 2)**2
            else:
                radius = falloffsize / 2

            value = value - sealevel
            if (dist < radius):
                dist = dist / radius
                dist = (dist * dist * (3 - 2 * dist))
                value = (value - value * dist) + sealevel
            else:
                value = sealevel

    # strata / terrace / layered
    if stratatype != '0':
        strata = strata / height

    if stratatype == '1':
        strata *= 2
        steps = (sin(value * strata * pi) * (0.1 / strata * pi))
        value = (value * (1.0 - 0.5) + steps * 0.5) * 2.0

    elif stratatype == '2':
        steps = -abs(sin(value * strata * pi) * (0.1 / strata * pi))
        value = (value * (1.0 - 0.5) + steps * 0.5) * 2.0

    elif stratatype == '3':
        steps = abs(sin(value * strata * pi) * (0.1 / strata * pi))
        value = (value * (1.0 - 0.5) + steps * 0.5) * 2.0

    else:
        value = value

    # clamp height
    if (value < sealevel):
        value = sealevel
    if (value > platlevel):
        value = platlevel

    return value