def get_dist_num(args):
    dist = args[0]

    for i in range(len(args[1:])):
        args[i + 1] = float(args[1:][i])

    if dist == 'EXP':
        return exponential(args[1])
    elif dist == 'NOR':
        return normal(loc=args[1],
                      scale=args[2])  # loc = média , scale = desvio
    elif dist == 'TRI':
        return triangular(args[1], args[2], args[3])
    elif dist == 'UNI':
        return uniform(low=args[1], high=args[2])
    elif dist == 'BET':
        return beta(args[1], args[2])
    elif dist == 'WEI':
        return weibull(args[1])
    elif dist == 'CAU':  # CAU: Cauchy
        return 0
    elif dist == 'CHI':
        return chisquare(args[1])
    elif dist == 'ERL':  # ERL: Erlang
        return 0
    elif dist == 'GAM':
        return gamma(args[1], scale=args[2])
    elif dist == 'LOG':
        return lognormal(mean=args[1], sigma=args[2])
    elif dist == 'PAR':
        return pareto(args[1])
    elif dist == 'STU':
        return standard_t(args[1])
Example #2
0
 def generating(self):
     if self._check_distri_paras() :
         #均匀分布
         if self.distri=='uniform_distri':
             rands = np_rand.uniform(low=self.location_para, 
                                     high=(self.location_para+self.scale_para), 
                                     size=self.number_rand).tolist()
         #指数分布
         elif self.distri=='exponential_distri':
             rands = np_rand.exponential(scale=self.scale_para, size=self.number_rand).tolist()
         #正态分布
         elif self.distri=='normal_distri':
             rands = np_rand.normal(loc=self.location_para, scale=self.scale_para, size=self.number_rand).tolist()
         #对数正态分布
         elif self.distri=='lognormal_distri':
             rands = np_rand.lognormal(mean=self.scale_para, sigma=self.shape_para, size=self.number_rand).tolist()
         #威布尔分布
         elif self.distri=='weibull_distri':
             rands = np_rand.weibull(a=self.shape_para, size=self.number_rand)
             rands = rands * self.scale_para
             rands = rands.tolist()
     else:
         print('PDF函数的参数设置不正确')
     
     return rands
Example #3
0
def input_choice():

    data = rnd.weibull(a=1, size=1000)*100
    plt.hist(data, label="weibull", alpha=0.4)
    # data = rnd.exponential(scale=50, size=1000)
    # plt.hist(data, label="exp", alpha=0.4)
    # data = rnd.gamma(1, size=1000) * 100
    # plt.hist(data, label="gamma", alpha=0.4)
    # data = rnd.exponential(scale=500, size=1000)
    plt.legend()
    plt.show()
Example #4
0
 def time_to_mutation_rate(tree):
     if not hasattr(GC, "NUMPY_SEEDED"):
         from numpy.random import seed as numpy_seed
         numpy_seed(seed=GC.random_number_seed)
         GC.random_number_seed += 1
         GC.NUMPY_SEEDED = True
     t = read_tree_newick(tree)
     for node in t.traverse_preorder():
         if node.edge_length is not None:
             node.edge_length *= (weibull(a=GC.tree_rate_shape) *
                                  GC.tree_rate_scale)
     return str(t)
Example #5
0
def patient(env, idx, name, hosp):
    icutime = weibull(1) * MEANICUTIME
    patientData[idx]["hospitalArrivalTime"] = env.now[0]
    patientData[idx]["icuTime"] = icutime

    console.debug('%s arrives at the hospital %s at %.2f' % (name, hosp.name, env.now))
    with hosp.icubeds.request() as request:
        yield request
        patientData[idx]["icuStartTime"] = env.now[0]
        console.debug('%s enters the ICU at %.2f' % (name, env.now))

        yield env.process(hosp.treat(name, icutime))
        patientData[idx]["icuEndTime"] = env.now[0]
        console.debug('%s leaves the ICU at %.2f' % (name, env.now))
        dischargeList.append({"time": math.floor(env.now), "hospital": hosp.id})
Example #6
0
def _simulate_claim_sizes(R, sev, thetaSev):
    # n_t is integer valued
    # sev is a claim sizes distribution to chosen in ("weibull", "lognormal",
    # "gamma")
    # theta_sev corresponds to the parameters of the claim sizes distribution
    if sev == "weibull":
        k, scale = thetaSev
        claims = np.empty(R, np.float64)
        for i in range(R):
            claims[i] = scale * rnd.weibull(k)
        return claims
    elif sev == "exponential":
        scale = thetaSev[0]
        claims = np.empty(R, np.float64)
        for i in range(R):
            claims[i] = scale * rnd.exponential()
        return claims
    elif sev == "gamma":
        k, scale = thetaSev
        claims = np.empty(R, np.float64)
        for i in range(R):
            claims[i] = scale * rnd.gamma(k)
        return claims
    elif sev == "lognormal":
        mu, sigma = thetaSev
        claims = np.empty(R, np.float64)
        for i in range(R):
            claims[i] = rnd.lognormal(mu, sigma)
        return claims
    # elif sev == "dependent lognormal":  # exponential of gaussian vector
    #     if R == 0:
    #         return np.array([])
    #     else:
    #         mu, sigma, ρ = param1, param2, param3
    #         Σ = np.ones((R, R)) * ρ * sigma ** 2 + np.identity(R) * (
    #             sigma ** 2 - sigma ** 2 * ρ
    #         )

    #         return np.exp(rnd.multivariate_normal(np.repeat(mu, R), Σ, 1).flatten())
    elif sev == "frequency dependent exponential":
        scale, cor = thetaSev
        claims = np.empty(R, np.float64)
        for i in range(R):
            claims[i] = scale * np.exp(cor * R) * rnd.exponential()
        return claims

    else:
        return
Example #7
0
 def sample(self, n=1, params=None, hold_time=None, **kwargs):
     params = self.params_handler('load', params, **kwargs)
     if hold_time is None:  # ordinary sampling from weibull
         s = npr.weibull(params.shape, size=n)
     else:  # conditional sampling from weibull
         shape = params['shape']
         scale = params['scale']
         cdf_at_m = 1 - np.exp(-(hold_time / scale)**shape)
         s = []
         for i in range(n):
             u = npr.uniform(cdf_at_m, 1, size=1)[0]
             sample = scale * np.power(-np.log(1 - u), 1. / shape)
             s.append(sample)
     if n == 1:
         s = s[0]
     return s
Example #8
0
 def getInstance(self):
     dist = self.dist
     p = self.params
     small_correction = random.random() * 0.001
     if dist == 'exponential':
         return random.exponential(p[0]) + small_correction
     elif dist == 'normal':
         return random.normal(p[0],p[1]) + small_correction
     elif dist == 'uniform':
         return random.uniform(p[0],p[1]) + small_correction
     elif dist == 'poisson':
         return random.poisson(p[0]) + small_correction
     elif dist == 'binomial':
         return random.binomial(p[0],p[1]) + small_correction
     elif dist == 'geometric':
         return random.geometric(p[0]) + small_correction
     elif dist == 'weibull':
         return random.weibull(p[0]) + small_correction
     elif dist == 'gamma':
         return random.gamma(p[0],p[1]) + small_correction
     elif dist == 'beta':
         return random.beta(p[0],p[1]) + small_correction
     elif dist == 'lognormal':
         return random.lognormal(p[0],p[1]) + small_correction
Example #9
0
from numpy.random import rand, weibull
from scipy import spatial
import networkx as nx
import matplotlib.pyplot as plt

nnodes = 100
l = 0.15
r = l * weibull(3, 1)
positions = rand(nnodes, 2)
kdtree = spatial.KDTree(positions)
pairs = kdtree.query_pairs(r)
G = nx.Graph()
G.add_nodes_from(range(nnodes))
G.add_edges_from(list(pairs))
pos = dict(zip(range(nnodes), positions))
nx.draw(G, pos)
plt.show()

G = nx.random_geometric_graph(200, 0.125)
pos = nx.get_node_attributes(G, 'pos')

# find node near center (0.5,0.5)
dmin = 1
ncenter = 0
for n in pos:
    x, y = pos[n]
    d = (x - 0.5)**2 + (y - 0.5)**2
    if d < dmin:
        ncenter = n
        dmin = d
def draw_weibull(shape, scale, nsamples=1):
    from numpy.random import weibull
    x = scale * weibull(shape, nsamples)
    return x
Example #11
0
 def _get_random_scalars(self):
     return random.weibull(1, size=self.n_nodes)
Example #12
0
def weibull(size, params):
    try:
        return random.weibull(params['a'], size)
    except ValueError as e:
        exit(e)
Example #13
0
    
    timecost.append([mid_time-start_time,time.time()-mid_time])

    #poisson
    start_time=time.time()
    a=dsg.poisson(1.5,times)
    mid_time=time.time()
    b=nr.poisson(1.5,times)
    
    timecost.append([mid_time-start_time,time.time()-mid_time])

    #weibull
    start_time=time.time()
    a=dsg.weibull(2,1,times)
    mid_time=time.time()
    b=nr.weibull(2,times)
    timecost.append([mid_time-start_time,time.time()-mid_time])

    #gumbel
    start_time=time.time()
    a=dsg.gumbel(2,3,times)
    mid_time=time.time()
    b=nr.gumbel(2,3,times)
    
    timecost.append([mid_time-start_time,time.time()-mid_time])

    #dirichlet
    start_time=time.time()
    a=dsg.dirichlet([1,2,3,4,5],times)
    mid_time=time.time()
    b=nr.dirichlet([1,2,3,4,5],times)
Example #14
0
 def _get_random_scalars(self):
     return random.weibull( 1, size = self.n_nodes ) 
Example #15
0
def generateRocks(context, scaleX, skewX, scaleY, skewY, scaleZ, skewZ,
                  scale_fac, detail, display_detail, deform, rough,
                  smooth_fac, smooth_it,
                  numOfRocks=1, userSeed=1.0,
                  scaleDisplace=False, randomSeed=True):
    global LASTROCK
    sigmaX = 0
    sigmaY = 0
    sigmaZ = 0
    upperSkewX = False
    upperSkewY = False
    upperSkewZ = False
    shift = 0
    # vertexScaling = []

    # Seed the random Gaussian value generator:
    if randomSeed:
        seed(int(time.time()))
    else:
        seed(userSeed)

    # These values need to be really small to look good.
    # So the user does not have to use such ridiculously small values:
    deform /= 10
    rough /= 100

    # Verify that the min really is the min:
    if scaleX[1] < scaleX[0]:
        scaleX[0], scaleX[1] = scaleX[1], scaleX[0]
    if scaleY[1] < scaleY[0]:
        scaleY[0], scaleY[1] = scaleY[1], scaleY[0]
    if scaleZ[1] < scaleZ[0]:
        scaleZ[0], scaleZ[1] = scaleZ[1], scaleZ[0]

    # todo: edit below to allow for skewing the distribution
    #   *** todo completed 4/22/2011 ***
    #   *** Code now generating "int not scriptable error" in Blender ***
    #
    # Calculate mu and sigma for a Gaussian distributed random number
    #   generation:
    # If the lower and upper bounds are the same, skip the math.
    #
    # sigma is the standard deviation of the values.  The 95% interval is three
    # standard deviations, which is what we want most generated values to fall
    # in.  Since it might be skewed we are going to use half the difference
    # betwee the mean and the furthest bound and scale the other side down
    # post-number generation.
    if scaleX[0] != scaleX[1]:
        skewX = (skewX + 1) / 2
        muX = scaleX[0] + ((scaleX[1] - scaleX[0]) * skewX)
        if skewX < 0.5:
            sigmaX = (scaleX[1] - muX) / 3
        else:
            sigmaX = (muX - scaleX[0]) / 3
            upperSkewX = True
    else:
        muX = scaleX[0]
    if scaleY[0] != scaleY[1]:
        skewY = (skewY + 1) / 2
        muY = scaleY[0] + ((scaleY[1] - scaleY[0]) * skewY)
        if skewY < 0.5:
            sigmaY = (scaleY[1] - muY) / 3
        else:
            sigmaY = (muY - scaleY[0]) / 3
            upperSkewY = True
    else:
        muY = scaleY[0]
    if scaleZ[0] != scaleZ[1]:
        skewZ = (skewZ + 1) / 2
        muZ = scaleZ[0] + ((scaleZ[1] - scaleZ[0]) * skewZ)
        if skewZ < 0.5:
            sigmaZ = (scaleZ[1] - muZ) / 3
        else:
            sigmaZ = (muZ - scaleZ[0]) / 3
            upperSkewZ = True
    else:
        muZ = scaleZ

    for i in range(numOfRocks):
        # todo: enable different random values for each (x,y,z) corrdinate for
        # each vertex.  This will add additional randomness to the shape of the
        # generated rocks.
        #   *** todo completed 4/19/2011 ***
        #   *** Code is notably slower at high rock counts ***

        # name = generateObject(context, muX, sigmaX, scaleX, upperSkewX, muY,
        rock = generateObject(
            context, muX, sigmaX, scaleX, upperSkewX, muY,
            sigmaY, scaleY, upperSkewY, muZ, sigmaZ, scaleZ,
            upperSkewZ, i, LASTROCK, scaleDisplace, scale_fac)

        # rock = bpy.data.objects[name]

        # todo Map what the two new textures will be:
        # This is not working.  It works on paper so . . . ???
        #   *** todo completed on 4/23/2011 ***
        #   *** todo re-added as the first rock is getting
        #       'Texture.001' twice. ***
        #   *** todo completed on 4/25/2011 ***
        #   *** Script no longer needs to map new texture names 9/6/2011 ***

        # Create the four new textures:
        # todo Set displacement texture parameters:
        #   *** todo completed on 5/31/2011 ***
        # Voronoi has been removed from being an option for the fine detail
        #   texture.
        texTypes = ['CLOUDS', 'MUSGRAVE', 'DISTORTED_NOISE', 'STUCCI', 'VORONOI']
        newTex = []
        # The first texture is to give a more ranodm base shape appearance:
        newTex.append(bpy.data.textures.new(
            name='rock_displacement',
            type=texTypes[1]))
        randomizeTexture(newTex[0], 0)
        newTex.append(bpy.data.textures.new(
            name='rock_displacement',
            type=texTypes[4]))
        randomizeTexture(newTex[1], 0)
        if numpy:
            newTex.append(bpy.data.textures.new(
                name='rock_displacement',
                type=texTypes[int(round(weibull(1, 1)[0] / 2.125))]))
            randomizeTexture(newTex[2], 1)
            newTex.append(bpy.data.textures.new(
                name='rock_displacement',
                type=texTypes[int(round(weibull(1, 1)[0] / 2.125))]))
            randomizeTexture(newTex[3], 2)
        else:
            newTex.append(bpy.data.textures.new(
                name='rock_displacement',
                type=texTypes[int(round(weibull(1, 1) / 2.125))]))
            randomizeTexture(newTex[2], 1)
            newTex.append(bpy.data.textures.new(
                name='rock_displacement',
                type=texTypes[int(round(weibull(1, 1) / 2.125))]))
            randomizeTexture(newTex[3], 2)

        # Add modifiers:
        rock.modifiers.new(name="Subsurf", type='SUBSURF')
        rock.modifiers.new(name="Subsurf", type='SUBSURF')
        rock.modifiers.new(name="Displace", type='DISPLACE')
        rock.modifiers.new(name="Displace", type='DISPLACE')
        rock.modifiers.new(name="Displace", type='DISPLACE')
        rock.modifiers.new(name="Displace", type='DISPLACE')

        # If smoothing is enabled, allow a little randomness into the
        #   smoothing factor. Then add the smoothing modifier.
        if smooth_fac > 0.0 and smooth_it > 0:
            rock.modifiers.new(name="Smooth", type='SMOOTH')
            rock.modifiers[6].factor = gauss(smooth_fac, (smooth_fac ** 0.5) / 12)
            rock.modifiers[6].iterations = smooth_it
        # Make a call to random to keep things consistant:
        else:
            gauss(0, 1)

        # Set subsurf modifier parameters:
        rock.modifiers[0].levels = display_detail
        rock.modifiers[0].render_levels = detail
        rock.modifiers[1].levels = display_detail
        rock.modifiers[1].render_levels = detail

        # todo Set displacement modifier parameters:
        #   *** todo completed on 4/23/2011 ***
        #   *** toned down the variance on 4/26/2011 ***
        #   *** added third modifier on 4/28/2011 ***
        #   *** texture access changed on 9/6/2011 ***
        rock.modifiers[2].texture = newTex[0]
        rock.modifiers[2].strength = gauss(deform / 100, (1 / 300) * deform)
        rock.modifiers[2].mid_level = 0
        rock.modifiers[3].texture = newTex[1]
        rock.modifiers[3].strength = gauss(deform, (1 / 3) * deform)
        rock.modifiers[3].mid_level = 0
        rock.modifiers[4].texture = newTex[2]
        rock.modifiers[4].strength = gauss(rough * 2, (1 / 3) * rough)
        rock.modifiers[5].texture = newTex[3]
        rock.modifiers[5].strength = gauss(rough, (1 / 3) * rough)

        # Set mesh to be smooth and fix the normals:
        utils.smooth(rock.data)
        # utils.smooth(bpy.data.meshes[name])
        bpy.ops.object.editmode_toggle()
        bpy.ops.mesh.normals_make_consistent()
        bpy.ops.object.editmode_toggle()

        # Store the last value of i:
        shift = i

    # Add the shift to LASTROCK:
    LASTROCK += shift + 1

    return
Example #16
0
def ex2_level(year):
    """
    Create file with daily data for specific year >= 2000, two variables in file, added level in dimensions
    :param year: year of file
    """
    assert (year >= 2000)
    # time - 365 daily , from 2000, 2 variables
    file_name = 'ex2level_ens101_' + str(year) + '.nc'
    file_name = os.path.join(directories.DATA, file_name)
    dataset = Dataset(file_name, 'w', format='NETCDF4_CLASSIC')

    # Create datasets
    dataset.createDimension('level', 5)
    dataset.createDimension('latitude', 10)
    dataset.createDimension('longitude', 20)
    dataset.createDimension('time', None)

    levels = dataset.createVariable('level', np.float64, ('level', ))
    times = dataset.createVariable('time', np.float64, ('time', ))
    latitudes = dataset.createVariable('latitude', np.float64, ('latitude', ))
    longitudes = dataset.createVariable('longitude', np.float64,
                                        ('longitude', ))

    # Create the 3D variable
    temp = dataset.createVariable('temp',
                                  np.float32,
                                  ('level', 'time', 'latitude', 'longitude'),
                                  fill_value=-1.e+20)
    sal = dataset.createVariable('sal',
                                 np.float32,
                                 ('level', 'time', 'latitude', 'longitude'),
                                 fill_value=-1.e+20)

    # Global Attributes
    dataset.description = 'Example NetCDF file'
    dataset.history = 'Created by Adanna Akwataghibe ' + time.ctime(
        time.time())
    dataset.source = 'netCDF4 python module tutorial'

    # Variable Attributes
    latitudes.units = 'degree_north'
    longitudes.units = 'degree_east'
    longitudes.long_name = 'longitude'
    latitudes.long_name = 'latitude'
    times.calendar = 'NOLEAP'
    times.calendar_type = 'NOLEAP'
    times.units = 'days since 2000-01-01 00:00:00'
    times.long_name = 'time'
    temp.missing_value = -1.e+20
    temp.units = 'K'
    temp.long_name = 'temperature'
    sal.long_name = 'salinity'
    levels.units = "hPa"

    # Fill in lons and lats
    lats = np.linspace(-90, 90, 10)
    lons = np.linspace(-180, 180, 20)
    latitudes[:] = lats
    longitudes[:] = lons
    levels[:] = [100, 75, 50, 25, 0]

    nlats = len(dataset.dimensions['latitude'])
    nlons = len(dataset.dimensions['longitude'])
    temp[:, :, :, :] = uniform(size=(5, 365, nlats, nlons))
    sal[:, :, :, :] = weibull(5, size=(5, 365, nlats, nlons))

    times[:] = np.asarray(range(365)) + 365 * (year - 2000)

    print("New file " + file_name + " created in " + directories.DATA +
          " directory.")

    dataset.close()
 def weibull(self, a):
     '''
     Parameters:\n
     a: float, >=0.
     '''
     return r.weibull(a, self.size)
Example #18
0
 def draw(self):
     return weibull(self.k) * self.lambda_
Example #19
0
def wei(df):
    """Weibull distribution."""
    weibull(df)
 def _get_scalars(self):
     return random.weibull( 1, size = 8 ) 
Example #21
0
def draw_weibull(shape, scale, nsamples):
    x = scale * weibull(shape, nsamples)
    return x
Example #22
0
def draw_weibull(shape, scale, nsamples=1):
    from numpy.random import weibull
    x = scale * weibull(shape, nsamples)
    return x
Example #23
0
def generateRocks(context, scaleX, skewX, scaleY, skewY, scaleZ, skewZ,
                  scale_fac, detail, display_detail, deform, rough,
                  smooth_fac, smooth_it,
                  numOfRocks=1, userSeed=1.0,
                  scaleDisplace=False, randomSeed=True, use_enter_edit_mode=False):
    global LASTROCK
    sigmaX = 0
    sigmaY = 0
    sigmaZ = 0
    upperSkewX = False
    upperSkewY = False
    upperSkewZ = False
    shift = 0
    # vertexScaling = []

    # Seed the random Gaussian value generator:
    if randomSeed:
        seed(int(time.time()))
    else:
        seed(userSeed)

    # These values need to be really small to look good.
    # So the user does not have to use such ridiculously small values:
    deform /= 10
    rough /= 100

    # Verify that the min really is the min:
    if scaleX[1] < scaleX[0]:
        scaleX[0], scaleX[1] = scaleX[1], scaleX[0]
    if scaleY[1] < scaleY[0]:
        scaleY[0], scaleY[1] = scaleY[1], scaleY[0]
    if scaleZ[1] < scaleZ[0]:
        scaleZ[0], scaleZ[1] = scaleZ[1], scaleZ[0]

    # todo: edit below to allow for skewing the distribution
    #   *** todo completed 4/22/2011 ***
    #   *** Code now generating "int not scriptable error" in Blender ***
    #
    # Calculate mu and sigma for a Gaussian distributed random number
    #   generation:
    # If the lower and upper bounds are the same, skip the math.
    #
    # sigma is the standard deviation of the values.  The 95% interval is three
    # standard deviations, which is what we want most generated values to fall
    # in.  Since it might be skewed we are going to use half the difference
    # between the mean and the furthest bound and scale the other side down
    # post-number generation.
    if scaleX[0] != scaleX[1]:
        skewX = (skewX + 1) / 2
        muX = scaleX[0] + ((scaleX[1] - scaleX[0]) * skewX)
        if skewX < 0.5:
            sigmaX = (scaleX[1] - muX) / 3
        else:
            sigmaX = (muX - scaleX[0]) / 3
            upperSkewX = True
    else:
        muX = scaleX[0]
    if scaleY[0] != scaleY[1]:
        skewY = (skewY + 1) / 2
        muY = scaleY[0] + ((scaleY[1] - scaleY[0]) * skewY)
        if skewY < 0.5:
            sigmaY = (scaleY[1] - muY) / 3
        else:
            sigmaY = (muY - scaleY[0]) / 3
            upperSkewY = True
    else:
        muY = scaleY[0]
    if scaleZ[0] != scaleZ[1]:
        skewZ = (skewZ + 1) / 2
        muZ = scaleZ[0] + ((scaleZ[1] - scaleZ[0]) * skewZ)
        if skewZ < 0.5:
            sigmaZ = (scaleZ[1] - muZ) / 3
        else:
            sigmaZ = (muZ - scaleZ[0]) / 3
            upperSkewZ = True
    else:
        muZ = scaleZ

    rocks = []

    for i in range(numOfRocks):
        # todo: enable different random values for each (x,y,z) corrdinate for
        # each vertex.  This will add additional randomness to the shape of the
        # generated rocks.
        #   *** todo completed 4/19/2011 ***
        #   *** Code is notably slower at high rock counts ***

        # name = generateObject(context, muX, sigmaX, scaleX, upperSkewX, muY,
        rock = generateObject(
            context, muX, sigmaX, scaleX, upperSkewX, muY,
            sigmaY, scaleY, upperSkewY, muZ, sigmaZ, scaleZ,
            upperSkewZ, i, LASTROCK, scaleDisplace, scale_fac)

        # rock = bpy.data.objects[name]

        # todo Map what the two new textures will be:
        # This is not working.  It works on paper so . . . ???
        #   *** todo completed on 4/23/2011 ***
        #   *** todo re-added as the first rock is getting
        #       'Texture.001' twice. ***
        #   *** todo completed on 4/25/2011 ***
        #   *** Script no longer needs to map new texture names 9/6/2011 ***

        # Create the four new textures:
        # todo Set displacement texture parameters:
        #   *** todo completed on 5/31/2011 ***
        # Voronoi has been removed from being an option for the fine detail
        #   texture.
        texTypes = ['CLOUDS', 'MUSGRAVE', 'DISTORTED_NOISE', 'STUCCI', 'VORONOI']
        newTex = []
        # The first texture is to give a more ranodm base shape appearance:
        newTex.append(bpy.data.textures.new(
            name='rock_displacement',
            type=texTypes[1]))
        randomizeTexture(newTex[0], 0)
        newTex.append(bpy.data.textures.new(
            name='rock_displacement',
            type=texTypes[4]))
        randomizeTexture(newTex[1], 0)
        if numpy:
            newTex.append(bpy.data.textures.new(
                name='rock_displacement',
                type=texTypes[int(round(weibull(1, 1)[0] / 2.125))]))
            randomizeTexture(newTex[2], 1)
            newTex.append(bpy.data.textures.new(
                name='rock_displacement',
                type=texTypes[int(round(weibull(1, 1)[0] / 2.125))]))
            randomizeTexture(newTex[3], 2)
        else:
            newTex.append(bpy.data.textures.new(
                name='rock_displacement',
                type=texTypes[int(round(weibull(1, 1) / 2.125))]))
            randomizeTexture(newTex[2], 1)
            newTex.append(bpy.data.textures.new(
                name='rock_displacement',
                type=texTypes[int(round(weibull(1, 1) / 2.125))]))
            randomizeTexture(newTex[3], 2)

        # Add modifiers:
        rock.modifiers.new(name="Subsurf", type='SUBSURF')
        rock.modifiers.new(name="Subsurf", type='SUBSURF')
        rock.modifiers.new(name="Displace", type='DISPLACE')
        rock.modifiers.new(name="Displace", type='DISPLACE')
        rock.modifiers.new(name="Displace", type='DISPLACE')
        rock.modifiers.new(name="Displace", type='DISPLACE')

        # If smoothing is enabled, allow a little randomness into the
        #   smoothing factor. Then add the smoothing modifier.
        if smooth_fac > 0.0 and smooth_it > 0:
            rock.modifiers.new(name="Smooth", type='SMOOTH')
            rock.modifiers[6].factor = gauss(smooth_fac, (smooth_fac ** 0.5) / 12)
            rock.modifiers[6].iterations = smooth_it
        # Make a call to random to keep things consistent:
        else:
            gauss(0, 1)

        # Set subsurf modifier parameters:
        rock.modifiers[0].levels = display_detail
        rock.modifiers[0].render_levels = detail
        rock.modifiers[1].levels = display_detail
        rock.modifiers[1].render_levels = detail

        # todo Set displacement modifier parameters:
        #   *** todo completed on 4/23/2011 ***
        #   *** toned down the variance on 4/26/2011 ***
        #   *** added third modifier on 4/28/2011 ***
        #   *** texture access changed on 9/6/2011 ***
        rock.modifiers[2].texture = newTex[0]
        rock.modifiers[2].strength = gauss(deform / 100, (1 / 300) * deform)
        rock.modifiers[2].mid_level = 0
        rock.modifiers[3].texture = newTex[1]
        rock.modifiers[3].strength = gauss(deform, (1 / 3) * deform)
        rock.modifiers[3].mid_level = 0
        rock.modifiers[4].texture = newTex[2]
        rock.modifiers[4].strength = gauss(rough * 2, (1 / 3) * rough)
        rock.modifiers[5].texture = newTex[3]
        rock.modifiers[5].strength = gauss(rough, (1 / 3) * rough)

        # Set mesh to be smooth and fix the normals:
        utils.smooth(rock.data)
        # utils.smooth(bpy.data.meshes[name])
        bpy.ops.object.editmode_toggle()
        bpy.ops.mesh.normals_make_consistent()
        bpy.ops.object.editmode_toggle()

        if use_enter_edit_mode:
            for m in rock.modifiers:
                m.show_in_editmode = True
                m.show_on_cage = True

        # Store the last value of i:
        shift = i

        rocks.append(rock)

    # Add the shift to LASTROCK:
    LASTROCK += shift + 1

    return rocks
Example #24
0
	def tal():
		a, B = 0.96357, 0.05201  # scale , shape
		s = (weibull(a, 1) * B)[0]
		return int(round(s * 24 * 60))