Esempio n. 1
0
def sampleEllipticity(size, sigma_e=0.3):
    """Draw ellipticity samples from the Rayleigh distribution.

    Samples from the Rayleigh distribution of width sigma_e, but makes sure
    that no ellipticities with |epsilon| >= 1 are created by resampling the
    outliers.
    The orientation of the sample is uniform in the interval [0 .. pi).

    Args:
        size: A positive integer.
        sigma_e: The width parameter/mode of the Rayleigh distribution

    Returns:
        A complex numpy array of given size. 
    """
    e = rayleigh(sigma_e, size)
    # make sure no outliers are created
    # this effectively tightens the distribution
    mask = (e >= 1)
    while sum(mask) > 0:
        e[mask] = rayleigh(sigma_e, sum(mask))
        mask = (e >= 1)
    # sample unformly random orientation and create complex ellipticity
    phi = pi*random(size)
    return e*(cos(2*phi) + 1j * sin(2*phi))
Esempio n. 2
0
def task_generation(num_intervals, num_periods, num_intervals_periods,
                    mode_value, l_demands, p_d_short, cf_max):
    # generation - demand
    demand = r.choice(l_demands)
    demand = int(demand * 1000)

    # generation - duration
    duration = max(1, int(random.rayleigh(mode_value, 1)[0]))

    # generation - preferred start time
    p_start = max(
        int(np.random.choice(a=num_periods, size=1, p=p_d_short)[0]) *
        num_intervals_periods +
        r.randint(-num_intervals_periods + 1, num_intervals_periods), 0)
    p_start = min(p_start, num_intervals - 1)

    # generation - earliest starting time
    # e_start = r.randint(-duration + 1, p_start)
    e_start = 0

    # generation - latest finish time
    # l_finish = r.randint(p_start + duration, num_intervals - 1 + duration)
    l_finish = num_intervals - 1 + duration

    # generation - care factor
    # care_f = int(r.choice([i for i in range(1, cf_max + 1)]))
    care_f = r.randint(1, cf_max + 1)

    return demand, duration, p_start, e_start, l_finish, care_f
Esempio n. 3
0
def rat_path(environment,
             t=15,
             delta_t=0.1,
             d_from_walls=2,
             v_scale=1.4,
             r_mu=0,
             r_std=5.7,
             perim_v_reduc=0.25,
             perim_r_change=np.pi / 2):
    assert environment.agent.radius is not None
    assert environment.agent.radius < d_from_walls
    assert len(environment.rooms) > 0

    n_samples = int(t // delta_t)
    v_scale *= delta_t
    r_mu *= delta_t
    r_std *= delta_t

    # Path data to return
    positions = np.zeros((n_samples, 2), dtype=np.float32)
    directions = np.zeros((n_samples), dtype=np.float32)
    movements = np.zeros((n_samples), dtype=np.float32)
    rotations = np.zeros((n_samples), dtype=np.float32)

    # Initialization
    positions[0], directions[0] = start_state(environment, d_from_walls)
    random_rotations = normal(r_mu, r_std, size=n_samples)
    random_movements = rayleigh(v_scale, size=n_samples)
    movement = random_movements[0]

    for step in range(1, n_samples):
        closest_wall_d, closest_wall_r = closest_wall(environment,
                                                      positions[step - 1],
                                                      directions[step - 1])
        if closest_wall_d < d_from_walls and np.abs(
                closest_wall_r) < perim_r_change:
            sign = np.sign(closest_wall_r) if closest_wall_r != 0 else 1
            rotation = sign * (perim_r_change -
                               np.abs(closest_wall_r)) + random_rotations[step]
            movement = (1 - perim_v_reduc) * movement
        else:
            rotation = random_rotations[step]
            movement = random_movements[step]

        direction = dir_angle(directions[step - 1] + rotation)
        velocity = movement * np.array([np.cos(direction), np.sin(direction)])
        new_pos = positions[step - 1] + velocity

        positions[step] = new_pos
        directions[step] = direction
        movements[step] = movement
        rotations[step] = rotation

    return {
        'positions': positions,
        'directions': directions,
        'movements': movements,
        'rotations': rotations
    }
Esempio n. 4
0
 def time_to_mutation_rate(tree):
     if not hasattr(GC, "NUMPY_SEEDED"):
         from numpy.random import seed as numpy_seed
         numpy_seed(seed=GC.random_number_seed)
         GC.random_number_seed += 1
         GC.NUMPY_SEEDED = True
     t = read_tree_newick(tree)
     for node in t.traverse_preorder():
         if node.edge_length is not None:
             node.edge_length *= rayleigh(scale=GC.tree_rate_scale)
     return str(t)
Esempio n. 5
0
def rayleigh_noise(image,var=0.1):
    '''
        添加瑞利噪声
        var : 方差
    '''
    image = array(image/255, dtype=float)
    # 瑞利分布
    noise = rayleigh(var ** 0.5, image.shape)
    out = image + noise
    if out.min() < 0:
        low_clip = -1.
    else:
        low_clip = 0.
    out = clip(out, low_clip, 1.0)
    out = uint8(out*255)
    return out
Esempio n. 6
0
def noisy_path(environment,
               n_samples=20,
               m_scale=0.5,
               r_scale=5,
               min_d_wall=2,
               r_from_walls=np.pi / 1.5):
    assert environment.agent.radius is not None
    assert environment.agent.radius < min_d_wall
    assert len(environment.rooms) > 0

    p, d = start_state(environment, min_d_wall + 1)
    positions = [p]
    directions = [d]
    movements = [0]
    rotations = [0]

    for _ in range(1, n_samples):
        r = (beta(r_scale, r_scale) - 0.5) * 2 * np.pi
        m = rayleigh(m_scale)

        d_prop = dir_angle(d + r)
        p_prop = p + m * np.array([
            np.cos(d_prop), -np.sin(d_prop)
        ])  # -sin because of lefthand OpenGL coordinates
        d_wall, r_wall = closest_wall(environment, p_prop, d_prop)
        if d_wall < min_d_wall:
            m = 0
            sign = np.sign(r_wall) if r_wall != 0 else 1
            r = sign * (r_from_walls - np.abs(r_wall))
            d = dir_angle(d + r)
        else:
            d = d_prop
            p = p_prop

        positions.append(p)
        directions.append(d)
        movements.append(m)
        rotations.append(r)

    return {
        'positions': np.stack(positions).astype(np.float32),
        'directions': np.array(directions, dtype=np.float32),
        'movements': np.array(movements, dtype=np.float32),
        'rotations': np.array(rotations, dtype=np.float32)
    }
x  = np.array([x1])
y  = np.array([y1])
z  = np.array([z1])
vx = np.array([vx1])
vy = np.array([vy1])
vz = np.array([vz1])
mass=np.array([muB*MB])
name=np.array(['S2']) #the central object's name is S1

#######################################################################################################################
# (2) Second stage
# Generate a population or orbiting test particles
# (2)(a) 1st, generate population of orbital elements

#eccentricities
e=ran.rayleigh(0.01,N_samples)
#inclinations
I=ran.rayleigh(0.01*180.0/np.pi,N_samples)
#masses (test particles)
m=np.zeros(N_samples)
#semimajor axes
#a=10.0**(ran.random_sample(N_samples)*(np.log10(a_max)-np.log10(a_min)) + np.log10(a_min))
a=10.0**(np.arange(0,1.0,1.0/N_samples)*(np.log10(a_max)-np.log10(a_min)) + np.log10(a_min))
#longitudes or pericenter
omega=ran.random_sample(N_samples)*180.0
#longitudes of ascending nodes
Omega=ran.random_sample(N_samples)*180.0
#mean anomalies
MeanAnom = ran.random_sample(N_samples)*180.0

# (2)(b) 2nd, generate a list of names for the bodies
Esempio n. 8
0
plt.show()
'''Relation Between Poisson and Exponential Distribution
Poisson distribution deals with number of occurences of an event in a time period whereas
 exponential distribution deals with the time between these events.'''

print('Chi Square Distribution')
'''Chi Square distribution is used as a basis to verify the hypothesis.'''
x = random.chisquare(df=2, size=(2, 3))
print(x)

sns.distplot(random.chisquare(df=1, size=1000), hist=False)
plt.show()

print('Rayleigh Distribution')
'''Rayleigh distribution is used in signal processing.'''
x = random.rayleigh(scale=2, size=(2, 3))
print(x)
sns.distplot(random.rayleigh(size=1000), hist=False)
plt.show()
'''Similarity Between Rayleigh and Chi Square Distribution
At unit stddev the and 2 degrees of freedom rayleigh and
chi square represent the same distributions.'''
print('Pareto Distribution')
'''A distribution following Pareto's law 
i.e. 80-20 distribution (20% factors cause 80% outcome).'''
sns.distplot(random.pareto(a=2, size=1000), kde=False)

plt.show()
print('Zipf Distribution')
'''Zipf's Law: In a collection the nth common term is 1/n times of the most common term.
 E.g. 5th common word in english has occurs nearly 1/5th times as of the most used word.'''
def Rayleigh(scale, size):

    Amplitude = npr.rayleigh(scale, size)

    return Amplitude
def tasks():
    no_intervals = 144
    no_tasks = 10
    no_intervals_periods = int(no_intervals / 48)

    p_d = genfromtxt('probability.csv', delimiter=',').tolist()

    sum_t = sum(p_d[0])
    p_d_short = [p / sum_t for p in p_d[0]]
    period_options = [i for i in range(48)]

    l_demands = [1.5, 2.3, 3.5, 6, 0.008, 1.1, 2.4, 0.6, 0.5, 0.004, 0.002, 4, 0.6, 0.1, 0.015, 2.4, 0.05, 0.12, 1.2,
                 2.2,
                 0.7, 1.7, 2.1, 0.0015, 0.09, 0.05, 0.01, 0.056, 0.072, 0.65, 2, 1.5, 0.1, 2.4, 1.2, 2.4, 1.2, 1, 0.3,
                 2.4,
                 1.2, 0.075, 0.052, 0.015, 0.045, 0.011, 0.0625, 0.15, 1, 0.005, 1.1, 5, 0.55, 0.1, 0.14, 0.038, 0.035,
                 0.068, 0.072, 0.093, 0.148, 0.7, 0.3, 1, 0.08, 0.12, 0.015, 6, 0.02, 0.075, 0.055, 0.03, 0.13, 0.05,
                 0.21,
                 0.1, 0.005, 1, 3.6, 1.2, 0.9, 1.2, 1.2, 0.05, 0.06, 0.9, 0.4, 2.4, 0.35, 2]

    # I meant mean value is 40 minutes
    mean_value = 40.0 / (24.0 * 60.0 / no_intervals)
    mode_value = sqrt(2 / pi) * mean_value

    # task details
    preferred_starts = []
    earliest_starts = []
    latest_ends = []
    durations = []
    demands = []
    care_factors = []
    predecessors = []
    successors = []
    prec_delays = []
    aggregated_loads = [0] * no_intervals

    for counter_j in range(no_tasks):

        # job consumption per hour
        demand = r.choice(l_demands)
        demands.append(int(demand * 1000))

        # job duration
        duration = int(random.rayleigh(mode_value, 1)[0])
        while duration == 0:
            duration = int(random.rayleigh(mode_value, 1)[0])
        durations.append(duration)

        # job preferred start time
        middle_point = int(np.random.choice(period_options, 1, p=p_d_short)[0] *
                           no_intervals_periods + r.randint(0, 3))
        p_start = (middle_point - int(duration / 2)) % no_intervals
        while p_start + duration - 1 >= no_intervals:
            middle_point = int(
                np.random.choice(period_options, 1, p=p_d_short)[0] * no_intervals_periods + r.randint(0, 3))
            p_start = (middle_point - int(duration / 2)) % no_intervals
        preferred_starts.append(p_start)

        # job earliest starting time
        # e_start = r.choice([i for i in range(-duration + 1, p_start + 1)])
        e_start = 0
        earliest_starts.append(e_start)

        # job latest finish time
        # l_finish = r.choice([i for i in range(p_start - duration + 1, num_intervals - 1 + duration)])
        l_finish = no_intervals - 1
        latest_ends.append(l_finish)

        # job care factor
        care_f = round(r.random(), 1)
        if care_f == 0:
            care_f = 0.01
        care_factors.append(int(care_f * 10))

        if r.choice([True, False]) and counter_j > 0:
            successors.append(counter_j + 1)

            id_predecessor_set = [i for i in range(counter_j)]
            id_predecessor = r.choice(id_predecessor_set)
            predecessors.append(id_predecessor + 1)

            delay = 0 if durations[id_predecessor] + duration >= no_intervals \
                else r.randint(0, no_intervals - durations[id_predecessor] - duration - 1)
            # delay = 144
            prec_delays.append(delay)

        for d in range(duration):
            aggregated_loads[p_start + d] += demand
            # if p_start + k <= num_intervals - 1:
            #     aggregated_loads[p_start + k] += demand
            # else:
            #     aggregated_loads[p_start + k - num_intervals] += demand

    no_precedences = len(predecessors)
    maximum_demand = sum(demands)

    return no_intervals, preferred_starts, no_tasks, earliest_starts, latest_ends, durations, demands, care_factors, \
           no_precedences, predecessors, successors, prec_delays, maximum_demand, aggregated_loads
Esempio n. 11
0
 def new(self):
     t = rn.rayleigh(self.sigma, 1)[0]
     while t < 0:
         t = rn.rayleigh(self.sigma, 1)[0]
     return t
Esempio n. 12
0
def np_rayleigh_distribution():
    x = random.rayleigh(scale=2, size=(2, 3))
    print(x)
    sns.distplot(random.rayleigh(size=1000), hist=False)
    plt.show()
2. Physical sciences: To model wind speed, wave heights and sound/light radiation.
3. Engineering: To measure the lifetime of an object, where the lifetime depends on the object’s age. 
For example: resistors, transformers, and capacitors in aircraft radar sets.
4. Medical: Imaging science, to model noise variance in magnetic resonance imaging.

numpy.random.rayleigh() ke arguments:

1. scale: (Standard deviation defualt 1.0) Isse kitna flat distribution hoga ye decide karte hain.
2. size: Matrix or array ka shape kitna rakhoge.
'''

import numpy.random as r
import matplotlib.pyplot as plt
import seaborn as sns

ray = r.rayleigh(size=(10))
# print('\nRay: ',ray)

# seaborn
sns.distplot(r.rayleigh(size=(100)), hist=False, label='1.0')
sns.distplot(r.rayleigh(scale=2.0, size=(100)), hist=False, label='2.0')
sns.distplot(r.rayleigh(scale=3.0, size=(100)), hist=False, label='3.0')
sns.distplot(r.rayleigh(scale=4.0, size=(100)), hist=False, label='4.0')

# plt
plt.xlabel('Rang of x')
# plt.xlim(0, 10)
# plt.ylim(0,0.5)
plt.ylabel('Range of y')
plt.title('Rayleigh Distribution')
plt.show()
# rayleigh distribution used in signal processing.
# it has two parameters.
# scale - (standard deviation) decides how flat the distribution will be (default 1.0).
# size - shape of returned array.
from numpy import random
import matplotlib.pyplot as plt
import seaborn as sns

arr1 = random.rayleigh(scale=2, size=10)
print(arr1)
sns.distplot(arr1, hist=False)
plt.show()
# at unit standard deviation of rayleigh and df=2 in chi square represents same distribution
def task_generation():
    p_d = genfromtxt('inputs/probability.csv', delimiter=',', dtype="float")
    p_d_short = [int(p) for p in p_d[0]]
    sum_t = sum(p_d_short)
    p_d_short = [p / sum_t for p in p_d_short]

    l_demands = genfromtxt('inputs/demands_list.csv',
                           delimiter=',',
                           dtype="float")

    # I meant mean value is 40 minutes
    mean_value = 40.0 / (24.0 * 60.0 / no_intervals)
    mode_value = sqrt(2 / pi) * mean_value

    # task details
    preferred_starts = []
    earliest_starts = []
    latest_ends = []
    durations = []
    demands = []
    care_factors = []
    predecessors = []
    successors = []
    prec_delays = []
    aggregated_loads = [0] * no_intervals

    for counter_j in range(no_tasks):

        # job consumption per hour
        demand = r.choice(l_demands)
        demand = int(demand * 1000)
        demands.append(demand)

        # job duration
        duration = max(1, int(random.rayleigh(mode_value, 1)[0]))
        # while duration == 0:
        #     duration = int(random.rayleigh(mode_value, 1)[0])
        durations.append(duration)

        # job preferred start time
        p_start = no_intervals + 1
        while p_start + duration - 1 >= no_intervals - 1 or p_start < 0:
            middle_point = int(
                np.random.choice(a=no_periods, size=1, p=p_d_short)[0] *
                no_intervals_periods +
                np.random.random_integers(low=-2, high=2))
            p_start = middle_point - int(duration / 2)
        preferred_starts.append(p_start)

        # job earliest starting time
        e_start = 0
        # e_start = r.randint(0, max(p_start - 1, 0))
        earliest_starts.append(e_start)

        # job latest finish time
        l_finish = no_intervals - 1
        # l_finish = r.randint(p_start + duration, min(no_intervals - 1, p_start + duration))
        latest_ends.append(l_finish)

        # job care factor
        care_f = int(r.choice([i for i in range(care_f_max + 1)]))
        care_factors.append(care_f)

        if r.choice([True, False]) and counter_j > 0:

            # task predecessor
            id_predecessor_set = [i for i in range(counter_j)]
            id_predecessor = r.choice(id_predecessor_set)

            while preferred_starts[id_predecessor] + durations[id_predecessor] - 1 >= preferred_starts[counter_j] \
                    and len(id_predecessor_set) > 0:
                id_predecessor_set.remove(id_predecessor)
                if len(id_predecessor_set) > 0:
                    id_predecessor = r.choice(id_predecessor_set)

            if len(id_predecessor_set) > 0:
                predecessors.append(int(id_predecessor))
                successors.append(counter_j)

                # predecessing delay
                delay = 0
                if not durations[
                        id_predecessor] + duration - 1 == no_intervals - 1:
                    delay = r.choice([
                        i for i in range(no_intervals + 1 - duration -
                                         durations[id_predecessor])
                    ])
                prec_delays.append(int(delay))

        for d in range(duration):
            aggregated_loads[p_start + d] += demand

    no_precedences = len(predecessors)
    maximum_demand = max(demands) * max_demand_multiplier

    print(" --- Household made ---")

    return no_intervals, preferred_starts, no_tasks, earliest_starts, latest_ends, durations, demands, care_factors, \
           no_precedences, predecessors, successors, prec_delays, maximum_demand, aggregated_loads
def g():
	#constants
	q = random.rayleigh(12.5)
	return q
Esempio n. 17
0
def create():
    p_d = P.prob
    p_d_short = p_d[1]
    l_demands = P.devices

    p_d_long = []
    for i in list(range(len(p_d_short) - 1)):
        for j in list(range(P.interval)):
            p_d_long.append(p_d_short[i] +
                            (p_d_short[i + 1] - p_d_short[i]) / P.interval * j)

    # i should be 46 at this time
    i = len(p_d_short) - 2  # make sure i is 46
    for j in range(P.interval):
        p_d_long.append(p_d_short[i + 1] +
                        (p_d_short[i + 1] - p_d_short[i]) / P.interval * j)

    p_d_min = p_d_long[0] - p_d_long[0] / 3
    p_d_max = p_d_long[P.no_intervals_day - 1]

    # I meant mean value is 40 minutes
    mean_value = 40.0 / (24.0 * 60.0 / P.no_intervals_day)
    mode_value = sqrt(2 / pi) * mean_value

    community = []
    attributes = [
        "noh", "name", "demand", "estart", "pstart", "lfinish", "dur", "caf",
        "astart", "predecessor", "max-succeeding-delay"
    ]
    s_community = str(attributes)[1:-1].replace("'", "").replace(" ",
                                                                 "") + "\r\n"
    for counter_h in range(P.no_houses):
        # household instance
        # household = H.Household()
        # household.name = "household" + str(counter_h)
        no_jobs = r.randint(P.no_jobs_min, P.no_jobs_max)
        # household.no_jobs = no_jobs + 1
        household = []
        s_household = ""
        for counter_j in range(no_jobs):
            job = dict()

            # job name
            name = str(counter_j)

            # job consumption per hour
            demand = r.choice(l_demands)

            # job duration
            seed = r.uniform(p_d_min, p_d_max)
            middle_point = bisect.bisect(p_d_long, seed)

            duration = int(random.rayleigh(mode_value, 1)[0])
            while duration == 0:
                duration = int(random.rayleigh(mode_value, 1)[0])

            # job preferred start time
            p_start = (middle_point - int(duration / 2)) % P.no_intervals_day
            # if p_start < 0:
            #     p_start += P.no_intervals_day - 1

            # job earliest starting time
            e_start = r.choice([i for i in range(-duration + 1, p_start + 1)])
            e_start = 0

            # job latest finish time
            l_finish = r.choice([
                i for i in range(p_start - duration + 1, P.no_intervals_day -
                                 1 + duration)
            ])
            l_finish = P.no_intervals_day - 1
            # l_finish = p_start - 2

            # job care factor
            care_f = round(r.random(), 1)
            if care_f == 0:
                care_f = 0.01
            # care_f = 0

            # job instance
            job['name'] = name
            job['demand'] = demand
            job['estart'] = e_start
            job['pstart'] = p_start
            job['lfinish'] = l_finish
            job['dur'] = duration
            job['caf'] = care_f
            job['astart'] = p_start

            s_household += str(counter_h) + "," + name + "," + str(demand) + "," + str(e_start) + "," \
                         + str(p_start) + "," + str(l_finish) + "," + str(duration) + "," + str(care_f) + "," \
                           + str(p_start)

            if r.choice([True, False]) and counter_j > 0:
                id_predecessor_set = [i for i in range(counter_j)]
                id_predecessor = r.choice(id_predecessor_set)
                job['predecessor'] = id_predecessor
                s_household += "," + str(id_predecessor)

                delay = 0 if household[id_predecessor]['dur'] + job['dur'] >= P.no_intervals_day \
                    else r.randint(0, P.no_intervals_day - household[id_predecessor]['dur'] - job['dur'] - 1)
                # delay = 144
                job['max-succeeding-delay'] = delay
                s_household += "," + str(delay)

                # while household[id_predecessor]['pstart'] - job['pstart']:
                #     id_predecessor_set.remove(id_predecessor)
                #     if len(id_predecessor_set) > 0:
                #         id_predecessor = r.choice(id_predecessor_set)
                #     else:
                #         break
                #
                # if len(id_predecessor_set) > 0:
                #     job['predecessor'] = id_predecessor
                #     delay = 0 if household[id_predecessor]['dur'] + job['dur'] >= P.no_intervals_day \
                #         else r.randint(0, P.no_intervals_day - household[id_predecessor]['dur'] - job['dur'] - 1)
                #     job['max-succeeding-delay'] = delay
                #     s_household += "," + str(id_predecessor) + "," + str(delay)

            # print (job)

            # job added to the household
            household.append(job)
            s_household += "\r\n"
        # household added to the community
        community.append(household)
        s_community += s_household
        # community.aggregated_loads = [x + y for x, y in zip(community.aggregated_loads, household.aggregated_loads)]

    with open(P.jobs_file, 'w') as output_file:
        output_file.write(s_community)
    print("Job data is generated and saved to {}.".format(P.jobs_file))

    return community


# create()
Esempio n. 18
0
def pop3(p1, p2, n):
    return ra.rayleigh(p1, n)
Esempio n. 19
0
from numpy import random
import matplotlib.pyplot as plt
import seaborn as sns

x = random.rayleigh(scale=2, size=(2, 3))

print(x)

sns.displot(x, hist=False, label="rayliegh")

plt.show()
Esempio n. 20
0
 def next(self):
     t = rn.rayleigh(fabs(self.sigma), 1)[0]
     while t < 0:
         t = rn.rayleigh(fabs(self.sigma), 1)[0]
     return t
Esempio n. 21
0
demands = []
care_factors = []
predecessors = []
successors = []
prec_delays = []
aggregated_loads = [0] * no_intervals

for counter_j in range(no_tasks):

    # job consumption per hour
    demand = r.choice(l_demands)
    demand = int(demand * 1000)
    demands.append(demand)

    # job duration
    duration = max(1, int(random.rayleigh(mode_value, 1)[0]))
    durations.append(duration)

    # job preferred start time
    p_start = no_intervals + 1
    while p_start + duration - 1 >= no_intervals - 1 or p_start < 0:
        middle_point = int(np.random.choice(a=no_periods, size=1, p=p_d_short)[0]
                           * no_intervals_periods
                           + np.random.random_integers(low=-2, high=2))
        p_start = middle_point - int(duration / 2)
    preferred_starts.append(p_start)

    # job earliest starting time
    e_start = 0
    earliest_starts.append(e_start)
sigma_distribution = np.subtract(sigma_distribution, sigma_distribution[0])
sigma_distribution = np.divide(sigma_distribution, sigma_distribution[-1])

a_func_sigma = intp.interp1d(sigma_distribution, a_distribution)
midpoints = np.linspace(0., 1., number + 1)
for i in range(number):
    midpoints[i] = (midpoints[i] + midpoints[i + 1]) / 2.0
a_to_start_out = a_func_sigma(midpoints)

#s = 'Initial_aes/initial_a_sim1_' + str(number) + '.txt'
#np.savetxt(s,np.transpose(a_to_start_out))

#Now, for the rest of the orbital elements:

eccentricity = rand.rayleigh(scale=e_0, size=number)
inclination = rand.rayleigh(scale=i_0, size=number)
capital_omega = rand.random(number)
capital_omega = np.multiply(capital_omega, 2. * 180.)  #np.pi)
omega = rand.random(number)
omega = np.multiply(omega, 2. * 180.)  #np.pi)
mean_anomaly = rand.random(number)
mean_anomaly = np.multiply(mean_anomaly, 2. * 180.)  #np.pi)

particle_number = 2

f = open('big.in', 'w')

f.write(')O+_06 Big-body initial data\n')
f.write(')  seed value: ' + str(seed) + ', cusp: ' + cusp + '\n')
f.write(')----------------------------------------\n')
Esempio n. 23
0
def rayleigh(size, params):
    try:
        return random.rayleigh(params['scale'], size)
    except ValueError as e:
        exit(e)