Beispiel #1
0
 def test_str(self):
     rs = Generator(self.bit_generator(*self.data1['seed']))
     assert 'Generator' in str(rs)
     assert str(self.bit_generator.__name__) in str(rs)
     assert '{:#x}'.format(id(rs)).upper().replace('X', 'x') not in str(rs)
Beispiel #2
0
def simulate_platedesign(fp_targets, nSCI_apogee, nSCI_boss, random_seed=1050):
    """
    Takes in an astropy table containing a list of fiveplates targets and 
    simulates the plate design code to produce an estimate of what the final
    plate will look like. To do this, we go through priority group by priority
    group.

    Parameters
    ----------
    fp_targets : astropy table
        Almost always the output of fiveplates.Field.targets
    nSCI_apogee : int
        Number of APOGEE fibers for science in plate.
    nSCI_boss : int
        Number of BOSS fibers for science in plate.
    random_seed : int
        Seed number for random number generator. Set to any number.
        Default is 1050 (just *randomly* chosen) for reproducability.
    """

    # Initiate Random State (fixed)
    RandomState = Generator(PCG64(1050))
    # numpy random functions will use this

    # indx_col = Column(np.arange(len(fp_targets), dtype=int), name='indx')
    # priority_groups = fp_targets['order_priority'].data
    # indx_groups = indx_col.group_by(priority_groups)

    bypriority = fp_targets.group_by('order_priority')
    assigned_targets = []

    nSCI_apogee_assigned = 0  # nothing assigned at beginning
    nSCI_boss_assigned = 0

    nSCI_goal = {}
    nSCI_assigned = {}
    nSCI_needed = {}

    nSCI_goal['apogee'] = nSCI_apogee
    nSCI_goal['boss'] = nSCI_boss
    nSCI_assigned['apogee'] = nSCI_apogee_assigned
    nSCI_assigned['boss'] = nSCI_boss_assigned

    for ii, target_grp in enumerate(bypriority.groups):
        nSCI_needed['apogee'] = nSCI_goal['apogee'] - nSCI_assigned['apogee']
        nSCI_needed['boss'] = nSCI_goal['boss'] - nSCI_assigned['boss']

        # consider = bypriority.groups[priority]
        instrument_ = set(target_grp['instrument']).pop()
        # beter be only ONE instrument!!!
        Nrows = len(target_grp)
        # Ignore SKY and STD cartons for now
        target_type = set(target_grp['Type']).pop()

        if target_type != 0:
            continue  # SKIP this group

        if Nrows <= nSCI_needed[instrument_]:
            # Just take whole priority group if it will fit
            assigned_targets.append(target_grp)
            nSCI_assigned[
                instrument_] += Nrows  # increase N of fibers assigned
        elif nSCI_needed[instrument_] > 0:  #  Still need to assign fibers
            keep_rows_indx = RandomState.choice(Nrows,
                                                nSCI_needed[instrument_],
                                                replace=False)
            assigned_targets.append(target_grp[keep_rows_indx])
            nSCI_assigned[instrument_] += len(keep_rows_indx)
        else:  # Filled up the plate already
            pass

    assigned = vstack(assigned_targets)
    assigned.sort('catalogid')
    return assigned
Beispiel #3
0
 def test_repr(self):
     rs = Generator(self.bit_generator(*self.data1['seed']))
     assert 'Generator' in repr(rs)
     assert '{:#x}'.format(id(rs)).upper().replace('X', 'x') in repr(rs)
def randseq(distro):
    def seq(*args):
        val = distro(*args)
        while True:
            yield val
            val = distro(*args)

    return seq


numStreams = 9
seed = 8745309
sg = SeedSequence(seed)
random.seed(seed)
streams = [Generator(Philox(s)) for s in sg.spawn(numStreams)]

agentDensity = 0.2

agentVisionDist = randseq(streams[0].integers)(1, 3)
agentMetabDist = randseq(streams[1].uniform)(2.0, 3.0)
intermovement = randseq(streams[2].exponential)(1)
interreproduce = randseq(streams[3].exponential)(1)
gestationperiod = randseq(streams[4].uniform)(1.0, 2.0)

siteCapDist = randseq(streams[5].uniform)(0.0, 5.0)
siteSugarDist = randseq(streams[6].uniform)(0.0, 5.0)
siteRegenDist = randseq(streams[7].uniform)(0.0, 5.0)

agentDeathLag = randseq(streams[8].uniform)(0.0, 1.0)
Beispiel #5
0
 def ReSeed(self, entropy):
     self.seed = SeedSequence(entropy)
     self.rand_gen = Generator(PCG64(self.seed))
Beispiel #6
0
 def test_seed(self):
     rg = Generator(self.bit_generator(*self.seed))
     rg2 = Generator(self.bit_generator(*self.seed))
     rg.random()
     rg2.random()
     assert_(comp_state(rg.bit_generator.state, rg2.bit_generator.state))
 def test_beta_small_parameters(self):
     # Test that beta with small a and b parameters does not produce
     # NaNs due to roundoff errors causing 0 / 0, gh-5851
     mt19937 = Generator(MT19937(1234567890))
     x = mt19937.beta(0.0001, 0.0001, size=100)
     assert_(not np.any(np.isnan(x)), 'Nans in mt19937.beta')
Beispiel #8
0
import loompy as lp
import numpy as np
import pandas as pd
import pytest
import os

from pathlib import Path
from numpy.random import Generator, PCG64

from scopeserver.dataserver.utils.loom import Loom
from scopeserver.dataserver.utils.loom_file_handler import LoomFileHandler
from scopeserver.gen_test_loom import generate_test_loom_data

LOOM_FILE_HANDLER = LoomFileHandler()

rg = Generator(PCG64(55850))

LOOM_PATH = Path("test/data/SCope_Test.loom")


@pytest.fixture
def matrix():
    num_genes = 100
    # Must be divisible by 4
    num_cells = 100

    genes = rg.poisson(lam=1, size=num_genes)
    genes = [x + 1 for x in genes]
    matrix = rg.poisson(lam=genes, size=(num_cells, num_genes))
    return matrix
Beispiel #9
0
def minimize(fun, 
             bounds=None, 
             x0=None, 
             input_sigma = 0.3, 
             popsize = 31, 
             max_evaluations = 100000, 
             max_iterations = 100000,  
             accuracy = 1.0, 
             stop_fitness = None, 
             is_terminate = None, 
             rg = Generator(MT19937()),
             runid=0,
             workers = None, 
             normalize = True,
             update_gap = None):   
    """Minimization of a scalar function of one or more variables using a 
    C++ CMA-ES implementation called via ctypes.
     
    Parameters
    ----------
    fun : callable
        The objective function to be minimized.
            ``fun(x, *args) -> float``
        where ``x`` is an 1-D array with shape (n,) and ``args``
        is a tuple of the fixed parameters needed to completely
        specify the function.
    bounds : sequence or `Bounds`, optional
        Bounds on variables. There are two ways to specify the bounds:
            1. Instance of the `scipy.Bounds` class.
            2. Sequence of ``(min, max)`` pairs for each element in `x`. None
               is used to specify no bound.
    x0 : ndarray, shape (n,)
        Initial guess. Array of real elements of size (n,),
        where 'n' is the number of independent variables.  
    input_sigma : ndarray, shape (n,) or scalar
        Initial step size for each dimension.
    popsize = int, optional
        CMA-ES population size.
    max_evaluations : int, optional
        Forced termination after ``max_evaluations`` function evaluations.
    max_iterations : int, optional
        Forced termination after ``max_iterations`` iterations.
    accuracy : float, optional
        values > 1.0 reduce the accuracy.
    stop_fitness : float, optional 
         Limit for fitness value. If reached minimize terminates.
    is_terminate : callable, optional
        Callback to be used if the caller of minimize wants to 
        decide when to terminate. 
    rg = numpy.random.Generator, optional
        Random generator for creating random guesses.
    runid : int, optional
        id used by the is_terminate callback to identify the CMA-ES run. 
    workers : int or None, optional
        If not workers is None, function evaluation is performed in parallel for the whole population. 
        Useful for costly objective functions but is deactivated for parallel retry.      
    normalize : boolean, optional
        pheno -> if true geno transformation maps arguments to interval [-1,1] 
    update_gap : int, optional
        number of iterations without distribution update
           
    Returns
    -------
    res : scipy.OptimizeResult
        The optimization result is represented as an ``OptimizeResult`` object.
        Important attributes are: ``x`` the solution array, 
        ``fun`` the best function value, 
        ``nfev`` the number of function evaluations,
        ``nit`` the number of CMA-ES iterations, 
        ``status`` the stopping critera and
        ``success`` a Boolean flag indicating if the optimizer exited successfully. """
    
    lower, upper, guess = _check_bounds(bounds, x0, rg)      
    n = guess.size   
    if lower is None:
        lower = [0]*n
        upper = [0]*n
    mu = int(popsize/2)
    if np.ndim(input_sigma) == 0:
        input_sigma = [input_sigma] * n
    if stop_fitness is None:
        stop_fitness = math.inf   
    if is_terminate is None:    
        is_terminate=_is_terminate_false
        use_terminate = False 
    else:
        use_terminate = True 
    parfun = None if workers is None else parallel(fun, workers)
    array_type = ct.c_double * n 
    c_callback_par = call_back_par(callback_par(fun, parfun))
    c_is_terminate = is_terminate_type(is_terminate)
    try:
        res = optimizeACMA_C(runid, c_callback_par, n, array_type(*guess), array_type(*lower), array_type(*upper), 
                           array_type(*input_sigma), max_iterations, max_evaluations, stop_fitness, mu, 
                           popsize, accuracy, use_terminate, c_is_terminate, 
                           int(rg.uniform(0, 2**32 - 1)), normalize, -1 if update_gap is None else update_gap)

        x = np.array(np.fromiter(res, dtype=np.float64, count=n))
        val = res[n]
        evals = int(res[n+1])
        iterations = int(res[n+2])
        stop = int(res[n+3])
        freemem(res)
        if not parfun is None:
            parfun.stop() # stop all parallel evaluation processes
        return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True)
    except Exception as ex:
        if not workers is None:
            fun.stop() # stop all parallel evaluation processes
        return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
Beispiel #10
0
def minimize(fun,
             dim=None,
             bounds=None,
             popsize=None,
             max_evaluations=100000,
             stop_fitness=None,
             keep=200,
             f=0.5,
             cr=0.9,
             rg=Generator(MT19937()),
             workers=1,
             runid=0):
    """Minimization of a scalar function of one or more variables using a 
    C++ Differential Evolution implementation called via ctypes.
     
    Parameters
    ----------
    fun : callable
        The objective function to be minimized.
            ``fun(x, *args) -> float``
        where ``x`` is an 1-D array with shape (dim,) and ``args``
        is a tuple of the fixed parameters needed to completely
        specify the function.
    dim : int
        dimension of the argument of the objective function
    bounds : sequence or `Bounds`, optional
        Bounds on variables. There are two ways to specify the bounds:
            1. Instance of the `scipy.Bounds` class.
            2. Sequence of ``(min, max)`` pairs for each element in `x`. None
               is used to specify no bound.
    popsize : int, optional
        Population size.
    max_evaluations : int, optional
        Forced termination after ``max_evaluations`` function evaluations.
    stop_fitness : float, optional 
         Limit for fitness value. If reached minimize terminates.
    keep = float, optional
        changes the reinitialization probability of individuals based on their age. Higher value
        means lower probablity of reinitialization.
    f = float, optional
        The mutation constant. In the literature this is also known as differential weight, 
        being denoted by F. Should be in the range [0, 2].
    cr = float, optional
        The recombination constant. Should be in the range [0, 1]. 
        In the literature this is also known as the crossover probability.     
    rg = numpy.random.Generator, optional
        Random generator for creating random guesses.
    workers : int or None, optional
        If not workers is None, function evaluation is performed in parallel for the whole population. 
        Useful for costly objective functions but is deactivated for parallel retry.      
    runid : int, optional
        id used to identify the run for debugging / logging. 
            
    Returns
    -------
    res : scipy.OptimizeResult
        The optimization result is represented as an ``OptimizeResult`` object.
        Important attributes are: ``x`` the solution array, 
        ``fun`` the best function value, 
        ``nfev`` the number of function evaluations,
        ``nit`` the number of iterations,
        ``success`` a Boolean flag indicating if the optimizer exited successfully. """

    dim, lower, upper = de._check_bounds(bounds, dim)
    if popsize is None:
        popsize = 31
    if lower is None:
        lower = [0] * dim
        upper = [0] * dim
    if workers is None:
        workers = 0
    if stop_fitness is None:
        stop_fitness = math.inf
    array_type = ct.c_double * dim
    c_callback = mo_call_back_type(callback(fun, dim))
    seed = int(rg.uniform(0, 2**32 - 1))
    res = np.empty(dim + 4)
    res_p = res.ctypes.data_as(ct.POINTER(ct.c_double))
    try:
        optimizeDE_C(runid, c_callback, dim, seed, array_type(*lower),
                     array_type(*upper), max_evaluations, keep, stop_fitness,
                     popsize, f, cr, workers, res_p)
        x = res[:dim]
        val = res[dim]
        evals = int(res[dim + 1])
        iterations = int(res[dim + 2])
        stop = int(res[dim + 3])
        return OptimizeResult(x=x,
                              fun=val,
                              nfev=evals,
                              nit=iterations,
                              status=stop,
                              success=True)
    except Exception as ex:
        return OptimizeResult(x=None,
                              fun=sys.float_info.max,
                              nfev=0,
                              nit=0,
                              status=-1,
                              success=False)
Beispiel #11
0
pmpar1 = open('new.pmpar.in', 'r')  #opens the data file
pmpar2 = pmpar1.readline()  #reads the first line
obs1 = Observation(pmpar2)  #defines the line as an observation
print(obs1.to_string())
otherlines.append(pmpar2)  #writes the original coordinates into the array
print(otherlines)

#this is where the code decides whether or not to perturb the positions
q = [0, 1]  #choices that the code has
w = [0.5, 0.5]  #the weight (probability) of each choice
Z = random.choices(q, w)  #the code chooses
print(Z)  #we can see what the code chooses

if Z == [1]:
    RNG = Generator(PCG64(1))  #starts up the random number generator
    rg1 = RNG.random()  #picks a random offset for the RA
    rg2 = RNG.random()  #picks a random offset for the Dec
    obs1.perturbposition(
        rg1 * delta_ra,
        rg2 * delta_dec)  #perturbs the position by the random offset
    newcoords.append(obs1.to_string(
    ))  #adds the perturbed positions into the observations array

else:
    c = obs1.to_string()  #if 0 is picked then the line isn't perturbed
    newcoords.append(c)

print(newcoords)

# In[4]:
Beispiel #12
0
from scipy import sparse
import scipy.sparse.linalg as spla
import utils.codegen_utils as cu
from numpy.random import Generator, PCG64

# Set random seed for reproducibility
rg = Generator(PCG64(1))

# Simple case
test_solve_KKT_n = 3
test_solve_KKT_m = 4

test_solve_KKT_P = sparse.random(test_solve_KKT_n,
                                 test_solve_KKT_n,
                                 density=0.4,
                                 format='csc',
                                 random_state=rg)
test_solve_KKT_P = test_solve_KKT_P.dot(test_solve_KKT_P.T).tocsc()
test_solve_KKT_A = sparse.random(test_solve_KKT_m,
                                 test_solve_KKT_n,
                                 density=0.4,
                                 format='csc',
                                 random_state=rg)
test_solve_KKT_Pu = sparse.triu(test_solve_KKT_P, format='csc')

test_solve_KKT_rho = 4.0
test_solve_KKT_sigma = 1.0
test_solve_KKT_KKT = sparse.bmat([[
    test_solve_KKT_P + test_solve_KKT_sigma * sparse.eye(test_solve_KKT_n),
    test_solve_KKT_A.T
], [test_solve_KKT_A, -1. / test_solve_KKT_rho * sparse.eye(test_solve_KKT_m)]
Beispiel #13
0
 def setup(self, bitgen, args):
     if bitgen == 'numpy':
         self.rg = np.random.RandomState()
     else:
         self.rg = Generator(getattr(np.random, bitgen)())
     self.rg.random()
Beispiel #14
0
def run_sim(force_name, month):

  year = 2020

  end_year = 2020 if month < 12 else 2021
  end_month = month + 1 if month < 12 else 1

  # construct and run the model for one month only
  microsim = model.CrimeMicrosim(force_name, (year, month), (end_year, end_month))
  model.no.run(microsim)
  return microsim.crimes


from numpy.random import Generator, MT19937
rg = Generator(MT19937(12345))


# test function for netlogo integration
@app.route("/rand", methods=["GET"])
def rand():
  try:
    if not "max" in request.args:
      raise KeyError("max param not specified")
    return json.dumps(rg.random() * float(request.args.get("max"))), 200
  except Exception as e:
    return "%s: %s" % (type(e).__name__, str(e)), 400


@app.route('/data', methods=["GET"])
def crime_data():
Beispiel #15
0
def block_rng(seed, jump_index):
    return Generator(PCG64(seed).jumped(jump_index))
Beispiel #16
0
    def __init__(self, xgrid, pdfs, params):
        self.noise = 100
        self.norm, pdf, self.lhaPDFs = pdfs
        self.xgrid = xgrid
        self.params = params
        dim, flsize, xgssize = pdf.shape
        self.batch = dim * params["batch_size"] // 100
        self.hyperopt = params.get("scan")
        self.rndgen = Generator(PCG64(seed=0))
        self.folder = params.get("save_output")
        discparams = params.get("disc_parameters")

        # Prepare latent space
        self.latent_pdf = latent_sampling(pdf,
                                          params.get("tot_replicas"),
                                          self.rndgen,
                                          nsx=params.get("gauss_noise", 1e-5))
        params["latent_space"] = self.latent_pdf

        # Define Models architecture
        if params.get("architecture") == "dnn":
            real_shape = Input(shape=(flsize, xgssize))
            genparams = params.get("gen_parameters")
            if genparams.get("structure") == "standard":
                synt_shape = Input(shape=(self.noise, ))
            elif genparams.get("structure") == "custom":
                synt_shape = Input(shape=(flsize, xgssize))
            self.pdf = pdf
            self.gan = WGanModel(self.pdf, params)
        elif params.get("architecture") == "dcnn":
            logger.warning("DCNN is not fully operational yet!")
            self.pdf = pdf.reshape(pdf.shape + (1, ))
            real_shape = Input(shape=(flsize, xgssize, 1))
            synt_shape = Input(shape=(self.noise, ))
            self.gan = DWGanModel(self.pdf, params, self.noise)
        else:
            raise ValueError("Invalid Achitecture!")

        # Initialize Models
        self.generator = self.gan.generator_model()
        self.critic = self.gan.critic_model()
        self.adversarial = self.gan.adversarial_model(self.generator,
                                                      self.critic)

        # Make sure the Models are trainable
        turn_on_training(self.critic, self.generator)

        # Define Graph for the combined Discriminator. It takes both
        # real PDF samples and Latent space/noise. The Latent Space
        # (noise) is run through the Generator to generate the fake
        # PDF replicas. Both the real and fake PDFs run through the
        # Discriminator for evaluation.
        # The implementation below includes a "Gradient Penalty" for
        # the discriminator, to consider it during the training the
        # training, just modify the loss weight to 1.0. In this case
        # the weights clipping in the input runcard needs to be relaxed.
        synt_discr = self.generator(synt_shape)
        discr_real = self.critic(real_shape)
        discr_synt = self.critic(synt_discr)

        partial_gp = GradientPenalty(
            self.critic, self.batch, params.get("architecture"),
            discparams.get("gp_weight", 10))([real_shape, synt_discr])

        self.discriminator = Model(
            inputs=[real_shape, synt_shape],
            outputs=[discr_real, discr_synt, partial_gp])
        inputloss = discparams.get("loss", "wasserstein")
        if inputloss != "wasserstein":
            dloss = self.discparams.get("loss")
        else:
            dloss = wasserstein_loss
        opt_name = discparams.get("optimizer")
        ds_optmz = get_optimizer(opt_name)
        self.discriminator.compile(
            optimizer=ds_optmz,
            loss=[dloss, dloss, "mse"],
            loss_weights=[1.0, 1.0, discparams.get("gp_loss", 1.0)])

        if not self.hyperopt and not params.get("use_saved_model"):
            gan_summary(self.critic, self.generator, self.adversarial)

        # Save Checkpoints
        self.ckpt = save_ckpt(self.generator, self.critic, self.adversarial)
Beispiel #17
0
 def test_entropy_init(self):
     rg = Generator(self.bit_generator())
     rg2 = Generator(self.bit_generator())
     assert_(not comp_state(rg.bit_generator.state,
                            rg2.bit_generator.state))
Beispiel #18
0
#
# diffusion coefficient in => diffusion coefficient out
#
# STEP 1: Brownian simulation and Video synthesis

import numpy as np

from ddm_toolkit.simulation import brownian_softbox, random_coordinates
from ddm_toolkit.simulation import imgsynth2
from ddm_toolkit import tqdm

from ddm_toolkit import sim_params

# set up pseudo random number generator for making noise
from numpy.random import Generator, PCG64
PRNG = Generator(PCG64())

#
# GET SIMULATION/ANALYSIS PARAMETERS
#
sim = sim_params()

#
# SIMULATION (2D)
#
#set initial particle coordinates
x0 = random_coordinates(sim.Np, sim.bl_x)
y0 = random_coordinates(sim.Np, sim.bl_y)
#create array of coordinates of the particles at different timesteps
x1 = brownian_softbox(x0, sim.Nt, sim.dt, sim.D, sim.bl_x)
y1 = brownian_softbox(y0, sim.Nt, sim.dt, sim.D, sim.bl_y)
 def test_permutation_longs(self):
     mt19937 = Generator(MT19937(1234))
     a = mt19937.permutation(12)
     mt19937 = Generator(MT19937(1234))
     b = mt19937.permutation(long(12))
     assert_array_equal(a, b)
 def getRandomFile(self, count):
     self.random = Generator(PCG64(self.seed))
     res = [(self.min + self.getRandom() % (self.max - self.min + 1))
            for _ in range(count)]
     self.saveToFile(res)
from numpy.testing import (assert_, assert_array_equal)
import numpy as np
import pytest
from numpy.random import Generator, MT19937

mt19937 = Generator(MT19937())


class TestRegression:
    def test_vonmises_range(self):
        # Make sure generated random variables are in [-pi, pi].
        # Regression test for ticket #986.
        for mu in np.linspace(-7., 7., 5):
            r = mt19937.vonmises(mu, 1, 50)
            assert_(np.all(r > -np.pi) and np.all(r <= np.pi))

    def test_hypergeometric_range(self):
        # Test for ticket #921
        assert_(np.all(mt19937.hypergeometric(3, 18, 11, size=10) < 4))
        assert_(np.all(mt19937.hypergeometric(18, 3, 11, size=10) > 0))

        # Test for ticket #5623
        args = (2**20 - 2, 2**20 - 2, 2**20 - 2)  # Check for 32-bit systems
        assert_(mt19937.hypergeometric(*args) > 0)

    def test_logseries_convergence(self):
        # Test for ticket #923
        N = 1000
        mt19937 = Generator(MT19937(0))
        rvsn = mt19937.logseries(0.8, size=N)
        # these two frequency counts should be close to theoretical
Beispiel #22
0
# PyTorch 1.8.1-CPU virtual env.
# Python 3.9.4 Windows 10
# -*- coding: utf-8 -*-
"""The script implement the classical longstaff-schwartz algorithm for pricing american options.
This script focus on the multidimensional case for rainbow option
"""
import numpy as np
from numpy.random import Generator, seed
from numpy.random import PCG64
import os
import sys
sys.path.insert(0,
                os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import Products
import time
rng = Generator(PCG64(seed=123))  #set seed and get generator object

#rng = Generator(PCG64()) #set seed and get generator object


#########
# Functions for simulate paths
#########
def initialState(pathsTotal, spot, assetsTotal):
    """Args:
        pathsTotal ([int]): [Total number of simulated paths]
        spot ([vector of doubles]): [The current price of the underlying stocks]
        assetsTotal ([int]): [Total number of underlying assets]

    Returns:
        [2D Matrix]: [The initial state for markov chain at time 0]