Exemplo n.º 1
0
    def test_fourier_matrices_product(self):
        """
        Test Fourier basis transition

        This test validates that the transition of a matrix from Fourier basis to the standard basis is equivalent to
        multiplying my the DFT matrix from right and its inverse from the left.

        """
        rng = Generator(PCG64(1995))
        n: int = rng.integers(low=2, high=100)
        mat: Matrix = rng.standard_normal((n, n))
        tested_output = change_from_fourier_basis(mat)
        dft_mat: Matrix = dft(n, scale="sqrtn")
        expected_output: Matrix = np.conj(dft_mat.T).dot(mat).dot(dft_mat)
        self.assertTrue(np.allclose(tested_output, expected_output))

        mat: Matrix = rng.standard_normal((n, n))
        mat_fourier: Matrix = change_to_fourier_basis(mat)
        expected_output: Matrix = dft_mat.dot(mat).dot(np.conj(dft_mat.T))
        self.assertTrue(np.allclose(mat_fourier, expected_output))
Exemplo n.º 2
0
    def test_real_data_tri_spectrum_consistency(self):
        """
        Test the consistency of the tri-spectrum estimation in the case of REAL data (since the exact
        tri-spectrum becomes more complicated in this case).

        This test validates that the estimation over a large number of observations (without noise)
        is "very close" to the exact tri-spectrum

        """
        rng = Generator(PCG64(596))
        signal_length: int = rng.integers(low=10, high=20)
        observations_num: int = rng.integers(low=10000, high=40000)
        approximation_rank: int = rng.integers(low=2, high=signal_length)
        data_type = np.float64
        tol = 1e-3
        exact_covariance, eigenvectors, eigenvalues = generate_covariance(
            signal_length, approximation_rank, data_type, rng)
        observations = generate_observations(eigenvectors, eigenvalues,
                                             approximation_rank,
                                             observations_num, data_type, rng)
        observations_fourier: Matrix = np.fft.fft(observations, norm="ortho")
        exact_cov_fourier_basis: Matrix = change_to_fourier_basis(
            exact_covariance)
        exact_tri_spectrum: ThreeDMatrix = calc_exact_tri_spectrum(
            exact_cov_fourier_basis, data_type)
        estimate_tri_spectrum: ThreeDMatrix = estimate_tri_spectrum_v2(
            observations_fourier)

        print(f'Observations number: {observations_num}')
        print(
            f'Real Tri-spectrum estimation error: {np.max(np.abs(estimate_tri_spectrum - exact_tri_spectrum))}'
        )

        # Validate the tri-spectrum estimation in the real case is consistent.
        self.assertTrue(
            np.allclose(estimate_tri_spectrum,
                        exact_tri_spectrum,
                        atol=tol,
                        rtol=0),
            msg=f'Tri-spectrum estimation is inconsistent!, error=' +
            f'{np.max(np.abs(estimate_tri_spectrum - exact_tri_spectrum))}')
Exemplo n.º 3
0
    def __init__(self,
                 L: int,
                 beta: float,
                 term: int = -1,
                 extfield: float = 0.,
                 conf_in: tp.Optional[tp.Union[int, str]] = None,
                 seed: tp.Optional[int] = None,
                 state: tp.Optional[str] = None) -> tp.NoReturn:
        self.__L = L
        self.__seed = seed
        self.rng = Generator(PCG64(seed))
        self.init_rng(state)

        #if no configuration given, the lattice is initialized with broken symmetry if beta is higher than the critical beta,
        # in order to minimize the termalization time
        if conf_in is None:
            conf_in = (beta >= 0.44) and 1 or 0

        #Exps and lattice must be initialized with an initialized generator
        self.gen_exp(beta, extfield)
        self.inizializza(L, term, conf_in)
Exemplo n.º 4
0
def _test_optimization_template(data_type, signal_length, approximation_rank,
                                seed):
    rng = Generator(PCG64(seed))
    exact_covariance, eigenvectors, eigenvalues = generate_covariance(
        signal_length, approximation_rank, data_type, rng)
    exact_cov_fourier_basis: Matrix = change_to_fourier_basis(exact_covariance)
    exact_power_spectrum: Vector = np.real(np.diag(exact_cov_fourier_basis))
    exact_tri_spectrum: ThreeDMatrix = calc_exact_tri_spectrum(
        exact_cov_fourier_basis, data_type)
    diagonals = find_diagonals(exact_cov_fourier_basis)
    g_mats = np.array([
        diagonal.reshape(-1, 1).dot(np.conj(diagonal.reshape(-1, 1).T))
        for diagonal in diagonals
    ])
    g_mats = np.vstack((np.outer(exact_power_spectrum,
                                 exact_power_spectrum).reshape(1, 3,
                                                               3), g_mats))
    optimization_object, _, _ = create_optimization_objective(
        exact_tri_spectrum, exact_power_spectrum, data_type, use_cp=False)
    return optimization_object(
        g_mats
    ), exact_tri_spectrum, exact_power_spectrum, g_mats, exact_cov_fourier_basis
Exemplo n.º 5
0
    def test_warm_start(self):
        # Set random seed for reproducibility
        rg = Generator(PCG64(1))

        self.n = 100
        self.m = 200
        self.A = sparse.random(self.m,
                               self.n,
                               density=0.9,
                               format='csc',
                               random_state=rg)
        self.l = -rg.random(self.m) * 2.
        self.u = rg.random(self.m) * 2.

        P = sparse.random(self.n, self.n, density=0.9, random_state=rg)
        self.P = sparse.triu(P.dot(P.T), format='csc')
        self.q = rg.standard_normal(self.n)

        # Setup solver
        self.model = osqp.OSQP()
        self.model.setup(self.P, self.q, self.A, self.l, self.u, **self.opts)

        # Solve problem with OSQP
        res = self.model.solve()

        # Store optimal values
        x_opt = res.x
        y_opt = res.y
        tot_iter = res.info.iter

        # Warm start with zeros and check if number of iterations is the same
        self.model.warm_start(x=np.zeros(self.n), y=np.zeros(self.m))
        res = self.model.solve()
        self.assertEqual(res.info.iter, tot_iter)

        # Warm start with optimal values and check that number of iter < 10
        self.model.warm_start(x=x_opt, y=y_opt)
        res = self.model.solve()
        self.assertLess(res.info.iter, 10)
Exemplo n.º 6
0
  def test_polish_random(self):

    # Set random seed for reproducibility
    rg = Generator(PCG64(1))

    self.n = 30
    self.m = 50
    Pt = rg.standard_normal((self.n, self.n))
    self.P = sparse.triu(np.dot(Pt.T, Pt), format='csc')
    self.q = rg.standard_normal(self.n)
    self.A = sparse.csc_matrix(rg.standard_normal((self.m, self.n)))
    self.l = -3 + rg.standard_normal(self.m)
    self.u = 3 + rg.standard_normal(self.m)
    self.model = osqp.OSQP()
    self.model.setup(self.P, self.q, self.A, self.l, self.u, **self.opts)

    # Solve problem
    res = self.model.solve()

    # Assert close
    nptest.assert_allclose(
        res.x, np.array([
             0.14309944, -0.03539077,  0.27864189,  0.0693045 , -0.40741513,
             0.58500801, -0.05715695,  0.53470081,  0.15764935, -0.10198167,
             0.03584195, -0.28628935, -0.15170641,  0.10532207, -0.48210877,
             0.00868872,  0.48983164, -0.30742672,  0.54240528, -0.17622243,
            -0.38665758, -0.16340594, -0.24741171,  0.26922765,  0.53341687,
            -0.74634085, -1.28463569,  0.02608472, -0.23450606, -0.09142843]),
        rtol=1e-5, atol=1e-5)
    nptest.assert_allclose(
        res.y, np.array([
             0.,  0., 0.11863563,  0.,  0., 0.,  0.,  0.,  0., 0.23223504,
            -0.08489787,  0.,  0.,  0.,  0., 0.0274536 ,  0.,  0.,  0.,  0.,
             0.,  0.,  0.,  0.,  0., 0.,  0.,  0.,  0.,  0., 0.,  0., -0.447373,
             0.,  0., 0.,  0.,  0., -0.12800017,  0., 0.,  0., -0.05106154,
             0.47314221,  0., -0.23398984,  0.,  0.,  0.,  0.]),
        rtol=2e-5, atol=2e-5)
    nptest.assert_allclose(res.info.obj_val, -5.680387544713935, rtol=1e-5, atol=1e-5)
Exemplo n.º 7
0
    def setUp(self):
        """
    Setup unconstrained quadratic problem
    """
        # Set random seed for reproducibility
        rg = Generator(PCG64(1))

        self.n = 30
        self.m = 0
        P = sparse.diags(rg.random(self.n)) + 0.2 * sparse.eye(self.n)
        self.P = P.tocsc()
        self.q = rg.standard_normal(self.n)
        self.A = sparse.csc_matrix((self.m, self.n))
        self.l = np.array([])
        self.u = np.array([])
        self.opts = {
            'verbose': False,
            'eps_abs': 1e-06,
            'eps_rel': 1e-06,
            'polish': False
        }
        self.model = osqp.OSQP()
        self.model.setup(self.P, self.q, self.A, self.l, self.u, **self.opts)
Exemplo n.º 8
0
    def test_coefficient_matrix_construction(self):
        """
        Test coefficient matrix properties

        This test validates that the coefficients matrix in the phase retrieval algorithm follows the
        theoretical properties.

        """
        rng = Generator(PCG64(1995))
        n: int = rng.integers(low=9, high=20)
        r: int = rng.integers(low=2, high=np.floor(np.sqrt(n)))
        mat: Matrix = generate_covariance(n, r, np.complex128, rng)[0]
        mat = change_to_fourier_basis(mat)

        coeff_mat: Matrix = coefficient_matrix_construction(
            mat, signal_length=n, approximation_rank=r)
        self.assertEqual(coeff_mat.shape[0], n**3)
        self.assertEqual(coeff_mat.shape[1], (1 + r**4) * n)

        other_mat: Matrix = matrix_construction_alternative(
            mat, signal_length=n, approximation_rank=r)
        self.assertTrue(np.allclose(coeff_mat, other_mat),
                        msg=f'{np.max(np.abs(other_mat - coeff_mat))}')
Exemplo n.º 9
0
    def test_phase_retrieval_on_exact_data(self):
        """
        Test phase-retrieval performance on exact data.

        This test performs the phase-retrieval on the exact covariance, up to some random integer shift.
        The output estimated covariance should be equal to the exact covariance, up to some shift of both its axes,
        i.e the estimation error should be very close to zero (about 10^-20).
        This test is performed for both real and complex data.
        """
        rng = Generator(PCG64(1995))
        signal_length: int = rng.integers(low=9, high=20)
        approximation_rank: int = rng.integers(low=2,
                                               high=np.floor(
                                                   np.sqrt(signal_length)))
        tol: float = 1e-20

        for data_type in [np.complex128, np.float64]:
            exact_cov: Matrix = generate_covariance(signal_length,
                                                    approximation_rank,
                                                    data_type, rng)[0]
            random_shift: int = rng.integers(low=0, high=signal_length)
            rotated_cov: Matrix = np.roll(exact_cov,
                                          shift=[random_shift, random_shift],
                                          axis=[0, 1])
            exact_cov_fourier_basis: Matrix = change_to_fourier_basis(
                rotated_cov)

            random_phases: Vector = 1j * rng.standard_normal(signal_length - 1)
            input_cov = np.multiply(
                exact_cov_fourier_basis,
                circulant([1] + np.exp(random_phases).tolist()))
            estimated_cov: Matrix = phase_retrieval(input_cov, signal_length,
                                                    approximation_rank)
            estimation_error: float = calc_estimation_error(
                exact_cov, estimated_cov)

            self.assertTrue(np.allclose(estimation_error, 0, atol=tol, rtol=0))
Exemplo n.º 10
0
    def test_equivalence_to_naive_method(self):
        """
        Test methods equivalence.

        This test validates that both the naive method and the improved method return identical
        output (up to numerical inaccuracies) for some random signal.

        """
        rng = Generator(PCG64(596))
        signal_length: int = rng.integers(low=10, high=40)
        observations_num: int = rng.integers(low=5, high=20)
        approximation_rank: int = rng.integers(low=5, high=20)
        data_type = np.complex128
        covariance, eigenvectors, eigenvalues = generate_covariance(
            signal_length, approximation_rank, data_type, rng)
        observations = generate_observations(eigenvectors, eigenvalues,
                                             approximation_rank,
                                             observations_num, data_type, rng)
        observations_fourier: Matrix = np.fft.fft(observations,
                                                  axis=1,
                                                  norm="ortho")
        tri_spectrum_naive: ThreeDMatrix = estimate_tri_spectrum_naive(
            observations_fourier)
        tri_spectrum_improved: ThreeDMatrix = estimate_tri_spectrum_v2(
            observations_fourier)
        # Validate both tri-spectra are equal
        self.assertTrue(
            np.allclose(np.abs(tri_spectrum_naive),
                        np.abs(tri_spectrum_improved)),
            msg=f'{np.max(np.abs(tri_spectrum_improved - tri_spectrum_naive))}'
        )
        self.assertTrue(
            np.allclose(np.angle(tri_spectrum_naive),
                        np.angle(tri_spectrum_improved)),
            msg=
            f'{np.max(np.angle(tri_spectrum_improved) - np.angle(tri_spectrum_naive))}'
        )
Exemplo n.º 11
0
def points_on_biconcave_disc(num_points, param_c = 0.5, param_d = 0.375, random_seed = 42, verbose = False):
	"""Generates a pointcloud on a biconcave disc
	
	Parameters:
	num_points (int): Number of points to be generated
	param_c (float): Value of parameter 'c'
	param_d (float): Value of parameter 'd'
	random_seed (int): Seed for the random number generator
	verbose (bool): Print information on screen
	
	Returns:
	np.array : The pointcloud generated with shape (3,num_points)

	"""
	if verbose:
		print(f"No. of points sampled = {num_points}")

	pointcloud = np.zeros((num_points, 3))
	rg = Generator(PCG64(random_seed))
	count = 0
	while count < num_points:
		y = 2.0*rg.random()-1.0
		z = 2.0*rg.random()-1.0

		t1 = (8*param_d*param_d*(y*y + z*z) + param_c*param_c*param_c*param_c)**(1.0/3.0)
		t2 = param_d*param_d + y*y + z*z
		if t1 >= t2:
			if rg.random() < 0.5:
				pointcloud[count, 0] = np.sqrt(t1 - t2)
			else:
				pointcloud[count, 0] = -np.sqrt(t1 - t2)

			pointcloud[count, 1] = y
			pointcloud[count, 2] = z
			count += 1

	return pointcloud
Exemplo n.º 12
0
def block_rng(seed, jump_index):
    return Generator(PCG64(seed).jumped(jump_index))
Exemplo n.º 13
0
     56208,
     23325,
     29606,
     40099,
     9776,
     46303,
     6333,
     15881,
     63110,
     6022,
     61267,
     56526,
 ]
 entropy = sum([bits << (16 * i) for i, bits in enumerate(entropy_bits)])
 seq = SeedSequence(entropy)
 gen = [Generator(PCG64(child)) for child in seq.spawn(EX_NUM)]
 sample_sizes = (
     20,
     25,
     30,
     35,
     40,
     45,
     50,
     60,
     70,
     80,
     90,
     100,
     120,
     140,
Exemplo n.º 14
0
    def test_montecarlo_f0(self):
        means = np.array([0.2, 0.4, 0.6, 0.5])
        stds = np.array([0.05, 0.07, 0.1, 0.01])
        weights = np.array([1, 2, 4, 5])

        # Normal
        generator = PCG64(1994)
        vals = spatial.montecarlo_f0(means, stds, weights,
                                     dist_generators="normal",
                                     dist_spatial="normal",
                                     nrealizations=3,
                                     generator=generator)
        mean, stddev, realizations = vals

        # Realizations
        expected = np.array([[0.2165,  0.2017,  0.2639],
                             [0.4322,  0.3953,  0.4572],
                             [0.5411,  0.6164,  0.6436],
                             [0.5244,  0.5015,  0.5080]])
        returned = realizations
        self.assertArrayAlmostEqual(expected, returned, places=3)

        # Mean
        expected = 0.5035
        returned = mean
        self.assertAlmostEqual(expected, returned, places=3)

        # Stddev
        expected = 0.1124
        returned = stddev
        self.assertAlmostEqual(expected, returned, places=3)

        # LogNormal
        generator = PCG64(1994)
        vals = spatial.montecarlo_f0(means, stds, weights,
                                     dist_generators="lognormal",
                                     dist_spatial="lognormal",
                                     nrealizations=3,
                                     generator=generator)
        mean, stddev, realizations = vals

        # Realizations
        expected = np.exp(np.array([[0.2165,  0.2017,  0.2639],
                                    [0.4322,  0.3953,  0.4572],
                                    [0.5411,  0.6164,  0.6436],
                                    [0.5244,  0.5015,  0.5080]]))
        returned = realizations
        self.assertArrayAlmostEqual(expected, returned, places=3)

        # Mean
        expected = np.exp(0.5035)
        returned = mean
        self.assertAlmostEqual(expected, returned, places=3)

        # Stddev
        expected = 0.1124
        returned = stddev
        self.assertAlmostEqual(expected, returned, places=3)

        # Other generators
        for generator in ["PCG64", "MT19937"]:
            spatial.montecarlo_f0(means, stds, weights,
                                  generator=generator)

        # Bad generator
        generator = "my fancy generator"
        self.assertRaises(ValueError, spatial.montecarlo_f0,
                          means, stds, weights, generator=generator)

        # Bad dist_generator
        dist_generators = "my fancy generator"
        self.assertRaises(NotImplementedError,
                          spatial.montecarlo_f0, means, stds,
                          weights, dist_generators=dist_generators)

        # Bad dist_spatial
        dist_spatial = "my fancy distribution"
        self.assertRaises(NotImplementedError,
                          spatial.montecarlo_f0, means, stds,
                          weights, dist_spatial=dist_generators)
Exemplo n.º 15
0
import numpy as np
import pandas as pd
from numpy.random import Generator, PCG64

import json


def df_to_named_matrix(df):
    arr_ip = [tuple(i) for i in df.values]
    dtyp = np.dtype(list(zip(df.dtypes.index, df.dtypes)))
    arr = np.array(arr_ip, dtype=dtyp)
    return arr


rg = Generator(PCG64(55850))


NUM_GENES = 100
NUM_REGULONS = 4
# Must be divisible by 4
NUM_CELLS = 100

cell_ids = [f"Cell_{n}" for n in range(1, NUM_CELLS + 1)]
gene_ids = [f"Gene_{n}" for n in range(1, NUM_GENES + 1)]


def generate_matrix():
    genes = rg.poisson(lam=1, size=NUM_GENES)
    genes = [x + 1 for x in genes]
    matrix = rg.poisson(lam=genes, size=(NUM_CELLS, NUM_GENES))
    return matrix
Exemplo n.º 16
0
def dirichlet():
    base = Generator(PCG64())
    probs = base.random(10)
    probs = probs / probs.sum()
    return [(probs, ), (probs, (3, 4, 5))]
Exemplo n.º 17
0
def multinomial():
    base = Generator(PCG64())
    probs = base.random(10)
    probs /= probs.sum()
    return (10, probs), (base.integers(10, 100, size=(3, 4)), probs)
Exemplo n.º 18
0
import numpy as np
from scipy import sparse
import utils.codegen_utils as cu
from numpy.random import Generator, PCG64

# Set random seed for reproducibility
rg = Generator(PCG64(1))

# Define tests
n = 5
m = 8
test_form_KKT_n = n
test_form_KKT_m = m
p = 0.7

test_form_KKT_A = sparse.random(test_form_KKT_m,
                                test_form_KKT_n,
                                density=p,
                                format='csc',
                                random_state=rg)
test_form_KKT_P = sparse.random(n, n, density=p, random_state=rg)
test_form_KKT_P = test_form_KKT_P.dot(test_form_KKT_P.T).tocsc() + sparse.eye(
    n, format='csc')
test_form_KKT_Pu = sparse.triu(test_form_KKT_P, format='csc')
test_form_KKT_rho = 1.6
test_form_KKT_sigma = 0.1
test_form_KKT_KKT = sparse.bmat([[
    test_form_KKT_P + test_form_KKT_sigma * sparse.eye(test_form_KKT_n),
    test_form_KKT_A.T
], [test_form_KKT_A, -1. / test_form_KKT_rho * sparse.eye(test_form_KKT_m)]],
                                format='csc')
Exemplo n.º 19
0
import numpy as np
from numpy.random import Generator, PCG64  # 导入 PCG-64 BitGenerator

# We default to using a 128-bit integer using entropy gathered from the OS
sq1 = np.random.SeedSequence()
sq2 = np.random.SeedSequence()
print(sq1.entropy == sq2.entropy)  # 两个熵序列并不相等
print(sq2.generate_state(2, dtype=np.uint64))  # 第一个参数指定种子序列的长度,第二个指定种子类型

# rg = Generator(PCG64(sq1.generate_state(1, dtype=np.uint64)))
rg = Generator(PCG64(sq1.entropy))  # 直接将一个熵序列作为随机数种子

A = np.arange(12).reshape((3, 4))  # 产生一个 3 * 4 的自然数矩阵
print(A)

B = rg.choice(A, axis=1, size=5)  # 未改变矩阵 A
print(B)

rg.shuffle(A, axis=1)  # 改变了矩阵 A
print(A)

print(rg.choice(5, 3, replace=False))  # 不重复地从 {0, 1, 2, 3, 4} 中取出 3个数
# This is equivalent to rg.permutation(np.arange(5))[:3]

print(rg.choice([-1.0, 2.0, 4.0], 2, replace=False))
""" If you need to generate a good seed “offline”, then SeedSequence().entropy or using secrets.randbits(128) from the standard library are both convenient ways. """

# https://numpy.org/doc/stable/reference/random/bit_generators/index.html
# https://numpy.org/doc/stable/reference/random/index.html#numpyrandom
Exemplo n.º 20
0
 def __init__(self, seed=None):
     self._tasks = []
     self._task_weights = []
     self._rng = Generator(PCG64(seed))
        action="store",
        help="Number of CPUs to use. If not specified, uses cpu_count() - 1",
    )
    parser.add_argument(
        "--z_only",
        action="store_true",
        help="Only execute Z-type tests",
    )
    args = parser.parse_args()
    njobs = getattr(args, "ncpu", None)
    njobs = psutil.cpu_count(logical=False) - 1 if njobs is None else njobs
    njobs = max(njobs, 1)

    ss = SeedSequence(ENTROPY)
    children = ss.spawn(len(TRENDS) * EX_NUM)
    generators = [Generator(PCG64(child)) for child in children]
    jobs = []
    count = 0
    for tr in TRENDS:
        for i in range(EX_NUM):
            file_name = os.path.join(OUTPUT_PATH, f"adf_z_{tr}-{i:04d}.npz")
            jobs.append((tr, generators[count], file_name))
            count += 1
    jobs = [job for job in jobs if not os.path.exists(job[-1])]
    random.shuffle(jobs)
    nremconfig = len(jobs)
    nconfig = len(children)
    print(f"Total configurations: {BLUE}{nconfig}{RESET}, "
          f"Remaining: {RED}{nremconfig}{RESET}")
    print(f"Running on {BLUE}{njobs}{RESET} CPUs")
    if njobs > 1:
def multiday(
    depots,
    sample_generator,
    dist_and_time,
    route_optimizer,
    simulator,
    n_days,
    day_start,
    day_end,
    seed=None,
    replications=1,
    plot=False,
    collection_points=None,
    k=0,
    dist_threshold=20000,
    futile_count_threshold=1,
    cap=20,
    tlim=1e10,
):
    """Multiday Sim

    Paramters
    ---------
    depots : np.array
        2*n_depots array of longitudes and latitudes of the depots.
        This is set up to support multidepot problems. However, to do this properly we'll need to track which depots
        have which packages. So this isn't actually fully supported.
    sample_generator : function
        Takes no inputs, returns two lists, longitudes and latitudes of the packages.
    dist_and_time : function
        Takes longitudes and latitudes, and returns a distance matrix, a time matrix, and a array of time windows.
    route_optimizer : function
        Inputs are the depot numbers, the distance and time matrices, the time windows as a np.array,
        the current day, the day each package arrived, and the number of times each package was futile.
        Outputs are a set of vehicle routes, and a list of packages that were not scheduled.
    simulator : function
        Simulates the deliveries for a set of routes, given the routes, distances, times and time windows.
    n_days : int
        The number of days to simulate.
    day_start : int
        The time for the start of a day
    day_end : int
        The time for the end of a day
    seed : int (Optional)
        The seed to initialise the random number generator with
    replications : int (Optional)
        Defaults to 1. The number of simulations to perform on the optimized route. Only the last is used as the input to the next day.
        (Might be an idea to take the mode values if enough simulations are performed?)
    plot : bool (Optional)
        Whether to display a plot of the current routes.
    collection_points: bool (Optional)
        Whether to enable the use of collection points
    k : int (Optional)
        If we have collection points, the number of collection points
    dist_threshold : int
        The distance a customer needs to be within to have their package assigned to a collection point
    futile_count_threshold : int
        The number of times a package needs to be futile before it gets assigned to a collection point
    cap : int
        The capacity of the collection points
    tlim : int
        The time to run the simulation for. Helps us stop the simulation if we get an extreme buildup of packages
    """
    start = time.time()
    logger.debug("Start multiday sim")

    rg = Generator(PCG64(seed))

    # Pregenerate arrivals
    latitudes_per_day = []
    longitudes_per_day = []
    time_windows_per_day = []
    customers_per_day = []
    allocat_packages_to_collection = [[] for i in range(k)]  # preset package allocation
    customer_to_cp = [
        [] for i in range(k)
    ]  # initialise the customer list for each collection point

    logger.debug("Generating incoming packages")

    for day in range(n_days):
        customers, new_time_windows = sample_generator(rg)
        latitudes_per_day.append([c.lat for c in customers])
        longitudes_per_day.append([c.lon for c in customers])
        time_windows_per_day.append(new_time_windows)
        customers_per_day.append(customers)

    data = []
    n_depots = depots.shape[1]
    delivery_time_windows = np.array(
        [[day_start, day_end] for i in range(n_depots)]
    )  # These are our beliefs about the time windows, not their true value
    arrival_days = np.zeros(n_depots)
    futile_count = np.zeros(n_depots)
    customers = np.array(
        [
            Customer(depots[0, 0], depots[1, 0], 1, 1, rg=rg)
            for i in range(len(depots[0]))
        ]
    )
    packages_at_collection = []
    collection_point_removed_packages = 0
    if collection_points and k != 0:  # choose the number of collection points
        sol_fac_lat, sol_fac_lon, coord, fac_coord = opt_collection_coord(
            k, cap, depots, sample_generator, dist_and_time, seed=None
        )

        # initialise a list of dictionaries for each collection point
        packages_at_collection = [{} for i in range(k)]
        # collection_point_removed_packages = [0 for i in range(k)]
    for day in range(n_days):
        logger.debug("Start day %i" % day)
        collection_point_removed_packages = [0 for i in range(k)]
        # Generate data
        new_time_windows, new_customers = (
            time_windows_per_day[day],
            customers_per_day[day],
        )

        delivery_time_windows = np.vstack((delivery_time_windows, new_time_windows))
        arrival_days = np.append(arrival_days, [day for _ in range(len(new_customers))])
        futile_count = np.append(futile_count, np.zeros(len(new_customers)))
        customers = np.append(customers, new_customers)

        logger.debug("Number of incoming packages: %i" % len(new_customers))
        logger.debug(
            "Current number of packages: %i" % (len(customers) - 1)
        )  # -1 for depo

        logger.debug("Calculating distance and time matrix")

        cp_customers = np.array([])
        collection_dist = 0
        # Remove packages from collection points
        if collection_points and k != 0:
            for i in range(k):
                logger.debug(
                    "Number of packages in collection %i, day %i: %i",
                    i,
                    day,
                    len(packages_at_collection[i]),
                )
                if (
                    len(packages_at_collection[i]) != 0
                ):  # there's packages in the collection point
                    p = 0.6  # success probability
                    # np.random.geometric(p=0.35, size=10000)
                    # np.random.geometric(p=0.6, size=len(packages_at_collection[i]))
                    collected_package = []
                    for c in packages_at_collection[i]:
                        v = packages_at_collection[i][c]
                        # package collection distribution
                        cdf = geom.cdf(v, p)
                        # if the probablity is greater than the random number, the package is picked up
                        if cdf >= rg.random():
                            collected_package.append(c)
                            collection_point_removed_packages[i] += 1
                        else:
                            # add a day to the number of days at collection point
                            packages_at_collection[i][c] += 1
                    for collected in collected_package:
                        packages_at_collection[i].pop(collected)

            # Add customers to collections points, and add visited collection points to customers
            #  a list of undelivered packages in the simulation
            undelivered = np.ones(len(futile_count), dtype=bool)
            for i, c in enumerate(futile_count):
                # a threshold of day count of the package in the system
                if c >= futile_count_threshold and i >= n_depots:
                    cd = os.path.join(
                        os.path.dirname(os.path.abspath(__file__)), "..", "data"
                    )
                    # get the dist from the cusomter's house to the collection points
                    lat_all = sol_fac_lat[:]
                    lon_all = sol_fac_lon[:]
                    lat_all.insert(0, customers[i].lat)
                    lon_all.insert(0, customers[i].lon)
                    if len(lat_all) > 4:
                        print("error")
                    coord_filename = None
                    dist, tm = osrm_get_dist(
                        cd,
                        coord_filename,
                        lat_all,
                        lon_all,
                        source=[0],
                        save=False,
                        host="localhost:5000",
                    )
                    # choose the closest collection point
                    min_value = min(dist[0])
                    # dist_threshold = 20000  # 20km
                    # allow the package to be assigned to the closest collection point if the dist is within the threshold
                    if min_value < dist_threshold:
                        min_ind = dist[0].index(min_value)
                        # assign if its closet collection point still has spare space
                        if len(packages_at_collection[min_ind]) < cap:
                            # assign the package to its closest collection point
                            allocat_packages_to_collection[min_ind].append(
                                i
                            )  # i is the index but not the unique index for the customer?
                            undelivered[
                                i
                            ] = False  # customer to be removed from the delivery list
                            # record the customer list for each collection point
                            customer_to_cp[min_ind].append(customers[i])
                            collection_dist += min_value
                            # tw_to_cp[min_ind].append(delivery_time_windows[i])
                            # ad_to_cp[min_ind].append(arrival_days[i])
                            # fc_to_cp[min_ind].append(futile_count[i])

                            # packages_at_collection[min_ind][customers[i]] = 0
            # Remove packages sent to collection points from customers
            delivery_time_windows = delivery_time_windows[undelivered]
            arrival_days = arrival_days[undelivered]
            futile_count = futile_count[undelivered]
            customers = customers[undelivered]

            cp_customers = np.array([])
            # add collection point as a customer if there is package allocated to it
            for cp in range(k):
                if len(customer_to_cp[cp]) != 0:
                    cp_customers = np.append(
                        cp_customers,
                        Customer(sol_fac_lat[cp], sol_fac_lon[cp], 1, 1, rg=rg),
                    )
            # customers = np.append(customers, new_customers)

            # cp_customers = np.array(
            #     [
            #         Customer(sol_fac_lat[cp], sol_fac_lon[cp], 1, 1, rg=rg)
            #         for cp in range(k)
            #         if len(customer_to_cp[cp]) != 0
            #     ]
            # )
            if len(cp_customers) > 0:
                cp_time_windows = np.array(
                    [[day_start, day_end] for i in range(len(cp_customers))]
                )

                delivery_time_windows = np.vstack(
                    (delivery_time_windows, cp_time_windows)
                )
                customers = np.append(customers, cp_customers)
                futile_count = np.append(futile_count, np.zeros(len(cp_customers)))
                arrival_days = np.append(
                    arrival_days, [day for _ in range(len(cp_customers))]
                )

        # Get times and distances
        dm, tm = dist_and_time(customers)
        if dm is None:
            logger.critical("Distance computation failed. Stopping simulation.")
            # We've exceeded the map bounds. Stop here for now, but we should really handle this more gracefully.
            break
        dm = np.array(dm)
        tm = np.array(tm)

        logger.debug("Compute alternate locations")

        # Setup list of alternate locations
        alternate_locations = []
        temp = customers.tolist()
        while len(temp) > 0:
            c = temp.pop()
            location_index = []
            for a in c.alternates:
                location_index.append(customers.tolist().index(a))
                if a in temp:
                    temp.remove(a)
            alternate_locations.append(location_index)

        logger.debug("Optimise routes")

        # Calulate routes for the day
        routes, unscheduled = route_optimizer(
            [i for i in range(n_depots)],
            dm,
            tm,
            delivery_time_windows,
            day,
            arrival_days,
            futile_count,
            alternate_locations,
        )
        if plot:
            plt.clf()
            routes.plot(
                positions=[(customer.lon, customer.lat) for customer in customers],
                weight_matrix=dm,
            )
            plt.show(block=False)
            plt.pause(0.001)

        futile_count[[i for i in range(len(customers)) if i not in unscheduled]] += 1

        # logger.debug(routes)
        logger.debug("Unscheduled: %s" % unscheduled)

        logger.debug("Start simulations")

        for i in range(replications):
            logger.debug("Replication %i" % i)
            # Simulate behaviour
            distances, times, futile, delivered = simulator(
                routes, dm, tm, delivery_time_windows, customers, rg
            )
            logger.debug("Delivered: %s" % delivered)

            # Data collection to save
            data.append(
                collect_data(
                    day,
                    routes,
                    distances,
                    times,
                    futile,
                    delivered,
                    arrival_days,
                    delivery_time_windows,
                    [len(l) for l in packages_at_collection],
                    collection_point_removed_packages,
                    collection_dist,
                )
            )

        # Remove delivered packages, using just the last result
        undelivered = np.ones(len(customers), dtype=bool)
        for alternates in alternate_locations:  # Remove all alternate locations as well
            for package in delivered:
                if package in alternates:
                    undelivered[alternates] = False
        undelivered[[i for i in range(n_depots)]] = True

        # # get the undelivered list for collection point
        # cp_undelivered = undelivered[-len(cp_customers) :]
        # count_cp_undelivered = 0

        # check if collection point is visited
        for i in range(len(cp_customers)):
            if not undelivered[-i - 1]:  # collection point visited
                for cust in customer_to_cp[
                    -i - 1
                ]:  # add package into the collection point
                    packages_at_collection[-i - 1][cust] = 0
                # allocat_packages_to_collection[min_ind] = []
                customer_to_cp[
                    -i - 1
                ] = []  # reset the customer list for the visited collection point
            else:
                # count_cp_undelivered += 1
                undelivered[
                    -i - 1
                ] = False  # remove collection point in the customer list
                # Generate data
                # new_time_windows, new_arrival_days, new_futile_count, new_customers = (
                #     tw_to_cp[-i - 1],
                #     ad_to_cp[-i - 1],
                #     fc_to_cp[-i - 1],
                #     customer_to_cp[-i - 1],
                # )

        delivery_time_windows = delivery_time_windows[undelivered]
        arrival_days = arrival_days[undelivered]
        futile_count = futile_count[undelivered]
        customers = customers[undelivered]
        # if count_cp_undelivered:
        #     # stick the undelivered collection pacakges on
        #     delivery_time_windows = np.vstack((delivery_time_windows, new_time_windows))
        #     arrival_days = np.append(arrival_days, new_arrival_days)
        #     futile_count = np.append(futile_count, new_futile_count)
        #     customers = np.append(customers, new_customers)

        logger.debug(
            "Number of remaining Packages: %i" % (len(customers) - 1)
        )  # -1 for depo
        if time.time() - start > tlim:
            break

    return data
Exemplo n.º 23
0
 def __enter__(self):
     global RNG
     assert RNG is None
     RNG = Generator(PCG64(seed=self.seed))
Exemplo n.º 24
0
def compressing(pdfsetting, compressed, minimization, est_dic, gans):
    """
    Action that performs the compression. The parameters
    for the compression are provided by a `runcard.yml`.

    Parameters
    ----------
    pdf: str
        pdf/PDF name
    compressed: int
        Size of the compressed set
    est_dic: dict
        Dictionary containing the list of estimators
    """

    minimizer = minimization.get("minimizer", "genetic")
    seed = minimization.get("seed", 0)
    maxit = minimization.get("max_iteration", 15000)
    pdf = str(pdfsetting["pdf"])
    enhanced_already_exists = pdfsetting.get("existing_enhanced", False)

    if gans["enhance"]:
        from pycompressor.postgans import postgans
        runcard = gans["runcard"]
        nbgen = gans["total_replicas"]
        # Write PDF name into gans runcard
        ganruncard = open(f"{runcard}.yml", "a+")
        ganruncard.write(f"pdf: {str(pdf)}")
        ganruncard.close()
        outfolder = str(pdf) + "_enhanced"
        sub.call([
            "ganpdfs",
            f"{runcard}.yml",
            "-o",
            f"{outfolder}",
            "-k",
            f"{nbgen}",
            "--force",
        ])
        sub.call(["evolven3fit", f"{outfolder}", f"{nbgen}"])
        # Add symbolic Links to LHAPDF dataDir
        postgans(str(pdf), outfolder, nbgen)

    splash()
    # Set seed
    rndgen = Generator(PCG64(seed=seed))

    console.print("\n• Load PDF sets & Printing Summary:", style="bold blue")
    xgrid = XGrid().build_xgrid()
    # Load Prior Sets
    prior = PdfSet(pdf, xgrid, Q0, NF).build_pdf()
    rndindex = rndgen.choice(prior.shape[0], compressed, replace=False)
    # Load Enhanced Sets
    if enhanced_already_exists:
        try:
            postgan = pdf + "_enhanced"
            final_result = {"pdfset_name": postgan}
            enhanced = PdfSet(postgan, xgrid, Q0, NF).build_pdf()
        except RuntimeError as excp:
            raise LoadingEnhancedError(f"{excp}")
        nb_iter, ref_estimators = maxit, None
        init_index = np.array(extract_index(pdf, compressed))
    else:
        final_result = {"pdfset_name": pdf}
        nb_iter, ref_estimators = maxit, None
        init_index, enhanced = rndindex, prior

    # Create output folder
    outrslt = postgan if enhanced_already_exists else pdf
    folder = pathlib.Path().absolute() / outrslt
    folder.mkdir(exist_ok=True)
    # Create output folder for ERF stats
    out_folder = pathlib.Path().absolute() / "erfs_output"
    out_folder.mkdir(exist_ok=True)

    # Output Summary
    table = Table(show_header=True, header_style="bold magenta")
    table.add_column("Parameters", justify="left", width=24)
    table.add_column("Description", justify="left", width=50)
    table.add_row("PDF set name", f"{pdf}")
    table.add_row("Size of Prior", f"{prior.shape[0] - 1} replicas")
    if enhanced_already_exists:
        table.add_row("Size of enhanced", f"{enhanced.shape[0] - 1} replicas")
    table.add_row("Size of compression", f"{compressed} replicas")
    table.add_row("Input energy Q0", f"{Q0} GeV")
    table.add_row(
        "x-grid size",
        f"{xgrid.shape[0]} points, x=({xgrid[0]:.4e}, {xgrid[-1]:.4e})")
    table.add_row("Minimizer", f"{minimizer}")
    console.print(table)

    # Init. Compressor class
    comp = Compress(prior, enhanced, est_dic, compressed, init_index,
                    ref_estimators, out_folder, rndgen)
    # Start compression depending on the Evolution Strategy
    erf_list = []
    console.print("\n• Compressing MC PDF replicas:", style="bold blue")
    if minimizer == "genetic":
        # Run compressor using GA
        with trange(nb_iter) as iter_range:
            for _ in iter_range:
                iter_range.set_description("Compression")
                erf, index = comp.genetic_algorithm(nb_mut=5)
                erf_list.append(erf)
                iter_range.set_postfix(ERF=erf)
    elif minimizer == "cma":
        # Run compressor using CMA
        erf, index = comp.cma_algorithm(std_dev=0.8)
    else:
        raise ValueError(f"{minimizer} is not a valid minimizer.")

    # Prepare output file
    final_result["ERFs"] = erf_list
    final_result["index"] = index.tolist()
    outfile = open(f"{outrslt}/compress_{pdf}_{compressed}_output.dat", "w")
    outfile.write(json.dumps(final_result, indent=2))
    outfile.close()
    # Fetching ERF and construct reduced PDF grid
    console.print(f"\n• Final ERF: [bold red]{erf}.", style="bold red")

    # Compute final ERFs for the final choosen replicas
    final_err_func = comp.final_erfs(index)
    serfile = open(f"{out_folder}/erf_reduced.dat", "a+")
    serfile.write(f"{compressed}:")
    serfile.write(json.dumps(final_err_func))
    serfile.write("\n")
    serfile.close()
Exemplo n.º 25
0
from cffi import FFI

from numpy.random import PCG64

ffi = FFI()
if os.path.exists("./distributions.dll"):
    lib = ffi.dlopen("./distributions.dll")
elif os.path.exists("./libdistributions.so"):
    lib = ffi.dlopen("./libdistributions.so")
else:
    raise RuntimeError("Required DLL/so file was not found.")

ffi.cdef("""
double random_standard_normal(void *bitgen_state);
""")
x = PCG64()
xffi = x.cffi
bit_generator = xffi.bit_generator

random_standard_normal = lib.random_standard_normal


def normals(n, bit_generator):
    out = np.empty(n)
    for i in range(n):
        out[i] = random_standard_normal(bit_generator)
    return out


normalsj = nb.jit(normals, nopython=True)
Exemplo n.º 26
0
import numpy as np
from numpy.random import Generator, PCG64
import mpmath
from mpsci.distributions import multivariate_hypergeometric
from mpsci.stats import gtest

mpmath.mp.dps = 20

# How many p-values to compute for each set of parameters.
ntrials = 3

# Minimum expected count of any element in the support for the G-test.
min_count = 100

gen = Generator(PCG64(11223344556677))

for method in ['marginals', 'count']:
    print()
    print("=== method:", method, "===")
    # Be sure any parameters for which this code is run have a PMF whose
    # smallest nonzero value is large enough such min_count/min(pmf.values())
    # gives a reasonable sample size.
    for colors, nsample in [([10, 90], 40), ([5, 10, 10], 12), ([40, 6,
                                                                 5], 19),
                            ([40, 6, 5], 30), ([3, 4, 5, 6], 8),
                            ([2, 3, 3, 3, 3, 4, 5], 13)]:

        pmf = multivariate_hypergeometric.pmf_dict(colors, nsample)

        pmin = float(min(pmf.values()))
        size = int(min_count / pmin + 0.5)
Exemplo n.º 27
0
import numpy as np
from scipy import sparse
import scipy.sparse.linalg as spla
import utils.codegen_utils as cu
from numpy.random import Generator, PCG64

# Set random seed for reproducibility
rg = Generator(PCG64(2))

# Simple case
test_solve_KKT_n = 3
test_solve_KKT_m = 4

test_solve_KKT_P = sparse.random(test_solve_KKT_n,
                                 test_solve_KKT_n,
                                 density=0.4,
                                 format='csc',
                                 random_state=rg)
test_solve_KKT_P = test_solve_KKT_P.dot(test_solve_KKT_P.T).tocsc()
test_solve_KKT_A = sparse.random(test_solve_KKT_m,
                                 test_solve_KKT_n,
                                 density=0.4,
                                 format='csc',
                                 random_state=rg)
test_solve_KKT_Pu = sparse.triu(test_solve_KKT_P, format='csc')

test_solve_KKT_rho = 4.0
test_solve_KKT_sigma = 1.0
test_solve_KKT_KKT = sparse.vstack([
    sparse.hstack([
        test_solve_KKT_P + test_solve_KKT_sigma * sparse.eye(test_solve_KKT_n),
Exemplo n.º 28
0
import numpy as np
import numba as nb

from numpy.random import PCG64
from timeit import timeit

bit_gen = PCG64()
next_d = bit_gen.cffi.next_double
state_addr = bit_gen.cffi.state_address


def normals(n, state):
    out = np.empty(n)
    for i in range((n + 1) // 2):
        x1 = 2.0 * next_d(state) - 1.0
        x2 = 2.0 * next_d(state) - 1.0
        r2 = x1 * x1 + x2 * x2
        while r2 >= 1.0 or r2 == 0.0:
            x1 = 2.0 * next_d(state) - 1.0
            x2 = 2.0 * next_d(state) - 1.0
            r2 = x1 * x1 + x2 * x2
        f = np.sqrt(-2.0 * np.log(r2) / r2)
        out[2 * i] = f * x1
        if 2 * i + 1 < n:
            out[2 * i + 1] = f * x2
    return out


# Compile using Numba
normalsj = nb.jit(normals, nopython=True)
# Must use state address not state with numba
Exemplo n.º 29
0
import numpy as np
from numpy.random import Generator, PCG64
import math
import matplotlib.pyplot as plt


G = Generator(PCG64())


def stick_breaking(n: int, alpha: float) -> np.ndarray:
    """
    Draws n samples from a stick-breaking process with beta distribution intensity alpha.

    :param n: number of samples
    :param alpha: intensity parameter of the beta distribution
    :returns: stick lengths
    """
    betas = G.beta(a=alpha, b=1.0, size=n)
    betas[1:] *= np.cumprod(1-betas[:-1])
    weights = np.sort(betas)[::-1]
    return weights


def plot_stick_lengths(stick_lengths: np.array,
                       alpha: float,
                       B: int) -> None:
    """
    Plots -log2(sticks)
    :param sticks: list of stick lenghts
    """
    _, ax = plt.subplots()
Exemplo n.º 30
0
from distutils.version import LooseVersion
from itertools import product
from typing import cast

import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
import pytest

from randomgen import Generator
import randomgen.common

try:
    from numpy.random import PCG64, Generator as NPGenerator

    pcg = PCG64()
    initial_state = pcg.state
    np_gen = NPGenerator(pcg)
    gen = Generator(cast(randomgen.common.BitGenerator, pcg))
except ImportError:
    from randomgen import PCG64  # type: ignore[misc]

NP_LT_1174 = LooseVersion(np.__version__) < LooseVersion("1.17.4")
NP_GTE_118 = LooseVersion(np.__version__) >= LooseVersion("1.18")
NP_GTE_120 = LooseVersion(np.__version__) >= LooseVersion("1.20")
NP_GTE_121 = LooseVersion(np.__version__) >= LooseVersion("1.21")

pytestmark = pytest.mark.skipif(NP_LT_1174, reason="Only test 1.17.4+")


def positive_param():
    base = Generator(PCG64())