Esempio n. 1
0
    def reseed(self, seed_seq=None):
        """
        Get new random number generator.

        Parameters
        ----------
        seed_seq : np.random.SeedSequence, rlberry.seeding.Seeder or int, default : None
            Seed sequence from which to spawn the random number generator.
            If None, generate random seed.
            If int, use as entropy for SeedSequence.
            If seeder, use seeder.seed_seq
        """
        # if None, new seed sequence
        if seed_seq is None:
            seed_seq = SeedSequence()
        # if SeedSequence, do nothing
        elif isinstance(seed_seq, SeedSequence):
            seed_seq = seed_seq
        # if Seeder, get Seeder.seed_seq
        elif isinstance(seed_seq, Seeder):
            seed_seq = seed_seq.seed_seq
        # if integer, new SeedSequence
        else:
            seed_seq = SeedSequence(seed_seq)

        # spawn
        seed_seq = seed_seq.spawn(1)[0]

        self.seed_seq = seed_seq
        self.rng = default_rng(self.seed_seq)
Esempio n. 2
0
def set_global_seed(entropy=42):
    """
    rlberry has a global seed from which we can obtain different random number
    generators with (close to) independent outcomes.

    Important:

    In each new process/thread, set_global_seed must be called with a new seed.

    To do:
        Check (torch seeding):
        https://github.com/pytorch/pytorch/issues/7068#issuecomment-487907668

    Parameters
    ---------
    entropy: int, SeedSequence optional
        The entropy for creating the global SeedSequence, or a SeedSequence
    """
    global _GLOBAL_ENTROPY, _GLOBAL_SEED_SEQ, _GLOBAL_RNG

    if isinstance(entropy, SeedSequence):
        seedseq = entropy
        _GLOBAL_ENTROPY = seedseq.entropy
        _GLOBAL_SEED_SEQ = seedseq
    else:
        _GLOBAL_ENTROPY = entropy
        _GLOBAL_SEED_SEQ = SeedSequence(entropy)

    _GLOBAL_RNG = get_rng()

    # seed torch
    if _TORCH_INSTALLED:
        torch.manual_seed(
            _GLOBAL_SEED_SEQ.generate_state(1, dtype=np.uint32)[0])
Esempio n. 3
0
    def __init__(self, seed_seq=None, spawn_seed_seq=True):
        """
        Parameters
        ----------
        seed_seq : np.random.SeedSequence, rlberry.seeding.Seeder or int, default : None
            Seed sequence from which to spawn the random number generator.
            If None, generate random seed.
            If int, use as entropy for SeedSequence.
            If seeder, use seeder.seed_seq
        spawn_seed_seq : bool, default : True
            If True, uses seed_seq to spawn a new seed sequence (strongly recommended) for the Seeder.
            If False, uses the input seed_seq to define the Seeder.
            Warning: Setting to false can lead to unexpected behavior. This argument is only used internally
            in rlberry, in Seeder.spawn(), to avoid unnecessary spawning.
        """
        super().__init__()
        if seed_seq is None:
            seed_seq = SeedSequence()
        elif isinstance(seed_seq, SeedSequence):
            seed_seq = seed_seq
        elif isinstance(seed_seq, Seeder):
            seed_seq = seed_seq.seed_seq
        else:  # integer
            seed_seq = SeedSequence(seed_seq)

        if spawn_seed_seq:
            seed_seq = seed_seq.spawn(1)[0]

        self.seed_seq = seed_seq
        self.rng = default_rng(self.seed_seq)
Esempio n. 4
0
def _create_cache(t0, t1, w0, w1, entropy, pool_size, k):
    ts = [t0, t1]
    ws = [w0, w1]

    parent = SeedSequence(entropy=entropy, pool_size=pool_size)
    seeds = [parent]

    for level in range(1, k + 1):
        new_ts = []
        new_ws = []
        new_seeds = []
        for i, parent in enumerate(seeds):
            seedv, seedl, seedr = parent.spawn(3)
            new_seeds.extend([seedl, seedr])

            t0, t1 = ts[i], ts[i + 1]
            w0, w1 = ws[i], ws[i + 1]
            t = (t0 + t1) / 2
            w = utils.brownian_bridge(t0=t0,
                                      t1=t1,
                                      w0=w0,
                                      w1=w1,
                                      t=t,
                                      seed=seedv)
            new_ts.extend([ts[i], t])
            new_ws.extend([ws[i], w])

        new_ts.append(ts[-1])
        new_ws.append(ws[-1])
        ts = new_ts
        ws = new_ws
        seeds = new_seeds

    return ts, ws, seeds
Esempio n. 5
0
def generate_random_configurations(box_length, n_part, n_dim, n_ensemble,
                                   seed_entropy):
    """ Generates a set of configurations in an ensemble

    Parameters
    ----------
    box_length : float
        length of the box
    n_part : int
        number of particles
    n_dim : int
        dimension of the system
    seed_coords : int
        seed for random number generator
    n_ensemble : int
        number of configurations to generate

    Returns
    -------
    list of configurations : list of numpy arrays
        list of configurations in the ensemble
    seeds : list of ints
        list of seeds for random number generator
        for each configuration
    """
    sq = SeedSequence(seed_entropy)
    seeds = sq.spawn(n_ensemble)
    coords_list = []
    for seed in seeds:
        initial_coords = generate_random_configuration_single(
            box_length, n_part, n_dim, seed)
        coords_list.append(initial_coords)
    return (coords_list, seeds)
Esempio n. 6
0
def retry(store,
          prob,
          algo,
          num_retries,
          value_limit=math.inf,
          popsize=1,
          workers=mp.cpu_count()):
    try:
        import pygmo as pg
    except ImportError as e:
        raise ImportError(
            "Please install PYGMO (pip install pygmo) to use PAGMO optimizers"
        ) from e
    sg = SeedSequence()
    rgs = [Generator(MT19937(s)) for s in sg.spawn(workers)]
    proc = [
        Process(target=_retry_loop,
                args=(pid, rgs, store, prob, algo, num_retries, value_limit,
                      popsize, pg)) for pid in range(workers)
    ]
    [p.start() for p in proc]
    [p.join() for p in proc]
    store.sort()
    store.dump()
    return OptimizeResult(x=store.get_x_best(),
                          fun=store.get_y_best(),
                          nfev=store.get_count_evals(),
                          success=True)
Esempio n. 7
0
    def __init__(self, seed=None, comm=MPI.COMM_WORLD):
        """Create independent random number generators in parallel

        Optional keyword arguments:
        seed=None: seed the Gnerator to get a reproducible stream.
        comm=MPI.COMM_WORLD: The MPI communicator

        Creates an independent np.random.Generator in each MPI process. This
        generator can be retrived with the __call__ method, e.g.

        from KSFD import Generator
        ...
        kgen = Generator(seed)
        rng = kgen()

        Also, the class method get_rng() will retrieve the process-wide
        npp.random.Generator, so that you don't need to carry the Generator
        instance around with you:

        rng = Generator.get_rng()
        """
        if seed is None and self._rng is not None:
            #
            # already set -- nothing to do
            #
            return
        size = comm.size
        rank = comm.rank
        ss = SeedSequence(seed)
        seeds = ss.spawn(size)
        type(self)._seeds = seeds
        type(self)._rng = default_rng(seeds[rank])
        return
Esempio n. 8
0
    def run(self):

        ### 1.- Especificar limites maximos y minimos
        self.valid_args_limit()

        chunk = int(np.ceil(self.maxiter / self.workers))
        seq = SeedSequence()
        random = seq.spawn(self.workers)

        if self.debug:
            print()
            print('maxiter {} chunk {} workers {} threads {} cats {}'.format(
                self.maxiter, chunk, self.workers, self.n_threads,
                self.n_cats))
            print()
            print('exec pstree -p', os.getpid())

        best = None

        if self.workers == 1:
            best = self.__worker__(0, default_rng(random[0]), self.maxiter,
                                   None)
        else:

            best = [[]] * 4
            best[self.BEST_FUNC_TEST_VALUE] = np.inf

            shared_best = Array('d', [0.0] * (self.dimension + 4), lock=True)
            shared_best[self.BEST_FUNC_TEST_VALUE] = np.Inf

            jobs = []

            for pid in range(self.workers):
                p = Process(target=self.__worker__,
                            args=(pid, default_rng(random[pid]), chunk,
                                  shared_best))
                jobs.append(p)

                p.start()

            for job in jobs:
                job.join()

            best[self.BEST_PROCESS_ID] = shared_best[self.BEST_PROCESS_ID]
            best[self.BEST_CAT_INDEX] = shared_best[self.BEST_CAT_INDEX]
            best[self.BEST_FUNC_TEST_VALUE] = shared_best[
                self.BEST_FUNC_TEST_VALUE]

            for i in range(self.dimension):
                best[self.BEST_CAT_POSITION].append(
                    shared_best[self.BEST_CAT_POSITION + i])

        # Se parchea la salida si se esta maximizando para optener el valor real: -f(x)
        if self.maximize:
            best[self.BEST_FUNC_TEST_VALUE] = -best[self.BEST_FUNC_TEST_VALUE]

        if self.debug:
            print()

        return best
Esempio n. 9
0
def _retry(minimizer):
    sg = SeedSequence()
    rgs = [Generator(MT19937(s)) for s in sg.spawn(minimizer.workers)]
    procs = [
        Process(target=_retry_loop, args=(pid, rgs, minimizer))
        for pid in range(minimizer.workers)
    ]
    [p.start() for p in procs]
    return procs
Esempio n. 10
0
def mo_retry(fun, weight_bounds, ncon, y_exp, store, optimize, num_retries, value_limits, 
          workers=mp.cpu_count()):
    sg = SeedSequence()
    rgs = [Generator(MT19937(s)) for s in sg.spawn(workers)]
    proc=[Process(target=_retry_loop,
            args=(pid, rgs, fun, weight_bounds, ncon, y_exp, 
                  store, optimize, num_retries, value_limits)) for pid in range(workers)]
    [p.start() for p in proc]
    [p.join() for p in proc]
    store.sort()
    store.dump()
    return store.get_xs()
Esempio n. 11
0
def retry(fun, store, optimize, num_retries, value_limit = math.inf, 
          workers=mp.cpu_count(), stop_fitness = -math.inf):
    sg = SeedSequence()
    rgs = [Generator(MT19937(s)) for s in sg.spawn(workers)]
    proc=[Process(target=_retry_loop,
            args=(pid, rgs, fun, store, optimize, num_retries, value_limit, stop_fitness)) for pid in range(workers)]
    [p.start() for p in proc]
    [p.join() for p in proc]
    store.sort()
    store.dump()
    return OptimizeResult(x=store.get_x_best(), fun=store.get_y_best(), 
                          nfev=store.get_count_evals(), success=True)
Esempio n. 12
0
    def __init__(self, n, seed=None, threads=None):
        if threads is None:
            threads = multiprocessing.cpu_count()
        self.threads = threads

        seq = SeedSequence(seed)
        self._random_generators = [default_rng(s) for s in seq.spawn(threads)]

        self.n = n
        self.executor = concurrent.futures.ThreadPoolExecutor(threads)
        self.values = np.empty(n)
        self.step = np.ceil(n / threads).astype(np.int_)
Esempio n. 13
0
def _generate_args(data_dir):
    vehicle_data = osp.join(data_dir, "Vehicle Data", "Simulation Snapshot")
    ss = SeedSequence(SEED)
    seeds = ss.spawn(N_FILES)

    for chunk in range(N_FILES):
        fname = osp.join(vehicle_data,
                         "Snapshot_" + str(chunk * 1000000) + ".csv")

        sys.stderr.write("Processing chunk {}...\n".format(chunk))
        sys.stderr.flush()

        yield (seeds[chunk], fname)
Esempio n. 14
0
def calculate_variable_importance(explainer, type, loss_function, variables, N,
                                  B, label, processes, keep_raw_permutations,
                                  random_state):
    if processes == 1:
        result = [None] * B
        for i in range(B):
            result[i] = loss_after_permutation(explainer.data, explainer.y,
                                               explainer.model,
                                               explainer.predict_function,
                                               loss_function, variables, N,
                                               np.random)
    else:
        # Create number generator for each iteration
        ss = SeedSequence(random_state)
        generators = [default_rng(s) for s in ss.spawn(B)]
        pool = mp.Pool(processes)
        result = pool.starmap_async(
            loss_after_permutation,
            [(explainer.data, explainer.y, explainer.model,
              explainer.predict_function, loss_function, variables, N,
              generators[i]) for i in range(B)]).get()
        pool.close()

    raw = pd.concat(result, sort=True)
    result = raw.mean().sort_values().reset_index()
    result['label'] = label

    result.rename(columns={
        0: 'dropout_loss',
        'index': 'variable'
    },
                  inplace=True)

    if type == "ratio":
        result.loc[:,
                   'dropout_loss'] = result.loc[:, 'dropout_loss'] / result.loc[
                       result.variable == '_full_model_',
                       'dropout_loss'].values

    if type == "difference":
        result.loc[:,
                   'dropout_loss'] = result.loc[:,
                                                'dropout_loss'] - result.loc[
                                                    result.variable ==
                                                    '_full_model_',
                                                    'dropout_loss'].values

    raw_permutations = raw.reset_index(
        drop=True) if keep_raw_permutations else None

    return result, raw_permutations
def test_reference_data():
    """ Check that SeedSequence generates data the same as the C++ reference.

    https://gist.github.com/imneme/540829265469e673d045
    """
    inputs = [
        [3735928559, 195939070, 229505742, 305419896],
        [3668361503, 4165561550, 1661411377, 3634257570],
        [164546577, 4166754639, 1765190214, 1303880213],
        [446610472, 3941463886, 522937693, 1882353782],
        [1864922766, 1719732118, 3882010307, 1776744564],
        [4141682960, 3310988675, 553637289, 902896340],
        [1134851934, 2352871630, 3699409824, 2648159817],
        [1240956131, 3107113773, 1283198141, 1924506131],
        [2669565031, 579818610, 3042504477, 2774880435],
        [2766103236, 2883057919, 4029656435, 862374500],
    ]
    outputs = [
        [3914649087, 576849849, 3593928901, 2229911004],
        [2240804226, 3691353228, 1365957195, 2654016646],
        [3562296087, 3191708229, 1147942216, 3726991905],
        [1403443605, 3591372999, 1291086759, 441919183],
        [1086200464, 2191331643, 560336446, 3658716651],
        [3249937430, 2346751812, 847844327, 2996632307],
        [2584285912, 4034195531, 3523502488, 169742686],
        [959045797, 3875435559, 1886309314, 359682705],
        [3978441347, 432478529, 3223635119, 138903045],
        [296367413, 4262059219, 13109864, 3283683422],
    ]
    outputs64 = [
        [2477551240072187391, 9577394838764454085],
        [15854241394484835714, 11398914698975566411],
        [13708282465491374871, 16007308345579681096],
        [15424829579845884309, 1898028439751125927],
        [9411697742461147792, 15714068361935982142],
        [10079222287618677782, 12870437757549876199],
        [17326737873898640088, 729039288628699544],
        [16644868984619524261, 1544825456798124994],
        [1857481142255628931, 596584038813451439],
        [18305404959516669237, 14103312907920476776],
    ]
    for seed, expected, expected64 in zip(inputs, outputs, outputs64):
        expected = np.array(expected, dtype=np.uint32)
        ss = SeedSequence(seed)
        state = ss.generate_state(len(expected))
        assert_array_equal(state, expected)
        state64 = ss.generate_state(len(expected64), dtype=np.uint64)
        assert_array_equal(state64, expected64)
Esempio n. 16
0
def divide_in_partitions(l: List[T], k: int, seed: int) -> List[FrozenSet[T]]:
    """
  Divide the given list into k random partitions
  """
    rng = Generator(PCG64(SeedSequence(seed)))
    rng.shuffle(l)
    return [frozenset(l[j::k]) for j in range(k)]
Esempio n. 17
0
def split(X, y, s, r):

    rs = RandomState(MT19937(SeedSequence(r)))

    i = X.index.to_numpy()
    np.random.shuffle(i, )
    P = np.rint(np.multiply(i.size, s))
    P[-1] = len(i) - np.sum(P[:-1])
    P = np.cumsum(P)
    P = P - 1
    P = [int(p) for p in P]

    last = -1
    X_splitted = []
    for j, p in enumerate(P):
        X_splitted.append(X[(last + 1):p])
        last = p

    last = -1
    y_splitted = []
    for j, p in enumerate(P):
        y_splitted.append(y[(last + 1):p])
        last = p

    return (X_splitted + y_splitted)
Esempio n. 18
0
 def _set_rng(self):
     """
     Initialize random generator stream. For seeded runs, sets the state reproducibly.
     """
     # TODO: checkpointing save of self._rng.bit_generator.state per process
     if mpi.is_main_process():
         seed = getattr(self, "seed", None)
         if seed is not None:
             self.mpi_warning("This run has been SEEDED with seed %s", seed)
         ss = SeedSequence(seed)
         child_seeds = ss.spawn(mpi.size())
     else:
         child_seeds = None
     ss = mpi.scatter(child_seeds)
     self._entropy = ss.entropy  # for debugging store for reproducibility
     self._rng = default_rng(ss)
Esempio n. 19
0
def set_global_seed(seed=42):
    """
    rlberry has a global seed from which we can obtain different random number
    generators with (close to) independent outcomes.

    Important:

    If the global seed is altered by the user, it should be done only once,
    after importing rlberry for the first time. This is to ensure that all
    random number generators are children of the same SeedSequence.

    To do:
        Check (torch seeding):
        https://github.com/pytorch/pytorch/issues/7068#issuecomment-487907668
    """
    global _GLOBAL_SEED, _GLOBAL_SEED_SEQ
    _GLOBAL_SEED = seed
    _GLOBAL_SEED_SEQ = SeedSequence(_GLOBAL_SEED)

    # get state (for seeding)
    # rng_libs = get_rng()
    # state = rng_libs.__getstate__()['state']['state']

    # seed torch
    if _TORCH_INSTALLED:
        torch.manual_seed(seed)
Esempio n. 20
0
def MultiDist(n, mu, sigma):
    rand_gen = Generator(PCG64(SeedSequence()))
    dist_1 = rand_gen.exponential(1, n)
    mu_array = np.full(n, mu)
    mu_array = mu_array * dist_1
    dist_2 = rand_gen.normal(mu_array, sigma)
    plt.hist(dist_2, bins='auto')
    plt.show()
Esempio n. 21
0
def test_seedsequence():
    from numpy.random.bit_generator import (ISeedSequence,
                                            ISpawnableSeedSequence,
                                            SeedlessSeedSequence)

    s1 = SeedSequence(range(10), spawn_key=(1, 2), pool_size=6)
    s1.spawn(10)
    s2 = SeedSequence(**s1.state)
    assert_equal(s1.state, s2.state)
    assert_equal(s1.n_children_spawned, s2.n_children_spawned)

    # The interfaces cannot be instantiated themselves.
    assert_raises(TypeError, ISeedSequence)
    assert_raises(TypeError, ISpawnableSeedSequence)
    dummy = SeedlessSeedSequence()
    assert_raises(NotImplementedError, dummy.generate_state, 10)
    assert len(dummy.spawn(10)) == 10
Esempio n. 22
0
 def __init__(self, fname):
     self.vars = Variables(fname)
     self.seed = SeedSequence()
     self.rand_gen = Generator(PCG64(self.seed))
     self.vector_initial = Normalise(self.rand_gen.uniform(-1,1,3))
     self.pos_initial = np.array([0, 0, 0])
     self.time = np.full(self.vars.sample_total, self.vars.base_time)
     self.time = np.append([0], self.time)
     self.time = np.cumsum(self.time)
     self.time = np.reshape(self.time, (self.time.size, 1))
Esempio n. 23
0
def inverse_shuffle(input_rgb, r1, r2):
    rs_row = RandomState(MT19937(SeedSequence(r1)))
    rs_col = RandomState(MT19937(SeedSequence(r2)))
    
    row_size = len(input_rgb)
    col_size = len(input_rgb[0])
    
    shuf_rgb = np.zeros(input_rgb.shape)
    
    rows = list(range(row_size))
    for i in range(row_size):
        row = rs_row.choice(rows)
        rows.remove(row)
        cols = list(range(col_size))
        for j in range(col_size):
            col = rs_col.choice(cols)
            cols.remove(col)
            shuf_rgb[i][j] = input_rgb[row][col]
            
    return shuf_rgb
Esempio n. 24
0
def run_experiment(c):
    # timestamp to identify the experiment
    timestamp = datetime.now()

    # multiprocessing initialization
    manager = mp.Manager()
    queue = manager.Queue()
    pool = mp.Pool(mp.cpu_count() + 1)

    # calculate number of total iterations to show progress
    num_iterations_total = len(c['DATASETS']) * len(
        c['OPTIMIZERS']) * c['NUM_ITERATIONS']

    # start process which listens to the queue and writes new results to file
    result_writer = pool.apply_async(
        queue_listener, (queue, c, timestamp, num_iterations_total))

    # create independent random generator objects (streams) for every iteration
    seed_sequence = SeedSequence(12345)
    child_seeds = seed_sequence.spawn(num_iterations_total)
    random_streams = iter([default_rng(s) for s in child_seeds])

    # create all the workers which each compute one iteration
    iterations = []
    for dataset in c['DATASETS']:
        for opt_name, opt_params in c['OPTIMIZERS'].items():
            for iteration_idx in range(c['NUM_ITERATIONS']):
                rng = next(random_streams)
                iteration = pool.apply_async(
                    run_iteration, (c, dataset, iteration_idx, opt_name,
                                    opt_params, timestamp, queue, rng))
                iterations.append(iteration)

    # collect results from the workers through the pool result queue
    for iteration in iterations:
        iteration.get()

    # now we are done, kill the listener
    queue.put('kill')
    pool.close()
    pool.join()
Esempio n. 25
0
def UpdateNormalNormalized(i, d=0.01, n=1, Mb=100, mb= -100, rs=0):
    i = np.array(i)
    if not rs:
        rseed = random.randint(1000, 9999)
        rs = RandomState(MT19937(SeedSequence(rseed)))
    Ix = rs.randint(0, i.shape[0],n) # faster than np.random.choice
    Iy = rs.randint(0, i.shape[1],n)
    z = np.zeros(i.shape) + i
    z[Ix,Iy] = z[Ix,Iy] + rs.normal(0, d[Ix,Iy], n)
    z = z/np.sum(z)
    hastings = 0
    return z, (Ix, Iy), hastings
Esempio n. 26
0
def UpdateNormal1D(i, d=0.01, n=1, Mb=100, mb= -100, rs=0):
    i = np.array(i)
    if not rs:
        rseed = random.randint(1000, 9999)
        rs = RandomState(MT19937(SeedSequence(rseed)))
    Ix = rs.randint(0, len(i),n) # faster than np.random.choice
    z = np.zeros(i.shape) + i
    z[Ix] = z[Ix] + rs.normal(0, d, n)
    z[z > Mb] = Mb - (z[z>Mb] - Mb)
    z[z < mb] = mb + (mb - z[z<mb])
    hastings = 0
    return z, Ix, hastings
Esempio n. 27
0
def main(outdir):
    rng = RandomState(MT19937(SeedSequence(config.seed)))

    berlin_holidays = holidays.DE(prov="BW")

    num_employees = 20000
    num_jobsites = 200
    num_areas = 20
    num_qualifications = 40
    num_shifts = 3
    num_days = 356

    num_orders = 1000
    df = pd.DataFrame.from_dict({
        "Einsatzort":
        rng.randint(0, num_jobsites, num_orders),
        "Qualifikation":
        rng.randint(0, num_qualifications, num_orders),
        "Schicht":
        rng.randint(0, num_shifts, num_orders),
        "Tag":
        rng.randint(0, num_days, num_orders),
    })

    df["Tag"] = df["Tag"].apply(
        lambda day: datetime(2019, 1, 1) + timedelta(day))
    df["Wochentag"] = df["Tag"].apply(lambda day: day.strftime("%a"))
    df["Feiertag"] = df["Tag"].apply(lambda day: day in berlin_holidays)

    # grouping of jobsites into areas
    area_splits = np.cumsum(rng.randint(1, 10, num_areas))
    area_splits = (area_splits.T / area_splits.max() *
                   num_jobsites).astype(int)
    df["Ort"] = df["Einsatzort"].apply(
        lambda jobsite_id: np.argmax(area_splits > jobsite_id))

    offers = []
    for _ in range(len(df)):
        offers.append(
            rng.choice(range(num_employees),
                       replace=False,
                       size=rng.randint(1, 6)).tolist())

    df["Mitarbeiter ID"] = offers

    train, test = train_test_split(df)

    train.to_csv(os.path.join(outdir, "train.tsv"), index=False, sep="\t")
    test.to_csv(os.path.join(outdir, "test_truth.tsv"), index=False, sep="\t")
    test[[
        "Einsatzort", "Qualifikation", "Schicht", "Tag", "Wochentag",
        "Feiertag", "Ort"
    ]].to_csv(os.path.join(outdir, "test_publish.tsv"), index=False, sep="\t")
Esempio n. 28
0
def dot_jrrp_handler(
    update: Update,
    context: CallbackContext,
    argv: Tuple[str],
) -> None:
    username = update.effective_user.username
    temp = f"{username}{arrow.utcnow().to('utf-8').isocalendar()}"
    hash_ = int(hashlib.md5(temp.encode()).hexdigest(), 16) % sys.maxsize
    # logging.info(f"in jrrp_handler: {temp=}, {hash_=}")
    rp = default_rng(SeedSequence(hash_)).integers(0, 100, endpoint=True)
    chat_id = update.effective_chat.id
    msg = f"@{username} 今天的人品值是:**{rp}**。"
    context.bot.send_message(chat_id, text=msg, parse_mode=ParseMode.MARKDOWN)
Esempio n. 29
0
def multiplier_proposal_vector(q, d=1.05, f=1, rs=0):
    if not rs:
        rseed = random.randint(1000, 9999)
        rs = RandomState(MT19937(SeedSequence(rseed)))
    S = q.shape
    ff = rs.binomial(1,f,S)
    u = rs.random(S)
    l = 2 * np.log(d)
    m = np.exp(l * (u - .5))
    m[ff==0] = 1.
    new_q = q * m
    U=np.sum(np.log(m))
    return new_q, 0, U
Esempio n. 30
0
def create_bit_generator(seed=None, stream=0):
    """Creates an instance of a ``BIT_GENERATOR``.

    Parameters
    ----------
    seed : int, optional
        The seed to use. If seed is None (the default), will create a seed
        using :py:func:`create_seed`.
    stream : int, optional
        The stream to create the bit generator for. This allows multiple
        generators to exist with the same seed, but that produce different sets
        of random numbers. Default is 0.

    Returns
    -------
    BIT_GENERATOR :
        The bit generator initialized with the given seed and stream.
    """
    # create the seed sequence
    seedseq = SeedSequence(create_seed(seed))
    if stream > 0:
        seedseq = seedseq.spawn(stream + 1)[stream]
    return BIT_GENERATOR(seedseq)