def test_get_sub_seed(): n = 100 seed = np.random.randint(2**31) sub_seeds = [] for i in range(n): sub_seeds.append(get_sub_seed(seed, i, n)) assert len(np.unique(sub_seeds)) == n # Test the cached version cache = {} sub_seeds_cached = [] for i in range(n): sub_seed, cache = get_sub_seed(seed, i, n, cache=cache) sub_seeds_cached.append(sub_seed) assert np.array_equal(sub_seeds, sub_seeds_cached)
def test_get_sub_seed(): n = 100 rs = np.random.RandomState() state = rs.get_state() sub_seeds = [] for i in range(n): rs.set_state(state) sub_seeds.append(get_sub_seed(rs, i, n)) assert len(np.unique(sub_seeds)) == n
def prepare_seed(*inputs, **kwinputs): if 'random_state' in kwinputs: # Get the seed for this batch, assuming np.RandomState instance seed = kwinputs['random_state'].get_state()[1][0] # Since we may not be the first operation to use this seed, lets generate a # a sub seed using this seed sub_seed_index = kwinputs.get('index_in_batch') or 0 kwinputs['seed'] = get_sub_seed(np.random.RandomState(seed), sub_seed_index) return inputs, kwinputs
def rvs(self, size=None, random_state=None): random_state = random_state or np.random self.context.batch_size = size or 1 self.context.seed = get_sub_seed(random_state, 0) loaded_net = self.client.load_data(self._rvs_net, self.context, batch_index=0) batch = self.client.compute(loaded_net) rvs = np.column_stack([batch[p] for p in self.parameter_names]) if self.dim == 1: rvs = rvs.reshape(size or 1) return rvs[0] if size is None else rvs
def load(cls, context, compiled_net, batch_index): """Add an instance of random state to the corresponding node. Parameters ---------- context : ComputationContext compiled_net : nx.DiGraph batch_index : int Returns ------- net : nx.DiGraph Loaded net, which is the `compiled_net` that has been loaded with data that can depend on the batch_index. """ key = 'output' seed = context.seed if seed is 'global': # Get the random_state of the respective worker by delaying the evaluation random_state = get_np_random key = 'operation' elif isinstance(seed, (int, np.int32, np.uint32)): # TODO: In the future, we could use https://pypi.python.org/pypi/randomstate to enable # jumps? sub_seed, context.sub_seed_cache = get_sub_seed(seed, batch_index, cache=context.sub_seed_cache) random_state = np.random.RandomState(sub_seed) else: raise ValueError("Seed of type {} is not supported".format(seed)) # Assign the random state or its acquirer function to the corresponding node node_name = '_random_state' if compiled_net.has_node(node_name): compiled_net.node[node_name][key] = random_state return compiled_net
def load(cls, context, compiled_net, batch_index): key = 'output' seed = context.seed if seed is 'global': # Get the random_state of the respective worker by delaying the evaluation random_state = get_np_random key = 'operation' elif isinstance(seed, (int, np.int32, np.uint32)): random_state = np.random.RandomState(context.seed) else: raise ValueError("Seed of type {} is not supported".format(seed)) # Jump (or scramble) the state based on batch_index to create parallel separate # pseudo random sequences if seed is not 'global': # TODO: In the future, we could use https://pypi.python.org/pypi/randomstate to enable jumps? random_state = np.random.RandomState(get_sub_seed(random_state, batch_index)) _random_node = '_random_state' if compiled_net.has_node(_random_node): compiled_net.node[_random_node][key] = random_state return compiled_net