def test_max_steps(self): with pytest.raises(ValidationError): with Progress(max_steps=0): pass with pytest.raises(ValidationError): with Progress(max_steps=-1): pass
def test_success_property(self): with Progress(10) as p: assert p.success is None assert p.success try: with Progress(10) as p2: raise Exception() except: pass assert not p2.success
def test_success_property(self): with Progress(max_steps=10) as p: assert p.success is None assert p.success try: with Progress(max_steps=10) as p2: raise UnboundLocalError() except UnboundLocalError: pass assert not p2.success
def test_progress_tracker(): update_interval = 0.001 sleep_interval = 20 * update_interval stages = 4 steps = 3 total_progress = Progress(name_during="total_prog", max_steps=stages) progress_bar = ProgressBarMock() tracker = ProgressTracker(progress_bar, total_progress, update_interval=update_interval) assert not progress_bar.closed with tracker: for i in range(stages): sub_progress = tracker.next_stage(name_during="stage%d" % i, max_steps=steps) with sub_progress: for j in range(steps): sub_progress.step() time.sleep(sleep_interval) assert progress_bar.updates[ -1].name_during == "stage%d" % i assert progress_bar.updates[-1].n_steps == j + 1 time.sleep(sleep_interval) assert progress_bar.updates[-1].name_during == "total_prog" assert progress_bar.updates[-1].n_steps == i assert progress_bar.closed
def run_steps(self, steps, progress_bar=None): """Simulate for the given number of ``dt`` steps. Parameters ---------- steps : int Number of steps to run the simulation for. progress_bar : bool or `.ProgressBar` or `.ProgressUpdater`, optional \ (Default: True) Progress bar for displaying the progress of the simulation run. If True, the default progress bar will be used. If False, the progress bar will be disabled. For more control over the progress bar, pass in a `.ProgressBar` or `.ProgressUpdater` instance. """ if progress_bar is None: progress_bar = self.progress_bar with ProgressTracker(progress_bar, Progress("Simulating", "Simulation", steps)) as pt: for i in range(steps): self.step() pt.total_progress.step()
def test_elapsed_seconds(self, monkeypatch): t = 1. monkeypatch.setattr(time, 'time', lambda: t) with Progress(10) as p: t = 10. assert p.elapsed_seconds() == 9.
def test_progress_calculation(self): with Progress(10) as p: assert p.progress == 0. for _ in range(5): p.step() assert p.progress == 0.5 p.step(5) assert p.progress == 1.
def test_elapsed_seconds(self, monkeypatch): t = 1.0 monkeypatch.setattr(time, "time", lambda: t) with Progress(max_steps=10) as p: t = 10.0 assert p.elapsed_seconds() == 9.0
def __init__(self, network, dt=0.001, seed=None, model=None, progress_bar=True, optimize=True): self.closed = True # Start closed in case constructor raises exception self.progress_bar = progress_bar self.optimize = optimize if model is None: self.model = Model( dt=float(dt), label="%s, dt=%f" % (network, dt), decoder_cache=get_default_decoder_cache(), ) else: self.model = model pt = ProgressTracker(progress_bar, Progress("Building", "Build")) with pt: if network is not None: # Build the network into the model self.model.build(network, progress=pt.next_stage("Building", "Build")) # Order the steps (they are made in `Simulator.reset`) self.dg = operator_dependency_graph(self.model.operators) if optimize: with pt.next_stage("Building (running optimizer)", "Optimization"): opmerge_optimize(self.model, self.dg) self._step_order = [ op for op in toposort(self.dg) if hasattr(op, "make_step") ] # -- map from Signal.base -> ndarray self.signals = SignalDict() for op in self.model.operators: op.init_signals(self.signals) # Add built states to the raw simulation data dictionary self._sim_data = self.model.params # Provide a nicer interface to simulation data self.data = SimulationData(self._sim_data) if seed is None: if network is not None and network.seed is not None: seed = network.seed + 1 else: seed = np.random.randint(npext.maxint) self.closed = False self.reset(seed=seed)
def test_unknown_number_of_steps(self, monkeypatch): t = 1.0 monkeypatch.setattr(time, "time", lambda: t) with Progress() as p: p.step() t = 10.0 assert p.progress == 0.0 assert p.eta() == -1 assert p.n_steps == 1 assert p.elapsed_seconds() == 9.0
def test_write_progress_to_file(tmpdir): """Tests the WriteProgressToFile progress bar type""" def check_file(filename, startstring): with open(filename, "r") as fh: data = fh.read() assert data.startswith(startstring) filename = str(tmpdir.join("test_write_progress_file.txt")) progress = Progress(name_during="myprog", max_steps=2) bar = WriteProgressToFile(filename) with progress: bar.update(progress) check_file(filename, "0%, ETA") progress.step() bar.update(progress) check_file(filename, "50%, ETA") progress.step() bar.update(progress) check_file(filename, "100%, ETA") bar.update(progress) check_file(filename, "myprog finished in")
def run_steps(self, N, progress_bar=True): if self.closed: raise SimulatorClosed("Simulator cannot run because it is closed.") if self.n_steps + N >= 2**24: # since n_steps is float32, point at which `n_steps == n_steps + 1` raise ValueError("Cannot handle more than 2**24 steps") if self._cl_probe_plan is not None: # -- precondition: the probe buffers have been drained bufpositions = self._cl_probe_plan.cl_bufpositions.get() assert np.all(bufpositions == 0) if progress_bar is None: progress_bar = self.progress_bar try: progress = ProgressTracker(progress_bar, Progress("Simulating", "Simulation", N)) except TypeError: try: progress = ProgressTracker(N, progress_bar, "Simulating") except TypeError: progress = ProgressTracker(N, progress_bar) with progress: # -- we will go through N steps of the simulator # in groups of up to B at a time, draining # the probe buffers after each group of B while N: B = min(N, self._max_steps_between_probes) self._plans.call_n_times(B) self._probe() N -= B if hasattr(progress, 'total_progress'): progress.total_progress.step(n=B) else: progress.step(n=B) if self.profiling > 1: self.print_profiling()
def test_eta(self): with Progress(10) as p: assert p.eta() == -1 # no estimate available yet p.step() assert p.eta() >= 0.
def test_finished_property(self): with Progress(10) as p: assert not p.finished p.step(5) assert not p.finished assert p.finished
def build_network(model, network, progress=None): """Builds a `.Network` object into a model. The network builder does this by mapping each high-level object to its associated signals and operators one-by-one, in the following order: 1. Ensembles, nodes, neurons 2. Subnetworks (recursively) 3. Connections, learning rules 4. Probes Before calling any of the individual objects' build functions, random number seeds are assigned to objects that did not have a seed explicitly set by the user. Whether the seed was assigned manually or automatically is tracked, and the decoder cache is only used when the seed is assigned manually. Parameters ---------- model : Model The model to build into. network : Network The network to build. progress : Progress, optional Object used to track the build progress. Note that this will only affect top-level networks. Notes ----- Sets ``model.params[network]`` to ``None``. """ if model.toplevel is None: model.toplevel = network seed_network(network, seeds=model.seeds, seeded=model.seeded) if progress is not None: # number of sub-objects, plus 1 to account for this network progress.max_steps = len(network.all_objects) + 1 def build_callback(obj): if isinstance(obj, tuple(network.objects)): progress.step() model.build_callback = build_callback if progress is None: progress = Progress() # dummy progress # Set config old_config = model.config model.config = network.config # If this is the toplevel network, enter the decoder cache context = model.decoder_cache if model.toplevel is network else nullcontext( ) with context, progress: logger.debug("Network step 1: Building ensembles and nodes") for obj in network.ensembles + network.nodes: model.build(obj) logger.debug("Network step 2: Building subnetworks") for subnetwork in network.networks: model.build(subnetwork) logger.debug("Network step 3: Building connections") for conn in network.connections: # NB: we do these in the order in which they're defined, and build # the learning rule in the connection builder. Because learning # rules are attached to connections, the connection that contains # the learning rule (and the learning rule) are always built # *before* a connection that attaches to that learning rule. # Therefore, we don't have to worry about connection ordering here. # TODO: Except perhaps if the connection being learned # is in a subnetwork? model.build(conn) logger.debug("Network step 4: Building probes") for probe in network.probes: model.build(probe) if context is model.decoder_cache: model.decoder_cache.shrink() if model.toplevel is network: progress.step() model.build_callback = None # Unset config model.config = old_config model.params[network] = None
def build_network(model, network, progress=None): """Builds a `.Network` object into a model. The network builder does this by mapping each high-level object to its associated signals and operators one-by-one, in the following order: 1. Ensembles, nodes, neurons 2. Subnetworks (recursively) 3. Connections, learning rules 4. Probes Before calling any of the individual objects' build functions, random number seeds are assigned to objects that did not have a seed explicitly set by the user. Whether the seed was assigned manually or automatically is tracked, and the decoder cache is only used when the seed is assigned manually. Parameters ---------- model : Model The model to build into. network : Network The network to build. progress : Progress, optional Object used to track the build progress. Note that this will only affect top-level networks. Notes ----- Sets ``model.params[network]`` to ``None``. """ def get_seed(obj, rng): # Generate a seed no matter what, so that setting a seed or not on # one object doesn't affect the seeds of other objects. seed = rng.randint(npext.maxint) return (seed if not hasattr(obj, 'seed') or obj.seed is None else obj.seed) if model.toplevel is None: model.toplevel = network model.seeds[network] = get_seed(network, np.random) model.seeded[network] = getattr(network, 'seed', None) is not None max_steps = len(network.all_objects) + 1 # +1 for top level network if progress is not None: progress.max_steps = max_steps def build_callback(obj): if isinstance(obj, tuple(network.objects)): progress.step() model.build_callback = build_callback if progress is None: progress = Progress() # dummy progress # Set config old_config = model.config model.config = network.config # assign seeds to children rng = np.random.RandomState(model.seeds[network]) # Put probes last so that they don't influence other seeds sorted_types = (Connection, Ensemble, Network, Node, Probe) assert all(tp in sorted_types for tp in network.objects) for obj_type in sorted_types: for obj in network.objects[obj_type]: model.seeded[obj] = (model.seeded[network] or getattr(obj, 'seed', None) is not None) model.seeds[obj] = get_seed(obj, rng) # If this is the toplevel network, enter the decoder cache context = (model.decoder_cache if model.toplevel is network else nullcontext()) with context, progress: logger.debug("Network step 1: Building ensembles and nodes") for obj in network.ensembles + network.nodes: model.build(obj) logger.debug("Network step 2: Building subnetworks") for subnetwork in network.networks: model.build(subnetwork) logger.debug("Network step 3: Building connections") for conn in network.connections: # NB: we do these in the order in which they're defined, and build # the learning rule in the connection builder. Because learning # rules are attached to connections, the connection that contains # the learning rule (and the learning rule) are always built # *before* a connection that attaches to that learning rule. # Therefore, we don't have to worry about connection ordering here. # TODO: Except perhaps if the connection being learned # is in a subnetwork? model.build(conn) logger.debug("Network step 4: Building probes") for probe in network.probes: model.build(probe) if context is model.decoder_cache: model.decoder_cache.shrink() if model.toplevel is network: progress.step() model.build_callback = None # Unset config model.config = old_config model.params[network] = None