def test_log_to_console(): nengo.log(debug=False, path=None) assert logging.root.getEffectiveLevel() == logging.WARNING assert nengo.utils.logging.console_handler in logging.root.handlers n_handlers = len(logging.root.handlers) nengo.log(debug=True, path=None) assert logging.root.getEffectiveLevel() == logging.DEBUG assert len(logging.root.handlers) == n_handlers logging.root.handlers.remove(nengo.utils.logging.console_handler)
def test_log_to_console(): nengo.log(path=None) assert logging.root.getEffectiveLevel() == logging.WARNING assert nengo.utils.logging.console_handler in logging.root.handlers n_handlers = len(logging.root.handlers) nengo.log('debug', path=None) assert logging.root.getEffectiveLevel() == logging.DEBUG assert len(logging.root.handlers) == n_handlers logging.root.handlers.remove(nengo.utils.logging.console_handler)
def test_log_to_file(tmpdir): tmpfile = str(tmpdir.join("log.txt")) nengo.log(path=tmpfile) n_handlers = len(logging.root.handlers) handler = logging.root.handlers[-1] assert logging.root.getEffectiveLevel() == logging.WARNING assert isinstance(handler, logging.FileHandler) assert handler.baseFilename == tmpfile nengo.log('debug', path=tmpfile) assert logging.root.getEffectiveLevel() == logging.DEBUG assert len(logging.root.handlers) == n_handlers logging.root.handlers.remove(handler)
def test_log_to_file(tmpdir): tmpfile = str(tmpdir.join("log.txt")) nengo.log(debug=False, path=tmpfile) n_handlers = len(logging.root.handlers) handler = logging.root.handlers[-1] assert logging.root.getEffectiveLevel() == logging.WARNING assert isinstance(handler, logging.FileHandler) assert handler.baseFilename == tmpfile nengo.log(debug=True, path=tmpfile) assert logging.root.getEffectiveLevel() == logging.DEBUG assert len(logging.root.handlers) == n_handlers logging.root.handlers.remove(handler)
def test_log_to_console(): logging.shutdown() reload(logging) nengo.log(debug=False, path=None) assert logging.root.getEffectiveLevel() == logging.WARNING assert len(logging.root.handlers) == 1 handler = logging.root.handlers[0] assert isinstance(handler, logging.StreamHandler) assert handler.formatter == nengo.utils.logging.console_formatter nengo.log(debug=True, path=None) assert logging.root.getEffectiveLevel() == logging.DEBUG assert len(logging.root.handlers) == 1
def test_log_to_file(tmpdir): logging.shutdown() reload(logging) tmpfile = str(tmpdir.join("log.txt")) nengo.log(debug=False, path=tmpfile) assert logging.root.getEffectiveLevel() == logging.WARNING assert len(logging.root.handlers) == 1 handler = logging.root.handlers[0] assert isinstance(handler, logging.FileHandler) assert handler.formatter == nengo.utils.logging.file_formatter nengo.log(debug=True, path=tmpfile) assert logging.root.getEffectiveLevel() == logging.DEBUG assert len(logging.root.handlers) == 1
def test_log_to_file(tmpdir): tmpfile = str(tmpdir.join("log.txt")) handler = nengo.log(path=tmpfile) try: n_handlers = len(logging.root.handlers) handler = logging.root.handlers[-1] assert logging.root.getEffectiveLevel() == logging.WARNING assert isinstance(handler, logging.FileHandler) assert handler.baseFilename == tmpfile nengo.log('debug', path=tmpfile) assert logging.root.getEffectiveLevel() == logging.DEBUG assert len(logging.root.handlers) == n_handlers logging.root.handlers.remove(handler) finally: handler.close()
# returns None def none_function(t): pass model2 = nengo.Network() with model2: nengo.Node(output=none_function) sim = nengo.Simulator(model2) sim.run(1) def test_scalar(Simulator): model = nengo.Network() with model: a = nengo.Node(output=1) b = nengo.Ensemble(nengo.LIF(100), dimensions=1) nengo.Connection(a, b) ap = nengo.Probe(a) bp = nengo.Probe(b) sim = nengo.Simulator(model) sim.run(1) assert sim.data[ap].shape == (1000, 1) assert sim.data[bp].shape == (1000, 1) if __name__ == "__main__": nengo.log(debug=True) pytest.main([__file__, '-v'])
with product: input_A = nengo.Node(output=func_A) input_B = nengo.Node(output=func_B) nengo.Connection(input_A, product.A) nengo.Connection(input_B, product.B) p = nengo.Probe(product.output, synapse=pstc) sim = Simulator(product, seed=123) sim.run(1.0) t = sim.trange() AB = np.asarray(list(map(func_A, t))) * np.asarray(list(map(func_B, t))) delay = 0.011 offset = np.where(t > delay)[0] with Plotter(Simulator) as plt: for i in range(dim): plt.subplot(dim+1, 1, i+1) plt.plot(t + delay, AB[:, i], label="$A \cdot B$") plt.plot(t, sim.data[p][:, i], label="Output") plt.legend() plt.savefig('test_product.test_sine_waves.pdf') plt.close() assert rmse(AB[:len(offset), :], sim.data[p][offset, :]) < 0.3 if __name__ == "__main__": nengo.log(debug=True) pytest.main([__file__, '-v'])
setattr(vocab, cfg_param, eval(cfg_value)) # ----- Check if data folder exists ----- if not (os.path.isdir(cfg.data_dir) and os.path.exists(cfg.data_dir)): raise RuntimeError('Data directory "%s"' % (cfg.data_dir) + ' does not exist. Please ensure the correct path' + ' has been specified.') # ----- Spaun imports ----- from _spaun.utils import get_total_n_neurons from _spaun.probes import default_probe_config, default_anim_config from _spaun.spaun_main import Spaun # ----- Enable debug logging ----- if args.debug: nengo.log('debug') # ----- Experiment and vocabulary initialization ----- experiment.initialize(args.s, vis_data.get_image_ind, vis_data.get_image_label, cfg.mtr_est_digit_response_time, cfg.rng) vocab.initialize(experiment.num_learn_actions, cfg.rng) vocab.initialize_mtr_vocab(mtr_data.dimensions, mtr_data.sps) vocab.initialize_vis_vocab(vis_data.dimensions, vis_data.sps) # ----- Configure output log files ----- if cfg.use_mpi: sys.path.append('C:\\Users\\xchoo\\GitHub\\nengo_mpi') mpi_save = args.mpi_save.split('.') mpi_savename = '.'.join(mpi_save[:-1])
import nengo from nengo.tests.helpers import SimulatorTestCase, unittest class TestBasalGanglia(SimulatorTestCase): def test_basic(self): model = nengo.Model('test_basalganglia_basic') with model: bg = nengo.networks.BasalGanglia(dimensions=5, label='BG') input = nengo.Node([0.8, 0.4, 0.4, 0.4, 0.4], label='input') nengo.Connection(input, bg.input) p = nengo.Probe(bg.output, 'output') sim = self.Simulator(model) sim.run(0.2) output = np.mean(sim.data(p)[50:], axis=0) self.assertGreater(output[0], -0.15) self.assertLess(output[1], -0.8) self.assertLess(output[2], -0.8) self.assertLess(output[3], -0.8) self.assertLess(output[4], -0.8) if __name__ == "__main__": nengo.log(debug=True, path='log.txt') unittest.main()
def create_spaun_model(n, args, max_probe_time): print("\n======================== RUN %i OF %i ========================" % (n + 1, args.n)) # ----- Seeeeeeeed ----- if args.seed < 0: seed = int(time.time()) else: seed = args.seed cfg.set_seed(seed) print "MODEL SEED: %i" % cfg.seed # ----- Model Configurations ----- vocab.sp_dim = args.d cfg.data_dir = args.data_dir # Parse --config options if args.config is not None: print "USING CONFIGURATION OPTIONS: " for cfg_options in args.config: cfg_opts = cfg_options.split('=') cfg_param = cfg_opts[0] cfg_value = cfg_opts[1] if hasattr(cfg, cfg_param): print " * cfg: " + str(cfg_options) setattr(cfg, cfg_param, eval(cfg_value)) elif hasattr(experiment, cfg_param): print " * experiment: " + str(cfg_options) setattr(experiment, cfg_param, eval(cfg_value)) elif hasattr(vocab, cfg_param): print " * vocab: " + str(cfg_options) setattr(vocab, cfg_param, eval(cfg_value)) # ----- Check if data folder exists ----- if not (os.path.isdir(cfg.data_dir) and os.path.exists(cfg.data_dir)): raise RuntimeError('Data directory "%s"' % (cfg.data_dir) + ' does not exist. Please ensure the correct path' + ' has been specified.') # ----- Enable debug logging ----- if args.debug: nengo.log('debug') # ----- Experiment and vocabulary initialization ----- experiment.initialize(args.s, vis_data.get_image_ind, vis_data.get_image_label, cfg.mtr_est_digit_response_time, cfg.rng) vocab.initialize(experiment.num_learn_actions, cfg.rng) vocab.initialize_mtr_vocab(mtr_data.dimensions, mtr_data.sps) vocab.initialize_vis_vocab(vis_data.dimensions, vis_data.sps) # ----- Configure output log files ----- mpi_savename = None mpi_saveext = None probe_cfg = None probe_anim_cfg = None anim_probe_data_filename = None if cfg.use_mpi: sys.path.append('C:\\Users\\xchoo\\GitHub\\nengo_mpi') mpi_save = args.mpi_save.split('.') mpi_savename = '.'.join(mpi_save[:-1]) mpi_saveext = mpi_save[-1] cfg.probe_data_filename = get_probe_data_filename(mpi_savename, suffix=args.tag) else: cfg.probe_data_filename = get_probe_data_filename(suffix=args.tag) # ----- Initalize looger and write header data ----- logger.initialize(cfg.data_dir, cfg.probe_data_filename[:-4] + '_log.txt') cfg.write_header() experiment.write_header() vocab.write_header() logger.flush() # ----- Raw stimulus seq ----- print "RAW STIM SEQ: %s" % (str(experiment.raw_seq_str)) # ----- Spaun proper ----- model = Spaun() # ----- Display stimulus seq ----- print "PROCESSED RAW STIM SEQ: %s" % (str(experiment.raw_seq_list)) print "STIMULUS SEQ: %s" % (str(experiment.stim_seq_list)) # ----- Calculate runtime ----- # Note: Moved up here so that we have data to disable probes if necessary runtime = args.t if args.t > 0 else experiment.get_est_simtime() # ----- Set up probes ----- make_probes = not args.noprobes if runtime > max_probe_time and make_probes: print(">>> !!! WARNING !!! EST RUNTIME > %0.2fs - DISABLING PROBES" % max_probe_time) make_probes = False if make_probes: print "PROBE FILENAME: %s" % cfg.probe_data_filename probe_cfg = default_probe_config(model, vocab, cfg.sim_dt, cfg.data_dir, cfg.probe_data_filename) # ----- Set up animation probes ----- if args.showanim or args.showiofig or args.probeio: anim_probe_data_filename = cfg.probe_data_filename[:-4] + '_anim.npz' print "ANIM PROBE FILENAME: %s" % anim_probe_data_filename probe_anim_cfg = default_anim_config(model, vocab, cfg.sim_dt, cfg.data_dir, anim_probe_data_filename) # ----- Neuron count debug ----- print "MODEL N_NEURONS: %i" % (get_total_n_neurons(model)) if hasattr(model, 'vis'): print "- vis n_neurons: %i" % (get_total_n_neurons(model.vis)) if hasattr(model, 'ps'): print "- ps n_neurons: %i" % (get_total_n_neurons(model.ps)) if hasattr(model, 'bg'): print "- bg n_neurons: %i" % (get_total_n_neurons(model.bg)) if hasattr(model, 'thal'): print "- thal n_neurons: %i" % (get_total_n_neurons(model.thal)) if hasattr(model, 'enc'): print "- enc n_neurons: %i" % (get_total_n_neurons(model.enc)) if hasattr(model, 'mem'): print "- mem n_neurons: %i" % (get_total_n_neurons(model.mem)) if hasattr(model, 'trfm'): print "- trfm n_neurons: %i" % (get_total_n_neurons(model.trfm)) if hasattr(model, 'dec'): print "- dec n_neurons: %i" % (get_total_n_neurons(model.dec)) if hasattr(model, 'mtr'): print "- mtr n_neurons: %i" % (get_total_n_neurons(model.mtr)) # ----- Connections count debug ----- print "MODEL N_CONNECTIONS: %i" % (len(model.all_connections)) return (model, mpi_savename, mpi_saveext, runtime, make_probes, probe_cfg, probe_anim_cfg, anim_probe_data_filename)
setattr(vocab, cfg_param, eval(cfg_value)) # ----- Check if data folder exists ----- if not(os.path.isdir(cfg.data_dir) and os.path.exists(cfg.data_dir)): raise RuntimeError('Data directory "%s"' % (cfg.data_dir) + ' does not exist. Please ensure the correct path' + ' has been specified.') # ----- Spaun imports ----- from _spaun.utils import get_total_n_neurons from _spaun.probes import default_probe_config, default_anim_config from _spaun.spaun_main import Spaun # ----- Enable debug logging ----- if args.debug: nengo.log('debug') # ----- Experiment and vocabulary initialization ----- experiment.initialize(args.s, vis_data.get_image_ind, vis_data.get_image_label, cfg.mtr_est_digit_response_time, cfg.rng) vocab.initialize(experiment.num_learn_actions, cfg.rng) vocab.initialize_mtr_vocab(mtr_data.dimensions, mtr_data.sps) vocab.initialize_vis_vocab(vis_data.dimensions, vis_data.sps) # ----- Configure output log files ----- if cfg.use_mpi: sys.path.append('C:\\Users\\xchoo\\GitHub\\nengo_mpi') mpi_save = args.mpi_save.split('.') mpi_savename = '.'.join(mpi_save[:-1])
from collections import OrderedDict import matplotlib.pyplot as plt import numpy as np import nengo nengo.log(level='info') import nengo_extras from run_core import load_network, SoftLIFRate, round_layer def hist_dist(hist, edges): p = np.zeros(edges.size, dtype=float) p[1:-1] = hist[:-1] + hist[1:] p /= p.sum() return nengo.dists.PDF(edges, p) def build_layer(layer, inputs, data, hist=None, pt=None): assert isinstance(inputs, list) assert len(layer.get('inputs', [])) == len(inputs) name = layer['name'] if layer['type'] == 'cost.logreg': labels, probs = inputs return probs # pt = params['presentation_time'] # nlabels, nprobs = layer['numInputs'] # assert nlabels == 1
with open(decoder_file) as f: dt = 0.001 synapse = 0.01 self.decoders = np.asarray([[float(x) / (1 - np.exp(-dt / synapse)) for x in line.split()] for line in f.readlines()]) # note: dividing by filter to undo the filter that will be applied # when the decoders are set def __call__(self, A, Y, rng=None): return self.decoders, [] SEED = 1 rng = random.Random() rng.seed(SEED) nengo.log() pong_game = PongGame(["computer", "computer"], seed=SEED) l_rate = 5e-5 discount = 0.97 #d_file = os.path.join("data", "decoders_%s_%s_%s.txt" % (l_rate, discount, SEED)) d_file = os.path.join("data", "decoders_best.txt") d_solve = decoder_setter(d_file) #d_solve = nengo.decoders.lstsq_L2nz with nengo.Network() as net: p0 = DecoderPongPlayer(0, pong_game, opposite=True, noise=None) p1 = rl_pongplayer.RLPongPlayer(1, pong_game, decoder_solver=d_solve, l_rate=l_rate, discount=discount, rng=rng)
import logging import argparse import numpy as np import time import os import nengo import nengo_mpi from nengo_mpi.partition import metis_partitioner, work_balanced_partitioner from nengo_mpi.partition import random_partitioner, EnsembleArraySplitter from utils import write_to_csv logger = logging.getLogger(__name__) nengo.log(debug=False) parser = argparse.ArgumentParser(description="A grid network.") parser.add_argument( '--ns', type=int, default=1, help='Number of streams in the network.') parser.add_argument( '--sl', type=int, default=1, help='Length of each stream.') parser.add_argument( '-d', type=int, default=1, help='Number of dimensions in each neural ensemble.') parser.add_argument( '--npd', type=int, default=50,