def run_all_on_path(in_path, simulator='INI'): assert os.path.exists(os.path.join( in_path, 'x_norm.npz')), 'Could not find x_norm.npz on path' candidates = [] for filename in os.listdir(in_path): if filename.endswith( ".h5" ) and 'parsed' not in filename and simulator not in filename: candidates.append(filename) print('Found a total of {} candidates.'.format(len(candidates))) durations = [] for i, candidate in enumerate(candidates): # strip the extension candidate = candidate.split('.h5')[0] print('Running candidate {}'.format(candidate)) config = generate_snn_config(in_path, candidate, simulator=simulator) start_time = time.time() main(config) end_time = time.time() duration = round(end_time - start_time, 2) durations.append(duration) print('Duration for candidate {}: {}'.format(candidate, duration)) mean_duration = sum(durations) / len(durations) n_left = len(candidates[i + 1:]) pred_duration = mean_duration * n_left print('Predicted duration for remaining candidates: {}'.format( datetime.timedelta(seconds=pred_duration))) # rename the log dir shutil.move(os.path.join(in_path, 'log'), os.path.join(in_path, candidate))
def setup(): config = configparser.ConfigParser() config.read("tb/snn_toolbox.ini") main("tb/snn_toolbox.ini") h5_file = config["paths"]["filename_ann"] + "_INI.h5" v_thresh = 0 with h5py.File(h5_file, 'r') as h5: v_thresh = h5["model_weights"]["0Dense_4"]["v_thresh:0"].shape # Takes the first threshold and runs with it param_str = "" if not v_thresh: param_str = " -Psnn.THRESHOLD=5" else: param_str = " -Psnn.THRESHOLD=" + h5["model_weights"]["0Dense_2"][ "v_thresh:0"].shape[0] param_str += " -Psnn.NUM_NODES=4" if "COMPILE_ARGS" in os.environ: os.environ["COMPILE_ARGS"] = os.environ["COMPILE_ARGS"] + param_str else: os.environ["COMPILE_ARGS"] = param_str print(os.getcwd()) run_make = "make -f " + os.getcwd() + "/tb/tb.mk" make_proc = subprocess.run(run_make, shell=True, stderr=subprocess.STDOUT)
'simulator': 'brian2', # Chooses execution backend of SNN toolbox. 'duration': 50, # Number of time steps to run each sample. 'num_to_test': 5, # How many test samples to run. 'batch_size': 1, # Batch size for simulation. 'dt': 0.1 # Time resolution for ODE solving. } config['input'] = { 'poisson_input': False # Images are encodes as spike trains. } config['output'] = { 'plot_vars': { # Various plots (slows down simulation). 'spiketrains', # Leave section empty to turn off plots. 'spikerates', 'activations', 'correlation', 'v_mem', 'error_t'} } # Store config file. config_filepath = os.path.join(path_wd, 'config') with open(config_filepath, 'w') as configfile: config.write(configfile) # RUN SNN TOOLBOX # ################### main(config_filepath)
""" Script to run a pre-trained neural network for anomaly detection on Loihi, using SNN toolbox """ import argparse from snntoolbox.bin.run import main if __name__ == "__main__": parser = argparse.ArgumentParser() config_filepath = 'conversion_config_loihi.txt' parser.add_argument("--model", help="Path to model with weights in h5 format", action='store', dest='model', default=config_filepath) args = parser.parse_args() main(args.model)