def _config(_path_wd, _datapath): path_wd = str(_path_wd) datapath = str(_datapath) filename_ann = 'mnist_cnn' configparser = import_configparser() config = configparser.ConfigParser() config.read_dict({ 'paths': { 'path_wd': path_wd, 'dataset_path': datapath, 'filename_ann': filename_ann } }) with open(os.path.join(path_wd, filename_ann + '.h5'), 'w'): pass config_filepath = os.path.join(path_wd, 'config') with open(config_filepath, 'w') as configfile: config.write(configfile) config = update_setup(config_filepath) return config
def mk_config(path, fold): "create config file given path to a trained model" configparser = import_configparser() config = configparser.ConfigParser() config['paths'] = { 'path_wd': path + "/" + fold, # Path to model. 'dataset_path': path + "/" + fold, # Path to dataset. 'filename_ann': "ts" + fold # Name of input model. } config['tools'] = { 'evaluate_ann': True, # Test ANN on dataset before conversion. 'normalize': True, # Normalize weights for full dynamic range. 'simulate': True # Simulate model, seems to be necessary for normalization } config['simulation'] = { 'simulator': 'nest', # We convert to a pynn model with nest as backend 'duration': 50, # Number of time steps to run each sample. 'num_to_test': 1, # How many test samples to run. 'batch_size': 1, # Batch size for simulation. 'dt': 0.1 # timestep } config['input'] = { 'poisson_input': True, # Images are encodes as spike trains. 'input_rate': 1000 # Poisson Neurons firing rate } config['cell'] = { 'v_thresh': 0.01, # voltage threshold, different for actual SCNN 'tau_refrac': 0.1, # refractory period 'delay': 0.1 # synaptic delay } return config
def generate_snn_config(path_wd, model_name, simulator='INI'): # Store model so SNN Toolbox can find it. # SNN TOOLBOX CONFIGURATION # ############################# # Create a config file with experimental setup for SNN Toolbox. configparser = import_configparser() config = configparser.ConfigParser() config['paths'] = { 'path_wd': path_wd, # Path to model. 'dataset_path': path_wd, # Path to dataset. 'filename_ann': model_name # Name of input model. } config['tools'] = { 'evaluate_ann': True, # Test ANN on dataset before conversion. 'normalize': True, # Normalize weights for full dynamic range. } config['simulation'] = { 'simulator': simulator, # Chooses execution backend of SNN toolbox. 'duration': 2000, # Number of time steps to run each sample. 'num_to_test': 10000, # How many test samples to run. 'batch_size': 256, # Batch size for simulation. 'keras_backend': 'tensorflow' } config['output'] = { # log desired variables to disk 'log_vars': { 'synaptic_operations_b_t', 'neuron_operations_b_t' }, #{'spiketrains_n_b_l_t', 'synaptic_operations_b_t', 'neuron_operations_b_t'}, # 'plot_vars': { # Various plots (slows down simulation). # 'spiketrains', # Leave section empty to turn off plots. # 'spikerates', # 'activations', # 'correlation', # 'v_mem', # 'error_t'} 'verbose': 0 } if simulator is 'loihi': config['loihi'] = loihi_config_dict # loihi does not support batching config.set('simulation', 'batch_size', '1') # config['simulation']['batch_size'] = 1 # Store config file. config_filepath = os.path.join(path_wd, model_name) + '.config' with open(config_filepath, 'w') as configfile: config.write(configfile) return config_filepath
def test_updating_settings(params, expect_pass, _path_wd): configparser = import_configparser() config = configparser.ConfigParser() config.read_dict(params) configpath = os.path.join(str(_path_wd), 'config') with open(configpath, 'w') as file: config.write(file) if expect_pass: assert update_setup(configpath) else: pytest.raises(AssertionError, update_setup, configpath)
def load_config(filepath): """ Load a config file from ``filepath``. """ from snntoolbox.utils.utils import import_configparser configparser = import_configparser() assert os.path.isfile(filepath), \ "Configuration file not found at {}.".format(filepath) config = configparser.ConfigParser() config.optionxform = str config.read(filepath) return config
model.compile('adam', 'categorical_crossentropy', ['accuracy']) # Train model with backprop. model.fit(x_train, y_train, batch_size=64, epochs=1, verbose=2, validation_data=(x_test, y_test)) # Store model so SNN Toolbox can find it. model_name = 'mnist_cnn' keras.models.save_model(model, os.path.join(path_wd, model_name + '.h5')) # SNN TOOLBOX CONFIGURATION # ############################# # Create a config file with experimental setup for SNN Toolbox. configparser = import_configparser() config = configparser.ConfigParser() config['paths'] = { 'path_wd': path_wd, # Path to model. 'dataset_path': path_wd, # Path to dataset. 'filename_ann': model_name # Name of input model. } config['tools'] = { 'evaluate_ann': True, # Test ANN on dataset before conversion. 'normalize': True, # Normalize weights for full dynamic range. } config['simulation'] = { 'simulator': 'brian2', # Chooses execution backend of SNN toolbox.