Пример #1
0
    def test_normalizing(self, _model_2, _config):

        # Parsing removes BatchNorm layers, so we make a copy of the model.
        input_model = keras.models.clone_model(_model_2)
        input_model.set_weights(_model_2.get_weights())
        input_model.compile(_model_2.optimizer.__class__.__name__,
                            _model_2.loss, _model_2.metrics)

        num_to_test = 10000
        batch_size = 100
        _config.set('simulation', 'batch_size', str(batch_size))
        _config.set('simulation', 'num_to_test', str(num_to_test))

        normset, testset = get_dataset(_config)
        x_test = testset['x_test']
        y_test = testset['y_test']
        x_norm = normset['x_norm']

        model_lib = import_module('snntoolbox.parsing.model_libs.' +
                                  _config.get('input', 'model_lib') +
                                  '_input_lib')
        model_parser = model_lib.ModelParser(input_model, _config)
        model_parser.parse()
        parsed_model = model_parser.build_parsed_model()

        normalize_parameters(parsed_model, _config, x_norm=x_norm)

        _, acc, _ = model_parser.evaluate(batch_size, num_to_test, x_test,
                                          y_test)
        _, target_acc = _model_2.evaluate(x_test, y_test, batch_size)
        assert acc == target_acc
Пример #2
0
def _parsed_model(_config, _normset):
    from snntoolbox.conversion.utils import normalize_parameters
    input_model_and_lib = _input_model_and_lib_single(_config, _ml[0])
    model = input_model_and_lib['input_model']['model']
    model_parser = input_model_and_lib['model_lib'].ModelParser(model, _config)
    model_parser.parse()
    parsed_model = model_parser.build_parsed_model()
    normalize_parameters(parsed_model, _config, **_normset)
    return parsed_model
Пример #3
0
    def test_normalizing(self, _input_model_and_lib, _normset, _testset,
                         _config):
        from snntoolbox.conversion.utils import normalize_parameters

        # Need to test only once because normalization is independent of
        # input library.
        if 'keras' not in _input_model_and_lib['model_lib'].__name__:
            return

        batch_size = _config.getint('simulation', 'batch_size')
        num_to_test = _config.getint('simulation', 'num_to_test')
        model_parser = _input_model_and_lib['model_lib'].ModelParser(
            _input_model_and_lib['input_model']['model'], _config)
        model_parser.parse()
        parsed_model = model_parser.build_parsed_model()
        normalize_parameters(parsed_model, _config, **_normset)
        score = model_parser.evaluate(batch_size, num_to_test, **_testset)
        target_acc = _input_model_and_lib['target_acc']

        assert round(100 * score[1], 2) >= target_acc
Пример #4
0
def run_pipeline(config, queue=None):
    """Convert an analog network to a spiking network and simulate it.

    Complete pipeline of
        1. loading and testing a pretrained ANN,
        2. normalizing parameters
        3. converting it to SNN,
        4. running it on a simulator,
        5. given a specified hyperparameter range ``params``,
           repeat simulations with modified parameters.

    Parameters
    ----------

    config: configparser.ConfigParser
        ConfigParser containing the user settings.

    queue: Optional[Queue.Queue]
        Results are added to the queue to be displayed in the GUI.

    Returns
    -------

    results: list
        List of the accuracies obtained after simulating with each parameter
        value in config.get('parameter_sweep', 'param_values').
    """

    from snntoolbox.datasets.utils import get_dataset
    from snntoolbox.conversion.utils import normalize_parameters

    num_to_test = config.getint('simulation', 'num_to_test')

    # Instantiate an empty spiking network
    target_sim = import_target_sim(config)
    spiking_model = target_sim.SNN(config, queue)

    # ___________________________ LOAD DATASET ______________________________ #

    normset, testset = get_dataset(config)

    parsed_model = None
    if config.getboolean('tools', 'parse') and not is_stop(queue):

        # __________________________ LOAD MODEL _____________________________ #

        model_lib = import_module('snntoolbox.parsing.model_libs.' +
                                  config.get('input', 'model_lib') +
                                  '_input_lib')
        input_model = model_lib.load(config.get('paths', 'path_wd'),
                                     config.get('paths', 'filename_ann'))

        # Evaluate input model.
        if config.getboolean('tools', 'evaluate_ann') and not is_stop(queue):
            print(
                "Evaluating input model on {} samples...".format(num_to_test))
            model_lib.evaluate(input_model['val_fn'],
                               config.getint('simulation', 'batch_size'),
                               num_to_test, **testset)

        # ____________________________ PARSE ________________________________ #

        print("Parsing input model...")
        model_parser = model_lib.ModelParser(input_model['model'], config)
        model_parser.parse()
        parsed_model = model_parser.build_parsed_model()

        # ___________________________ NORMALIZE _____________________________ #

        if config.getboolean('tools', 'normalize') and not is_stop(queue):
            normalize_parameters(parsed_model, config, **normset)

        # Evaluate parsed model.
        if config.getboolean('tools', 'evaluate_ann') and not is_stop(queue):
            print(
                "Evaluating parsed model on {} samples...".format(num_to_test))
            model_parser.evaluate(config.getint('simulation', 'batch_size'),
                                  num_to_test, **testset)

        # Write parsed model to disk
        parsed_model.save(
            str(
                os.path.join(
                    config.get('paths', 'path_wd'),
                    config.get('paths', 'filename_parsed_model') + '.h5')))

    # _____________________________ CONVERT _________________________________ #

    if config.getboolean('tools', 'convert') and not is_stop(queue):
        if parsed_model is None:
            from snntoolbox.parsing.model_libs.keras_input_lib import load
            try:
                parsed_model = load(config.get('paths', 'path_wd'),
                                    config.get('paths',
                                               'filename_parsed_model'),
                                    filepath_custom_objects=config.get(
                                        'paths',
                                        'filepath_custom_objects'))['model']
            except FileNotFoundError:
                print("Could not find parsed model {} in path {}. Consider "
                      "setting `parse = True` in your config file.".format(
                          config.get('paths', 'path_wd'),
                          config.get('paths', 'filename_parsed_model')))

        spiking_model.build(parsed_model, **testset)

        # Export network in a format specific to the simulator with which it
        # will be tested later.
        spiking_model.save(config.get('paths', 'path_wd'),
                           config.get('paths', 'filename_snn'))

    # ______________________________ SIMULATE _______________________________ #

    if config.getboolean('tools', 'simulate') and not is_stop(queue):

        # Decorate the 'run' function of the spiking model with a parameter
        # sweep function.
        @run_parameter_sweep(config, queue)
        def run(snn, **test_set):
            return snn.run(**test_set)

        # Simulate network
        results = run(spiking_model, **testset)

        # Clean up
        spiking_model.end_sim()

        # Add results to queue to be displayed in GUI.
        if queue:
            queue.put(results)

        return results
Пример #5
0
                         batch_size=1,
                         num_to_test=50,
                         x_test=testX[:50],
                         y_test=testY[:50])

config = update_setup(config_path)
config.set("paths", "path_wd", os.path.dirname(model_path))
config.set("paths", "dataset_path", os.path.dirname(model_path))
config.set("paths", "filename_ann", os.path.basename(model_path))
model_parser = model_lib.ModelParser(input_model['model'], config)
model_parser.parse()
parsed_model = model_parser.build_parsed_model()

# Normalize
norm_data = {'x_norm': testX}
normalize_parameters(parsed_model, config, **norm_data)

score_norm = model_parser.evaluate(batch_size=1,
                                   num_to_test=50,
                                   x_test=testX[:50],
                                   y_test=testY[:50])

print("score norm = {}".format(score_norm))
parsed_model.save(os.path.join(dir_name, "parsed_model.h5"))
# convert

target_sim = import_module(
    'snntoolbox.simulation.target_simulators.brian2_target_sim')
spiking_model = target_sim.SNN(config)
spiking_model.build(parsed_model)
spiking_model.save(dir_name, "spike_snn")