Example #1
0
def test_block_update():
    params, model_data = utility.get_random_model_data()
    params['nodal_set'] = gop_operators.get_default_nodal_set()
    params['pool_set'] = gop_operators.get_default_pool_set()
    params['activation_set'] = gop_operators.get_default_activation_set()
    params['convergence_measure'] = random.choice(
        ['train_', 'val_']) + params['convergence_measure']
    block_names = ['gop_0_0', 'gop_1_0', 'bn_0_0', 'bn_1_0', 'output']

    all_op_sets = utility.get_all_operators()

    op_set_indices = {}
    for layer_name in model_data['op_sets'].keys():
        op_set_indices[layer_name] = all_op_sets.index(
            model_data['op_sets'][layer_name])

    _, _, new_weights = gop_utils.block_update(
        model_data['topology'], op_set_indices, model_data['weights'], params,
        block_names, utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS], utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS], utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS])

    for layer_name in new_weights.keys():
        if layer_name not in block_names:
            assert np.allclose(new_weights[layer_name][0],
                               model_data['weights'][layer_name][0])
Example #2
0
def test_evaluate():
    params, model_data = utility.get_random_model_data()
    gop_utils.evaluate(model_data=model_data,
                       func=utility.get_generator,
                       data=[INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS],
                       metrics=params['metrics'],
                       special_metrics=params['special_metrics'])
Example #3
0
def test_finetune():
    params, model_data = utility.get_random_model_data()
    history, performance, data = gop_utils.finetune(
        model_data, params, utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS], utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS], utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS])
Example #4
0
def test_load(tmpdir):
    params, model_data = utility.get_random_model_data()
    model_data_attributes = [
        'model', 'weights', 'topology', 'op_sets', 'output_activation',
        'use_bias'
    ]
    filename = os.path.join(tmpdir.dirname, 'model_data.pickle')
    with open(filename, 'wb') as fid:
        dill.dump(model_data, fid, recurse=True)

    model_data_recovered = gop_utils.load(filename, model_data_attributes,
                                          model_data['model'])
    assert model_data_recovered['topology'] == model_data['topology']
    assert model_data_recovered['op_sets'] == model_data['op_sets']
    layer_name = random.choice(list(model_data['weights'].keys()))
    assert np.allclose(model_data_recovered['weights'][layer_name][0],
                       model_data['weights'][layer_name][0])
Example #5
0
def network_builder():
    params, model_data = utility.get_random_model_data()

    model = gop_utils.network_builder(
        model_data['topology'],
        model_data['op_sets'],
        input_dropout=params['input_dropout'],
        dropout=params['dropout'],
        regularizer=params['weight_regularizer'],
        constraint=params['weight_constraint'],
        output_activation=model_data['output_activation'],
        use_bias=model_data['use_bias'])

    model.compile(params['optimizer'], params['loss'], params['metrics'])

    output_weights = model.get_layer('output').get_weights()
    if output_weights[0].ndim == 3:
        assert output_weights[0].shape == (1, 50, OUTPUT_DIM)
    else:
        assert output_weights[0].shape == (50, OUTPUT_DIM)
Example #6
0
def test_block_update_standalone(tmpdir):
    model_path = os.path.join(tmpdir.dirname, 'test_model')
    if os.path.exists(model_path):
        shutil.rmtree(model_path)
    os.mkdir(model_path)

    params, model_data = utility.get_random_model_data()
    params['tmp_dir'] = tmpdir.dirname
    params['model_name'] = 'test_model'
    params['nodal_set'] = gop_operators.get_default_nodal_set()
    params['pool_set'] = gop_operators.get_default_pool_set()
    params['activation_set'] = gop_operators.get_default_activation_set()
    params['convergence_measure'] = random.choice(
        ['train_', 'val_']) + params['convergence_measure']
    block_names = ['gop_0_0', 'gop_1_0', 'bn_0_0', 'bn_1_0', 'output']

    all_op_sets = utility.get_all_operators()

    op_set_indices = {}
    for layer_name in model_data['op_sets'].keys():
        op_set_indices[layer_name] = all_op_sets.index(
            model_data['op_sets'][layer_name])

    train_states = {
        'topology': model_data['topology'],
        'weights': model_data['weights'],
        'op_set_indices': op_set_indices
    }

    _, _, new_weights = gop_utils.block_update_standalone(
        train_states, params, block_names, utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS], utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS], utility.get_generator,
        [INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS])

    for layer_name in new_weights.keys():
        if layer_name not in block_names:
            assert np.allclose(new_weights[layer_name][0],
                               model_data['weights'][layer_name][0])

    shutil.rmtree(model_path)
Example #7
0
def network_trainer():
    params, model_data = utility.get_random_model_data()

    model = gop_utils.network_builder(
        model_data['topology'],
        model_data['op_sets'],
        input_dropout=params['input_dropout'],
        dropout=params['dropout'],
        regularizer=params['weight_regularizer'],
        constraint=params['weight_constraint'],
        output_activation=model_data['output_activation'],
        use_bias=model_data['use_bias'])

    model.compile(params['optimizer'], params['loss'], params['metrics'])

    convergence_measure = random.choice(['train_', 'val_'
                                         ]) + params['convergence_measure']

    measure, history, weights = gop_utils.network_trainer(
        model,
        direction=params['direction'],
        convergence_measure=convergence_measure,
        LR=params['lr_finetune'],
        SC=params['epoch_finetune'],
        optimizer=params['optimizer'],
        optimizer_parameters=params['optimizer_parameters'],
        loss=params['loss'],
        metrics=params['metrics'],
        special_metrics=params['special_metrics'],
        train_func=utility.get_generator,
        train_data=[INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS],
        val_func=utility.get_generator,
        val_data=[INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS],
        test_func=utility.get_generator,
        test_data=[INPUT_DIM, OUTPUT_DIM, BATCH_SIZE, STEPS],
        class_weight=params['class_weight'])
Example #8
0
def test_predict():
    params, model_data = utility.get_random_model_data()
    predictions = gop_utils.predict(model_data,
                                    func=utility.get_generator,
                                    data=[INPUT_DIM, None, BATCH_SIZE, STEPS])
    assert predictions.shape == (BATCH_SIZE * STEPS, OUTPUT_DIM)