Beispiel #1
0
def main():

    dataset = load_dataset()

    for network_name in network_names:

        model_func = getattr(network_models, network_name.split('_')[0])

        train_data = np.asarray(dataset['train']['data'])
        train_labels = dataset['train']['label']
        num_classes = len(np.unique(train_labels))

        test_data = np.asarray(dataset['test']['data'])
        test_labels = dataset['test']['label']

        train_labels = to_categorical(train_labels, num_classes=num_classes)
        test_labels = to_categorical(test_labels, num_classes=num_classes)

        epochs = 130 if 'wrn' in network_name else 80

        fit_kwargs = {
            'epochs':
            epochs,
            'callbacks': [
                callbacks.LearningRateScheduler(
                    scheduler('wrn' in network_name))
            ],
            'verbose':
            2
        }

        generator = dataset['generator']
        generator_kwargs = {'batch_size': batch_size}
        fit_kwargs['steps_per_epoch'] = len(train_data) // batch_size

        print('reps : ', reps)
        print('method : ', method)
        name = 'mnist_' + network_name + '_l_' + str(lasso) + '_g_' + str(gamma) + \
               '_r_' + str(regularization)
        print(name)
        model_kwargs = {
            'nclasses': num_classes,
            'lasso': lasso,
            'regularization': regularization
        }
        saliency_kwargs = {'horizontal_flip': True}
        rank = get_rank(method,
                        data=train_data,
                        label=train_labels,
                        model_func=model_func,
                        model_kwargs=model_kwargs,
                        fit_kwargs=fit_kwargs,
                        generator=generator,
                        generator_kwargs=generator_kwargs,
                        rank_kwargs=rank_kwargs,
                        saliency_kwargs=saliency_kwargs)

        nfeats = []
        accuracies = []
        model_kwargs['lasso'] = 0.
        total_features = int(np.prod(train_data.shape[1:]))
        for factor in [.05, .1, .25, .5]:
            n_features = int(total_features * factor)
            mask = np.zeros(train_data.shape[1:])
            mask.flat[rank[:n_features]] = 1.0
            n_accuracies = []
            for r in range(reps):
                print('factor : ', factor, ' , rep : ', r)
                model = network_models.wrn164(train_data.shape[1:],
                                              **model_kwargs)
                model.fit_generator(
                    generator.flow(mask * train_data, train_labels,
                                   **generator_kwargs),
                    steps_per_epoch=train_data.shape[0] // batch_size,
                    epochs=130,
                    callbacks=[
                        callbacks.LearningRateScheduler(scheduler(True))
                    ],
                    validation_data=(mask * test_data, test_labels),
                    validation_steps=test_data.shape[0] // batch_size,
                    verbose=2)
                n_accuracies.append(
                    model.evaluate(mask * test_data, test_labels,
                                   verbose=0)[-1])
                del model
            print('n_features : ', n_features, ', acc : ', n_accuracies)
            accuracies.append(n_accuracies)
            nfeats.append(n_features)

        try:
            os.makedirs(directory)
        except:
            pass
        output_filename = directory + network_name + '_' + str(
            gamma) + '_dfs_results.json'

        try:
            with open(output_filename) as outfile:
                info_data = json.load(outfile)
        except:
            info_data = {}

        if name not in info_data:
            info_data[name] = []

        info_data[name].append({
            'lasso': lasso,
            'gamma': gamma,
            'regularization': regularization,
            'rank': rank.tolist(),
            'reps': reps,
            'classification': {
                'n_features': nfeats,
                'accuracy': accuracies
            }
        })

        with open(output_filename, 'w') as outfile:
            json.dump(info_data, outfile)

        del rank
Beispiel #2
0
def main():

    dataset = load_dataset()

    train_data = np.asarray(dataset['train']['data'])
    train_labels = dataset['train']['label']
    num_classes = len(np.unique(train_labels))

    test_data = np.asarray(dataset['test']['data'])
    test_labels = dataset['test']['label']

    train_labels = to_categorical(train_labels, num_classes=num_classes)
    test_labels = to_categorical(test_labels, num_classes=num_classes)

    if os.path.exists(directory + model_filename):
        model = load_model(directory + model_filename,
                           custom_objects={
                               'GaussianNoise': layers.GaussianNoise,
                               'Mask': layers.Mask
                           })
    else:
        model_kwargs = {
            'nclasses': num_classes,
            'lasso': lasso,
            'regularization': regularization,
            'input_noise': input_noise
        }

        generator = dataset['generator']
        generator_kwargs = {'batch_size': batch_size}

        model = network_models.wrn164(train_data.shape[1:], **model_kwargs)
        model.fit_generator(
            generator.flow(train_data, train_labels, **generator_kwargs),
            steps_per_epoch=train_data.shape[0] // batch_size,
            epochs=40,
            callbacks=[callbacks.LearningRateScheduler(scheduler)],
            validation_data=(test_data, test_labels),
            validation_steps=test_data.shape[0] // batch_size,
            verbose=2)
        if not os.path.isdir(directory):
            os.makedirs(directory)
        save_model(model, directory + model_filename)

    model.saliency = saliency_function.get_saliency('categorical_crossentropy',
                                                    model,
                                                    reduce_func=None,
                                                    use_abs=False)

    samples = []
    for i in range(num_classes):
        print('label', i)
        pos = np.where(test_labels[:, i] > 0.)[0]
        np.random.seed(42)
        p = pos[np.random.randint(pos.shape[0])]
        # predictions = model.predict(test_data[pos])
        # label_pred = np.argmax(predictions, axis=-1)
        # p = np.where(label_pred == i)[0]
        # p_min = np.argmin(predictions[p, i])
        # p = pos[p[p_min]]
        sample = test_data[p]
        i_samples = [sample]
        for label in range(num_classes):
            print('label', i, label)
            label_sample = sample.copy().astype(float)
            cat_label = to_categorical([label], num_classes=num_classes)
            prediction = model.predict(np.asarray([label_sample]))[0]
            while prediction[label] < .95:
                saliency = model.saliency([[label_sample], cat_label, 0])[0][0]
                saliency /= np.max(np.abs(saliency))
                label_sample += lr * saliency
                label_sample[label_sample < 0.] = 0.
                label_sample[label_sample > 1.] = 1.
                prediction = model.predict(np.asarray([label_sample]))[0]
                print(label, prediction[label])
            i_samples.append(label_sample)
        samples.append(i_samples)

    sample_images(samples, filename=directory + 'image.png')
    sample_images(samples,
                  filename=directory + 'image_diff.png',
                  show_diff=True)