Пример #1
0
 def attach(self, model):
     self.e2efs_layer = self.get_layer(model.input_shape[1:])
     self.model = self.e2efs_layer.add_to_model(
         model, input_shape=model.input_shape[1:])
     kwargs = model.optimizer.get_config()
     if 'sgd' in type(model.optimizer).__name__.lower():
         opt = custom_optimizers.E2EFS_SGD(self.e2efs_layer,
                                           th=self.th,
                                           **kwargs)
     elif 'adam' in type(model.optimizer).__name__.lower():
         opt = custom_optimizers.E2EFS_Adam(self.e2efs_layer,
                                            th=self.th,
                                            **kwargs)
     else:
         raise Exception(
             'Optimizer not supported. Contact the authors if you need it')
     self.model.compile(opt, model.loss, model.metrics, model.loss_weights,
                        model.sample_weight_mode, model.weighted_metrics)
     return self
Пример #2
0
def main():

    dataset = load_dataset()

    train_data = np.asarray(dataset['train']['data'])
    train_labels = dataset['train']['label']
    num_classes = len(np.unique(train_labels))

    test_data = np.asarray(dataset['test']['data'])
    test_labels = dataset['test']['label']

    train_labels = to_categorical(train_labels, num_classes=num_classes)
    test_labels = to_categorical(test_labels, num_classes=num_classes)

    generator = dataset['generator']
    fs_generator = dataset['fs_generator']
    generator_kwargs = {
        'batch_size': batch_size
    }

    print('reps : ', reps)
    name = 'mnist_' + fs_network + '_r_' + str(regularization)
    print(name)
    model_kwargs = {
        'nclasses': num_classes,
        'regularization': regularization
    }

    total_features = int(np.prod(train_data.shape[1:]))

    fs_filename = directory + fs_network + '_trained_model.h5'
    classifier_filename = directory + classifier_network + '_trained_model.h5'
    if not os.path.isdir(directory):
        os.makedirs(directory)
    if not os.path.exists(fs_filename) and warming_up:
        np.random.seed(1001)
        tf.set_random_seed(1001)
        model = getattr(network_models, fs_network)(input_shape=train_data.shape[1:], **model_kwargs)
        print('training_model')
        model.fit_generator(
            generator.flow(train_data, train_labels, **generator_kwargs),
            steps_per_epoch=train_data.shape[0] // batch_size, epochs=110,
            callbacks=[
                callbacks.LearningRateScheduler(scheduler())
            ],
            validation_data=(test_data, test_labels),
            validation_steps=test_data.shape[0] // batch_size,
            verbose=verbose
        )

        model.save(fs_filename)
        del model
        K.clear_session()

    for e2efs_class in e2efs_classes:
        nfeats = []
        accuracies = []
        times = []

        cont_seed = 0

        for factor in [.05, .1, .25, .5]:
            n_features = int(total_features * factor)
            n_accuracies = []
            n_times = []

            for r in range(reps):
                print('factor : ', factor, ' , rep : ', r)
                np.random.seed(cont_seed)
                tf.set_random_seed(cont_seed)
                cont_seed += 1
                mask = (np.std(train_data, axis=0) > 1e-3).astype(int).flatten()
                classifier = load_model(fs_filename) if warming_up else getattr(network_models, fs_network)(input_shape=train_data.shape[1:], **model_kwargs)
                e2efs_layer = e2efs_class(n_features, input_shape=train_data.shape[1:], kernel_initializer=initializers.constant(mask))
                model = e2efs_layer.add_to_model(classifier, input_shape=train_data.shape[1:])

                optimizer = custom_optimizers.E2EFS_Adam(e2efs_layer=e2efs_layer, lr=1e-3)  # optimizers.adam(lr=1e-2)
                model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['acc'])
                model.fs_layer = e2efs_layer
                model.classifier = classifier
                model.summary()
                start_time = time.time()
                model.fit_generator(
                    fs_generator.flow(train_data, train_labels, **generator_kwargs),
                    steps_per_epoch=train_data.shape[0] // batch_size, epochs=20000,
                    callbacks=[
                        E2EFSCallback(verbose=verbose)
                    ],
                    validation_data=(test_data, test_labels),
                    validation_steps=test_data.shape[0] // batch_size,
                    verbose=verbose
                )
                fs_rank = np.argsort(K.eval(model.heatmap))[::-1]
                mask = np.zeros(train_data.shape[1:])
                mask.flat[fs_rank[:n_features]] = 1.
                # mask = K.eval(model.fs_kernel).reshape(train_data.shape[1:])
                n_times.append(time.time() - start_time)
                print('nnz : ', np.count_nonzero(mask))
                del model
                K.clear_session()
                model = load_model(classifier_filename) if warming_up else getattr(network_models, classifier_network)(
                    input_shape=train_data.shape[1:], **model_kwargs)
                optimizer = optimizers.Adam(lr=1e-2)  # optimizers.adam(lr=1e-2)
                model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['acc'])
                model.fit_generator(
                    generator.flow(mask * train_data, train_labels, **generator_kwargs),
                    steps_per_epoch=train_data.shape[0] // batch_size, epochs=80,
                    callbacks=[
                        callbacks.LearningRateScheduler(scheduler()),
                    ],
                    validation_data=(mask * test_data, test_labels),
                    validation_steps=test_data.shape[0] // batch_size,
                    verbose=verbose
                )
                n_accuracies.append(model.evaluate(mask * test_data, test_labels, verbose=0)[-1])
                del model
                K.clear_session()
            print(
                'n_features : ', n_features, ', acc : ', n_accuracies, ', time : ', n_times
            )
            accuracies.append(n_accuracies)
            nfeats.append(n_features)
            times.append(n_times)

        output_filename = directory + fs_network + '_' + classifier_network + '_' + e2efs_class.__name__ + \
                          '_results_warming_' + str(warming_up) + '.json'

        try:
            with open(output_filename) as outfile:
                info_data = json.load(outfile)
        except:
            info_data = {}

        if name not in info_data:
            info_data[name] = []

        info_data[name].append(
            {
                'regularization': regularization,
                'reps': reps,
                'classification': {
                    'n_features': nfeats,
                    'accuracy': accuracies,
                    'times': times
                }
            }
        )

        with open(output_filename, 'w') as outfile:
            json.dump(info_data, outfile)
Пример #3
0
def main():

    dataset = load_dataset()

    for network_name in network_names:

        train_data = np.asarray(dataset['train']['data'])
        train_labels = dataset['train']['label']
        num_classes = len(np.unique(train_labels))

        test_data = np.asarray(dataset['test']['data'])
        test_labels = dataset['test']['label']

        train_labels = to_categorical(train_labels, num_classes=num_classes)
        test_labels = to_categorical(test_labels, num_classes=num_classes)

        generator = dataset['generator']
        generator_fs = dataset['generator_fs']
        generator_kwargs = {
            'batch_size': batch_size
        }

        print('reps : ', reps)
        name = 'fashion_mnist_' + network_name + '_r_' + str(regularization)
        print(name)
        model_kwargs = {
            'nclasses': num_classes,
            'regularization': regularization
        }

        total_features = int(np.prod(train_data.shape[1:]))

        model_filename = directory + network_name + '_trained_model.h5'
        if not os.path.isdir(directory):
            os.makedirs(directory)
        if not os.path.exists(model_filename) and warming_up:
            np.random.seed(1001)
            tf.set_random_seed(1001)
            model = getattr(network_models, network_name)(input_shape=train_data.shape[1:], **model_kwargs)
            print('training_model')
            model.fit_generator(
                generator.flow(train_data, train_labels, **generator_kwargs),
                steps_per_epoch=train_data.shape[0] // batch_size, epochs=80,
                callbacks=[
                    callbacks.LearningRateScheduler(scheduler())
                ],
                validation_data=(test_data, test_labels),
                validation_steps=test_data.shape[0] // batch_size,
                verbose=verbose
            )

            model.save(model_filename)
            del model
            K.clear_session()

        for e2efs_class in e2efs_classes:
            nfeats = []
            accuracies = []
            times = []

            cont_seed = 0
            for r in range(reps):
                temp_filename = temp_directory + network_name + '_' + e2efs_class.__name__ + \
                              '_e2efs_heatmap_iter_' + str(r) + '.npy'
                if os.path.exists(temp_filename):
                    heatmap = np.load(temp_filename)
                else:
                    heatmap = np.zeros(np.prod(train_data.shape[1:]))
                    start_time = time.time()
                    for fs_r in range(fs_reps):
                        print('rep : ', fs_r)
                        np.random.seed(cont_seed)
                        tf.set_random_seed(cont_seed)
                        cont_seed += 1
                        classifier = load_model(model_filename) if warming_up else getattr(network_models, network_name)(
                            input_shape=train_data.shape[1:], **model_kwargs)
                        e2efs_layer = e2efs_class(1, input_shape=train_data.shape[1:],)
                                                  # kernel_initializer=initializers.constant(mask))
                        model = e2efs_layer.add_to_model(classifier, input_shape=test_data.shape[1:])

                        # optimizer = custom_optimizers.E2EFS_SGD(e2efs_layer=e2efs_layer, lr=1e-1)  # optimizers.adam(lr=1e-2)
                        optimizer = custom_optimizers.E2EFS_Adam(e2efs_layer=e2efs_layer, lr=1e-2)
                        model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['acc'])
                        model.fs_layer = e2efs_layer
                        model.classifier = classifier
                        model.summary()

                        model.fit_generator(
                            generator_fs.flow(train_data, train_labels, **generator_kwargs),
                            steps_per_epoch=train_data.shape[0] // batch_size, epochs=20000,
                            callbacks=[
                                E2EFSCallback(units=int(total_features * 0.05),
                                              verbose=verbose)
                            ],
                            validation_data=(test_data, test_labels),
                            validation_steps=test_data.shape[0] // batch_size,
                            verbose=verbose
                        )
                        heatmap += K.eval(model.heatmap)
                        del model
                        K.clear_session()
                    if not os.path.isdir(temp_directory):
                        os.makedirs(temp_directory)
                    np.save(temp_filename, heatmap)
                    times.append(time.time() - start_time)
                fs_rank = np.argsort(heatmap)[::-1]

                for i, factor in enumerate([.05, .1, .25, .5]):
                    print('factor : ', factor, ' , rep : ', r)
                    n_features = int(total_features * factor)
                    mask = np.zeros(train_data.shape[1:])
                    mask.flat[fs_rank[:n_features]] = 1.

                    np.random.seed(cont_seed)
                    tf.set_random_seed(cont_seed)
                    cont_seed += 1
                    model = load_model(model_filename) if warming_up else getattr(network_models, network_name)(input_shape=train_data.shape[1:], **model_kwargs)
                    # optimizer = optimizers.RMSprop(learning_rate=1e-2)  # optimizers.SGD(lr=1e-1)  #
                    optimizer = optimizers.Adam(lr=1e-2)
                    model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['acc'])

                    model.fit_generator(
                        generator.flow(mask * train_data, train_labels, **generator_kwargs),
                        steps_per_epoch=train_data.shape[0] // batch_size, epochs=80,
                        callbacks=[
                            callbacks.LearningRateScheduler(scheduler()),
                        ],
                        validation_data=(mask * test_data, test_labels),
                        validation_steps=test_data.shape[0] // batch_size,
                        verbose=verbose
                    )
                    acc = model.evaluate(mask * test_data, test_labels, verbose=0)[-1]
                    if i < len(accuracies):
                        accuracies[i].append(acc)
                    else:
                        accuracies.append([acc])
                        nfeats.append(n_features)
                    del model
                    K.clear_session()
                    print(
                        'n_features : ', n_features, ', acc : ', acc, ', time : ', times[-1]
                    )

            output_filename = directory + network_name + '_' + e2efs_class.__name__ + \
                              '_e2efs_results_warming_' + str(warming_up) + '.json'

            try:
                with open(output_filename) as outfile:
                    info_data = json.load(outfile)
            except:
                info_data = {}

            if name not in info_data:
                info_data[name] = []

            info_data[name].append(
                {
                    'regularization': regularization,
                    'reps': reps,
                    'classification': {
                        'n_features': nfeats,
                        'accuracy': accuracies,
                        'times': times
                    }
                }
            )

            with open(output_filename, 'w') as outfile:
                json.dump(info_data, outfile)