コード例 #1
0
    def save_classifier(self, debug, filename=None, filepath=None):
        """
        Saves projections classifiers.
        """

        if filename is None or filepath is None:
            filename, filepath = directories._get_model_savedir(
                model_name=self.model_name,
                dataset_name=self.dataset_name,
                size_proj=self.size_proj,
                projection_mode=self.projection_mode,
                epochs=self.epochs,
                centroid_translation=self.centroid_translation,
                debug=debug)

        if self.trained:
            for i, seed in enumerate(self.random_seeds):
                # for seed, proj_classifier in enumerate(self.classifiers):
                proj_classifier = self.classifiers[i]
                proj_classifier.save_classifier(filepath=filepath,
                                                filename=filename + "_" +
                                                str(seed),
                                                debug=debug)

            if self.centroid_translation:
                save_to_pickle(data=self.translation_vector,
                               filepath=filepath,
                               filename="training_data_centroid.pkl")
        else:
            raise ValueError("Train the model first.")
コード例 #2
0
    def load_classifier(self, debug, filename=None, filepath=None):

        if filename is None or filepath is None:
            filename, filepath = directories._get_model_savedir(model_name=self.model_name, dataset_name=self.dataset_name, 
                                                                epochs=self.epochs, debug=debug)

        return super(BaselineConvnet, self).load_classifier(filepath=filepath, filename=filename)
コード例 #3
0
    def save_robust_classifier(self, robust_classifier, debug, attack_method, attack_library, filename=None, filepath=None):

        if filename is None or filepath is None:
            filename, filepath = directories._get_model_savedir(model_name=self.model_name, dataset_name=self.dataset_name, 
                                                                epochs=self.epochs, debug=debug, robust=True, 
                                                                attack_method=attack_method, attack_library=attack_library)

        robust_classifier.save_classifier(filepath=filepath, filename=filename, debug=debug)
コード例 #4
0
    def load_robust_classifier(self, debug, attack_method, attack_library, filename=None, filepath=None):

        if filename is None or filepath is None:
            filename, filepath = directories._get_model_savedir(model_name=self.model_name, dataset_name=self.dataset_name, 
                                                                epochs=self.epochs, debug=debug, robust=True, 
                                                                attack_method=attack_method, attack_library=attack_library)

        robust_classifier = BaselineConvnet(input_shape=self.input_shape, num_classes=self.num_classes, epochs=self.epochs, 
                                            data_format=self.data_format, dataset_name=self.dataset_name)

        robust_classifier.load_classifier(filepath=filepath, filename=filename, debug=debug)
        return robust_classifier
コード例 #5
0
    def load_classifier(self, debug, filename=None, filepath=None):
        """
        Loads a pre-trained classifier.
        """

        if filename is None or filepath is None:
            filename, filepath = directories._get_model_savedir(
                model_name=self.model_name,
                dataset_name=self.dataset_name,
                size_proj=self.size_proj,
                projection_mode=self.projection_mode,
                epochs=self.epochs,
                centroid_translation=self.centroid_translation,
                debug=debug)

        start_time = time.time()
        self.trained = True

        classifiers = []
        for seed in self.random_seeds:
            proj_classifier = BaselineConvnet(input_shape=self.input_shape,
                                              num_classes=self.num_classes,
                                              epochs=self.epochs,
                                              data_format=self.data_format,
                                              dataset_name=self.dataset_name)
            classifiers.append(
                proj_classifier.load_classifier(filepath=filepath,
                                                filename=filename + "_" +
                                                str(seed),
                                                debug=debug))

        if self.centroid_translation:
            self.translation_vector = load_from_pickle(
                path=filepath + "training_data_centroid.pkl")
        else:
            self.translation_vector = None

        print("\nLoading time: --- %s seconds ---" %
              (time.time() - start_time))

        self.classifiers = classifiers
        return classifiers