Exemplo n.º 1
0
    def _get_test_roc(self, model_type, checkpoint_path):
        data_module = building.build_datamodule(self.dataset, anomaly=True)
        anomaly_detector = downstream.AnomalyDetection.from_autoencoder_checkpoint(
            model_type, data_module, checkpoint_path)
        fpr, tpr, thresholds, auc = anomaly_detector.get_test_roc(data_module)

        return fpr, tpr, thresholds, auc
 def test_rest(self):
     rest = [
         'shallow', 'vanilla', 'stacked', 'sparse', 'vae',
         'beta_vae_strict', 'beta_vae_loose', 'vq'
     ]
     for model_type in rest:
         with self.subTest(model_type=model_type):
             dm = building.build_datamodule(model_type)
             self.assertIsNone(dm.exclude)
Exemplo n.º 3
0
def run(model_type, dataset, batch_size, gpu, anomaly=False):
    assert model_type in AUTOENCODERS
    task = 'anomaly' if anomaly else None
    pl.seed_everything(42)
    datamodule = build_datamodule(dataset, model_type, batch_size, anomaly)
    ae = build_ae(model_type, datamodule.dims, anomaly)
    logger = build_logger(model_type, dataset, task)
    checkpoint_path = _train(model_type, ae, datamodule, logger, gpu)

    return checkpoint_path
Exemplo n.º 4
0
    def _get_test_accuracy(self, model_type, checkpoint_path):
        data_module = building.build_datamodule(self.dataset, 'classification')
        classifier = downstream.Classifier.from_autoencoder_checkpoint(
            model_type, data_module, checkpoint_path)
        trainer = self._get_classification_trainer()

        trainer.fit(classifier, datamodule=data_module)
        test_results, *_ = trainer.test(datamodule=data_module)
        accuracy = test_results['test/accuracy']

        return accuracy
Exemplo n.º 5
0
 def test_rest(self):
     rest = [
         'shallow', 'vanilla', 'stacked', 'sparse', 'vae',
         'beta_vae_strict', 'beta_vae_loose', 'vq'
     ]
     for model_type in rest:
         for dataset in data.AVAILABLE_DATASETS.keys():
             with self.subTest(model_type=model_type):
                 dm = building.build_datamodule(dataset, model_type)
                 self.assertIsInstance(dm, data.AVAILABLE_DATASETS[dataset])
                 self.assertIsNone(dm.exclude)
Exemplo n.º 6
0
    def _get_datamodule(self):
        data_module = building.build_datamodule(self.dataset)
        data_module.prepare_data()
        data_module.setup('test')

        return data_module
Exemplo n.º 7
0
    def _get_datamodule(self):
        data = building.build_datamodule()
        data.prepare_data()
        data.setup('test')

        return data
 def test_anomaly(self):
     dm = building.build_datamodule('vae', anomaly=True)
     self.assertEqual(1, dm.exclude)
 def test_classification(self):
     dm = building.build_datamodule('classification')
     self.assertEqual(550, dm.train_size)  # 1% of training data
import building
from models import bottlenecks, encoders, decoders


class TestBuildingDataModule(unittest.TestCase):
    def test_anomaly(self):
        dm = building.build_datamodule('vae', anomaly=True)
        self.assertEqual(1, dm.exclude)

    def test_classification(self):
        dm = building.build_datamodule('classification')
        self.assertEqual(550, dm.train_size)  # 1% of training data

    def test_no_model_type(self):
        with self.subTest(case='default'):
            dm = building.build_datamodule()
            self.assertIsNone(dm.exclude)
            self.assertIsNone(dm.train_size)
            self.assertEqual(32, dm.batch_size)
        with self.subTest(case='anomaly'):
            dm = building.build_datamodule(anomaly=True)
            self.assertEqual(1, dm.exclude)
            self.assertIsNone(dm.train_size)
            self.assertEqual(32, dm.batch_size)

    def test_rest(self):
        rest = [
            'shallow', 'vanilla', 'stacked', 'sparse', 'vae',
            'beta_vae_strict', 'beta_vae_loose', 'vq'
        ]
        for model_type in rest: