Exemple #1
0
 def test_analyze_one_classification_result(self):
   self.storage_client = fake_cloud_client.FakeStorageClient(
       {'filename':
        'a1.png,1\na2.png,4\na3.png,1\na4.png,1\na5.png,2\na6.png,9'})
   adv_batch = {
       'dataset_batch_id': 'BATCH000',
       'images': {'a' + str(i):
                  {'clean_image_id': 'c' + str(i)} for i in range(1, 6)}
   }
   dataset_batches = image_batches.DatasetBatches(
       datastore_client=self.datastore_client,
       storage_client=self.storage_client,
       dataset_name='final')
   dataset_batches._data = {
       'BATCH000': {'images': {'c' + str(i): {'dataset_image_id': str(i)}
                               for i in range(1, 6)}},
   }
   (count_correctly_classified, count_errors,
    count_hit_target_class, num_images) = (
        classification_results.analyze_one_classification_result(
            self.storage_client, 'filename', adv_batch, dataset_batches,
            FakeDatasetMeta()))
   self.assertEqual(3, count_correctly_classified)
   self.assertEqual(2, count_errors)
   self.assertEqual(1, count_hit_target_class)
   self.assertEqual(5, num_images)
 def setUp(self):
     # prepare dataset batches and submissions
     storage_blobs = [
         'dataset/dev/img1.png',
         'dataset/dev/img2.png',
         'dataset/dev/img3.png',
         'dataset/dev/img4.png',
         'dataset/dev/img5.png',
         'dataset/dev_dataset.csv',
         ROUND_NAME + '/submissions/nontargeted/1.zip',
         ROUND_NAME + '/submissions/nontargeted/baseline_nt.zip',
         ROUND_NAME + '/submissions/targeted/1.zip',
         ROUND_NAME + '/submissions/targeted/2.zip',
         ROUND_NAME + '/submissions/defense/3.zip',
         ROUND_NAME + '/submissions/defense/baseline_adv_train.zip',
     ]
     self.storage_client = fake_cloud_client.FakeStorageClient(
         storage_blobs)
     self.datastore_client = fake_cloud_client.FakeDatastoreClient()
     self.dataset_batches = image_batches.DatasetBatches(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         dataset_name='dev')
     self.dataset_batches.init_from_storage_write_to_datastore(batch_size=3)
     self.submissions = submissions.CompetitionSubmissions(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         round_name=ROUND_NAME)
     self.submissions.init_from_storage_write_to_datastore()
 def test_init_from_datastore(self):
     self.dataset_batches.init_from_storage_write_to_datastore(batch_size=3)
     self.dataset_batches = image_batches.DatasetBatches(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         dataset_name='dev')
     self.dataset_batches.init_from_datastore()
     self.verify_dataset_batches()
 def setUp(self):
     storage_blobs = [
         'dataset/dev/img1.png',
         'dataset/dev/img2.png',
         'dataset/dev/img3.png',
         'dataset/dev/img4.png',
         'dataset/dev/img5.png',
         'dataset/dev_dataset.csv',
     ]
     self.storage_client = fake_cloud_client.FakeStorageClient(
         storage_blobs)
     self.datastore_client = fake_cloud_client.FakeDatastoreClient()
     self.dataset_batches = image_batches.DatasetBatches(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         dataset_name='dev')
Exemple #5
0
 def test_analyze_one_classification_result(self):
     self.storage_client = fake_cloud_client.FakeStorageClient({
         "filename":
         "a1.png,1\na2.png,4\na3.png,1\na4.png,1\na5.png,2\na6.png,9"
     })
     adv_batch = {
         "dataset_batch_id": "BATCH000",
         "images": {
             "a" + str(i): {
                 "clean_image_id": "c" + str(i)
             }
             for i in range(1, 6)
         },
     }
     dataset_batches = image_batches.DatasetBatches(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         dataset_name="final",
     )
     dataset_batches._data = {
         "BATCH000": {
             "images": {
                 "c" + str(i): {
                     "dataset_image_id": str(i)
                 }
                 for i in range(1, 6)
             }
         },
     }
     (
         count_correctly_classified,
         count_errors,
         count_hit_target_class,
         num_images,
     ) = classification_results.analyze_one_classification_result(
         self.storage_client,
         "filename",
         adv_batch,
         dataset_batches,
         FakeDatasetMeta(),
     )
     self.assertEqual(3, count_correctly_classified)
     self.assertEqual(2, count_errors)
     self.assertEqual(1, count_hit_target_class)
     self.assertEqual(5, num_images)