Exemple #1
0
 def test_analyze_one_classification_result(self):
   self.storage_client = fake_cloud_client.FakeStorageClient(
       {'filename':
        'a1.png,1\na2.png,4\na3.png,1\na4.png,1\na5.png,2\na6.png,9'})
   adv_batch = {
       'dataset_batch_id': 'BATCH000',
       'images': {'a' + str(i):
                  {'clean_image_id': 'c' + str(i)} for i in range(1, 6)}
   }
   dataset_batches = image_batches.DatasetBatches(
       datastore_client=self.datastore_client,
       storage_client=self.storage_client,
       dataset_name='final')
   dataset_batches._data = {
       'BATCH000': {'images': {'c' + str(i): {'dataset_image_id': str(i)}
                               for i in range(1, 6)}},
   }
   (count_correctly_classified, count_errors,
    count_hit_target_class, num_images) = (
        classification_results.analyze_one_classification_result(
            self.storage_client, 'filename', adv_batch, dataset_batches,
            FakeDatasetMeta()))
   self.assertEqual(3, count_correctly_classified)
   self.assertEqual(2, count_errors)
   self.assertEqual(1, count_hit_target_class)
   self.assertEqual(5, num_images)
Exemple #2
0
 def setUp(self):
   self.storage_client = fake_cloud_client.FakeStorageClient()
   self.datastore_client = fake_cloud_client.FakeDatastoreClient()
   self.submissions = submissions.CompetitionSubmissions(
       datastore_client=self.datastore_client,
       storage_client=self.storage_client,
       round_name=ROUND_NAME)
   # we only need list of submissin ids in CompetitionSubmissions for this test
   self.submissions._defenses = {
       'SUBD000': {},
       'SUBD001': {},
   }
   self.adv_batches = image_batches.AversarialBatches(
       datastore_client=self.datastore_client)
   self.adv_batches._data = {
       'ADVBATCH000': {'dataset_batch_id': 'BATCH000',
                       'images': {},
                       'submission_id': 'SUBA000'},
       'ADVBATCH001': {'dataset_batch_id': 'BATCH000',
                       'images': {},
                       'submission_id': 'SUBA001'},
       'ADVBATCH002': {'dataset_batch_id': 'BATCH000',
                       'images': {},
                       'submission_id': 'SUBT000'},
   }
Exemple #3
0
 def setUp(self):
     self.storage_client = fake_cloud_client.FakeStorageClient()
     self.datastore_client = fake_cloud_client.FakeDatastoreClient()
     self.submissions = submissions.CompetitionSubmissions(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         round_name=ROUND_NAME,
     )
     # we only need list of submissin ids in CompetitionSubmissions for this test
     self.submissions._defenses = {
         "SUBD000": {},
         "SUBD001": {},
     }
     self.adv_batches = image_batches.AversarialBatches(
         datastore_client=self.datastore_client)
     self.adv_batches._data = {
         "ADVBATCH000": {
             "dataset_batch_id": "BATCH000",
             "images": {},
             "submission_id": "SUBA000",
         },
         "ADVBATCH001": {
             "dataset_batch_id": "BATCH000",
             "images": {},
             "submission_id": "SUBA001",
         },
         "ADVBATCH002": {
             "dataset_batch_id": "BATCH000",
             "images": {},
             "submission_id": "SUBT000",
         },
     }
 def setUp(self):
     # prepare dataset batches and submissions
     storage_blobs = [
         'dataset/dev/img1.png',
         'dataset/dev/img2.png',
         'dataset/dev/img3.png',
         'dataset/dev/img4.png',
         'dataset/dev/img5.png',
         'dataset/dev_dataset.csv',
         ROUND_NAME + '/submissions/nontargeted/1.zip',
         ROUND_NAME + '/submissions/nontargeted/baseline_nt.zip',
         ROUND_NAME + '/submissions/targeted/1.zip',
         ROUND_NAME + '/submissions/targeted/2.zip',
         ROUND_NAME + '/submissions/defense/3.zip',
         ROUND_NAME + '/submissions/defense/baseline_adv_train.zip',
     ]
     self.storage_client = fake_cloud_client.FakeStorageClient(
         storage_blobs)
     self.datastore_client = fake_cloud_client.FakeDatastoreClient()
     self.dataset_batches = image_batches.DatasetBatches(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         dataset_name='dev')
     self.dataset_batches.init_from_storage_write_to_datastore(batch_size=3)
     self.submissions = submissions.CompetitionSubmissions(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         round_name=ROUND_NAME)
     self.submissions.init_from_storage_write_to_datastore()
Exemple #5
0
 def test_get_blob(self):
     client = fake_cloud_client.FakeStorageClient({
         'some_blob': 'some_content',
         'blob2': 'another_content'
     })
     self.assertIsNone(client.get_blob('blob3'))
     buf = BytesIO()
     client.get_blob('some_blob').download_to_file(buf)
     self.assertEqual(six_b('some_content'), buf.getvalue())
 def test_get_blob(self):
     client = fake_cloud_client.FakeStorageClient({
         "some_blob": "some_content",
         "blob2": "another_content"
     })
     self.assertIsNone(client.get_blob("blob3"))
     buf = BytesIO()
     client.get_blob("some_blob").download_to_file(buf)
     self.assertEqual(six_b("some_content"), buf.getvalue())
Exemple #7
0
 def test_list_blobs(self):
     all_blobs = [
         'some_blob', 'dataset/dev_dataset.csv', 'dataset/dev/img1.png',
         'dataset/dev/img2.png'
     ]
     client = fake_cloud_client.FakeStorageClient(all_blobs)
     assertCountEqual(self, all_blobs, client.list_blobs())
     assertCountEqual(self, [
         'dataset/dev_dataset.csv', 'dataset/dev/img1.png',
         'dataset/dev/img2.png'
     ], client.list_blobs('dataset/dev'))
     assertCountEqual(self,
                      ['dataset/dev/img1.png', 'dataset/dev/img2.png'],
                      client.list_blobs('dataset/dev/'))
Exemple #8
0
 def setUp(self):
   storage_blobs = [
       ROUND_NAME + '/submissions/nontargeted/1.zip',
       ROUND_NAME + '/submissions/nontargeted/baseline_nt.zip',
       ROUND_NAME + '/submissions/targeted/1.zip',
       ROUND_NAME + '/submissions/targeted/2.zip',
       ROUND_NAME + '/submissions/defense/3.zip',
       ROUND_NAME + '/submissions/defense/baseline_adv_train.zip',
   ]
   self.storage_client = fake_cloud_client.FakeStorageClient(storage_blobs)
   self.datastore_client = fake_cloud_client.FakeDatastoreClient()
   self.submissions = submissions.CompetitionSubmissions(
       datastore_client=self.datastore_client,
       storage_client=self.storage_client,
       round_name=ROUND_NAME)
 def setUp(self):
     storage_blobs = [
         'dataset/dev/img1.png',
         'dataset/dev/img2.png',
         'dataset/dev/img3.png',
         'dataset/dev/img4.png',
         'dataset/dev/img5.png',
         'dataset/dev_dataset.csv',
     ]
     self.storage_client = fake_cloud_client.FakeStorageClient(
         storage_blobs)
     self.datastore_client = fake_cloud_client.FakeDatastoreClient()
     self.dataset_batches = image_batches.DatasetBatches(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         dataset_name='dev')
Exemple #10
0
 def test_analyze_one_classification_result(self):
     self.storage_client = fake_cloud_client.FakeStorageClient({
         "filename":
         "a1.png,1\na2.png,4\na3.png,1\na4.png,1\na5.png,2\na6.png,9"
     })
     adv_batch = {
         "dataset_batch_id": "BATCH000",
         "images": {
             "a" + str(i): {
                 "clean_image_id": "c" + str(i)
             }
             for i in range(1, 6)
         },
     }
     dataset_batches = image_batches.DatasetBatches(
         datastore_client=self.datastore_client,
         storage_client=self.storage_client,
         dataset_name="final",
     )
     dataset_batches._data = {
         "BATCH000": {
             "images": {
                 "c" + str(i): {
                     "dataset_image_id": str(i)
                 }
                 for i in range(1, 6)
             }
         },
     }
     (
         count_correctly_classified,
         count_errors,
         count_hit_target_class,
         num_images,
     ) = classification_results.analyze_one_classification_result(
         self.storage_client,
         "filename",
         adv_batch,
         dataset_batches,
         FakeDatasetMeta(),
     )
     self.assertEqual(3, count_correctly_classified)
     self.assertEqual(2, count_errors)
     self.assertEqual(1, count_hit_target_class)
     self.assertEqual(5, num_images)
 def test_list_blobs(self):
     all_blobs = [
         "some_blob",
         "dataset/dev_dataset.csv",
         "dataset/dev/img1.png",
         "dataset/dev/img2.png",
     ]
     client = fake_cloud_client.FakeStorageClient(all_blobs)
     assertCountEqual(self, all_blobs, client.list_blobs())
     assertCountEqual(
         self,
         [
             "dataset/dev_dataset.csv", "dataset/dev/img1.png",
             "dataset/dev/img2.png"
         ],
         client.list_blobs("dataset/dev"),
     )
     assertCountEqual(
         self,
         ["dataset/dev/img1.png", "dataset/dev/img2.png"],
         client.list_blobs("dataset/dev/"),
     )
Exemple #12
0
 def test_read_classification_results(self):
   self.storage_client = fake_cloud_client.FakeStorageClient(
       {'filename': 'img1.png,123\nimg2.jpg,456'})
   results = classification_results.read_classification_results(
       self.storage_client, 'filename')
   self.assertDictEqual({'img1': 123, 'img2': 456}, results)
Exemple #13
0
 def test_read_classification_results(self):
     self.storage_client = fake_cloud_client.FakeStorageClient(
         {"filename": "img1.png,123\nimg2.jpg,456"})
     results = classification_results.read_classification_results(
         self.storage_client, "filename")
     self.assertDictEqual({"img1": 123, "img2": 456}, results)