def test_bulk_delete(self): blobs = [ ('test.5', 'doc.5'), ('test.6', 'doc.6'), ] infos = [ self.db.put(BytesIO(b"content-{}".format(blob[0])), get_id(), bucket=blob[1]) for blob in blobs ] blob_infos = list(zip(blobs, infos)) paths = [ self.db.get_path(info.identifier, blob[1]) for blob, info in blob_infos ] with patch_datadog() as stats: self.assertTrue(self.db.bulk_delete(paths), 'delete failed') self.assertEqual(sum(s for s in stats["commcare.blobs.deleted.count"]), 2) self.assertEqual(sum(s for s in stats["commcare.blobs.deleted.bytes"]), 28) for blob, info in blob_infos: with self.assertRaises(mod.NotFound): self.db.get(info.identifier, blob[1]) return paths
def test_put_and_size(self): identifier = new_meta() with patch_datadog() as stats: meta = self.db.put(BytesIO(b"content"), meta=identifier) size = len(b'content') self.assertEqual(sum(s for s in stats["commcare.blobs.added.count"]), 1) self.assertEqual(sum(s for s in stats["commcare.blobs.added.bytes"]), size) self.assertEqual(self.db.size(key=meta.key), size)
def _get_stats(self, changes, batch=False): pillow = FakePillow() with patch_datadog() as stats: if batch: pillow._record_datadog_metrics(changes, 5) else: for change in changes: pillow._record_change_in_datadog(change, 2) return stats
def test_put_and_size(self): identifier = get_id() with patch_datadog() as stats: info = self.db.put(BytesIO(b"content"), identifier) size = len(b'content') self.assertEqual(sum(s for s in stats["commcare.blobs.added.count"]), 1) self.assertEqual(sum(s for s in stats["commcare.blobs.added.bytes"]), size) self.assertEqual(self.db.size(info.identifier), size)
def test_timings(self): with patch_datadog() as received_stats: self._do_migration_and_assert_flags(self.domain_name) tracked_stats = [ 'commcare.couch_sql_migration.unprocessed_cases.count.duration:', 'commcare.couch_sql_migration.main_forms.count.duration:', 'commcare.couch_sql_migration.unprocessed_forms.count.duration:', 'commcare.couch_sql_migration.case_diffs.count.duration:', 'commcare.couch_sql_migration.count.duration:', ] for t_stat in tracked_stats: self.assertTrue(any(r_stat.startswith(t_stat) for r_stat in received_stats))
def test_delete_bucket(self): bucket = join("doctype", "ys7v136b") info = self.db.put(BytesIO(b"content"), get_id(), bucket=bucket) with patch_datadog() as stats: self.assertTrue(self.db.delete(bucket=bucket)) self.assertEqual(sum(s for s in stats["commcare.blobs.deleted.count"]), 1) self.assertEqual(sum(s for s in stats["commcare.blobs.deleted.bytes"]), 7) self.assertTrue(info.identifier) with self.assertRaises(mod.NotFound): self.db.get(info.identifier, bucket=bucket)
def test_bulk_delete(self): metas = [ self.db.put(BytesIO("content-{}".format(key).encode('utf-8')), meta=new_meta()) for key in ['test.5', 'test.6'] ] with patch_datadog() as stats: self.assertTrue(self.db.bulk_delete(metas=metas), 'delete failed') self.assertEqual(sum(s for s in stats["commcare.blobs.deleted.count"]), 2) self.assertEqual(sum(s for s in stats["commcare.blobs.deleted.bytes"]), 28) for meta in metas: with self.assertRaises(mod.NotFound): self.db.get(key=meta.key) return metas