def testGetCollection(self): group_id = "g-314d61b8-9954-11e6-a733-3c15c2da029e" dataset_id = "d-4c48f3ae-9954-11e6-a3cd-3c15c2da029e" ctype_id = "t-8c785f1c-9953-11e6-9bc2-0242ac110005" bad_id = "x-59647858-9954-11e6-95d2-3c15c2da029e" self.assertEqual(getCollectionForId(group_id), "groups") self.assertEqual(getCollectionForId(dataset_id), "datasets") self.assertEqual(getCollectionForId(ctype_id), "datatypes") try: getCollectionForId(bad_id) self.assertTrue(False) except ValueError: pass # expected try: getCollectionForId(None) self.assertTrue(False) except ValueError: pass # expected
def testSchema2Id(self): root_id = createObjId("roots") group_id = createObjId("groups", rootid=root_id) dataset_id = createObjId("datasets", rootid=root_id) ctype_id = createObjId("datatypes", rootid=root_id) self.assertEqual(getCollectionForId(root_id), "groups") self.assertEqual(getCollectionForId(group_id), "groups") self.assertEqual(getCollectionForId(dataset_id), "datasets") self.assertEqual(getCollectionForId(ctype_id), "datatypes") chunk_id = 'c' + dataset_id[1:] + "_1_2" print(chunk_id) chunk_partition_id = 'c42-' + dataset_id[2:] + "_1_2" for id in (chunk_id, chunk_partition_id): try: getCollectionForId(id) self.assertTrue(False) except ValueError: pass # expected valid_ids = (group_id, dataset_id, ctype_id, chunk_id, chunk_partition_id, root_id) s3prefix = getS3Key(root_id) self.assertTrue(s3prefix.endswith("/.group.json")) s3prefix = s3prefix[:-(len(".group.json"))] for oid in valid_ids: print("oid:", oid) self.assertTrue(len(oid) >= 38) parts = oid.split('-') self.assertEqual(len(parts), 6) self.assertTrue(oid[0] in ('g', 'd', 't', 'c')) self.assertTrue(isSchema2Id(oid)) if oid == root_id: self.assertTrue(isRootObjId(oid)) else: self.assertFalse(isRootObjId(oid)) self.assertEqual(getRootObjId(oid), root_id) s3key = getS3Key(oid) print(s3key) self.assertTrue(s3key.startswith(s3prefix)) self.assertEqual(getObjId(s3key), oid) self.assertTrue(isS3ObjKey(s3key))