def test_database_query(self): from ambry.orm import Dataset, Partition from ambry.library.query import Resolver from ambry.library.database import ROOT_CONFIG_NAME_V f, db = self.new_db() db.create() db.install_bundle(self.bundle) # # Get a bunch of names from the existing bundles. This will check the simple # queries for single objects. # tests = {} for r in db.session.query( Dataset, Partition).filter(Dataset.vid != ROOT_CONFIG_NAME_V).all(): di = r.Dataset.identity tests[di.sname] = di.vid tests[di.vname] = di.vid tests[di.fqname] = di.vid tests[di.vid] = di.vid pi = r.Partition.identity tests[pi.sname] = pi.vid tests[pi.vname] = pi.vid tests[pi.fqname] = pi.vid tests[pi.vid] = pi.vid r = Resolver(db.session) for ref, vid in tests.items(): ip, results = r.resolve_ref_all(ref) self.assertEqual(1, len(results)) first = results.values().pop(0) vid2 = first.vid if not first.partitions else first.partitions.values( )[0].vid self.assertEquals(vid, vid2)
def test_database_query(self): from ambry.orm import Dataset, Partition from ambry.library.query import Resolver from ambry.library.database import ROOT_CONFIG_NAME_V f,db = self.new_db() print 'Testing ', f db.create() db.install_bundle(self.bundle) # # Get a bunch of names from the existing bundles. This will check the simple # queries for single objects. # tests = {} for r in db.session.query(Dataset, Partition).filter(Dataset.vid != ROOT_CONFIG_NAME_V).all(): di = r.Dataset.identity tests[di.sname] = di.vid tests[di.vname] = di.vid tests[di.fqname] = di.vid tests[di.vid] = di.vid pi = r.Partition.identity tests[pi.sname] = pi.vid tests[pi.vname] = pi.vid tests[pi.fqname] = pi.vid tests[pi.vid] = pi.vid r = Resolver(db.session) for ref, vid in tests.items(): ip, results = r.resolve_ref_all(ref) self.assertEqual(1, len(results)) first= results.values().pop(0) vid2 = first.vid if not first.partitions else first.partitions.values()[0].vid self.assertEquals(vid, vid2)
def test_version_resolver(self): l = self.get_library() db = l.database db.enable_delete = True db.drop() db.create() l.put_bundle(self.bundle) r = Resolver(db.session) vname = 'source-dataset-subset-variation-0.0.1' name = 'source-dataset-subset-variation' ip, results = r.resolve_ref_one(vname) self.assertEquals(vname, results.vname) ip, results = r.resolve_ref_one(name) self.assertEquals(vname, results.vname) # Cache keys ip, result = r.resolve_ref_one('source/dataset-subset-variation-0.0.1.db') self.assertEquals('source-dataset-subset-variation-0.0.1~diEGPXmDC8001', str(result)) ip, result = r.resolve_ref_one('source/dataset-subset-variation-0.0.1/tthree.db') self.assertEquals('source-dataset-subset-variation-tthree-0.0.1~piEGPXmDC8001001', str(result.partition)) # Now in the library, which has a slightly different interface. ident = l.resolve(vname) self.assertEquals(vname, ident.vname) ident = l.resolve('source-dataset-subset-variation-0.0.1~diEGPXmDC8001') self.assertEquals('diEGPXmDC8001', ident.vid) ident = l.resolve('source-dataset-subset-variation-tthree-0.0.1~piEGPXmDC8001001') self.assertEquals('diEGPXmDC8001', ident.vid) self.assertEquals('piEGPXmDC8001001', ident.partition.vid) # # Test semantic version matching # WARNING! The Mock object below only works for testing semantic versions. # with open(self.bundle.filesystem.path('meta', 'version_datasets.json')) as f: datasets = json.loads(f.read()) # This mock object only works on datasets; it will return all of the # partitions for each dataset, and each of the datasets. It is only for testing # version filtering. class TestResolver(Resolver): def _resolve_ref(self, ref, location=None): ip = Identity.classify(ref) return ip, {k: Identity.from_dict(ds) for k, ds in datasets.items()} r = TestResolver(db.session) ip, result = r.resolve_ref_one('source-dataset-subset-variation-==1.10.1') self.assertEquals('source-dataset-subset-variation-1.10.1~diEGPXmDC8001', str(result)) ip, result = r.resolve_ref_one('source-dataset-subset-variation->=1.10.1,<3.0.0') self.assertEquals('source-dataset-subset-variation-2.20.2~diEGPXmDC8002', str(result)) ip, result = r.resolve_ref_one('source-dataset-subset-variation->=1.10.1,<2.0.0') self.assertEquals('source-dataset-subset-variation-1.10.1~diEGPXmDC8001', str(result)) ip, result = r.resolve_ref_one('source-dataset-subset-variation->2.0.0') self.assertEquals('source-dataset-subset-variation-3.30.3~diEGPXmDC8003', str(result)) ip, result = r.resolve_ref_one('source-dataset-subset-variation-<=3.0.0') self.assertEquals('source-dataset-subset-variation-2.20.2~diEGPXmDC8002', str(result))
def test_versions(self): idnt = self.bundle.identity l = self.get_library() orig = os.path.join(self.bundle.bundle_dir, 'bundle.yaml') save = os.path.join(self.bundle.bundle_dir, 'bundle.yaml.save') shutil.copyfile(orig, save) datasets = {} try: for i in [1, 2, 3]: idnt._on.revision = i idnt.name.version_major = i idnt.name.version_minor = i * 10 bundle = Bundle() get_runconfig.clear() # clear runconfig cache bundle.metadata.load_all() bundle.metadata.identity = idnt.ident_dict bundle.metadata.names = idnt.names_dict bundle.metadata.write_to_dir(write_all=True) bundle = Bundle() bundle.clean() bundle.pre_prepare() bundle.prepare() bundle.post_prepare() bundle.pre_build() bundle.build_small() # bundle.build() bundle.post_build() bundle = Bundle() l.put_bundle(bundle) finally: os.rename(save, orig) # # Save the list of datasets for version analysis in other # tests # db = l.database for d in db.list(with_partitions=True).values(): datasets[d.vid] = d.dict datasets[d.vid]['partitions'] = {} for p_vid, p in d.partitions.items(): datasets[d.vid]['partitions'][p_vid] = p.dict with open(self.bundle.filesystem.path('meta', 'version_datasets.json'), 'w') as f: f.write(json.dumps(datasets)) r = Resolver(db.session) # ref = idnt.id_ ref = 'source-dataset-subset-variation-=2.20' ip, results = r.resolve_ref_all(ref)
def test_versions(self): import testbundle.bundle from ambry.run import get_runconfig from ambry.library.query import Resolver import shutil idnt = self.bundle.identity l = self.get_library() l.purge() orig = os.path.join(self.bundle.bundle_dir,'bundle.yaml') save = os.path.join(self.bundle.bundle_dir,'bundle.yaml.save') shutil.copyfile(orig,save) datasets = {} try: for i in [1,2,3]: idnt._on.revision = i idnt.name.version_major = i idnt.name.version_minor = i*10 bundle = Bundle() bundle.config.rewrite(identity=idnt.ident_dict, names=idnt.names_dict) get_runconfig.clear() #clear runconfig cache print 'Building version {}'.format(i) bundle = Bundle() bundle.clean() bundle.pre_prepare() bundle.prepare() bundle.post_prepare() bundle.pre_build() bundle.build_small() #bundle.build() bundle.post_build() bundle = Bundle() print "Installing ", bundle.identity.vname l.put(bundle) finally: pass os.rename(save, orig) # # Save the list of datasets for version analysis in other # tests # db = l.database for d in db.list().values(): datasets[d.vid] = d.dict datasets[d.vid]['partitions'] = {} for p_vid, p in d.partitions.items(): datasets[d.vid]['partitions'][p_vid] = p.dict with open(self.bundle.filesystem.path('meta','version_datasets.json'),'w') as f: import json f.write(json.dumps(datasets)) r = Resolver(db.session) ref = idnt.id_ ref = "source-dataset-subset-variation-=2.20" ip, results = r.resolve_ref_all(ref) for row in results: print row
def test_version_resolver(self): from ambry.library.query import Resolver l = self.get_library() db = l.database db.enable_delete = True db.drop() db.create() l.put_bundle(self.bundle) r = Resolver(db.session) vname = 'source-dataset-subset-variation-0.0.1' name = 'source-dataset-subset-variation' ip, results = r.resolve_ref_one(vname) self.assertEquals(vname, results.vname) ip, results = r.resolve_ref_one(name) self.assertEquals(vname, results.vname) # Cache keys ip, result = r.resolve_ref_one( 'source/dataset-subset-variation-0.0.1.db') self.assertEquals( 'source-dataset-subset-variation-0.0.1~diEGPXmDC8001', str(result)) ip, result = r.resolve_ref_one( 'source/dataset-subset-variation-0.0.1/tthree.db') self.assertEquals( 'source-dataset-subset-variation-tthree-0.0.1~piEGPXmDC8003001', str(result.partition)) # Now in the library, which has a slightly different interface. ident = l.resolve(vname) self.assertEquals(vname, ident.vname) ident = l.resolve( 'source-dataset-subset-variation-0.0.1~diEGPXmDC8001') self.assertEquals('diEGPXmDC8001', ident.vid) ident = l.resolve( 'source-dataset-subset-variation-tthree-0.0.1~piEGPXmDC8001001') self.assertEquals('diEGPXmDC8001', ident.vid) self.assertEquals('piEGPXmDC8001001', ident.partition.vid) ## ## Test semantic version matching ## WARNING! The Mock object below only works for testing semantic versions. ## with open(self.bundle.filesystem.path('meta', 'version_datasets.json')) as f: import json datasets = json.loads(f.read()) # This mock object only works on datasets; it will return all of the # partitions for each dataset, and each of the datasets. It is only for testing # version filtering. class TestResolver(Resolver): def _resolve_ref(self, ref, location=None): from ambry.identity import Identity ip = Identity.classify(ref) return ip, { k: Identity.from_dict(ds) for k, ds in datasets.items() } r = TestResolver(db.session) ip, result = r.resolve_ref_one( 'source-dataset-subset-variation-==1.10.1') self.assertEquals( 'source-dataset-subset-variation-1.10.1~diEGPXmDC8001', str(result)) ip, result = r.resolve_ref_one( 'source-dataset-subset-variation->=1.10.1,<3.0.0') self.assertEquals( 'source-dataset-subset-variation-2.20.2~diEGPXmDC8002', str(result)) ip, result = r.resolve_ref_one( 'source-dataset-subset-variation->=1.10.1,<2.0.0') self.assertEquals( 'source-dataset-subset-variation-1.10.1~diEGPXmDC8001', str(result)) ip, result = r.resolve_ref_one( 'source-dataset-subset-variation->2.0.0') self.assertEquals( 'source-dataset-subset-variation-3.30.3~diEGPXmDC8003', str(result)) ip, result = r.resolve_ref_one( 'source-dataset-subset-variation-<=3.0.0') self.assertEquals( 'source-dataset-subset-variation-2.20.2~diEGPXmDC8002', str(result))
def test_versions(self): from ambry.run import get_runconfig from ambry.library.query import Resolver import shutil idnt = self.bundle.identity l = self.get_library() l.purge() orig = os.path.join(self.bundle.bundle_dir, 'bundle.yaml') save = os.path.join(self.bundle.bundle_dir, 'bundle.yaml.save') shutil.copyfile(orig, save) datasets = {} try: for i in [1, 2, 3]: idnt._on.revision = i idnt.name.version_major = i idnt.name.version_minor = i * 10 bundle = Bundle() get_runconfig.clear() #clear runconfig cache bundle.metadata.load_all() bundle.metadata.identity = idnt.ident_dict bundle.metadata.names = idnt.names_dict bundle.metadata.write_to_dir(write_all=True) bundle = Bundle() bundle.clean() bundle.pre_prepare() bundle.prepare() bundle.post_prepare() bundle.pre_build() bundle.build_small() #bundle.build() bundle.post_build() bundle = Bundle() l.put_bundle(bundle) finally: pass os.rename(save, orig) # # Save the list of datasets for version analysis in other # tests # db = l.database for d in db.list(with_partitions=True).values(): datasets[d.vid] = d.dict datasets[d.vid]['partitions'] = {} for p_vid, p in d.partitions.items(): datasets[d.vid]['partitions'][p_vid] = p.dict with open(self.bundle.filesystem.path('meta', 'version_datasets.json'), 'w') as f: import json f.write(json.dumps(datasets)) r = Resolver(db.session) ref = idnt.id_ ref = "source-dataset-subset-variation-=2.20" ip, results = r.resolve_ref_all(ref) for row in results: print row