def push_cb(expect, action, metadata, time): import json self.assertIn(action, expect) identity = Identity.from_dict(json.loads(metadata['identity'])) print action, identity.cache_key
def post_dataset(did, library): """Accept a payload that describes a bundle in the remote. Download the bundle from the remote and install it. """ from ambry.identity import Identity identity = Identity.from_dict(request.json) if not identity.md5: raise exc.BadRequest("The identity must have the md5 value set") if not did in set([identity.id_, identity.vid]): raise exc.Conflict("Dataset address '{}' doesn't match payload id '{}'".format(did, identity.vid)) # need to go directly to remote, not library.get() because the # dataset hasn't been loaded yet. db_path = library.load(identity.cache_key, identity.md5) if not db_path: logger.error("Failed to get {} from cache while posting dataset".format(identity.cache_key)) logger.error(" cache = {}".format(library.cache)) logger.error(" remote = {}".format(library.upstream)) raise exc.NotFound("Didn't get bundle file for cache key {} ".format(identity.cache_key)) logger.debug("Loading {} for identity {} ".format(db_path, identity)) b = library.load(identity.cache_key, identity.md5) return b.identity.dict
def post_partition(did, pid, library): from ambry.identity import Identity from ambry.util import md5_for_file b = library.get(did) if not b: raise exc.NotFound("No bundle found for id {}".format(did)) payload = request.json identity = Identity.from_dict(payload['identity']) p = b.partitions.get(pid) if not p: raise exc.NotFound( "No partition for {} in dataset {}".format( pid, did)) if not pid in set([identity.id_, identity.vid]): raise exc.Conflict( "Partition address '{}' doesn't match payload id '{}'".format( pid, identity.vid)) library.database.add_remote_file(identity) return identity.dict
def init_dataset_number(self): from ambry.identity import Identity, DatasetNumber, NumberServer try: ns = NumberServer(**self.group('numbers')) ds = ns.next() except Exception as e: from ..util import get_logger logger = get_logger(__name__) logger.error("Failed to get number from number sever; need to use self assigned number: {}" .format(e.message)) raise self.identity['id'] = str(ds) ident = Identity.from_dict(self.identity) ident._on = ds.rev(self.identity.revision) self.rewrite(**dict( identity=ident.ident_dict, names=ident.names_dict ))
def _resolve_ref(self, ref, location=None): from ambry.identity import Identity ip = Identity.classify(ref) return ip, { k: Identity.from_dict(ds) for k, ds in datasets.items() }
def test_identity_from_dict(self): name = Name(source='source.com', dataset='foobar', variation='orig', version='0.0.1') dataset_number = DatasetNumber(10000, 1, assignment_class='registered') oident = Identity(name, dataset_number) opident = oident.as_partition(7) idict = oident.dict pidict = opident.dict ident = Identity.from_dict(idict) self.assertIsInstance(ident, Identity) self.assertEqual(ident.fqname, oident.fqname) ident = Identity.from_dict(pidict) self.assertEqual('source.com/foobar-orig-0.0.1', ident.cache_key)
def test_identity_from_dict(self): name = Name(source='source.com', dataset='foobar', variation='orig', version='0.0.1') dataset_number = DatasetNumber(10000, 1, assignment_class='registered') oident = Identity(name, dataset_number) opident = oident.as_partition(7) idict = oident.dict pidict = opident.dict ident = Identity.from_dict(idict) self.assertIsInstance(ident, Identity) self.assertEqual(ident.fqname, oident.fqname) ident = Identity.from_dict(pidict) self.assertEqual('source.com/foobar-orig-0.0.1', ident.cache_key)
def test_assignment(self): from ambry.identity import Identity t1 = Top(yaml.load(self.yaml_config)) self.assertEquals(self.yaml_config.strip(' \n'), t1.dump().strip(' \n')) idnt = Identity.from_dict(dict(t1.identity)) idd = idnt.ident_dict idd['variation'] = 'v2' t1.identity = idd self.assertEquals('v2', t1.identity.variation)
def test_assignment(self): from ambry.identity import Identity t1 = Top(yaml.load(self.yaml_config)) self.assertEquals(self.yaml_config.strip(' \n'), t1.dump().strip(' \n')) idnt = Identity.from_dict(dict(t1.identity)) idd = idnt.ident_dict idd['variation'] = 'v2' t1.identity = idd self.assertEquals('v2', t1.identity.variation)
def put(self, metadata): '''''' import json from ambry.identity import Identity metadata['identity'] = json.loads(metadata['identity']) identity = Identity.from_dict(metadata['identity']) if identity.is_bundle: r = self.remote.datasets(identity.vid).post(metadata) raise_for_status(r) else: r = self.remote.datasets(identity.as_dataset.vid).partitions(identity.vid).post(metadata) raise_for_status(r) return r
def new_from_bundle_config(self, config): """ Create a new bundle, or link to an existing one, based on the identity in config data. :param config: A Dict form of a bundle.yaml file :return: """ identity = Identity.from_dict(config['identity']) ds = self._db.dataset(identity.vid, exception=False) if not ds: ds = self._db.new_dataset(**identity.dict) b = Bundle(ds, self) b.commit() b.state = Bundle.STATES.NEW b.set_last_access(Bundle.STATES.NEW) # b.set_file_system(source_url=self._fs.source(ds.name), # build_url=self._fs.build(ds.name)) return b
def new_from_bundle_config(self, config): """ Create a new bundle, or link to an existing one, based on the identity in config data. :param config: A Dict form of a bundle.yaml file :return: """ identity = Identity.from_dict(config['identity']) ds = self._db.dataset(identity.vid, exception=False) if not ds: ds = self._db.new_dataset(**identity.dict) b = Bundle(ds, self) b.commit() b.state = Bundle.STATES.NEW b.set_last_access(Bundle.STATES.NEW) # b.set_file_system(source_url=self._fs.source(ds.name), # build_url=self._fs.build(ds.name)) return b
def _resolve_ref(self, ref, location=None): ip = Identity.classify(ref) return ip, {k: Identity.from_dict(ds) for k, ds in datasets.items()}