def seed_depend_dict(mf, dic): """Updates a dictionary of package names that declare dependencies, keyed by the depend action fmri name. We drop versions and consider all dependency types except 'incorporate'.""" name = mf.fmri for action in mf.gen_actions_by_type("depend"): if action.attrs.get("type") == "incorporate": continue dep = action.attrs["fmri"] try: if isinstance(dep, six.string_types): f = fmri.PkgFmri(dep) dic.setdefault( f.get_name(), [] ).append(name) elif isinstance(dep, list): for d in dep: f = fmri.PkgFmri(d) dic.setdefault( f.get_name(), [] ).append(name) # If we have a bad FMRI, this will be picked up # by pkglint.action006 and pkglint.action009. except fmri.FmriError: pass
def test_3_dependencies(self): """Verify that an install or uninstall of a single package with a single dependency sends the expected intent information.""" plist = self.pkgsend_bulk(self.durl, (self.foo10, self.bar10)) api_obj = self.image_create(self.durl) self.__do_install(api_obj, ["[email protected]"]) self.__do_uninstall(api_obj, ["bar", "foo"]) foo = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True) bar = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True) # Only testing for process; no need to re-test for evaluate. entries = self.get_intent_entries() self.assertTrue( self.intent_entry_exists(entries, { "operation": "install", "new_fmri": bar, "reference": "[email protected]" })) self.assertTrue( self.intent_entry_exists(entries, { "operation": "install", "new_fmri": foo, }))
def test_pkgname_grammar(self): for char in self.pkg_name_valid_chars["never"]: invalid_name = "invalid{0}[email protected],5.11-0".format(char) self.assertRaises(fmri.IllegalFmri, fmri.PkgFmri, invalid_name) for char in self.pkg_name_valid_chars["after-first"]: invalid_name = "{0}[email protected],5.11-0".format(char) self.assertRaises(fmri.IllegalFmri, fmri.PkgFmri, invalid_name) for char in self.pkg_name_valid_chars["after-first"]: invalid_name = "test/{0}[email protected],5.11-0".format(char) self.assertRaises(fmri.IllegalFmri, fmri.PkgFmri, invalid_name) # Some positive tests for char in self.pkg_name_valid_chars["always"]: for char2 in self.pkg_name_valid_chars["after-first"]: valid_name = "{0}{1}[email protected],5.11-0".format(char, char2) fmri.PkgFmri(valid_name) # Test '/' == 'pkg:/'; '//' == 'pkg://'. for vn in ("/[email protected],5.11-0", "//publisher/[email protected],5.11-0"): pfmri = fmri.PkgFmri(vn) self.assertEqual(pfmri.pkg_name, "test") if vn.startswith("//"): self.assertEqual(pfmri.publisher, "publisher") else: self.assertEqual(pfmri.publisher, None)
def testpublisher(self): """Verify that different ways of specifying the publisher information in an FMRI produce the same results.""" for s in ("pkg:///name", "pkg:/name", "///name", "/name"): f = fmri.PkgFmri(s) self.assertEqual(f.publisher, None) for s in ("pkg://test/name", "//test/name"): f = fmri.PkgFmri(s) self.assertEqual(f.publisher, "test")
def get_dep_fmri_str(fmri_str, pkg, act, latest_ref_pkgs, reversioned_pkgs, ref_xport): """Get the adjusted dependency FMRI of package 'pkg' specified in action 'act' based on if the FMRI belongs to a reversioned package or not. 'fmri_str' contains the original FMRI string from the manifest to be adjusted. This has to be passed in separately since in case of require-any or group-any dependencies, an action can contain multiple FMRIs. """ dpfmri = fmri.PkgFmri(fmri_str) # Versionless dependencies don't need to be changed. if not dpfmri.version: return fmri_str # Dep package hasn't been changed, no adjustment necessary. if dpfmri.get_pkg_stem() not in reversioned_pkgs: return fmri_str # Find the dependency action of the reference package # and replace the current version with it. try: ref_mani = ref_xport.get_manifest(latest_ref_pkgs[pkg]) except KeyError: # This package is not in the ref repo so we just substitute the # dependency. return subs_undef_fmri_str(fmri_str, latest_ref_pkgs) for ra in ref_mani.gen_actions_by_type("depend"): # Any difference other than the FMRI means we # can't use this action as a reference. diffs = act.differences(ra) if "fmri" in diffs: diffs.remove("fmri") if diffs: continue fmris = ra.attrlist("fmri") for rf in fmris: rpfmri = fmri.PkgFmri(rf) if rpfmri.get_pkg_stem() != dpfmri.get_pkg_stem(): continue # Only substitute dependency if it actually # changed. if not rpfmri.version \ or rpfmri.get_version() != dpfmri.get_version(): return rf return fmri_str # If a varcet changed we might not find the matching action. return subs_undef_fmri_str(fmri_str, latest_ref_pkgs)
def setUp(self): pkg5unittest.SingleDepotTestCase.setUp(self) self.sent_pkgs = self.pkgsend_bulk(self.rurl, [self.foo10, self.foo11, self.baz10, self.bar10, self.pkg410, self.obsolete10]) self.foo10_name = fmri.PkgFmri(self.sent_pkgs[0]).get_fmri( anarchy=True) self.foo11_name = fmri.PkgFmri(self.sent_pkgs[1]).get_fmri( anarchy=True) self.bar10_name = fmri.PkgFmri(self.sent_pkgs[3]).get_fmri( anarchy=True) self.pkg410_name = fmri.PkgFmri(self.sent_pkgs[4]).get_fmri( anarchy=True)
def test_2_depot_p5i(self): """Verify the output of the depot /publisher operation.""" # Now update the repository configuration while the depot is # stopped so changes won't be overwritten on exit. self.__update_repo_config() # Start the depot. self.dc.start() # Then, publish some packages we can abuse for testing. durl = self.dc.get_depot_url() plist = self.pkgsend_bulk(durl, (self.info10, self.quux10, self.system10, self.zfsextras10, self.zfsutils10)) # Now, for each published package, attempt to get a p5i file # and then verify that the parsed response has the expected # package information under the expected publisher. for p in plist: purl = urlparse.urljoin(durl, "p5i/0/{0}".format(p)) pub, pkglist = p5i.parse(location=purl)[0] # p5i files contain non-qualified FMRIs as the FMRIs # are already grouped by publisher. nq_p = fmri.PkgFmri(p).get_fmri(anarchy=True, include_scheme=False) self.assertEqual(pkglist, [nq_p]) # Try again, but only using package stems. for p in plist: stem = fmri.PkgFmri(p).pkg_name purl = urlparse.urljoin(durl, "p5i/0/{0}".format(stem)) pub, pkglist = p5i.parse(location=purl)[0] self.assertEqual(pkglist, [stem]) # Try again, but using wildcards (which will return a list of # matching package stems). purl = urlparse.urljoin(durl, "p5i/0/zfs*") pub, pkglist = p5i.parse(location=purl)[0] self.assertEqual(pkglist, ["zfs-extras", "zfs/utils"]) # Finally, verify that a non-existent package will error out # with a httplib.NOT_FOUND. try: urllib2.urlopen(urlparse.urljoin(durl, "p5i/0/nosuchpackage")) except urllib2.HTTPError as e: if e.code != httplib.NOT_FOUND: raise
def test_1_install_uninstall(self): """Verify that the install and uninstall of a single package sends the expected intent information.""" plist = self.pkgsend_bulk(self.durl, self.foo10) api_obj = self.image_create(self.durl) # Test install. self.__do_install(api_obj, ["foo"], noexecute=True) entries = self.get_intent_entries() # no data should be there self.assertTrue(not entries) self.__do_install(api_obj, ["foo"]) entries = self.get_intent_entries() foo = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True) self.assertTrue(self.intent_entry_exists(entries, { "operation": "install", "new_fmri" : foo, "reference": "foo" })) # Test uninstall. self.__do_uninstall(api_obj, ["*"]) # Verify that processing entries are present for uninstall. entries = self.get_intent_entries() self.assertTrue(self.intent_entry_exists(entries, { "operation": "uninstall", "old_fmri" : foo, "reference": "*" }))
def __change_content_hash(self): """Change the content-hash attr in the manifest located at the target and expected repos.""" mapping = { self.dpath2: self.published_targ, self.dpath3: self.published_exp } for repodir in (self.dpath2, self.dpath3): for s in mapping[repodir]: # Find elftest package if "elftest" in s: break f = fmri.PkgFmri(s, None) repo = self.get_repo(repodir) mpath = repo.manifest(f) # load manifest, change content-hash attr and store back # to disk mani = manifest.Manifest() mani.set_content(pathname=mpath) for a in mani.gen_actions(): if "bin/true" in str(a): # change the signed version of hash of # the ELF file a.attrs["pkg.content-hash"][0] = "gelf:sha512t_256:foo" mani.store(mpath) # rebuild repo catalog since manifest digest changed repo.rebuild()
def use_ref(a, deps, ignores): """Determine if the given action indicates that the pkg can be reversioned.""" if a.name == "set" and "name" in a.attrs: if a.attrs["name"] in ignores: return True # We ignore the pkg FMRI because this is what # will always change. if a.attrs["name"] == "pkg.fmri": return True # Signature will always change. if a.name == "signature": return True if a.name == "depend": # TODO: support dependency lists # For now, treat as content change. if not isinstance(a.attrs["fmri"], six.string_types): return False dpfmri = fmri.PkgFmri(a.attrs["fmri"]) deps.add(dpfmri.get_pkg_stem()) return True return False
def test_stress_file_publish(self): """Publish lots of packages rapidly ensuring that file publication can handle it.""" location = self.dc.get_repodir() location = os.path.abspath(location) location = urlunparse(("file", "", pathname2url(location), "", "", "")) repouriobj = publisher.RepositoryURI(location) repo = publisher.Repository(origins=[repouriobj]) pub = publisher.Publisher(prefix="repo1", repository=repo) xport_cfg = transport.GenericTransportCfg() xport_cfg.add_publisher(pub) xport = transport.Transport(xport_cfg) # Each version number must be unique since multiple packages # will be published within the same second. for i in range(100): pf = fmri.PkgFmri("foo@{0:d}.0".format(i)) t = trans.Transaction(location, pkg_name=str(pf), xport=xport, pub=pub) t.open() pkg_fmri, pkg_state = t.close() self.debug("{0}: {1}".format(pkg_fmri, pkg_state))
def test_info_bad_packages(self): """Verify that pkg info handles packages with invalid metadata.""" self.image_create(self.rurl) # Verify that no packages are installed. self.pkg("list", exit=1) plist = self.plist[:2] # This should succeed and cause the manifests to be cached. self.pkg("info -r {0}".format(" ".join(p for p in plist))) # Now attempt to corrupt the client's copy of the manifest by # adding malformed actions. for p in plist: self.debug("Testing package {0} ...".format(p)) pfmri = fmri.PkgFmri(p) mdata = self.get_img_manifest(pfmri) if mdata.find("dir") != -1: src_mode = "mode=755" else: src_mode = "mode=644" for bad_act in ('set name=description value="" \" my desc \" ""', "set name=com.sun.service.escalations value="): self.debug("Testing with bad action " "'{0}'.".format(bad_act)) bad_mdata = mdata + "{0}\n".format(bad_act) self.write_img_manifest(pfmri, bad_mdata) self.pkg("info -r {0}".format(pfmri.pkg_name), exit=0)
def __populate_repo(self, unsupp_content): # Publish a package and then add some unsupported action data # to the repository's copy of the manifest and catalog. sfmri = self.pkgsend_bulk(self.rurl, self.unsupp10)[0] pfmri = fmri.PkgFmri(sfmri) repo = self.get_repo(self.dcs[1].get_repodir()) mpath = repo.manifest(pfmri) with open(mpath, "ab+") as mfile: mfile.write(unsupp_content + "\n") mcontent = None with open(mpath, "rb") as mfile: mcontent = mfile.read() cat = repo.get_catalog("test") cat.log_updates = False # Update the catalog signature. entry = cat.get_entry(pfmri) entry["signature-sha-1"] = manifest.Manifest.hash_create(mcontent) # Update the catalog actions. self.debug(str(cat.parts)) dpart = cat.get_part("catalog.dependency.C", must_exist=True) entry = dpart.get_entry(pfmri) entry["actions"].append(unsupp_content) # Write out the new catalog. cat.save()
def cache_fmri(d, pfmri, pub, known=True): """Store the fmri in a data structure 'd' for fast lookup. 'd' is a dict that maps each package name to another dictionary, itself mapping: * each version string, which maps to a tuple of: -- the fmri object -- a dict of publisher prefixes with each value indicating catalog presence * "versions", which maps to a list of version objects, kept in sorted order The structure is as follows: pkg_name1: { "versions": [<version1>, <version2>, ... ], "version1": ( <fmri1>, { "pub1": known, "pub2": known, ... }, ), "version2": ( <fmri2>, { "pub1": known, "pub2": known, ... }, ), ... }, pkg_name2: { ... }, ... (where names in quotes are strings, names in angle brackets are objects, and the rest of the syntax is Pythonic). The fmri is expected not to have an embedded publisher. If it does, it will be ignored.""" if pfmri.has_publisher(): # Cache entries must not contain the name of the # publisher, otherwise matching during packaging # operations may not work correctly. pfmri = fmri.PkgFmri(pfmri.get_fmri(anarchy=True)) pversion = str(pfmri.version) if pfmri.pkg_name not in d: # This is the simplest representation of the cache data # structure. d[pfmri.pkg_name] = { "versions": [pfmri.version], pversion: (pfmri, { pub: known }) } elif pversion not in d[pfmri.pkg_name]: d[pfmri.pkg_name][pversion] = (pfmri, {pub: known}) bisect.insort(d[pfmri.pkg_name]["versions"], pfmri.version) elif pub not in d[pfmri.pkg_name][pversion][1]: d[pfmri.pkg_name][pversion][1][pub] = known
def parse(self, image, source_name): """decode depend action into fmri & constraint""" ctype = self.attrs["type"] fmristr = self.attrs["fmri"] f = fmri.PkgFmri(fmristr, image.attrs["Build-Release"]) min_ver = f.version if min_ver == None: min_ver = pkg.version.Version("0", image.attrs["Build-Release"]) name = f.get_name() max_ver = None presence = None if ctype == "require": presence = constraint.Constraint.ALWAYS elif ctype == "exclude": presence = constraint.Constraint.NEVER elif ctype == "incorporate": presence = constraint.Constraint.MAYBE max_ver = min_ver elif ctype == "optional": if image.cfg_cache.get_policy(REQUIRE_OPTIONAL): presence = constraint.Constraint.ALWAYS else: presence = constraint.Constraint.MAYBE elif ctype == "transfer": presence = constraint.Constraint.MAYBE assert presence return f, constraint.Constraint(name, min_ver, max_ver, presence, source_name)
def __run_search(paths, api_inst): """Function which interfaces with the search engine and extracts the fmri and variants from the actions which deliver the paths being searched for. 'paths' is the paths to search for. 'api_inst' is an ImageInterface which references the current image.""" qs = [ api.Query(p, case_sensitive=False, return_actions=True) for p in paths ] search_res = api_inst.local_search(qs) res = [] try: for num, pub, (version, return_type, (pfmri, match, a_str)) \ in search_res: pfmri = fmri.PkgFmri(pfmri) m = api_inst.img.get_manifest(pfmri) vars = variants.VariantSets( actions.fromstr(a_str.rstrip()).get_variants()) vars.merge_unknown(m.get_all_variants()) res.append((pfmri, vars)) except api_errors.SlowSearchUsed: pass return res
def testunsupported(self): """Verify that unsupported operations on a partial FMRI raise the correct exceptions.""" f = fmri.PkgFmri("BRCMbnx") self.assertRaises(fmri.MissingVersionError, f.get_dir_path) self.assertRaises(fmri.MissingVersionError, f.get_link_path) self.assertRaises(fmri.MissingVersionError, f.get_url_path)
def read_and_discard_matching_from_argument(self, fmri_set): """Reads the file and removes all frmis in the file from fmri_set. """ if self._file_handle: for line in self._file_handle: f = fmri.PkgFmri(line) fmri_set.discard(f)
def reopen(self, rstore, trans_dir): """The reopen() method is invoked by the repository as needed to load Transaction data.""" self.rstore = rstore try: open_time_str, self.esc_pkg_name = \ os.path.basename(trans_dir).split("_", 1) except ValueError: raise TransactionUnknownIDError(os.path.basename( trans_dir)) self.open_time = \ datetime.datetime.utcfromtimestamp(int(open_time_str)) self.pkg_name = unquote(self.esc_pkg_name) # This conversion should always work, because we encoded the # client release on the initial open of the transaction. self.fmri = fmri.PkgFmri(self.pkg_name, None) self.dir = os.path.join(rstore.trans_root, self.get_basename()) if not os.path.exists(self.dir): raise TransactionUnknownIDError(self.get_basename()) tmode = "rb" if not rstore.read_only: # The mode is important especially when dealing with # NFS because of problems with opening a file as # read/write or readonly multiple times. tmode += "+" # Find out if the package is renamed or obsolete. try: tfpath = os.path.join(self.dir, "manifest") tfile = open(tfpath, tmode) except IOError as e: if e.errno == errno.ENOENT: return raise m = pkg.manifest.Manifest() # If tfile is a StreamingFileObj obj, its read() # methods will return bytes. We need str for # manifest and here's an earlisest point that # we can convert it to str. m.set_content(content=misc.force_str(tfile.read())) tfile.close() if os.path.exists(os.path.join(self.dir, "append")): self.append_trans = True self.obsolete = m.getbool("pkg.obsolete", "false") self.renamed = m.getbool("pkg.renamed", "false") self.types_found = set(( action.name for action in m.gen_actions() )) self.has_reqdeps = any( a.attrs["type"] == "require" for a in m.gen_actions_by_type("depend") )
def rename(self, src_fmri, dest_fmri): """Renames an existing package specified by 'src_fmri' to 'dest_fmri'. Returns nothing.""" if not isinstance(src_fmri, fmri.PkgFmri): try: src_fmri = fmri.PkgFmri(src_fmri, None) except fmri.FmriError, e: raise RepositoryInvalidFMRIError(e)
def _fmri_from_path(pkg, vers): """Helper method that takes the full path to the package directory and the name of the manifest file, and returns an FMRI constructed from the information in those components.""" v = version.Version(urllib.unquote(vers), None) f = fmri.PkgFmri(urllib.unquote(os.path.basename(pkg)), None) f.version = v return f
def test_2_upgrade(self): """Verify the the install of a single package, and then an upgrade (install of newer version) of that package sends the expected intent information.""" plist = self.pkgsend_bulk(self.durl, (self.foo10, self.foo11)) api_obj = self.image_create(self.durl) foo10 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True) foo11 = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True) # Test install. self.__do_install(api_obj, ["[email protected]"]) self.__do_install(api_obj, ["[email protected]"]) # Test uninstall. self.__do_uninstall(api_obj, ["foo"]) entries = self.get_intent_entries() # Verify that evaluation and processing entries are present # for install. self.assertTrue( self.intent_entry_exists(entries, { "operation": "install", "new_fmri": foo10, "reference": "[email protected]" })) self.assertTrue( self.intent_entry_exists( entries, { "operation": "install", "new_fmri": foo11, "old_fmri": foo10, "reference": "[email protected]" })) self.assertTrue( self.intent_entry_exists(entries, { "operation": "uninstall", "old_fmri": foo11, "reference": "foo" }))
def test_17_verbose(self): """Verify that pkg list -v works as expected.""" # FMRI with no branch component should be displayed correctly. plist = self.pkgsend_bulk(self.rurl1, self.newpkg10) self.pkg("install [email protected]") self.pkg("list -Hv newpkg") output = self.reduceSpaces(self.output) expected = fmri.PkgFmri( plist[0]).get_fmri(include_build=False) + " i--\n" self.assertEqualDiff(expected, output)
def test_1_bad_packages(self): """Verify that the info operation handles packages with invalid metadata.""" api_obj = self.image_create(self.rurl) self.assertRaises(api_errors.NoPackagesInstalledException, api_obj.info, [], True, api.PackageInfo.ALL_OPTIONS - (frozenset([api.PackageInfo.LICENSES]) | api.PackageInfo.ACTION_OPTIONS)) self.make_misc_files("tmp/baz") badfile10 = """ open [email protected],5.11-0 add file tmp/baz mode=644 owner=root group=bin path=/tmp/baz-file close """ baddir10 = """ open [email protected],5.11-0 add dir mode=755 owner=root group=bin path=/tmp/baz-dir close """ plist = self.pkgsend_bulk(self.rurl, (badfile10, baddir10)) api_obj.refresh(immediate=True) # This should succeed and cause the manifests to be cached. info_needed = api.PackageInfo.ALL_OPTIONS ret = api_obj.info(plist, False, info_needed) # Now attempt to corrupt the client's copy of the manifest by # adding malformed actions. for p in plist: self.debug("Testing package {0} ...".format(p)) pfmri = fmri.PkgFmri(p) mdata = self.get_img_manifest(pfmri) if mdata.find("dir") != -1: src_mode = "mode=755" else: src_mode = "mode=644" for bad_act in ( 'set name=description value="" \" my desc \" ""', "set name=com.sun.service.escalations value="): self.debug("Testing with bad action " "'{0}'.".format(bad_act)) bad_mdata = mdata + "{0}\n".format(bad_act) self.write_img_manifest(pfmri, bad_mdata) # Info shouldn't raise an exception. api_obj.info([pfmri.pkg_name], False, info_needed)
def _search_fast_update(self, manifest_func, excludes): """This function searches the packages which have been installed since the last time the index was rebuilt. The "manifest_func" parameter is a function which maps fmris to the path to their manifests. The "excludes" paramter is a list of variants defined in the image.""" assert self._data_main_dict.get_file_handle() is not None glob = self._glob term = self._term case_sensitive = self._case_sensitive if not case_sensitive: glob = True fast_update_dict = {} fast_update_res = [] # self._data_fast_add holds the names of the fmris added # since the last time the index was rebuilt. for fmri_str in self._data_fast_add._set: if not (self.pkg_name_wildcard or self.pkg_name_match(fmri_str)): continue f = fmri.PkgFmri(fmri_str) path = manifest_func(f) search_dict = manifest.Manifest.search_dict(path, return_line=True, excludes=excludes) for tmp in search_dict: tok, at, st, fv = tmp if not (self.action_type_wildcard or at == self.action_type) or \ not (self.key_wildcard or st == self.key): continue if tok not in fast_update_dict: fast_update_dict[tok] = [] fast_update_dict[tok].append((at, st, fv, fmri_str, search_dict[tmp])) if glob: keys = fast_update_dict.keys() matches = choose(keys, term, case_sensitive) fast_update_res = [ fast_update_dict[m] for m in matches ] else: if term in fast_update_dict: fast_update_res.append(fast_update_dict[term]) return fast_update_res
def test_bug_4315(self): """Test that when multiple manifests are given and -m is used, their contents aren't comingled.""" self.image_create(self.rurl) self.pkg("contents -r -m {0}".format(" ".join(self.plist))) expected_res = reduce( lambda x, y: x + y, [self.get_img_manifest(pfmri.PkgFmri(s)) for s in self.plist], "") self.assertEqualDiff(expected_res, self.output)
def manifest(self, pfmri): """Returns the absolute pathname of the manifest file for the specified FMRI.""" self.scfg.inc_manifest() try: if not isinstance(pfmri, fmri.PkgFmri): pfmri = fmri.PkgFmri(pfmri, None) fpath = pfmri.get_dir_path() except fmri.FmriError, e: raise RepositoryInvalidFMRIError(e)
def testpartial(self): """Verify that supported operations on a partial FMRI function properly.""" pfmri = "pkg:/BRCMbnx" f = fmri.PkgFmri(pfmri) self.assertEqual(f.get_short_fmri(), pfmri) self.assertEqual(f.get_pkg_stem(), pfmri) self.assertEqual(f.get_fmri(), pfmri) self.assertFalse(f.has_version()) self.assertFalse(f.has_publisher())
def get_constrained_fmri(self, image): """ returns fmri of incorporation pkg or None if not an incorporation""" ctype = self.attrs["type"] if ctype != "incorporate": return None pkgfmri = self.attrs["fmri"] f = fmri.PkgFmri(pkgfmri, image.attrs["Build-Release"]) image.fmri_set_default_publisher(f) return f
def subs_undef_fmri_str(fmri_str, latest_ref_pkgs): """ Substitute correct dependency FMRI if no counterpart can be found in the reference manifest. Use the original FMRI in case the current version of dependency pkg in the repo is still a successor of the specified dependency FMRI, otherwise substitute the complete version of the pkg currently present in the repo.""" dpfmri = fmri.PkgFmri(fmri_str) ndpfmri = latest_ref_pkgs[dpfmri.get_name()] if ndpfmri.is_successor(dpfmri): return fmri_str return ndpfmri.get_short_fmri(anarchy=True)