def test_latest_version_npm( name, inp, curr_ver, ver, source, urls, requests_mock, tmpdir, ): pmy = LazyJson(tmpdir.join("cf-scripts-test.json")) pmy.update(parse_meta_yaml(inp)["source"]) pmy.update( { "feedstock_name": name, "version": curr_ver, "raw_meta_yaml": inp, "meta_yaml": parse_meta_yaml(inp), }, ) [requests_mock.get(url, text=text) for url, text in urls.items()] attempt = get_latest_version(name, pmy, [source]) if ver is None: assert not (attempt["new_version"] is False) assert attempt["new_version"] != curr_ver assert VersionOrder(attempt["new_version"]) > VersionOrder(curr_ver) elif ver is False: assert attempt["new_version"] is ver else: assert ver == attempt["new_version"]
def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: # if no new version do nothing if "new_version" not in attrs or not attrs["new_version"]: return True conditional = super().filter(attrs) result = bool( conditional # if archived/finished or len( [ k for k in attrs.get("PRed", []) if k["data"].get("migrator_name") == "Version" # The PR is the actual PR itself and k.get("PR", {}).get("state", None) == "open" ], ) > self.max_num_prs or not attrs.get("new_version"), # if no new version ) try: version_filter = ( # if new version is less than current version (VersionOrder(str(attrs["new_version"])) <= VersionOrder( str(attrs.get("version", "0.0.0")))) # if PRed version is greater than newest version or any( VersionOrder(self._extract_version_from_muid(h)) >= VersionOrder(str(attrs["new_version"])) for h in attrs.get("PRed", set()))) except conda.exceptions.InvalidVersionSpec as e: warnings.warn( f"Failed to filter to to invalid version for {attrs}\nException: {e}", ) version_filter = True return result or version_filter
def is_new_version_available(raw_yaml, context_dict, rendered_yaml): raw_yaml = copy.deepcopy(raw_yaml) current_version = rendered_yaml["package"]["version"] github_url = Github().get_url(rendered_yaml) if github_url is not None: github_version = Github().get_version(github_url) if VersionOrder(github_version) > VersionOrder(current_version): return True, github_version else: download_version = RawURL().get_url(raw_yaml, context_dict, rendered_yaml) if download_version is not None: if VersionOrder(download_version) > VersionOrder(current_version): return True, download_version return False, current_version
def _update_nodes_with_new_versions(gx): """Updates every node with it's new version (when available)""" # check if the versions folder is available if os.path.isdir("./versions"): pass else: return # get all the available node.json files # TODO: I don't thing this is a good idea (8000+ entries) list_files = os.listdir("./versions/") for file in list_files: node = os.path.splitext(os.path.basename(str(file)))[0] with open(f"./versions/{file}") as json_file: version_data: typing.Dict = json.load(json_file) with gx.nodes[f"{node}"]["payload"] as attrs: version_from_data = version_data.get("new_version", False) version_from_attrs = attrs.get("new_version", False) # don't update the version if it isn't newer if version_from_data and isinstance(version_from_data, str): # we only override the graph node if the version we found is newer # or the graph doesn't have a valid version if isinstance(version_from_attrs, str): attrs["new_version"] = max( [version_from_data, version_from_attrs], key=lambda x: VersionOrder(x.replace("-", ".")), ) else: attrs["new_version"] = version_from_data
def test_pep440(self): # this list must be in sorted order (slightly modified from the PEP 440 test suite # https://github.com/pypa/packaging/blob/master/tests/test_version.py) VERSIONS = [ # Implicit epoch of 0 "1.0a1", "1.0a2.dev456", "1.0a12.dev456", "1.0a12", "1.0b1.dev456", "1.0b2", "1.0b2.post345.dev456", "1.0b2.post345", "1.0c1.dev456", "1.0c1", "1.0c3", "1.0rc2", "1.0.dev456", "1.0", "1.0.post456.dev34", "1.0.post456", "1.1.dev1", "1.2.r32+123456", "1.2.rev33+123456", "1.2+abc", "1.2+abc123def", "1.2+abc123", "1.2+123abc", "1.2+123abc456", "1.2+1234.abc", "1.2+123456", # Explicit epoch of 1 "1!1.0a1", "1!1.0a2.dev456", "1!1.0a12.dev456", "1!1.0a12", "1!1.0b1.dev456", "1!1.0b2", "1!1.0b2.post345.dev456", "1!1.0b2.post345", "1!1.0c1.dev456", "1!1.0c1", "1!1.0c3", "1!1.0rc2", "1!1.0.dev456", "1!1.0", "1!1.0.post456.dev34", "1!1.0.post456", "1!1.1.dev1", "1!1.2.r32+123456", "1!1.2.rev33+123456", "1!1.2+abc", "1!1.2+abc123def", "1!1.2+abc123", "1!1.2+123abc", "1!1.2+123abc456", "1!1.2+1234.abc", "1!1.2+123456", ] version = [VersionOrder(v) for v in VERSIONS] self.assertEqual(version, sorted(version))
def test_openssl_convention(self): openssl = [ VersionOrder(k) for k in ( '1.0.1dev', '1.0.1_', # <- this '1.0.1a', '1.0.1b', '1.0.1c', '1.0.1d', '1.0.1r', '1.0.1rc', '1.0.1rc1', '1.0.1rc2', '1.0.1s', '1.0.1', # <- compared to this '1.0.1post.a', '1.0.1post.b', '1.0.1post.z', '1.0.1post.za', '1.0.2', ) ] shuffled = copy(openssl) shuffle(shuffled) assert sorted(shuffled) == openssl
def update_nodes_with_new_versions(gx): """Updates every node with it's new version (when available)""" # check if the versions folder is available if os.path.isdir("./versions"): pass else: return # get all the available node.json files # TODO: I don't thing this is a good idea (8000+ entries) list_files = os.listdir("./versions/") for file in list_files: node = str(file).replace(".json", "") with open(f"./versions/{file}") as json_file: version_data: typing.Dict = json.load(json_file) with gx.nodes[f"{node}"]["payload"] as attrs: version_from_data = version_data.get("new_version", False) version_from_attrs = attrs.get("new_version", False) # don't update the version if it isn't newer if version_from_data and isinstance(version_from_data, str): if isinstance(version_from_attrs, str): version_data["new_version"] = max( [version_from_data, version_from_attrs], key=lambda x: VersionOrder(x.replace("-", ".")), ) elif "new_version" in version_data: version_data.pop("new_version") attrs.update(version_data)
def execute(args, parser): spec = MatchSpec(args.match_spec) if spec.get_exact_value('subdir'): subdirs = spec.get_exact_value('subdir'), elif args.platform: subdirs = args.platform, else: subdirs = context.subdirs with Spinner("Loading channels", not context.verbosity and not context.quiet, context.json): spec_channel = spec.get_exact_value('channel') channel_urls = (spec_channel, ) if spec_channel else context.channels matches = sorted(SubdirData.query_all(spec, channel_urls, subdirs), key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build)) if not matches: channels_urls = tuple( calculate_channel_urls( channel_urls=context.channels, prepend=not args.override_channels, platform=subdirs[0], use_local=args.use_local, )) from ..exceptions import PackagesNotFoundError raise PackagesNotFoundError((text_type(spec), ), channels_urls) else: return matches
def get_latest_package_info(channels, package): ''' Get the latest conda package info with the following priority: 1. Most Specific Channel 2. Latest Version 3. Largest Build Number 4. Latest Timestamp ''' # Skip virtual packages (these have leading "__" in the name) if package.startswith("__"): return "" channel_args = sum( ([["--override-channels", "-c", channel]] for channel in channels), []) channel_args += [[]] # use defaults for last search all_std_out = "" for channel_arg in channel_args: search_args = ["--info", generalize_version(package)] + channel_arg # Setting the logging level allows us to ignore unnecessary output getLogger("conda.common.io").setLevel(ERROR) # Call "conda search --info" through conda's cli.python_api std_out, _, _ = conda.cli.python_api.run_command( conda.cli.python_api.Commands.SEARCH, search_args, use_exception_handler=True) all_std_out += std_out # Parsing the normal output from "conda search --info" instead of using the json flag. Using the json # flag adds a lot of extra time due to a slow regex in the conda code that is attempting to parse out # URL tokens entries = list() for entry in std_out.split("\n\n"): _, file_name, rest = entry.partition("file name") if file_name: entry = open_ce.yaml_utils.load(file_name + rest) # Convert time string into a timestamp (if there is a timestamp) if "timestamp" in entry: entry["timestamp"] = datetime.timestamp( datetime.strptime(entry["timestamp"], '%Y-%m-%d %H:%M:%S %Z')) else: entry["timestamp"] = 0 if not entry["dependencies"]: entry["dependencies"] = [] entry["version_order"] = VersionOrder(str(entry["version"])) entries.append(entry) if entries: retval = entries[0] for package_info in entries: if package_info["version_order"] < retval["version_order"]: continue if package_info["build number"] < retval["build number"]: continue if package_info["timestamp"] < retval["timestamp"]: continue retval = package_info return retval raise OpenCEError(Error.CONDA_PACKAGE_INFO, "conda search --info " + generalize_version(package), all_std_out)
def test_latest_version_rawurl(name, inp, curr_ver, ver, source, urls, tmpdir): pmy = LazyJson(tmpdir.join("cf-scripts-test.json")) pmy.update(parse_meta_yaml(inp)["source"]) pmy.update( { "feedstock_name": name, "version": curr_ver, "raw_meta_yaml": inp, "meta_yaml": parse_meta_yaml(inp), }, ) attempt = get_latest_version(name, pmy, [source]) if ver is None: assert not (attempt["new_version"] is False) assert attempt["new_version"] != curr_ver assert VersionOrder(attempt["new_version"]) > VersionOrder(curr_ver) elif ver is False: assert attempt["new_version"] is ver else: assert ver == attempt["new_version"]
def _version_order( v: Union[str, float], ordering: Optional[List[str]] = None ) -> Union[int, VersionOrder, float]: if ordering is not None: return ordering.index(v) else: try: return VersionOrder(v) except: return v
def get_version(self, url): data = feedparser.parse(url) if data["bozo"] == 1: return None vers = [] for entry in data["entries"]: ver = entry["link"].split("/")[-1] for prefix in self.ver_prefix_remove: if ver.startswith(prefix): ver = ver[len(prefix):] if any(s in ver for s in self.dev_vers): continue vers.append(ver) if vers: return max(vers, key=lambda x: VersionOrder(x.replace("-", "."))) else: return None
def get_version(self, url): data = feedparser.parse(url) if data['bozo'] == 1: return None vers = [] for entry in data['entries']: ver = entry['link'].split('/')[-1] for prefix in self.ver_prefix_remove: if ver.startswith(prefix): ver = ver[len(prefix):] if any(s in ver for s in self.dev_vers): continue vers.append(ver) if vers: return max(vers, key=lambda x: VersionOrder(x.replace('-', '.'))) else: return None
def add_edge(req_name: str, spc: VersionSpec): matching_versions = self.find_package_version(req_name, spec=spc) if matching_versions: # Conda version sorting is implemented by the VersionOrder # class vos = [VersionOrder(v) for v in matching_versions] max_version = str(max(vos)) graph.add_edge((req_name, max_version), nv) self.log.debug("To build {} {} we are using {} " "from candidates {}".format( req_name, spc, max_version, matching_versions)) return 1 else: self.log.warning("Can't find a suitable version for {} " "required by {}".format(req_name, nv)) return 0
def get_version(self, url) -> Optional[str]: data = feedparser.parse(url) if data["bozo"] == 1: return None vers = [] for entry in data["entries"]: ver = entry["link"].split("/")[-1] for prefix in self.ver_prefix_remove: if ver.startswith(prefix): ver = ver[len(prefix):] if any(s in ver.lower() for s in self.dev_vers): continue # Extract vesion number starting at the first digit. ver = re.search(r"(\d+[^\s]*)", ver).group(0) vers.append(ver) if vers: return max(vers, key=lambda x: VersionOrder(x.replace("-", "."))) else: return None
def test_version_order(self): versions = [ ("0.4", [[0], [0], [4]]), ("0.4.0", [[0], [0], [4], [0]]), ("0.4.1a.vc11", [[0], [0], [4], [1, 'a'], [0, 'vc', 11]]), ("0.4.1.rc", [[0], [0], [4], [1], [0, 'rc']]), ("0.4.1.vc11", [[0], [0], [4], [1], [0, 'vc', 11]]), ("0.4.1", [[0], [0], [4], [1]]), ("0.5*", [[0], [0], [5, '*']]), ("0.5a1", [[0], [0], [5, 'a', 1]]), ("0.5b3", [[0], [0], [5, 'b', 3]]), ("0.5C1", [[0], [0], [5, 'c', 1]]), ("0.5z", [[0], [0], [5, 'z']]), ("0.5za", [[0], [0], [5, 'za']]), ("0.5", [[0], [0], [5]]), ("0.5_5", [[0], [0], [5], [5]]), ("0.5-5", [[0], [0], [5], [5]]), ("0.9.6", [[0], [0], [9], [6]]), ("0.960923", [[0], [0], [960923]]), ("1.0", [[0], [1], [0]]), ("1.0.4a3", [[0], [1], [0], [4, 'a', 3]]), ("1.0.4b1", [[0], [1], [0], [4, 'b', 1]]), ("1.0.4", [[0], [1], [0], [4]]), ("1.1dev1", [[0], [1], [1, 'DEV', 1]]), ("1.1a1", [[0], [1], [1, 'a', 1]]), ("1.1.dev1", [[0], [1], [1], [0, 'DEV', 1]]), ("1.1.a1", [[0], [1], [1], [0, 'a', 1]]), ("1.1", [[0], [1], [1]]), ("1.1.post1", [[0], [1], [1], [0, float('inf'), 1]]), ("1.1.1dev1", [[0], [1], [1], [1, 'DEV', 1]]), ("1.1.1rc1", [[0], [1], [1], [1, 'rc', 1]]), ("1.1.1", [[0], [1], [1], [1]]), ("1.1.1post1", [[0], [1], [1], [1, float('inf'), 1]]), ("1.1post1", [[0], [1], [1, float('inf'), 1]]), ("2g6", [[0], [2, 'g', 6]]), ("2.0b1pr0", [[0], [2], [0, 'b', 1, 'pr', 0]]), ("2.2be.ta29", [[0], [2], [2, 'be'], [0, 'ta', 29]]), ("2.2be5ta29", [[0], [2], [2, 'be', 5, 'ta', 29]]), ("2.2beta29", [[0], [2], [2, 'beta', 29]]), ("2.2.0.1", [[0], [2], [2], [0], [1]]), ("3.1.1.6", [[0], [3], [1], [1], [6]]), ("3.2.p.r0", [[0], [3], [2], [0, 'p'], [0, 'r', 0]]), ("3.2.pr0", [[0], [3], [2], [0, 'pr', 0]]), ("3.2.pr.1", [[0], [3], [2], [0, 'pr'], [1]]), ("5.5.kw", [[0], [5], [5], [0, 'kw']]), ("11g", [[0], [11, 'g']]), ("14.3.1", [[0], [14], [3], [1]]), ("14.3.1.post26.g9d75ca2", [[0], [14], [3], [1], [0, float('inf'), 26], [0, 'g', 9, 'd', 75, 'ca', 2]]), ("1996.07.12", [[0], [1996], [7], [12]]), ("1!0.4.1", [[1], [0], [4], [1]]), ("1!3.1.1.6", [[1], [3], [1], [1], [6]]), ("2!0.4.1", [[2], [0], [4], [1]]), ] # check parser versions = [(v, VersionOrder(v), l) for v, l in versions] for s, v, l in versions: assert VersionOrder(v) is v assert str(v) == s.lower().replace('-', '_') self.assertEqual(v.version, l) self.assertEqual(VersionOrder("0.4.1.rc"), VersionOrder(" 0.4.1.RC ")) self.assertEqual(normalized_version(" 0.4.1.RC "), VersionOrder("0.4.1.rc")) for ver in ("", "", " ", "3.5&1", "5.5++", "5.5..mw", "!", "a!1.0", "a!b!1.0"): self.assertRaises(ValueError, VersionOrder, ver) # check __eq__ self.assertEqual(VersionOrder(" 0.4.rc "), VersionOrder("0.4.RC")) self.assertEqual(VersionOrder("0.4"), VersionOrder("0.4.0")) self.assertNotEqual(VersionOrder("0.4"), VersionOrder("0.4.1")) self.assertEqual(VersionOrder("0.4.a1"), VersionOrder("0.4.0a1")) self.assertNotEqual(VersionOrder("0.4.a1"), VersionOrder("0.4.1a1")) # check __lt__ self.assertEqual(sorted(versions, key=lambda x: x[1]), versions) # check startswith self.assertTrue(VersionOrder("0.4.1").startswith(VersionOrder("0"))) self.assertTrue(VersionOrder("0.4.1").startswith(VersionOrder("0.4"))) self.assertTrue( VersionOrder("0.4.1p1").startswith(VersionOrder("0.4"))) self.assertTrue( VersionOrder("0.4.1p1").startswith(VersionOrder("0.4.1p"))) self.assertFalse( VersionOrder("0.4.1p1").startswith(VersionOrder("0.4.1q1"))) self.assertFalse(VersionOrder("0.4").startswith(VersionOrder("0.4.1"))) self.assertTrue( VersionOrder("0.4.1+1.3").startswith(VersionOrder("0.4.1"))) self.assertTrue( VersionOrder("0.4.1+1.3").startswith(VersionOrder("0.4.1+1"))) self.assertFalse( VersionOrder("0.4.1").startswith(VersionOrder("0.4.1+1.3"))) self.assertFalse( VersionOrder("0.4.1+1").startswith(VersionOrder("0.4.1+1.3"))) # test openssl convention openssl = [ VersionOrder(k) for k in [ '1.0.1', '1.0.1post.a', '1.0.1post.b', '1.0.1post.z', '1.0.1post.za', '1.0.2' ] ] self.assertEqual(sorted(openssl), openssl)
def test_hexrd(self): VERSIONS = ['0.3.0.dev', '0.3.3'] vos = [VersionOrder(v) for v in VERSIONS] self.assertEqual(sorted(vos), vos)
'glueviz', 'jupyterlab', 'notebook', 'orange3', 'qtconsole', 'rstudio', 'spyder', 'vscode' ] try: from conda import __version__ as CONDA_INTERFACE_VERSION conda_interface_type = 'conda' except ImportError: raise RuntimeError("Conda must be installed for python interpreter\n" "with sys.prefix: %s" % sys.prefix) if conda_interface_type == 'conda': # This import path has been stable since 2016 from conda.models.version import VersionOrder _conda_version = VersionOrder(CONDA_INTERFACE_VERSION).version # Flatten VersionOrder.version, skip epoch, and keep only major and minor CONDA_MAJOR_MINOR = tuple(chain.from_iterable(_conda_version))[1:3] from conda.api import SubdirData # noqa from conda.base.context import ( context as _conda_context, replace_context_default as _conda_replace_context_default, ) from conda.common.io import env_vars as _env_vars from conda.core.package_cache_data import ( PackageCacheData as _PackageCacheData, ) from conda.core.prefix_data import PrefixData as _PrefixData from conda.exports import default_prefix as _default_prefix from conda.models.channel import all_channel_urls as _all_channel_urls from conda.gateways.disk.read import read_paths_json as _read_paths_json