예제 #1
0
    def test_empty_specifier(self, version):
        spec = SpecifierSet(prereleases=True)

        assert version in spec
        assert spec.contains(version)
        assert parse(version) in spec
        assert spec.contains(parse(version))
예제 #2
0
def check(packages, key, db_mirror, cached, ignore_ids):

    key = key if key else os.environ.get("SAFETY_API_KEY", False)
    db = fetch_database(key=key, db=db_mirror, cached=cached)
    db_full = None
    vulnerable_packages = frozenset(db.keys())
    vulnerable = []
    for pkg in packages:
        # normalize the package name, the safety-db is converting underscores to dashes and uses
        # lowercase
        name = pkg.key.replace("_", "-").lower()

        if name in vulnerable_packages:
            # we have a candidate here, build the spec set
            for specifier in db[name]:
                spec_set = SpecifierSet(specifiers=specifier)
                if spec_set.contains(pkg.version):
                    if not db_full:
                        db_full = fetch_database(full=True, key=key, db=db_mirror)
                    for data in get_vulnerabilities(pkg=name, spec=specifier, db=db_full):
                        vuln_id = data.get("id").replace("pyup.io-", "")
                        if vuln_id and vuln_id not in ignore_ids:
                            vulnerable.append(
                                Vulnerability(
                                    name=name,
                                    spec=specifier,
                                    version=pkg.version,
                                    advisory=data.get("advisory"),
                                    vuln_id=vuln_id
                                )
                            )
    return vulnerable
예제 #3
0
    def test_specifier_filter(self, specifier_prereleases, specifier,
                              prereleases, input, expected):
        if specifier_prereleases is None:
            spec = SpecifierSet(specifier)
        else:
            spec = SpecifierSet(specifier, prereleases=specifier_prereleases)

        kwargs = (
            {"prereleases": prereleases} if prereleases is not None else {}
        )

        assert list(spec.filter(input, **kwargs)) == expected
예제 #4
0
    def test_specifier_contains_prereleases(self):
        spec = SpecifierSet()
        assert spec.prereleases is None
        assert not spec.contains("1.0.dev1")
        assert spec.contains("1.0.dev1", prereleases=True)

        spec = SpecifierSet(prereleases=True)
        assert spec.prereleases
        assert spec.contains("1.0.dev1")
        assert not spec.contains("1.0.dev1", prereleases=False)
예제 #5
0
 def test_spec(spec, db):
     try:
         SpecifierSet(spec)
     except InvalidSpecifier as e:
         self.fail("Invalid specifier in {db}: {e}".format(db=db, e=e))
예제 #6
0
@pytest.mark.parametrize(
    "specset, new_set",
    [(
        "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
        [("!=", (3, 0)), ("!=", (3, 1)), ("!=", (3, 2)), ("!=", (3, 3))],
    )],
)
def test_get_specs(specset, new_set):
    assert requirementslib.models.markers._get_specs(specset) == new_set


@pytest.mark.parametrize(
    "specset, new_set",
    [
        (SpecifierSet("!=3.0,!=3.1,!=3.2,!=3.3"), [
            ("not in", "3.0, 3.1, 3.2, 3.3")
        ]),
        (SpecifierSet("==3.0,==3.1,==3.2,==3.3"), [("in", "3.0, 3.1, 3.2, 3.3")
                                                   ]),
        (
            SpecifierSet("!=3.0,!=3.1,!=3.2,!=3.3,>=2.7,<3.7"),
            [(">=", "2.7"), ("not in", "3.0, 3.1, 3.2, 3.3"), ("<", "3.7")],
        ),
        (SpecifierSet(">2.6,>=2.7,<3.6,<3.7"), [(">=", "2.7"), ("<", "3.7")]),
    ],
)
def test_cleanup_pyspecs(specset, new_set):
    assert requirementslib.models.markers.cleanup_pyspecs(specset) == new_set

예제 #7
0
def parse_require(req):
    req_object = Requirement.parse(req)
    pkg_name = req_object.key
    spec = SpecifierSet(str(req_object).replace(pkg_name, ""))
    return [pkg_name, spec]
예제 #8
0
def parse_marker_dict(marker_dict):
    op = marker_dict["op"]
    lhs = marker_dict["lhs"]
    rhs = marker_dict["rhs"]
    # This is where the spec sets for each side land if we have an "or" operator
    side_spec_list = []
    side_markers_list = []
    finalized_marker = ""
    # And if we hit the end of the parse tree we use this format string to make a marker
    format_string = "{lhs} {op} {rhs}"
    specset = SpecifierSet()
    specs = set()
    # Essentially we will iterate over each side of the parsed marker if either one is
    # A mapping instance (i.e. a dictionary) and recursively parse and reduce the specset
    # Union the "and" specs, intersect the "or"s to find the most appropriate range
    if any(issubclass(type(side), Mapping) for side in (lhs, rhs)):
        for side in (lhs, rhs):
            side_specs = set()
            side_markers = set()
            if issubclass(type(side), Mapping):
                merged_side_specs, merged_side_markers = parse_marker_dict(side)
                side_specs.update(merged_side_specs)
                side_markers.update(merged_side_markers)
            else:
                marker = _ensure_marker(side)
                marker_parts = getattr(marker, "_markers", [])
                if marker_parts[0][0].value == "python_version":
                    side_specs |= set(get_specset(marker_parts))
                else:
                    side_markers.add(str(marker))
            side_spec_list.append(side_specs)
            side_markers_list.append(side_markers)
        if op == "and":
            # When we are "and"-ing things together, it probably makes the most sense
            # to reduce them here into a single PySpec instance
            specs = reduce(lambda x, y: set(x) | set(y), side_spec_list)
            markers = reduce(lambda x, y: set(x) | set(y), side_markers_list)
            if not specs and not markers:
                return specset, finalized_marker
            if markers and isinstance(markers, (tuple, list, Set)):
                finalized_marker = Marker(" and ".join([m for m in markers if m]))
            elif markers:
                finalized_marker = str(markers)
            specset._specs = frozenset(specs)
            return specset, finalized_marker
        # Actually when we "or" things as well we can also just turn them into a reduced
        # set using this logic now
        sides = reduce(lambda x, y: set(x) & set(y), side_spec_list)
        finalized_marker = " or ".join(
            [normalize_marker_str(m) for m in side_markers_list]
        )
        specset._specs = frozenset(sorted(sides))
        return specset, finalized_marker
    else:
        # At the tip of the tree we are dealing with strings all around and they just need
        # to be smashed together
        specs = set()
        if lhs == "python_version":
            format_string = "{lhs}{op}{rhs}"
            marker = Marker(format_string.format(**marker_dict))
            marker_parts = getattr(marker, "_markers", [])
            _set = get_specset(marker_parts)
            if _set:
                specs |= set(_set)
                specset._specs = frozenset(specs)
        return specset, finalized_marker
예제 #9
0
def test_extension_proxy():
    # Test with minimum properties:
    extension = MinimumExtension()
    proxy = ExtensionProxy(extension)

    assert isinstance(proxy, Extension)
    assert isinstance(proxy, AsdfExtension)

    assert proxy.extension_uri == "asdf://somewhere.org/extensions/minimum-1.0"
    assert proxy.legacy_class_names == set()
    assert proxy.asdf_standard_requirement == SpecifierSet()
    assert proxy.converters == []
    assert proxy.tags == []
    assert proxy.types == []
    assert proxy.tag_mapping == []
    assert proxy.url_mapping == []
    assert proxy.delegate is extension
    assert proxy.legacy is False
    assert proxy.package_name is None
    assert proxy.package_version is None
    assert proxy.class_name == "asdf.tests.test_extension.MinimumExtension"

    # The subclassed version should have the same defaults:
    extension = MinimumExtensionSubclassed()
    subclassed_proxy = ExtensionProxy(extension)
    assert subclassed_proxy.extension_uri == proxy.extension_uri
    assert subclassed_proxy.legacy_class_names == proxy.legacy_class_names
    assert subclassed_proxy.asdf_standard_requirement == proxy.asdf_standard_requirement
    assert subclassed_proxy.converters == proxy.converters
    assert subclassed_proxy.tags == proxy.tags
    assert subclassed_proxy.types == proxy.types
    assert subclassed_proxy.tag_mapping == proxy.tag_mapping
    assert subclassed_proxy.url_mapping == proxy.url_mapping
    assert subclassed_proxy.delegate is extension
    assert subclassed_proxy.legacy == proxy.legacy
    assert subclassed_proxy.package_name == proxy.package_name
    assert subclassed_proxy.package_version == proxy.package_name
    assert subclassed_proxy.class_name == "asdf.tests.test_extension.MinimumExtensionSubclassed"

    # Test with all properties present:
    converters = [
        MinimumConverter(
            tags=["asdf://somewhere.org/extensions/full/tags/foo-*"], types=[])
    ]
    extension = FullExtension(
        converters=converters,
        asdf_standard_requirement=">=1.4.0",
        tags=["asdf://somewhere.org/extensions/full/tags/foo-1.0"],
        legacy_class_names=["foo.extensions.SomeOldExtensionClass"])
    proxy = ExtensionProxy(extension,
                           package_name="foo",
                           package_version="1.2.3")

    assert proxy.extension_uri == "asdf://somewhere.org/extensions/full-1.0"
    assert proxy.legacy_class_names == {"foo.extensions.SomeOldExtensionClass"}
    assert proxy.asdf_standard_requirement == SpecifierSet(">=1.4.0")
    assert proxy.converters == [ConverterProxy(c, proxy) for c in converters]
    assert len(proxy.tags) == 1
    assert proxy.tags[
        0].tag_uri == "asdf://somewhere.org/extensions/full/tags/foo-1.0"
    assert proxy.types == []
    assert proxy.tag_mapping == []
    assert proxy.url_mapping == []
    assert proxy.delegate is extension
    assert proxy.legacy is False
    assert proxy.package_name == "foo"
    assert proxy.package_version == "1.2.3"
    assert proxy.class_name == "asdf.tests.test_extension.FullExtension"

    # Should fail when the input is not one of the two extension interfaces:
    with pytest.raises(TypeError):
        ExtensionProxy(object)

    # Should fail with a bad converter:
    with pytest.raises(TypeError):
        ExtensionProxy(FullExtension(converters=[object()]))

    # Unparseable ASDF Standard requirement:
    with pytest.raises(ValueError):
        ExtensionProxy(
            FullExtension(asdf_standard_requirement="asdf-standard >= 1.4.0"))

    # Unrecognized ASDF Standard requirement type:
    with pytest.raises(TypeError):
        ExtensionProxy(FullExtension(asdf_standard_requirement=object()))

    # Bad tag:
    with pytest.raises(TypeError):
        ExtensionProxy(FullExtension(tags=[object()]))

    # Bad legacy class names:
    with pytest.raises(TypeError):
        ExtensionProxy(FullExtension(legacy_class_names=[object]))
예제 #10
0
 def test_comparison_canonicalizes(self, left, right):
     assert SpecifierSet(left) == SpecifierSet(right)
     assert left == SpecifierSet(right)
     assert SpecifierSet(left) == right
예제 #11
0
 def test_specifiers_combine_not_implemented(self):
     with pytest.raises(TypeError):
         SpecifierSet() & 12
예제 #12
0
파일: generate.py 프로젝트: testwc/get-pip
def determine_latest(versions: Iterable[Version], *, constraint: str):
    assert sorted(versions) == list(versions)
    return list(SpecifierSet(constraint).filter(versions))[-1]
예제 #13
0
def can_run_smdataparallel_efa(ecr_image):
    _, image_framework_version = get_framework_and_version_from_tag(ecr_image)
    image_cuda_version = get_cuda_version_from_tag(ecr_image)
    return Version(
        image_framework_version) in SpecifierSet(">=2.4.1") and Version(
            image_cuda_version.strip("cu")) >= Version("110")
예제 #14
0
    import importlib_metadata

# -- Check for missing dependencies -------------------------------------------
missing_requirements = {}
for line in importlib_metadata.requires('astropy'):
    if 'extra == "docs"' in line:
        req = Requirement(line.split(';')[0])
        req_package = req.name.lower()
        req_specifier = str(req.specifier)

        try:
            version = importlib_metadata.version(req_package)
        except importlib_metadata.PackageNotFoundError:
            missing_requirements[req_package] = req_specifier

        if version not in SpecifierSet(req_specifier, prereleases=True):
            missing_requirements[req_package] = req_specifier

if missing_requirements:
    print('The following packages could not be found and are required to '
          'build the documentation:')
    for key, val in missing_requirements.items():
        print(f'    * {key} {val}')
    print('Please install the "docs" requirements.')
    sys.exit(1)

from sphinx_astropy.conf.v1 import *  # noqa

# -- Plot configuration -------------------------------------------------------
plot_rcparams = {}
plot_rcparams['figure.figsize'] = (6, 6)
def can_run_s3_plugin(ecr_image):
    _, image_framework_version = get_framework_and_version_from_tag(ecr_image)
    return Version(image_framework_version) in SpecifierSet(">=1.7")
예제 #16
0
    def validate(iso_info):
        """

        Parameters
        ----------
        iso_info: CortxISOInfo
            CortxISOInfo instance with all necessary information about
            SW upgrade ISO

        Returns
        -------
        None

        Raises
        ------
        ValidationError
            If validation is failed.
        """
        packages = list(iso_info.packages.keys())

        if not packages:
            return  # nothing to validate

        # NOTE: the first line of `yum -q list installed` command is
        #  'Installed Packages' skip it via `tail -n +2`
        cmd = (f"yum -q list installed {' '.join(packages)} 2>/dev/null |"
               f" tail -n +2 | awk '{{print $1\" \"$2}}'")

        try:
            res = cmd_run(cmd, targets=local_minion_id())
        except Exception as e:
            logger.debug(f'Package compatibility check is failed: "{e}"')
            raise ValidationError(
                f'Package compatibility check is failed: "{e}"') from e

        res = res[local_minion_id()].strip()

        if res:
            logger.debug(f"List of installed CORTX packages: {res}")
        else:
            logger.warning(f"There are no installed CORTX packages")
            return  # Nothing to validate since there are not CORTX packages

        res = res.split('\n')

        packages = dict()
        for pkg in res:
            # Aggregate version information of installed CORTX packages
            pkg_name, pkg_version = pkg.split(" ")
            # remove architecture post-fix from the package name
            pkg_name = pkg_name.split(".")[0]
            packages[pkg_name] = utils.normalize_rpm_version(pkg_version)

        error_msg = list()

        compatibility = iso_info.packages.get(CORTX_VERSION, {}).get(
            SWUpgradeInfoFields.VERSION_COMPATIBILITY.value, None)
        if compatibility:
            cortx_version = GetRelease.cortx_version()
            if Version(cortx_version) in SpecifierSet(compatibility):
                logger.info(
                    f"The CORTX release version '{cortx_version}' "
                    f"satisfies the constraint version '{compatibility}'")
            else:
                msg = (f"The CORTX release version '{cortx_version}' does not "
                       f"satisfy the constraint version '{compatibility}'")
                logger.error(msg)
                error_msg.append(msg)

        for pkg in iso_info.packages:
            if (SWUpgradeInfoFields.VERSION_COMPATIBILITY.value
                    in iso_info.packages[pkg]):
                compatibility = iso_info.packages[pkg][
                    SWUpgradeInfoFields.VERSION_COMPATIBILITY.value]

                installed_ver = packages.get(pkg, None)
                if installed_ver is None:
                    msg = (f"There is version constraint '{compatibility}' for"
                           f" the CORTX package '{pkg}' that is not installed")
                    logger.debug(msg)
                    continue

                # NOTE: we used for comparison normalized values of RPM version
                #  For more details, please, review
                #  `provisioner.utils.normalize_rpm_version`
                #  There is some interesting behavior of packaging API for
                #  versions comparison:
                #  >>> Version('2.0.0-275') in SpecifierSet('> 2.0.0')
                #  False
                #  >>> Version('2.0.0-275') in SpecifierSet('>= 2.0.0')
                #  True
                #  >>> Version('2.0.0-275') in SpecifierSet('== 2.0.0')
                #  False
                #  >>> Version('2.0.0-275') in SpecifierSet('> 2.0.0-0')
                #  True
                # >>> version.parse('2.0.0-275') > version.parse('2.0.0')
                # True

                if Version(installed_ver) in SpecifierSet(compatibility):
                    logger.info(f"The CORTX package '{pkg}' of version "
                                f"'{installed_ver}' satisfies the constraint "
                                f"version '{compatibility}'")
                else:
                    msg = (f"The CORTX package '{pkg}' of version "
                           f"'{installed_ver}' does not satisfies the "
                           f"constraint version '{compatibility}'")
                    logger.error(msg)
                    error_msg.append(msg)

        if error_msg:
            raise ValidationError("During validation some compatibility "
                                  f"errors were found: {'/n'.join(error_msg)}")
예제 #17
0
 def version_matches(self, requirement):
     specifier_set = SpecifierSet(requirement.specifiers)
     return specifier_set.contains(self.package_version)
예제 #18
0
from typing import List, Dict, Optional

from packaging.specifiers import SpecifierSet
from packaging.version import parse as parse_version

from ospd_openvas.db import NVT_META_FIELDS, RedisCtx
from ospd_openvas.errors import OspdOpenvasError

logger = logging.getLogger(__name__)

LIST_FIRST_POS = 0
LIST_LAST_POS = -1

# actually the nvti cache with gvm-libs 10 should fit too but openvas was only
# introduced with GVM 11 and gvm-libs 11
SUPPORTED_NVTICACHE_VERSIONS_SPECIFIER = SpecifierSet('>=11.0')


class NVTICache(object):

    QOD_TYPES = {
        'exploit': '100',
        'remote_vul': '99',
        'remote_app': '98',
        'package': '97',
        'registry': '97',
        'remote_active': '95',
        'remote_banner': '80',
        'executable_version': '80',
        'remote_analysis': '70',
        'remote_probe': '50',
예제 #19
0
 def test_specifiers_hash(self, specifier):
     assert hash(SpecifierSet(specifier)) == hash(SpecifierSet(specifier))
예제 #20
0
    def test_specifiers_combine(self, left, right, expected):
        result = SpecifierSet(left) & SpecifierSet(right)
        assert result == SpecifierSet(expected)

        result = SpecifierSet(left) & right
        assert result == SpecifierSet(expected)

        result = SpecifierSet(left, prereleases=True) & SpecifierSet(right)
        assert result == SpecifierSet(expected)
        assert result.prereleases

        result = SpecifierSet(left, prereleases=False) & SpecifierSet(right)
        assert result == SpecifierSet(expected)
        assert not result.prereleases

        result = SpecifierSet(left) & SpecifierSet(right, prereleases=True)
        assert result == SpecifierSet(expected)
        assert result.prereleases

        result = SpecifierSet(left) & SpecifierSet(right, prereleases=False)
        assert result == SpecifierSet(expected)
        assert not result.prereleases

        result = SpecifierSet(left, prereleases=True) & SpecifierSet(
            right, prereleases=True
        )
        assert result == SpecifierSet(expected)
        assert result.prereleases

        result = SpecifierSet(left, prereleases=False) & SpecifierSet(
            right, prereleases=False
        )
        assert result == SpecifierSet(expected)
        assert not result.prereleases

        with pytest.raises(ValueError):
            result = SpecifierSet(left, prereleases=True) & SpecifierSet(
                right, prereleases=False
            )

        with pytest.raises(ValueError):
            result = SpecifierSet(left, prereleases=False) & SpecifierSet(
                right, prereleases=True
            )
예제 #21
0
def can_run_mnist_estimator(ecr_image):
    _, image_framework_version = get_framework_and_version_from_tag(ecr_image)
    return Version(image_framework_version) in SpecifierSet("<2.6")
예제 #22
0
 def test_comparison_false(self, left, right, op):
     assert not op(SpecifierSet(left), SpecifierSet(right))
     assert not op(SpecifierSet(left), Specifier(right))
     assert not op(Specifier(left), SpecifierSet(right))
     assert not op(left, SpecifierSet(right))
     assert not op(SpecifierSet(left), right)
예제 #23
0
def compare_python_version(version_spec):
    current_sys_version = parse(platform.python_version())
    spec_set = SpecifierSet(version_spec)

    return current_sys_version in spec_set
예제 #24
0
 def test_comparison_non_specifier(self):
     assert SpecifierSet("==1.0") != 12
     assert not SpecifierSet("==1.0") == 12
예제 #25
0
 def test_length(self, spec, expected_length):
     spec = SpecifierSet(spec)
     assert len(spec) == expected_length
예제 #26
0
def test_manifest_extension():
    with config_context() as config:
        minimal_manifest = """%YAML 1.1
---
id: asdf://somewhere.org/manifests/foo
extension_uri: asdf://somewhere.org/extensions/foo
...
"""
        config.add_resource_mapping(
            {"asdf://somewhere.org/extensions/foo": minimal_manifest})
        extension = ManifestExtension.from_uri(
            "asdf://somewhere.org/extensions/foo")
        assert isinstance(extension, Extension)
        assert extension.extension_uri == "asdf://somewhere.org/extensions/foo"
        assert extension.legacy_class_names == []
        assert extension.asdf_standard_requirement is None
        assert extension.converters == []
        assert extension.tags == []

        proxy = ExtensionProxy(extension)
        assert proxy.extension_uri == "asdf://somewhere.org/extensions/foo"
        assert proxy.legacy_class_names == set()
        assert proxy.asdf_standard_requirement == SpecifierSet()
        assert proxy.converters == []
        assert proxy.tags == []

    with config_context() as config:
        full_manifest = """%YAML 1.1
---
id: asdf://somewhere.org/manifests/foo
extension_uri: asdf://somewhere.org/extensions/foo
asdf_standard_requirement:
  gte: 1.6.0
  lt: 2.0.0
tags:
  - asdf://somewhere.org/tags/bar
  - tag_uri: asdf://somewhere.org/tags/baz
    schema_uri: asdf://somewhere.org/schemas/baz
    title: Baz title
    description: Bar description
...
"""
        config.add_resource_mapping(
            {"asdf://somewhere.org/extensions/foo": full_manifest})

        class FooConverter:
            tags = [
                "asdf://somewhere.org/tags/bar",
                "asdf://somewhere.org/tags/baz"
            ]
            types = []

            def select_tag(self, *args):
                pass

            def to_yaml_tree(self, *args):
                pass

            def from_yaml_tree(self, *args):
                pass

        converter = FooConverter()

        extension = ManifestExtension.from_uri(
            "asdf://somewhere.org/extensions/foo",
            legacy_class_names=["foo.extension.LegacyExtension"],
            converters=[converter])
        assert extension.extension_uri == "asdf://somewhere.org/extensions/foo"
        assert extension.legacy_class_names == [
            "foo.extension.LegacyExtension"
        ]
        assert extension.asdf_standard_requirement == SpecifierSet(
            ">=1.6.0,<2.0.0")
        assert extension.converters == [converter]
        assert len(extension.tags) == 2
        assert extension.tags[0] == "asdf://somewhere.org/tags/bar"
        assert extension.tags[1].tag_uri == "asdf://somewhere.org/tags/baz"
        assert extension.tags[
            1].schema_uri == "asdf://somewhere.org/schemas/baz"
        assert extension.tags[1].title == "Baz title"
        assert extension.tags[1].description == "Bar description"

        proxy = ExtensionProxy(extension)
        assert proxy.extension_uri == "asdf://somewhere.org/extensions/foo"
        assert proxy.legacy_class_names == {"foo.extension.LegacyExtension"}
        assert proxy.asdf_standard_requirement == SpecifierSet(
            ">=1.6.0,<2.0.0")
        assert proxy.converters == [ConverterProxy(converter, proxy)]
        assert len(proxy.tags) == 2
        assert proxy.tags[0].tag_uri == "asdf://somewhere.org/tags/bar"
        assert proxy.tags[1].tag_uri == "asdf://somewhere.org/tags/baz"
        assert proxy.tags[1].schema_uri == "asdf://somewhere.org/schemas/baz"
        assert proxy.tags[1].title == "Baz title"
        assert proxy.tags[1].description == "Bar description"

    with config_context() as config:
        simple_asdf_standard_manifest = """%YAML 1.1
---
id: asdf://somewhere.org/manifests/foo
extension_uri: asdf://somewhere.org/extensions/foo
asdf_standard_requirement: 1.6.0
...
"""
        config.add_resource_mapping({
            "asdf://somewhere.org/extensions/foo":
            simple_asdf_standard_manifest
        })
        extension = ManifestExtension.from_uri(
            "asdf://somewhere.org/extensions/foo")
        assert extension.asdf_standard_requirement == SpecifierSet("==1.6.0")

        proxy = ExtensionProxy(extension)
        assert proxy.asdf_standard_requirement == SpecifierSet("==1.6.0")
예제 #27
0
파일: utils.py 프로젝트: abathur/anaphora
	def __init__(self, specifier):
		SpecifierSet.__init__(self, specifier)
		CacheDict.__init__(self, self.contains)
예제 #28
0
    def _check_python_packages(
        cls,
        unit_name: str,
        python_packages_present: List[Dict[str, str]],
        python_packages_required: List[Dict[str, str]],
    ) -> bool:
        """Check if required Python packages are present in the environment."""
        # Convert to dict to have O(1) access time.
        py_packages_present_dict: Dict[str, List[Dict[str, str]]] = {}
        for python_package_present in python_packages_present:
            package = py_packages_present_dict.get(
                python_package_present["package_name"])
            if package is None:
                py_packages_present_dict[
                    python_package_present["package_name"]] = [
                        python_package_present
                    ]
            else:
                package.append(python_package_present)

        if isinstance(python_packages_required, dict):
            if "not" not in python_packages_required:
                _LOGGER.error(
                    "%s: Unable to parse description of Python packages required",
                    unit_name)
                return False

            for not_required_python_package in python_packages_required["not"]:
                for py_package_present in py_packages_present_dict.get(
                        not_required_python_package["name"]) or []:
                    location = not_required_python_package.get("location")
                    if location is not None and not re.fullmatch(
                            location, py_package_present["location"]):
                        _LOGGER.debug(
                            "%s: Python package %r in %r is located in different location %r as expected",
                            unit_name,
                            not_required_python_package["name"],
                            py_package_present["location"],
                            location,
                        )
                        continue

                    version = not_required_python_package.get("version")
                    if version and py_package_present[
                            "package_version"] not in SpecifierSet(version):
                        _LOGGER.debug(
                            "%s: Python package '%s==%s' (in %r) matches version %r but should not",
                            unit_name,
                            not_required_python_package["name"],
                            py_package_present["package_version"],
                            py_package_present["location"],
                            version,
                        )
                        continue

                    _LOGGER.debug(
                        "%s: presence of Python package %r causes not including the pipeline unit",
                        unit_name,
                        py_package_present,
                    )
                    return False
        else:
            for required_python_package in python_packages_required:
                for py_package_present in py_packages_present_dict.get(
                        required_python_package["name"]) or []:
                    version = required_python_package.get("version")
                    if version and py_package_present[
                            "package_version"] not in SpecifierSet(version):
                        _LOGGER.debug(
                            "%s: Python package '%s==%s' (in %r) does not match required version %r",
                            unit_name,
                            required_python_package["name"],
                            py_package_present["package_version"],
                            py_package_present.get("location", "any"),
                            version,
                        )
                        continue

                    location = required_python_package.get("location")
                    if location is not None and not re.fullmatch(
                            location, py_package_present["location"]):
                        _LOGGER.debug(
                            "%s: Python package %r is located at %r but expected to be in %r",
                            unit_name,
                            required_python_package["name"],
                            py_package_present["location"],
                            location,
                        )
                        continue

                    _LOGGER.debug(
                        "%s: Python package %r in version %r (located in %r) is found in the runtime environment",
                        unit_name,
                        required_python_package["name"],
                        required_python_package.get("version", "any"),
                        py_package_present.get("location", "any"),
                    )
                    break
                else:
                    _LOGGER.debug(
                        "%s: Not including as Python package %r (in version %r) is not present in the environment",
                        unit_name,
                        required_python_package["name"],
                        required_python_package.get("version", "any"),
                    )
                    return False

        _LOGGER.debug("%s: all Python package presence checks passed",
                      unit_name)
        return True
def make_requirements(args):
    """Split up requirements by epoch.

    Parameters
    ----------
    args : argparse.Namespace
        Command-line arguments

    Returns
    -------
    requirements : list of lists
        Each list corresponds to one epoch, with the epochs in order
    """
    reqs = []
    for requirements_file in args.requirements:
        reqs.append(parse_requirements(requirements_file))
    reqs.append([parse_requirement(req) for req in args.package])
    # Convert from list of iterables to an iterable
    reqs = itertools.chain(*reqs)
    defaults = []
    for default_versions in args.default_versions:
        defaults.extend(parse_requirements(default_versions))
    # Convert defaults from a list to a dictionary
    default_for = {}
    for item in defaults:
        if isinstance(item, Requirement):
            if item.marker and not item.marker.evaluate():
                continue
            name = canonicalize_name(item.name)
            pin = None
            for spec in item.specifier:
                if spec.operator in {'==', '==='}:
                    pin = spec
            if pin is not None:
                if name in default_for and default_for[name] != pin:
                    raise KeyError(
                        '{} is listed twice in {} with conflicting versions'.
                        format(name, args.default_versions))
                default_for[name] = pin

    by_epoch = {}
    for item in reqs:
        if isinstance(item, Requirement):
            if item.marker and not item.marker.evaluate():
                continue
            pinned = (item.url is not None)
            name = canonicalize_name(item.name)
            for spec in item.specifier:
                if spec.operator in {'==', '==='}:
                    pinned = True
            if not pinned:
                if name not in default_for:
                    if not args.allow_unversioned:
                        raise RuntimeError(
                            '{} is not version-pinned'.format(name))
                else:
                    pin = default_for[name]
                    item = copy.deepcopy(item)
                    item.specifier &= SpecifierSet(six.text_type(pin))
            value = six.text_type(item)
        else:
            name = item
            value = item
        epoch = EPOCH.get(name, 0)
        by_epoch.setdefault(epoch, []).append(value)
    return [by_epoch[x] for x in sorted(by_epoch.keys())]
예제 #30
0
 def test_iteration(self, spec, expected_items):
     spec = SpecifierSet(spec)
     items = set(str(item) for item in spec)
     assert items == set(expected_items)
def test_pip_check(image):
    """
    Ensure there are no broken requirements on the containers by running "pip check"

    :param image: ECR image URI
    """
    ctx = Context()
    gpu_suffix = "-gpu" if "gpu" in image else ""
    allowed_exception_list = []

    # TF inference containers do not have core tensorflow installed by design. Allowing for this pip check error
    # to occur in order to catch other pip check issues that may be associated with TF inference
    # smclarify binaries have s3fs->aiobotocore dependency which uses older version of botocore. temporarily
    # allowing this to catch other issues
    allowed_tf_exception = re.compile(
        rf"^tensorflow-serving-api{gpu_suffix} \d\.\d+\.\d+ requires tensorflow{gpu_suffix}, which is not installed.$"
    )
    allowed_exception_list.append(allowed_tf_exception)

    allowed_smclarify_exception = re.compile(
        r"^aiobotocore \d+(\.\d+)* has requirement botocore<\d+(\.\d+)*,>=\d+(\.\d+)*, "
        r"but you have botocore \d+(\.\d+)*\.$")
    allowed_exception_list.append(allowed_smclarify_exception)

    # The v0.22 version of tensorflow-io has a bug fixed in v0.23 https://github.com/tensorflow/io/releases/tag/v0.23.0
    allowed_habana_tf_exception = re.compile(
        rf"^tensorflow-io 0.22.0 requires tensorflow, which is not installed.$"
    )
    allowed_exception_list.append(allowed_habana_tf_exception)

    framework, framework_version = get_framework_and_version_from_tag(image)
    # The v0.21 version of tensorflow-io has a bug fixed in v0.23 https://github.com/tensorflow/io/releases/tag/v0.23.0

    tf263_io21_issue_framework_list = [
        "tensorflow", "huggingface_tensorflow", "huggingface_tensorflow_trcomp"
    ]
    if framework in tf263_io21_issue_framework_list or Version(
            framework_version) in SpecifierSet(">=2.6.3,<2.7"):
        allowed_tf263_exception = re.compile(
            rf"^tensorflow-io 0.21.0 requires tensorflow, which is not installed.$"
        )
        allowed_exception_list.append(allowed_tf263_exception)

    if "autogluon" in image and (("0.3.1" in image) or ("0.3.2" in image)):
        allowed_autogluon_exception = re.compile(
            rf"autogluon-(vision|mxnet) 0.3.1 has requirement Pillow<8.4.0,>=8.3.0, but you have pillow \d+(\.\d+)*"
        )
        allowed_exception_list.append(allowed_autogluon_exception)

    # TF2.9 sagemaker containers introduce tf-models-official which has a known bug where in it does not respect the
    # existing TF installation. https://github.com/tensorflow/models/issues/9267. This package in turn brings in
    # tensorflow-text. Skip checking these two packages as this is an upstream issue.
    if framework == "tensorflow" and Version(
            framework_version) in SpecifierSet(">=2.9.1"):
        allowed_tf29_exception = re.compile(
            rf"^(tf-models-official 2.9.1|tensorflow-text 2.9.0) requires tensorflow, which is not installed."
        )
        allowed_exception_list.append(allowed_tf29_exception)

    # Add null entrypoint to ensure command exits immediately
    output = ctx.run(f"docker run --entrypoint='' {image} pip check",
                     hide=True,
                     warn=True)
    if output.return_code != 0:
        if not (any([
                allowed_exception.match(output.stdout)
                for allowed_exception in allowed_exception_list
        ])):
            # Rerun pip check test if this is an unexpected failure
            ctx.run(f"docker run --entrypoint='' {image} pip check", hide=True)
예제 #32
0
 def test_legacy_specifiers_combined(self):
     spec = SpecifierSet("<3,>1-1-1")
     assert spec.contains("2.0")
예제 #33
0
    def test_specifier_prereleases_explicit(self):
        spec = SpecifierSet()
        assert not spec.prereleases
        assert "1.0.dev1" not in spec
        assert not spec.contains("1.0.dev1")
        spec.prereleases = True
        assert spec.prereleases
        assert "1.0.dev1" in spec
        assert spec.contains("1.0.dev1")

        spec = SpecifierSet(prereleases=True)
        assert spec.prereleases
        assert "1.0.dev1" in spec
        assert spec.contains("1.0.dev1")
        spec.prereleases = False
        assert not spec.prereleases
        assert "1.0.dev1" not in spec
        assert not spec.contains("1.0.dev1")

        spec = SpecifierSet(prereleases=True)
        assert spec.prereleases
        assert "1.0.dev1" in spec
        assert spec.contains("1.0.dev1")
        spec.prereleases = None
        assert not spec.prereleases
        assert "1.0.dev1" not in spec
        assert not spec.contains("1.0.dev1")
예제 #34
0
    def test_specifier_prereleases_explicit(self):
        spec = SpecifierSet()
        assert not spec.prereleases
        assert "1.0.dev1" not in spec
        assert not spec.contains("1.0.dev1")
        spec.prereleases = True
        assert spec.prereleases
        assert "1.0.dev1" in spec
        assert spec.contains("1.0.dev1")

        spec = SpecifierSet(prereleases=True)
        assert spec.prereleases
        assert "1.0.dev1" in spec
        assert spec.contains("1.0.dev1")
        spec.prereleases = False
        assert not spec.prereleases
        assert "1.0.dev1" not in spec
        assert not spec.contains("1.0.dev1")

        spec = SpecifierSet(prereleases=True)
        assert spec.prereleases
        assert "1.0.dev1" in spec
        assert spec.contains("1.0.dev1")
        spec.prereleases = None
        assert not spec.prereleases
        assert "1.0.dev1" not in spec
        assert not spec.contains("1.0.dev1")
예제 #35
0
 def test_legacy_specifiers_combined(self):
     spec = SpecifierSet("<3,>1-1-1")
     assert "2.0" in spec
예제 #36
0
    def test_specifiers_str_and_repr(self, specifier, expected):
        spec = SpecifierSet(specifier)

        assert str(spec) == expected
        assert repr(spec) == "<SpecifierSet({0})>".format(repr(expected))
예제 #37
0
    is_image_available_locally,
    login_to_ecr_registry,
    get_region_from_image_uri
)
from test.test_utils import ecr as ecr_utils
from test.test_utils.security import (
    CVESeverity,
    ScanVulnerabilityList,
    conduct_failure_routine,
    process_failure_routine_summary_and_store_data_in_s3,
    run_scan,
    fetch_other_vulnerability_lists,
)
from src.config import is_ecr_scan_allowlist_feature_enabled

ALLOWLIST_FEATURE_ENABLED_IMAGES = {"mxnet": SpecifierSet(">=1.8.0,<1.9.0")}


@pytest.mark.usefixtures("sagemaker")
@pytest.mark.model("N/A")
@pytest.mark.canary("Run security test regularly on production images")
def test_security(image):
    repo_name, image_tag = image.split("/")[-1].split(":")
    container_name = f"{repo_name}-{image_tag}-security"

    run(
        f"docker run -itd --name {container_name} "
        f"--mount type=bind,src=$(pwd)/container_tests,target=/test"
        f" --entrypoint='/bin/bash' "
        f"{image}",
        echo=True,