Example #1
0
    def from_db(cls, release):
        obj = cls(meta={"id": release.project.normalized_name})
        obj["name"] = release.project.name
        obj["normalized_name"] = release.project.normalized_name
        obj["version"] = [
            r.version
            for r in sorted(
                release.project.releases,
                key=lambda r: parse_version(r.version),
                reverse=True,
            )
        ]
        obj["latest_version"] = first(
            sorted(
                release.project.releases,
                key=lambda r: parse_version(r.version),
                reverse=True,
            ),
            key=lambda r: not r.is_prerelease,
            default=release.project.releases[0],
        ).version
        obj["summary"] = release.summary
        obj["description"] = release.description
        obj["author"] = release.author
        obj["author_email"] = release.author_email
        obj["maintainer"] = release.maintainer
        obj["maintainer_email"] = release.maintainer_email
        obj["home_page"] = release.home_page
        obj["download_url"] = release.download_url
        obj["keywords"] = release.keywords
        obj["platform"] = release.platform
        obj["created"] = release.created
        obj["classifiers"] = list(release.classifiers)

        return obj
def _any_arguments(fun):
    if parse_version(magicgui.__version__) >= parse_version("0.3.0"):

        def _any():
            fun()

    else:

        def _any(*_):
            fun()

    return _any
Example #3
0
def get_latest_release(repo, version_min):
    current_version = parse_version(version_min)
    latest_version = current_version
    latest_release = None
    for release in repo.get_releases():
        version = parse_version(release.tag_name)
        if not version:
            continue
        if version >= latest_version:
            latest_release = release
            latest_version = version
    return latest_release
Example #4
0
def test_parquet(s3, engine, s3so, metadata_file):
    import s3fs

    dd = pytest.importorskip("dask.dataframe")
    pd = pytest.importorskip("pandas")
    np = pytest.importorskip("numpy")
    from dask.dataframe._compat import tm

    lib = pytest.importorskip(engine)
    lib_version = parse_version(lib.__version__)
    if engine == "pyarrow" and lib_version < parse_version("0.13.1"):
        pytest.skip("pyarrow < 0.13.1 not supported for parquet")
    if (engine == "pyarrow" and lib_version.major == 2
            and parse_version(s3fs.__version__) > parse_version("0.5.0")):
        pytest.skip("#7056 - new s3fs not supported before pyarrow 3.0")

    url = "s3://%s/test.parquet" % test_bucket_name

    data = pd.DataFrame(
        {
            "i32":
            np.arange(1000, dtype=np.int32),
            "i64":
            np.arange(1000, dtype=np.int64),
            "f":
            np.arange(1000, dtype=np.float64),
            "bhello":
            np.random.choice(["hello", "you", "people"],
                             size=1000).astype("O"),
        },
        index=pd.Index(np.arange(1000), name="foo"),
    )
    df = dd.from_pandas(data, chunksize=500)
    df.to_parquet(url,
                  engine=engine,
                  storage_options=s3so,
                  write_metadata_file=metadata_file)

    files = [f.split("/")[-1] for f in s3.ls(url)]
    if metadata_file:
        assert "_common_metadata" in files
        assert "_metadata" in files
    assert "part.0.parquet" in files

    df2 = dd.read_parquet(url,
                          index="foo",
                          gather_statistics=True,
                          engine=engine,
                          storage_options=s3so)
    assert len(df2.divisions) > 1

    tm.assert_frame_equal(data, df2.compute())
Example #5
0
 def handle_errors(self, key: str | None):
     try:
         yield
     except MaxSpillExceeded as e:
         # key is in self.fast; no keys have been lost on eviction
         # Note: requires zict > 2.0
         (key_e, ) = e.args
         assert key_e in self.fast
         assert key_e not in self.slow
         now = time.time()
         if now - self.last_logged >= self.min_log_interval:
             logger.warning(
                 "Spill file on disk reached capacity; keeping data in memory"
             )
             self.last_logged = now
         raise HandledError()
     except OSError:
         # Typically, this is a disk full error
         now = time.time()
         if now - self.last_logged >= self.min_log_interval:
             logger.error("Spill to disk failed; keeping data in memory",
                          exc_info=True)
             self.last_logged = now
         raise HandledError()
     except PickleError as e:
         key_e, orig_e = e.args
         if parse_version(zict.__version__) <= parse_version("2.0.0"):
             pass
         else:
             assert key_e in self.fast
         assert key_e not in self.slow
         if key_e == key:
             assert key is not None
             # The key we just inserted failed to serialize.
             # This happens only when the key is individually larger than target.
             # The exception will be caught by Worker and logged; the status of
             # the task will be set to error.
             if has_zict_210:
                 del self[key]
             else:
                 assert key not in self.fast
                 assert key not in self.slow
             raise orig_e
         else:
             # The key we just inserted is smaller than target, but it caused
             # another, unrelated key to be spilled out of the LRU, and that key
             # failed to serialize. There's nothing wrong with the new key. The older
             # key is still in memory.
             if key_e not in self.logged_pickle_errors:
                 logger.error(f"Failed to pickle {key_e!r}", exc_info=True)
                 self.logged_pickle_errors.add(key_e)
             raise HandledError()
Example #6
0
    def wrapper(wrapped_function, instance, args, kwargs):  # pylint: disable=unused-argument
        version = execute_plugin(["--version"], stdout=PIPE, stderr=PIPE)
        logger.debug(
            "session-manager-plugin version: %s (required version: %s)",
            version,
            required_version,
        )

        if version and parse_version(version) < parse_version(
                required_version):
            raise ValueError("Invalid plugin version: {}".format(version))

        return wrapped_function(*args, **kwargs)
Example #7
0
def check_version(tool_to_compare, min_version, max_version):
    name = tool_to_compare.__name__
    try:
        ver = parse_version(tool_to_compare.__version__)
    except AttributeError:
        print(f"Version for {name} could not be compared...")
        return

    if ver < parse_version(min_version):
        raise ImportError(f"{name} should be at least version {min_version}")
    elif ver > parse_version(max_version):
        raise ImportError(
            f"{name} should be lesser than version {max_version}")
Example #8
0
    def _add_dependency_breakage(self, pkgname: str,
                                 pkgdeps: Optional[Set[str]],
                                 provided_versions: Dict[str, Set[str]],
                                 cannot_upgrade: Set[str],
                                 context: UpdateRequirementsContext):
        if pkgdeps:
            for dep in pkgdeps:
                dep_split = RE_DEP_OPERATORS.split(dep)

                if len(dep_split) > 1 and dep_split[1]:
                    real_providers = context.provided_map.get(dep_split[0])

                    if real_providers:
                        versions = provided_versions.get(dep_split[0])

                        if versions:
                            op = ''.join(RE_DEP_OPERATORS.findall(dep))

                            if op == '=':
                                op = '=='

                            version_match = False

                            for v in versions:
                                try:
                                    provided_version, required_version = parse_version(
                                        v), parse_version(dep_split[1])

                                    if eval('provided_version {} required_version'
                                            .format(op)):
                                        version_match = True
                                        break
                                except:
                                    self.logger.error(
                                        "Error when comparing versions {} (provided) and {} (required)"
                                        .format(v, dep_split[1]))
                                    traceback.print_exc()

                            if not version_match:
                                for pname in real_providers:
                                    if pname not in cannot_upgrade:
                                        provider = context.to_update.get(pname)
                                        if provider:
                                            cannot_upgrade.add(pname)
                                            reason = self.i18n[
                                                'arch.sync.dep_breakage.reason'].format(
                                                    pkgname, dep)
                                            context.cannot_upgrade[
                                                pname] = UpgradeRequirement(
                                                    pkg=provider,
                                                    reason=reason)
def install_post_save_hook(config_path: Optional[Path] = None):
    """Splices the post save hook into the global Jupyter configuration file"""
    if config_path is None:
        config_dir = jupyter_config_dir()
        config_path = Path(config_dir) / "jupyter_notebook_config.py"

    config_path = config_path.expanduser().resolve()

    if not config_path.exists():
        logger.debug(f"No existing Jupyter configuration detected at {config_path}. Creating...")
        config_path.parent.mkdir(exist_ok=True, parents=True)
        with config_path.open("w", encoding="utf-8") as fp:
            fp.write(post_save_hook_initialize_block)
        logger.info("nbautoexport post-save hook installed.")
        return

    # If config exists, check for existing nbautoexport initialize block and install as appropriate
    logger.debug(f"Detected existing Jupyter configuration at {config_path}")

    with config_path.open("r", encoding="utf-8") as fp:
        config = fp.read()

    if block_regex.search(config):
        logger.info("Detected existing nbautoexport post-save hook.")

        version_match = version_regex.search(config)
        if version_match:
            existing_version = version_match.group()
            logger.debug(f"Existing post-save hook is version {existing_version}")
        else:
            existing_version = ""
            logger.debug("Existing post-save hook predates versioning.")

        if parse_version(existing_version) < parse_version(__version__):
            logger.info(f"Updating nbautoexport post-save hook with version {__version__}...")
            with config_path.open("w", encoding="utf-8") as fp:
                # Open as w replaces existing file. We're replacing entire config.
                escaped_init = post_save_hook_initialize_block.replace(
                    "\\", r"\\"
                )  # escape metachars
                fp.write(block_regex.sub(escaped_init, config))
        else:
            logger.info("No changes made.")
            return
    else:
        logger.info("Installing post-save hook.")
        with config_path.open("a") as fp:
            # Open as a just appends. We append block at the end of existing file.
            fp.write("\n" + post_save_hook_initialize_block)

    logger.info("nbautoexport post-save hook installed.")
Example #10
0
    def _call_forward_real(self, vol_data, out=None, **kwargs):
        """Run an ASTRA forward projection on the given data using the GPU.

        Parameters
        ----------
        vol_data : ``vol_space.real_space`` element
            Volume data to which the projector is applied. Although
            ``vol_space`` may be complex, this element needs to be real.
        out : ``proj_space`` element, optional
            Element of the projection space to which the result is written. If
            ``None``, an element in `proj_space` is created.

        Returns
        -------
        out : ``proj_space`` element
            Projection data resulting from the application of the projector.
            If ``out`` was provided, the returned object is a reference to it.
        """
        with self._mutex:
            assert vol_data in self.vol_space.real_space

            if out is not None:
                assert out in self.proj_space
            else:
                out = self.proj_space.element()

            # Copy data to GPU memory
            if self.geometry.ndim == 2:
                astra.data2d.store(self.vol_id, vol_data.asarray())
            elif self.geometry.ndim == 3:
                astra.data3d.store(self.vol_id, vol_data.asarray())
            else:
                raise RuntimeError('unknown ndim')

            # Run algorithm
            astra.algorithm.run(self.algo_forward_id)

            # Copy result to host
            if self.geometry.ndim == 2:
                out[:] = self.proj_array
            elif self.geometry.ndim == 3:
                out[:] = np.swapaxes(self.proj_array, 0,
                                     1).reshape(self.proj_space.shape)

            # Fix scaling to weight by pixel size
            if (isinstance(self.geometry, Parallel2dGeometry) and
                    parse_version(ASTRA_VERSION) < parse_version('1.9.9.dev')):
                # parallel2d scales with pixel stride
                out *= 1 / float(self.geometry.det_partition.cell_volume)

            return out
Example #11
0
def test_roundtrip_from_dask_partitioned(tmpdir, parts, daskcudf, metadata):
    tmpdir = str(tmpdir)

    df = pd.DataFrame()
    df["year"] = [2018, 2019, 2019, 2019, 2020, 2021]
    df["month"] = [1, 2, 3, 3, 3, 2]
    df["day"] = [1, 1, 1, 2, 2, 1]
    df["data"] = [0, 0, 0, 0, 0, 0]
    df.index.name = "index"
    if daskcudf:
        ddf2 = dask_cudf.from_cudf(cudf.from_pandas(df), npartitions=2)
        ddf2.to_parquet(tmpdir,
                        write_metadata_file=metadata,
                        partition_on=parts)
    else:
        ddf2 = dd.from_pandas(df, npartitions=2)
        ddf2.to_parquet(
            tmpdir,
            engine="pyarrow",
            write_metadata_file=metadata,
            partition_on=parts,
        )
    df_read = dd.read_parquet(tmpdir, engine="pyarrow")
    gdf_read = dask_cudf.read_parquet(tmpdir)

    # TODO: Avoid column selection after `CudfEngine`
    # can be aligned with dask/dask#6534
    columns = list(df_read.columns)
    assert set(df_read.columns) == set(gdf_read.columns)
    dd.assert_eq(
        df_read.compute(scheduler=dask.get)[columns],
        gdf_read.compute(scheduler=dask.get)[columns],
    )

    assert gdf_read.index.name == "index"

    # Check that we don't have uuid4 file names
    for _, _, files in os.walk(tmpdir):
        for fn in files:
            if not fn.startswith("_"):
                assert "part" in fn

    if parse_version(dask.__version__) > parse_version("2021.07.0"):
        # This version of Dask supports `aggregate_files=True`.
        # Check that we can aggregate by a partition name.
        df_read = dd.read_parquet(tmpdir,
                                  engine="pyarrow",
                                  aggregate_files="year")
        gdf_read = dask_cudf.read_parquet(tmpdir, aggregate_files="year")
        dd.assert_eq(df_read, gdf_read)
Example #12
0
def test_render_mono_size():
    # issue 4177

    if parse_version(ImageFont.core.freetype2_version) < parse_version("2.4"):
        pytest.skip("Different metrics")

    im = Image.new("P", (100, 30), "white")
    draw = ImageDraw.Draw(im)
    ttf = ImageFont.truetype("Tests/fonts/DejaVuSans.ttf",
                             18,
                             layout_engine=ImageFont.LAYOUT_BASIC)

    draw.text((10, 10), "r" * 10, "black", ttf)
    assert_image_equal_tofile(im, "Tests/images/text_mono.gif")
Example #13
0
def _get_engine(engine, write=False):
    # Get engine
    if engine == "pyarrow":
        import pyarrow as pa

        from .arrow import ArrowORCEngine

        if write and parse_version(pa.__version__) < parse_version("4.0.0"):
            raise ValueError("to_orc is not supported for pyarrow<4.0.0")

        return ArrowORCEngine
    elif not isinstance(engine, ORCEngine):
        raise TypeError("engine must be 'pyarrow', or an ORCEngine object")
    return engine
Example #14
0
def _check_opal_firmware_kernel_version(detected_version, required_version):
    """Check the firmware kernel version for OPAL systems.

    :param detected_version: a string with the detected kernel version or None
    :param required_version: a string with the required kernel version or None
    :return: True or False
    """
    try:
        if detected_version and required_version:
            return parse_version(detected_version) >= parse_version(required_version)
    except Exception as e:  # pylint: disable=broad-except
        log.warning("Couldn't check the firmware kernel version: %s", str(e))

    return False
Example #15
0
    def __definition(self):
        """Get the definition of a report, including attributes and metrics. Implements GET /v2/reports/<report_id>"""

        response = reports.report_definition(connection=self._connection,
                                             report_id=self._report_id,
                                             verbose=helper.debug()).json()

        grid = response["definition"]["grid"]
        available_objects = response['definition']['availableObjects']

        if parse_version(self._connection.iserver_version) >= parse_version(
                "11.2.0100"):
            self._subtotals = grid["subtotals"]
        self._name = response["name"]
        self.cross_tab = grid["crossTab"]

        # Check if report have custom groups or consolidations
        if available_objects['customGroups']:
            helper.exception_handler(
                msg="Reports with custom groups are not supported.",
                exception_type=ImportError)
        if available_objects['consolidations']:
            helper.exception_handler(
                msg="Reports with consolidations are not supported.",
                exception_type=ImportError)

        full_attributes = []
        for row in grid["rows"]:
            if row["type"] == "attribute":
                full_attributes.append(row)
        for column in grid["columns"]:
            if column["type"] == "attribute":
                full_attributes.append(column)
        self._attributes = [{
            'name': attr['name'],
            'id': attr['id']
        } for attr in full_attributes]

        # Retrieve metrics from the report grid (metrics selected only in the report)
        metrics_position = grid.get("metricsPosition")
        if metrics_position is None:
            self._metrics = []
        else:
            full_metrics = grid[metrics_position["axis"]][
                metrics_position["index"]]["elements"]
            self._metrics = [{
                'name': metr['name'],
                'id': metr['id']
            } for metr in full_metrics]
Example #16
0
def parse_python_version(version_str):
    # type: (str) -> Dict[str, Union[str, int, Version]]
    from packaging.version import parse as parse_version

    is_debug = False
    if version_str.endswith("-debug"):
        is_debug = True
        version_str, _, _ = version_str.rpartition("-")
    match = version_re.match(version_str)
    if not match:
        raise InvalidPythonVersion("%s is not a python version" % version_str)
    version_dict = match.groupdict()  # type: Dict[str, str]
    major = int(version_dict.get("major",
                                 0)) if version_dict.get("major") else None
    minor = int(version_dict.get("minor",
                                 0)) if version_dict.get("minor") else None
    patch = int(version_dict.get("patch",
                                 0)) if version_dict.get("patch") else None
    is_postrelease = True if version_dict.get("post") else False
    is_prerelease = True if version_dict.get("prerel") else False
    is_devrelease = True if version_dict.get("dev") else False
    if patch:
        patch = int(patch)
    version = None  # type: Optional[Union[Version, LegacyVersion]]
    try:
        version = parse_version(version_str)
    except TypeError:
        version = None
    if isinstance(version, LegacyVersion) or version is None:
        v_dict = version_dict.copy()
        pre = ""
        if v_dict.get("prerel") and v_dict.get("prerelversion"):
            pre = v_dict.pop("prerel")
            pre = "{0}{1}".format(pre, v_dict.pop("prerelversion"))
        v_dict["pre"] = pre
        keys = ["major", "minor", "patch", "pre", "postdev", "post", "dev"]
        values = [v_dict.get(val) for val in keys]
        version_str = ".".join([str(v) for v in values if v])
        version = parse_version(version_str)
    return {
        "major": major,
        "minor": minor,
        "patch": patch,
        "is_postrelease": is_postrelease,
        "is_prerelease": is_prerelease,
        "is_devrelease": is_devrelease,
        "is_debug": is_debug,
        "version": version,
    }
Example #17
0
    def __init__(self,
                 size=(2, 2),
                 data_format=None,
                 interpolation='nearest',
                 **kwargs):
        super(UpSampling2D, self).__init__(**kwargs)
        # Update to K.normalize_data_format after keras 2.2.0
        if parse_version(keras.__version__) > parse_version("2.2.0"):
            self.data_format = K.normalize_data_format(data_format)
        else:
            self.data_format = conv_utils.normalize_data_format(data_format)

        self.interpolation = interpolation
        self.size = conv_utils.normalize_tuple(size, 2, 'size')
        self.input_spec = InputSpec(ndim=4)
Example #18
0
def newer_version_available():
    if __version__ is None:
        return False

    this_version = parse_version(__version__)
    if this_version.is_devrelease:
        return False

    r = requests.get('https://pypi.org/pypi/swcc/json', timeout=(5, 5))
    r.raise_for_status()
    releases = [parse_version(v) for v in r.json()['releases'].keys()]
    for release in releases:
        if not (release.is_prerelease or release.is_devrelease) and release > this_version:
            return True
    return False
    def __compute_isosurface(self, data, level, color, extent=None):
        """
        Compute the vertices and faces of an isosurface from grid data

        Parameters
        ----------
        data : numpy.ndarray
            Grid data stored as a numpy 3D tensor
        level : float
            The isocontour value that defines the surface
        color :
            color of a face
        extent : list
            list of [[xmin, xmax], [ymin, ymax], [zmin, zmax]] values that define the bounding box of the mesh,
            otherwise the viewport is used

        Returns
        -------
        a tuple of vertices and faces
        """
        # call the appropriate version of skimage's marching cubes
        from packaging.version import parse as parse_version
        if parse_version(skimage.__version__) >= parse_version('0.19.0'):
            values = skimage.measure.marching_cubes(data, level)
        else:
            values = skimage.measure.marching_cubes_lewiner(data, level)

        sk_verts, sk_faces, normals, values = values
        x, y, z = sk_verts.T

        # Rescale coordinates to given limits
        if extent:
            xlim, ylim, zlim = extent
            x = x * np.diff(xlim) / (data.shape[0]) + xlim[0]
            y = y * np.diff(ylim) / (data.shape[1]) + ylim[0]
            z = z * np.diff(zlim) / (data.shape[2]) + zlim[0]

        # Assemble the list of vertices
        vertices = []
        for n in range(len(x)):
            vertices.append([x[n], y[n], z[n]])

        # Assemble the list of faces
        faces = []
        for face in sk_faces:
            i, j, k = face
            faces.append((i, j, k, None, (color, color, color), None))
        return (vertices, faces)
Example #20
0
    def do_upgrade(self):
        current_ver = parse_version(self.get_db_version())
        app_db_version = parse_version(__dbversion__)

        if current_ver == app_db_version:
            return

        logger.debug("DATABASE UPGRADE IN PROGRESS!")

        if current_ver < parse_version("3.5"):
            logger.error("Due to database changes, you must be on at least "
                         f"v2.5 before upgrading to v{__version__}.")
            exit()

        if current_ver < parse_version("3.5.2"):
            self.query("CREATE TABLE releases_tmp ("
                       "'artist_id' INTEGER,"
                       "'artist_name' TEXT,"
                       "'album_id' INTEGER,"
                       "'album_name' TEXT,"
                       "'album_release' TEXT,"
                       "'album_added' INTEGER,"
                       "'explicit' INTEGER,"
                       "'label' TEXT,"
                       "'record_type' INTEGER,"
                       "'profile_id' INTEGER DEFAULT 1,"
                       "'future_release' INTEGER DEFAULT 0,"
                       "'trans_id' INTEGER,"
                       "unique(album_id, profile_id))")
            self.query(
                "INSERT OR REPLACE INTO releases_tmp(artist_id, artist_name, "
                "album_id, album_name, album_release, album_added, "
                "explicit, label, record_type, profile_id, "
                "future_release, trans_id) SELECT artist_id, "
                "artist_name, album_id, album_name, album_release, "
                "album_added, explicit, label, record_type, "
                "profile_id, future_release, trans_id FROM releases")
            self.query("DROP TABLE releases")
            self.query("ALTER TABLE releases_tmp RENAME TO releases")
            self.query(
                "INSERT OR REPLACE INTO 'deemon' ('property', 'value') VALUES ('version', '3.5.2')"
            )
            self.commit()
            logger.debug(f"Database upgraded to version 3.5.2")

        if current_ver < parse_version("3.6"):
            # album_release_ts REMOVED
            pass
Example #21
0
    def check(self, raise_exception=True):
        if self.ready:
            return True
        try:
            if not self.tokens:
                logger.debug('No tokens for client, attempting load...')
                self.tokens = self.__native_client.load_tokens_by_scope()
            # Verify client version is compatible with service
            if parse_version(self.remote_config["MIN_VERSION"]) > parse_version(VERSION):
                raise exc.OutdatedVersion(
                    "This CFDE Client is not up to date and can no longer make "
                    "submissions. Please update the client and try again."
                )

            if not self.remote_config["FLOWS"][self.service_instance]["flow_id"]:
                logger.critical(f"Service {self.service_instance} has no flow ID! "
                                f"Submissions will be disabled until that is set!")
                raise exc.SubmissionsUnavailable(
                    "Submissions to nih-cfde.org are temporarily offline. Please check "
                    "with out administrators for further details.")

            # Verify user has permission to view Flow
            try:
                flow_info = self.remote_config["FLOWS"][self.service_instance]
                self.get_flow_retry_500s(flow_info["flow_id"])
            except (globus_sdk.GlobusAPIError, globus_sdk.exc.GlobusAPIError) as e:
                logger.exception(e)
                if e.http_status not in [404, 405]:
                    raise
                error_message = ("Permission denied. Please use the 'Onboarding to the Submission "
                                 "System' page at https://github.com/nih-cfde/published-documentati"
                                 "on/wiki/Onboarding-to-the-CFDE-Portal-Submission-System to "
                                 "change your permissions. Only users with the Submitter role can "
                                 "push data to the submission system. If you have already "
                                 "sent in a request for Submitter status, but are getting this "
                                 "error, be sure that you fully accepted the Globus invitation to "
                                 "your Submitter group. You will need to click the 'Click here to "
                                 "apply for membership' text in the invitation message and follow "
                                 "instructions there before doing a submission.")
                raise exc.PermissionDenied(error_message)

            self.ready = True
            logger.info('Check PASSED, client is ready use flows.')
        except Exception:
            logger.info('Check FAILED, client lacks permissions or is not logged in.')
            self.ready = False
            if raise_exception is True:
                raise
Example #22
0
    def check(temp_file):
        with Image.open(temp_file) as im:
            assert im.n_frames == 2

            # Compare first frame to original
            im.load()
            assert_image_equal(im, frame1.convert("RGBA"))

            # Compare second frame to original
            if is_big_endian():
                webp = parse_version(features.version_module("webp"))
                if webp < parse_version("1.2.2"):
                    pytest.skip("Fails with libwebp earlier than 1.2.2")
            im.seek(1)
            im.load()
            assert_image_equal(im, frame2.convert("RGBA"))
Example #23
0
    def scope_insufficient(self, token, required_scopes):
        """Wrap the original method to support both Authlib < 1.0 and Authlib >= 1.0.

        Remove this when we only support Authlib >= 1.0.

        Args:
            token (IntrospectionToken): The token to check for insufficient scopes.
            required_scopes (list(str)): The list of required scopes.

        Returns:
            bool: Whether the token's scopes are sufficient.
        """
        if parse_version(authlib_version) >= parse_version("1.0.0"):
            return BearerTokenValidator.scope_insufficient(
                token.get_scope(), required_scopes)
        return super().scope_insufficient(token, required_scopes)
def main() -> int:
    root = Path.cwd()
    info = analyze_dists(root, os.environ["INPUT_DIST_DIR"])
    ctx = Context(root, info)
    ctx.version = find_version(
        ctx,
        os.environ["INPUT_VERSION_FILE"],
        os.environ["INPUT_VERSION"],
    )
    version = parse_version(ctx.version)
    check_head(ctx.version, os.environ["INPUT_CHECK_REF"])
    note = parse_changes(
        ctx,
        changes_file=os.environ["INPUT_CHANGES_FILE"],
        start_line=os.environ["INPUT_START_LINE"],
        head_line=os.environ["INPUT_HEAD_LINE"],
        fix_issue_regex=os.environ["INPUT_FIX_ISSUE_REGEX"],
        fix_issue_repl=os.environ["INPUT_FIX_ISSUE_REPL"],
        name=os.environ["INPUT_NAME"],
    )
    print(f"::set-output name=version::{ctx.version}")
    is_prerelease = version.is_prerelease
    print(f"::set-output name=prerelease::{str(is_prerelease).lower()}")
    is_devrelease = version.is_devrelease
    print(f"::set-output name=devrelease::{str(is_devrelease).lower()}")
    output_file = os.environ["INPUT_OUTPUT_FILE"]
    (root / output_file).write_text(note)
    return 0
def check_changes_version(declared_version: str, found_version: str,
                          changes_file: str) -> None:
    if declared_version == found_version:
        return
    dver = parse_version(declared_version)
    fver = parse_version(found_version)

    if dver < fver:
        raise ValueError(f"The distribution version {dver} is older than "
                         f"{fver} (from '{changes_file}').\n"
                         "Hint: push git tag with the latest version.")

    else:
        raise ValueError(f"The distribution version {dver} is younger than "
                         f"{fver} (from '{changes_file}').\n"
                         "Hint: run 'towncrier' again.")
Example #26
0
def newest_version_for_subdir(channel, subdir):
    """
    Return the newest versions of all packages in a channel subdir

    Parameters
    -----------
    channel : str
        Channel to examine. 'main' and 'free' will fetch repodata from
        repo.anaconda.com, all others from conda.anaconda.org.
    subdir : str
        Subdir to examine.

    Returns
    -------
    newest : dict
        Dictionary mapping package names to Version objects with the newest
        version of each package in the subdir for the specified channel.

    """
    repodata = fetch_repodata(channel, subdir)
    newest = {}
    for pkg_info in repodata['packages'].values():
        name = pkg_info['name']
        version = parse_version(pkg_info['version'])
        if name not in newest:
            newest[name] = version
        else:
            newest[name] = max(version, newest[name])
    return newest
Example #27
0
def __collect_via_version(commits: tp.List[tp.Tuple[FullCommitHash, str]],
                          cve_list: tp.FrozenSet[CVE]) -> CVEDict:
    """
    Collect data about resolved CVE's using the tagged versions and the
    vulnerable version list in the CVE's.

    Args:
        commits: a list of commits in textual form

    Return:
        a dictionary with commit hash as key and a set of CVE's and a set of
        CWE's as values
    """
    results: CVEDict = defaultdict(__create_cve_dict_entry)

    # Collect tagged commits
    tag_list: tp.Dict[tp.Union[LegacyVersion, Version], tp.Dict[str,
                                                                tp.Any]] = {}
    for number, commit_data in enumerate(reversed(commits)):
        commit, message = commit_data
        tag = re.findall(r'\(tag:\s*.*\)', message, re.IGNORECASE)
        if tag:
            parsed_tag = parse_version(tag[0].split(' ')[1].replace(
                ',', '').replace(')', ''))
            tag_list[parsed_tag] = {'number': number, 'commit': commit}

    # Check versions
    for cve in cve_list:
        for version in sorted(tag_list.keys()):
            if all(version > x for x in cve.vulnerable_versions):
                results[tag_list[version]['commit']]['cve'].add(cve)
                break

    return results
def clean_requires_python(candidates):
    """Get a cleaned list of all the candidates with valid specifiers in the `requires_python` attributes."""
    all_candidates = []
    sys_version = ".".join(map(str, sys.version_info[:3]))
    from packaging.version import parse as parse_version

    py_version = parse_version(
        os.environ.get("PIP_PYTHON_VERSION", sys_version))
    for c in candidates:
        from_location = attrgetter("location.requires_python")
        requires_python = getattr(c, "requires_python", from_location(c))
        if requires_python:
            # Old specifications had people setting this to single digits
            # which is effectively the same as '>=digit,<digit+1'
            if requires_python.isdigit():
                requires_python = ">={0},<{1}".format(requires_python,
                                                      int(requires_python) + 1)
            try:
                specifierset = SpecifierSet(requires_python)
            except InvalidSpecifier:
                continue
            else:
                if not specifierset.contains(py_version):
                    continue
        all_candidates.append(c)
    return all_candidates
Example #29
0
def get_refgenie_config(galaxy_root, refgenie_dir):
    config_version = 0.4
    if galaxy_root:
        version_major = get_galaxy_major_version(galaxy_root=galaxy_root)
        if version_major < parse_version('21.09'):
            config_version = 0.3
    return REFGENIE_CONFIG_TEMPLATE % (config_version, refgenie_dir)
Example #30
0
class _Condition:
    _current_kernel = parse_version(os.uname().release.split("-", 1)[0])
    _native_arch = _seccomp.NATIVE_ARCH

    def __init__(self, *, arches=None, caps=None, minKernel=None):
        if arches:
            self.arches = frozenset(arches)
        else:
            self.arches = None
        self.caps = caps
        if minKernel:
            self.min_kernel = parse_version(self.min_kernel)
        else:
            self.min_kernel = None

    def _check_arches(self):
        if not self.arches:
            # no arches: applies to all arches
            return None
        else:
            return self._native_arch in self.arches

    def _check_caps(self):
        if not self.caps:
            # no capability restriction
            return None
        return any(_libcap.has_cap(cap) for cap in self.caps)

    def _check_kernel(self):
        if not self.min_kernel:
            # no Kernel version restriction
            return None
        else:
            # current Kernel version must be equal or greater than min version
            return self._current_kernel >= self.min_kernel
Example #31
0
def get_galaxy_major_version(galaxy_root):
    spec = importlib.util.spec_from_file_location(
        '__galaxy_version',
        os.path.join(galaxy_root, 'lib', 'galaxy', 'version.py'))
    module = importlib.util.module_from_spec(spec)
    spec.loader.exec_module(module)
    return parse_version(module.VERSION_MAJOR)
Example #32
0
def check_version(request):
    if not ALLOW_VERSION_CHECK or request.method != "POST":
        raise Http404()

    version = cache.get(VERSION_CHECK_CACHE_KEY, 'nada')

    if version == 'nada':
        try:
            api_url = 'https://api.github.com/repos/rafalp/Misago/releases'
            r = requests.get(api_url)

            if r.status_code != requests.codes.ok:
                r.raise_for_status()

            latest_version = r.json()[0]['tag_name']

            latest = parse_version(latest_version)
            current = parse_version(__version__)

            if latest > current:
                version = {
                    'is_error': True,
                    'message': _("Outdated: %(current)s! (latest: %(latest)s)") % {
                        'latest': latest_version,
                        'current': __version__,
                    },
                }
            else:
                version = {
                    'is_error': False,
                    'message': _("Up to date! (%(current)s)") % {
                        'current': __version__,
                    },
                }

            cache.set(VERSION_CHECK_CACHE_KEY, version, 180)
        except (RequestException, IndexError, KeyError, ValueError) as e:
            version = {
                'is_error': True,
                'message': _("Failed to connect to GitHub API. Try again later."),
            }

    return JsonResponse(version)
    def _find_plugin(name, filters):
        def _get_specifier_set(package_versions):
            # Flat out the versions, in case one of them contains a few
            # operators/specific versions
            _versions = (v for vs in package_versions for v in vs.split(','))
            specs = SpecifierSet()
            for spec in _versions:
                if not spec:
                    raise InvalidSpecifier()
                try:
                    specs &= SpecifierSet(spec)
                except InvalidSpecifier:
                    # If the code below doesn't raise any exception then it's
                    # the case where a version has been provided with no
                    # operator to prefix it.
                    specs &= SpecifierSet('==={}'.format(spec))
            return specs

        filters['package_name'] = name
        version_specified = 'package_version' in filters
        versions = filters.pop('package_version', [])
        if not version_specified:
            specifier_set = SpecifierSet()
        else:
            try:
                specifier_set = _get_specifier_set(versions)
            except InvalidSpecifier:
                raise InvalidPluginError('Specified version param {0} of the '
                                         'plugin {1} are in an invalid form. '
                                         'Please refer to the documentation '
                                         'for valid forms of '
                                         'versions'.format(versions, name))
        sm = get_storage_manager()
        plugins = sm.list(Plugin, filters=filters)
        if not plugins:
            if version_specified:
                filters['package_version'] = versions
            version_message = ' (query: {0})'.format(filters) \
                if filters else ''
            raise InvalidPluginError(
                'Plugin {0}{1} not found'.format(name, version_message))
        plugin_versions = (
            (i, parse_version(p.package_version))
            for i, p in enumerate(plugins))
        matching_versions = [(i, v)
                             for i, v in plugin_versions if v in specifier_set]
        if not matching_versions:
            raise InvalidPluginError('No matching version was found for '
                                     'plugin {0} and '
                                     'version(s) {1}.'.format(name, versions))
        max_item = max(matching_versions, key=lambda (i, v): v)
        return plugins[max_item[0]]
Example #34
0
def devpiserver_on_upload(stage, projectname, version, link):
    """ called when a file is uploaded to a private stage for
    a projectname/version.  link.entry.file_exists() may be false because
    a more recent revision deleted the file (and files are not revisioned).
    NOTE that this hook is currently NOT called for the implicit "caching"
    uploads to the pypi mirror.

    If the uploaded file is a wheel and is the latest version on this index,
    store its metadata in json file at the root of index/+f/ directory.
    With the standard config with nginx, nginx will directly serve this file.
    """
    if link.entry and link.entry.file_exists() and link.entry._filepath.endswith('.whl'):
        threadlog.info("Wheel detected: %s", link.entry._filepath)
        new_version = parse_version(version)
        latest_version = parse_version(stage.get_latest_version_perstage(projectname))
        if latest_version > new_version:
            threadlog.debug("A newer release has already been uploaded: %s - nothing to do", latest_current_version)
            return
        metadata = extract_metadata_from_wheel_file(link.entry._filepath)
        linkstore = stage.get_linkstore_perstage(link.projectname, link.version)
        json_path = '%s/%s/+f/%s.json' % (linkstore.filestore.storedir, stage.name, projectname)
        with open(json_path, 'w') as fd:
            fd.write(json.dumps(metadata))
        threadlog.info("Stored %s to: %s", metadata, json_path)
Example #35
0
    def parse(cls, version):
        """Parse a valid version string into a dictionary

        Raises:
            ValueError -- Unable to parse version string
            ValueError -- Not a valid python version

        :param version: A valid version string
        :type version: str
        :return: A dictionary with metadata about the specified python version.
        :rtype: dict.
        """

        try:
            version = parse_version(version)
        except TypeError:
            raise ValueError("Unable to parse version: %s" % version)
        if not version or not version.release:
            raise ValueError("Not a valid python version: %r" % version)
            return
        if len(version.release) >= 3:
            major, minor, patch = version.release[:3]
        elif len(version.release) == 2:
            major, minor = version.release
            patch = None
        else:
            major = version.release[0]
            minor = None
            patch = None
        return {
            "major": major,
            "minor": minor,
            "patch": patch,
            "is_prerelease": version.is_prerelease,
            "is_postrelease": version.is_postrelease,
            "is_devrelease": version.is_devrelease,
            "version": version,
        }
Example #36
0
 def target_galaxy_version(self):
     if self._target_galaxy_version is None:
         self._target_galaxy_version = parse_version(self._get('version').json()['version_major'])
     return self._target_galaxy_version
def setup(*args, **kw):  # noqa: C901
    """This function wraps setup() so that we can run cmake, make,
    CMake build, then proceed as usual with setuptools, appending the
    CMake-generated output as necessary.

    The CMake project is re-configured only if needed. This is achieved by (1) retrieving the environment mapping
    associated with the generator set in the ``CMakeCache.txt`` file, (2) saving the CMake configure arguments and
    version in :func:`skbuild.constants.CMAKE_SPEC_FILE()`: and (3) re-configuring only if either the generator or
    the CMake specs change.
    """
    sys.argv, cmake_executable, skip_generator_test, cmake_args, make_args = parse_args()

    # work around https://bugs.python.org/issue1011113
    # (patches provided, but no updates since 2014)
    cmdclass = kw.get('cmdclass', {})
    cmdclass['build'] = cmdclass.get('build', build.build)
    cmdclass['build_py'] = cmdclass.get('build_py', build_py.build_py)
    cmdclass['build_ext'] = cmdclass.get('build_ext', build_ext.build_ext)
    cmdclass['install'] = cmdclass.get('install', install.install)
    cmdclass['install_lib'] = cmdclass.get('install_lib',
                                           install_lib.install_lib)
    cmdclass['install_scripts'] = cmdclass.get('install_scripts',
                                               install_scripts.install_scripts)
    cmdclass['clean'] = cmdclass.get('clean', clean.clean)
    cmdclass['sdist'] = cmdclass.get('sdist', sdist.sdist)
    cmdclass['bdist'] = cmdclass.get('bdist', bdist.bdist)
    cmdclass['bdist_wheel'] = cmdclass.get(
        'bdist_wheel', bdist_wheel.bdist_wheel)
    cmdclass['egg_info'] = cmdclass.get('egg_info', egg_info.egg_info)
    cmdclass['generate_source_manifest'] = cmdclass.get(
        'generate_source_manifest',
        generate_source_manifest.generate_source_manifest)
    cmdclass['test'] = cmdclass.get('test', test.test)
    kw['cmdclass'] = cmdclass

    # Extract setup keywords specific to scikit-build and remove them from kw.
    # Removing the keyword from kw need to be done here otherwise, the
    # following call to _parse_setuptools_arguments would complain about
    # unknown setup options.
    parameters = {
        'cmake_args': [],
        'cmake_install_dir': '',
        'cmake_source_dir': '',
        'cmake_with_sdist': False,
        'cmake_languages': ('C', 'CXX'),
        'cmake_minimum_required_version': None
    }
    skbuild_kw = {param: kw.pop(param, parameters[param])
                  for param in parameters}

    # ... and validate them
    try:
        _check_skbuild_parameters(skbuild_kw)
    except SKBuildError as ex:
        import traceback
        print("Traceback (most recent call last):")
        traceback.print_tb(sys.exc_info()[2])
        print('')
        sys.exit(ex)

    # Convert source dir to a path relative to the root
    # of the project
    cmake_source_dir = skbuild_kw['cmake_source_dir']
    if cmake_source_dir == ".":
        cmake_source_dir = ""
    if os.path.isabs(cmake_source_dir):
        cmake_source_dir = os.path.relpath(cmake_source_dir)

    # Skip running CMake in the following cases:
    # * flag "--skip-cmake" is provided
    # * "display only" argument is provided (e.g  '--help', '--author', ...)
    # * no command-line arguments or invalid ones are provided
    # * no command requiring cmake is provided
    # * no CMakeLists.txt if found
    display_only = has_invalid_arguments = help_commands = False
    force_cmake = skip_cmake = False
    commands = []
    try:
        (display_only, help_commands, commands,
         hide_listing, force_cmake, skip_cmake,
         plat_name, build_ext_inplace) = \
            _parse_setuptools_arguments(kw)
    except (DistutilsArgError, DistutilsGetoptError):
        has_invalid_arguments = True

    has_cmakelists = os.path.exists(
        os.path.join(cmake_source_dir, "CMakeLists.txt"))
    if not has_cmakelists:
        print('skipping skbuild (no CMakeLists.txt found)')

    skip_skbuild = (display_only
                    or has_invalid_arguments
                    or not _should_run_cmake(commands,
                                             skbuild_kw["cmake_with_sdist"])
                    or not has_cmakelists)
    if skip_skbuild and not force_cmake:
        if help_commands:
            # Prepend scikit-build help. Generate option descriptions using
            # argparse.
            skbuild_parser = create_skbuild_argparser()
            arg_descriptions = [
                line for line in skbuild_parser.format_help().split('\n')
                if line.startswith('  ')
                ]
            print('scikit-build options:')
            print('\n'.join(arg_descriptions))
            print('')
            print('Arguments following a "--" are passed directly to CMake '
                  '(e.g. -DMY_VAR:BOOL=TRUE).')
            print('Arguments following a second "--" are passed directly to '
                  ' the build tool.')
            print('')
        return upstream_setup(*args, **kw)

    developer_mode = "develop" in commands or "test" in commands or build_ext_inplace

    packages = kw.get('packages', [])
    package_dir = kw.get('package_dir', {})
    package_data = copy.deepcopy(kw.get('package_data', {}))

    py_modules = kw.get('py_modules', [])
    new_py_modules = {py_module: False for py_module in py_modules}

    scripts = kw.get('scripts', [])
    new_scripts = {script: False for script in scripts}

    data_files = {
        (parent_dir or '.'): set(file_list)
        for parent_dir, file_list in kw.get('data_files', [])
    }

    # Since CMake arguments provided through the command line have more
    # weight and when CMake is given multiple times a argument, only the last
    # one is considered, let's prepend the one provided in the setup call.
    cmake_args = skbuild_kw['cmake_args'] + cmake_args

    if sys.platform == 'darwin':

        # If no ``--plat-name`` argument was passed, set default value.
        if plat_name is None:
            plat_name = skbuild_plat_name()

        (_, version, machine) = plat_name.split('-')

        # The loop here allows for CMAKE_OSX_* command line arguments to overload
        # values passed with either the ``--plat-name`` command-line argument
        # or the ``cmake_args`` setup option.
        for cmake_arg in cmake_args:
            if 'CMAKE_OSX_DEPLOYMENT_TARGET' in cmake_arg:
                version = cmake_arg.split('=')[1]
            if 'CMAKE_OSX_ARCHITECTURES' in cmake_arg:
                machine = cmake_arg.split('=')[1]

        set_skbuild_plat_name("macosx-{}-{}".format(version, machine))

        # Set platform env. variable so that commands (e.g. bdist_wheel)
        # uses this information. The _PYTHON_HOST_PLATFORM env. variable is
        # used in distutils.util.get_platform() function.
        os.environ['_PYTHON_HOST_PLATFORM'] = skbuild_plat_name()

        # Set CMAKE_OSX_DEPLOYMENT_TARGET and CMAKE_OSX_ARCHITECTURES if not already
        # specified
        (_, version, machine) = skbuild_plat_name().split('-')
        if not cmaker.has_cmake_cache_arg(
                cmake_args, 'CMAKE_OSX_DEPLOYMENT_TARGET'):
            cmake_args.append(
                '-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=%s' % version
            )
        if not cmaker.has_cmake_cache_arg(
                cmake_args, 'CMAKE_OSX_ARCHITECTURES'):
            cmake_args.append(
                '-DCMAKE_OSX_ARCHITECTURES:STRING=%s' % machine
            )

    # Install cmake if listed in `setup_requires`
    for package in kw.get('setup_requires', []):
        if Requirement(package).name == 'cmake':
            setup_requires = [package]
            dist = upstream_Distribution({'setup_requires': setup_requires})
            dist.fetch_build_eggs(setup_requires)

            # Considering packages associated with "setup_requires" keyword are
            # installed in .eggs subdirectory without honoring setuptools "console_scripts"
            # entry_points and without settings the expected executable permissions, we are
            # taking care of it below.
            import cmake
            for executable in ['cmake', 'cpack', 'ctest']:
                executable = os.path.join(cmake.CMAKE_BIN_DIR, executable)
                if platform.system().lower() == 'windows':
                    executable += '.exe'
                st = os.stat(executable)
                permissions = (
                        st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
                )
                os.chmod(executable, permissions)
            cmake_executable = os.path.join(cmake.CMAKE_BIN_DIR, 'cmake')
            break

    # Languages are used to determine a working generator
    cmake_languages = skbuild_kw['cmake_languages']

    try:
        if cmake_executable is None:
            cmake_executable = CMAKE_DEFAULT_EXECUTABLE
        cmkr = cmaker.CMaker(cmake_executable)
        if not skip_cmake:
            cmake_minimum_required_version = skbuild_kw['cmake_minimum_required_version']
            if cmake_minimum_required_version is not None:
                if parse_version(cmkr.cmake_version) < parse_version(cmake_minimum_required_version):
                    raise SKBuildError(
                        "CMake version %s or higher is required. CMake version %s is being used" % (
                            cmake_minimum_required_version, cmkr.cmake_version))
            # Used to confirm that the cmake executable is the same, and that the environment
            # didn't change
            cmake_spec = {
                'args': [which(CMAKE_DEFAULT_EXECUTABLE)] + cmake_args,
                'version': cmkr.cmake_version,
                'environment': {
                    'PYTHONNOUSERSITE': os.environ.get("PYTHONNOUSERSITE"),
                    'PYTHONPATH': os.environ.get("PYTHONPATH")
                }
            }

            # skip the configure step for a cached build
            env = cmkr.get_cached_generator_env()
            if env is None or cmake_spec != _load_cmake_spec():
                env = cmkr.configure(cmake_args,
                                     skip_generator_test=skip_generator_test,
                                     cmake_source_dir=cmake_source_dir,
                                     cmake_install_dir=skbuild_kw['cmake_install_dir'],
                                     languages=cmake_languages
                                     )
                _save_cmake_spec(cmake_spec)
            cmkr.make(make_args, env=env)
    except SKBuildGeneratorNotFoundError as ex:
        sys.exit(ex)
    except SKBuildError as ex:
        import traceback
        print("Traceback (most recent call last):")
        traceback.print_tb(sys.exc_info()[2])
        print('')
        sys.exit(ex)

    # If any, strip ending slash from each package directory
    package_dir = {package: prefix[:-1] if prefix[-1] == "/" else prefix
                   for package, prefix in package_dir.items()}

    # If needed, set reasonable defaults for package_dir
    for package in packages:
        if package not in package_dir:
            package_dir[package] = package.replace(".", "/")
            if '' in package_dir:
                package_dir[package] = to_unix_path(os.path.join(package_dir[''], package_dir[package]))

    package_prefixes = _collect_package_prefixes(package_dir, packages)

    _classify_installed_files(cmkr.install(), package_data, package_prefixes,
                              py_modules, new_py_modules,
                              scripts, new_scripts,
                              data_files,
                              cmake_source_dir, skbuild_kw['cmake_install_dir'])

    original_manifestin_data_files = []
    if kw.get("include_package_data", False):
        original_manifestin_data_files = parse_manifestin(os.path.join(os.getcwd(), "MANIFEST.in"))
        for path in original_manifestin_data_files:
            _classify_file(path, package_data, package_prefixes,
                           py_modules, new_py_modules,
                           scripts, new_scripts,
                           data_files)

    if developer_mode:
        # Copy packages
        for package, package_file_list in package_data.items():
            for package_file in package_file_list:
                package_file = os.path.join(package_dir[package], package_file)
                cmake_file = os.path.join(CMAKE_INSTALL_DIR(), package_file)
                if os.path.exists(cmake_file):
                    _copy_file(cmake_file, package_file, hide_listing)

        # Copy modules
        for py_module in py_modules:
            package_file = py_module + ".py"
            cmake_file = os.path.join(CMAKE_INSTALL_DIR(), package_file)
            if os.path.exists(cmake_file):
                _copy_file(cmake_file, package_file, hide_listing)
    else:
        _consolidate_package_modules(
            cmake_source_dir, packages, package_dir, py_modules, package_data, hide_listing)

        original_package_data = kw.get('package_data', {}).copy()
        _consolidate_package_data_files(original_package_data, package_prefixes, hide_listing)

        for data_file in original_manifestin_data_files:
            dest_data_file = os.path.join(CMAKE_INSTALL_DIR(), data_file)
            _copy_file(data_file, dest_data_file, hide_listing)

    kw['package_data'] = package_data
    kw['package_dir'] = {
        package: (
            os.path.join(CMAKE_INSTALL_DIR(), prefix)
            if os.path.exists(os.path.join(CMAKE_INSTALL_DIR(), prefix))
            else prefix)
        for prefix, package in package_prefixes
    }

    kw['scripts'] = [
        os.path.join(CMAKE_INSTALL_DIR(), script) if mask else script
        for script, mask in new_scripts.items()
    ]

    kw['data_files'] = [
        (parent_dir, list(file_set))
        for parent_dir, file_set in data_files.items()
    ]

    if 'zip_safe' not in kw:
        kw['zip_safe'] = False

    # Adapted from espdev/ITKPythonInstaller/setup.py.in
    # pylint: disable=missing-docstring
    class BinaryDistribution(upstream_Distribution):
        def has_ext_modules(self):  # pylint: disable=no-self-use
            return has_cmakelists
    kw['distclass'] = BinaryDistribution

    print("")

    return upstream_setup(*args, **kw)