示例#1
0
def _python_get_deps(package_name):
    """ Function that is run in a subprocess to return active dependencies
        of a given package.
    """
    from pkglib.setuptools.dependency import get_all_requirements, get_dist
    pkg = get_dist(package_name).project_name
    if pkg:
        try:
            print('\n'.join(d.project_name
                            for d in get_all_requirements([pkg], True)))
        except:
            print('\n'.join(d.project_name
                            for d in get_all_requirements([pkg])))
示例#2
0
文件: pyenv.py 项目: agiledata/pkglib
def _python_get_deps(package_name):
    """ Function that is run in a subprocess to return active dependencies
        of a given package.
    """
    from pkglib.setuptools.dependency import get_all_requirements, get_dist
    pkg = get_dist(package_name).project_name
    if pkg:
        try:
            print('\n'.join(d.project_name for d in
                            get_all_requirements([pkg], True)))
        except:
            print('\n'.join(d.project_name for d in
                            get_all_requirements([pkg])))
示例#3
0
    def write_all_revisions_(self):
        """ Create ``allrevisions.txt`` file containing subversion revision
            of every project upon which we depend. This won't have the
            dependencies in it if we've not yet been set-up with
            'setup.py develop' or similar.
        """
        my_dist = dependency.get_dist(self.distribution.metadata.name)

        # list of (name,version,url,rev) tuples
        allrevisions = [(self.distribution.metadata.get_name(),
                         self.distribution.metadata.get_version(),
                         self.full_url, self.revision)]

        all_requires = []
        if my_dist:
            my_require = my_dist.as_requirement()
            try:
                all_requires = pkg_resources.working_set.resolve([my_require])
            except (pkg_resources.DistributionNotFound,
                    pkg_resources.VersionConflict):
                # not installed yet -- will probably be OK when we're
                # called after the build has taken place.
                pass

        for dist in all_requires:
            if dist == my_dist or not util.is_inhouse_package(
                    dist.project_name):
                continue
            try:
                revisions = parse.read_allrevisions(dist.location,
                                                    dist.project_name)
            except IOError as ex:
                log.warn("Can't read allrevisions for %s: %s", dist, ex)
            for name, version, url, rev in revisions:
                if pkg_resources.safe_name(name) == dist.project_name:
                    allrevisions.append((name, version, url, rev))
                    break
            else:
                log.warn("No revision for %s in %s", dist.project_name, dist)

        data = [
            '# These are the VCS revision numbers used for this particular build.',
            '# This file is used by release tools to tag a working build.'
        ] + [','.join(str(e) for e in rev_data) for rev_data in allrevisions]
        self.write_file("all revisions", self.all_revisions_file,
                        '\n'.join(data))
示例#4
0
    def write_all_revisions_(self):
        """ Create ``allrevisions.txt`` file containing subversion revision
            of every project upon which we depend. This won't have the
            dependencies in it if we've not yet been set-up with
            'setup.py develop' or similar.
        """
        my_dist = dependency.get_dist(self.distribution.metadata.name)

        # list of (name,version,url,rev) tuples
        allrevisions = [(self.distribution.metadata.get_name(),
                         self.distribution.metadata.get_version(),
                         self.full_url,
                         self.revision)]

        all_requires = []
        if my_dist:
            my_require = my_dist.as_requirement()
            try:
                all_requires = pkg_resources.working_set.resolve([my_require])
            except (pkg_resources.DistributionNotFound,
                    pkg_resources.VersionConflict):
                # not installed yet -- will probably be OK when we're
                # called after the build has taken place.
                pass

        for dist in all_requires:
            if dist == my_dist or not util.is_inhouse_package(dist.project_name):
                continue
            try:
                revisions = parse.read_allrevisions(dist.location, dist.project_name)
            except IOError as ex:
                log.warn("Can't read allrevisions for %s: %s", dist, ex)
            for name, version, url, rev in revisions:
                if pkg_resources.safe_name(name) == dist.project_name:
                    allrevisions.append((name, version, url, rev))
                    break
            else:
                log.warn("No revision for %s in %s", dist.project_name, dist)

        data = ['# These are the VCS revision numbers used for this particular build.',
                '# This file is used by release tools to tag a working build.'
                ] + [','.join(str(e) for e in rev_data)
                     for rev_data in allrevisions]
        self.write_file("all revisions", self.all_revisions_file,
                        '\n'.join(data))
示例#5
0
    def write_all_revisions(self):
        """ Create ``allrevisions.txt`` file containing subversion revision
            of every project upon which we depend. This won't have the
            dependencies in it if we've not yet been set-up with
            'setup.py develop' or similar.
        """
        my_dist = dependency.get_dist(self.distribution.metadata.name)

        revisions = []  # list of (name,version,url,rev) tuples

        revisions.append((self.distribution.metadata.get_name(),
                          self.distribution.metadata.get_version(),
                          self.full_url,
                          self.revision))

        # get all our requirements
        all_requires = []
        if my_dist:
            my_require = my_dist.as_requirement()
            try:
                all_requires = pkg_resources.working_set.resolve([my_require])
            except (pkg_resources.DistributionNotFound,
                    pkg_resources.VersionConflict):
                # not installed yet -- will probably be OK when we're
                # called after the build has taken place.
                pass

        for dist in all_requires:
            if dist == my_dist or not is_inhouse_package(dist.project_name):
                continue
            rev_data = self.read_all_revisions(dist)  # (name,version,url,rev)
            if rev_data:
                revisions.append(rev_data)

        data = ['# These are the VCS revision numbers used for this particular',
                '# build. This file can be used by release tools to tag a',
                '# working build.']
        for rev_data in revisions:
            data.append(','.join([str(e) for e in rev_data]))
        self.write_file("all revisions", self.all_revisions_file,
                        '\n'.join(data))
示例#6
0
def install(cmd,
            reqs,
            add_to_global=False,
            prefer_final=True,
            force_upgrade=False,
            use_existing=False,
            eggs=None,
            reinstall=False):
    """ Installs a given requirement using buildout's modified easy_install.

        Parameters
        ----------
        cmd : `setuptools.Command`
           active setuptools Command class
        reqs : `list` of `str`
           list of distutils requirements, eg ['foo==1.2']
        eggs : `list` of `str`
           paths to egg files to use to satisfy requirements
        add_to_global : `bool`
           adds installed distribution to the global working_set.
           This has the effect of making them available for import within this
           process, used by fetch_build_eggs.
        prefer_final : `bool`
           Will prefer released versions of the requirements over dev versions,
           unless the package is third-party where it always prefers final
           versions.
        force_upgrade : `bool`
           Will force downloads of requirements from PyPI. This is the rough
           equivalent of ``easy_install -U acme.foo``
        use_existing : `bool`
           Will not update any packages found in the current working set
        reinstall : `bool`
            Will reinstall packages that are already installed.

        Returns
        -------
        ws : `pkg_resources.WorkingSet`
            Working Set for the distributions that were just installed.
    """
    # Remove anything we're upgrading
    if force_upgrade:
        uninstall_eggs(reqs)

    # Create installer class configured to install into wherever the command
    # class was setup for
    installer = Installer(dest=cmd.install_dir,
                          index=cmd.index_url,
                          prefer_final=prefer_final)

    egg_dists = [egg_distribution(egg) for egg in (eggs or [])]
    installer._egg_dists = egg_dists

    # This is a bit nasty - we have to monkey-patch the filter for final
    # versions so that we can also filter for dev versions as well.

    # Set prefer_final to True, always, so it enables the filter
    easy_install.prefer_final(True)

    # This returns a WorkingSet of the packages we just installed.
    # NOTE here we pass in the existing stuff - this will prefer installed
    #      packages over ones on the server, eg an installed release
    #      version won't get trumped by a dev version on the server
    ws = pkg_resources.WorkingSet(pkg_resources.working_set.entries)

    # We must remove the package we're installing from the 'baseline' ws.
    # This way we won't get any weird requirement conflicts with new
    # versions of the package we're trying to set up
    if cmd.distribution.metadata.name:
        dist = dependency.get_dist(cmd.distribution.metadata.name)
        if dist:
            dependency.remove_from_ws(ws, dist)

    # There's a chance that there were packages in setup_requires that were
    # also in install_requires. All packages that were not in the original
    # environment and were installed during the `setup_requires` resolution
    # process are tracked by `fetched_setup_requires` field of
    # `pkglib.setuptools.dist.Distribution`.

    # Here we ensure that they're re-added for setup along with any of their
    # own dependencies if they are also part of the package install_requires.

    for d in getattr(cmd.distribution, "fetched_setup_requires", []):
        dependency.remove_from_ws(ws, d)

    # Now run the installer
    to_setup = installer.install(reqs,
                                 working_set=ws,
                                 use_existing=use_existing,
                                 force_upgrade=force_upgrade,
                                 reinstall=reinstall)

    return setup_dists(cmd,
                       ws,
                       to_setup,
                       egg_dists,
                       add_to_global=add_to_global)
示例#7
0
def install(cmd, reqs, add_to_global=False, prefer_final=True,
            force_upgrade=False, use_existing=False, eggs=None,
            reinstall=False):
    """ Installs a given requirement using buildout's modified easy_install.

        Parameters
        ----------
        cmd : `setuptools.Command`
           active setuptools Command class
        reqs : `list` of `str`
           list of distutils requirements, eg ['foo==1.2']
        eggs : `list` of `str`
           paths to egg files to use to satisfy requirements
        add_to_global : `bool`
           adds installed distribution to the global working_set.
           This has the effect of making them available for import within this
           process, used by fetch_build_eggs.
        prefer_final : `bool`
           Will prefer released versions of the requirements over dev versions,
           unless the package is third-party where it always prefers final
           versions.
        force_upgrade : `bool`
           Will force downloads of requirements from PyPI. This is the rough
           equivalent of ``easy_install -U acme.foo``
        use_existing : `bool`
           Will not update any packages found in the current working set
        reinstall : `bool`
            Will reinstall packages that are already installed.

        Returns
        -------
        ws : `pkg_resources.WorkingSet`
            Working Set for the distributions that were just installed.
    """
    # Remove anything we're upgrading
    if force_upgrade:
        uninstall_eggs(reqs)

    # Create installer class configured to install into wherever the command
    # class was setup for
    installer = Installer(dest=cmd.install_dir, index=cmd.index_url,
                          prefer_final=prefer_final)

    egg_dists = [egg_distribution(egg) for egg in (eggs or [])]
    installer._egg_dists = egg_dists

    # This is a bit nasty - we have to monkey-patch the filter for final
    # versions so that we can also filter for dev versions as well.

    # Set prefer_final to True, always, so it enables the filter
    easy_install.prefer_final(True)

    # This returns a WorkingSet of the packages we just installed.
    # NOTE here we pass in the existing stuff - this will prefer installed
    #      packages over ones on the server, eg an installed release
    #      version won't get trumped by a dev version on the server
    ws = pkg_resources.WorkingSet(pkg_resources.working_set.entries)

    # We must remove the package we're installing from the 'baseline' ws.
    # This way we won't get any weird requirement conflicts with new
    # versions of the package we're trying to set up
    if cmd.distribution.metadata.name:
        dist = dependency.get_dist(cmd.distribution.metadata.name)
        if dist:
            dependency.remove_from_ws(ws, dist)

    # There's a chance that there were packages in setup_requires that were
    # also in install_requires. All packages that were not in the original
    # environment and were installed during the `setup_requires` resolution
    # process are tracked by `fetched_setup_requires` field of
    # `pkglib.setuptools.dist.Distribution`.

    # Here we ensure that they're re-added for setup along with any of their
    # own dependencies if they are also part of the package install_requires.

    for d in getattr(cmd.distribution, "fetched_setup_requires", []):
        dependency.remove_from_ws(ws, d)

    # Now run the installer
    to_setup = installer.install(reqs, working_set=ws,
                                 use_existing=use_existing,
                                 force_upgrade=force_upgrade,
                                 reinstall=reinstall)

    return setup_dists(cmd, ws, to_setup, egg_dists,
                       add_to_global=add_to_global)