Example #1
0
def build_docs_and_install(name, version, findlinks):  # pragma no cover
    tdir = tempfile.mkdtemp()
    startdir = os.getcwd()
    os.chdir(tdir)
    try:
        tarpath = download_github_tar('OpenMDAO-Plugins', name, version)

        # extract the repo tar file
        tar = tarfile.open(tarpath)
        tar.extractall()
        tar.close()

        files = os.listdir('.')
        files.remove(os.path.basename(tarpath))
        if len(files) != 1:
            raise RuntimeError(
                "after untarring, found multiple directories: %s" % files)

        # build sphinx docs
        os.chdir(files[0])  # should be in distrib directory now
        check_call(['plugin', 'build_docs', files[0]])

        # create an sdist so we can query metadata for distrib dependencies
        check_call([sys.executable, 'setup.py', 'sdist', '-d', '.'])

        if sys.platform.startswith('win'):
            tars = fnmatch.filter(os.listdir('.'), "*.zip")
        else:
            tars = fnmatch.filter(os.listdir('.'), "*.tar.gz")
        if len(tars) != 1:
            raise RuntimeError("should have found a single archive file,"
                               " but found %s instead" % tars)

        check_call(['easy_install', '-NZ', tars[0]])

        # now install any dependencies
        metadict = get_metadata(tars[0])
        reqs = metadict.get('requires', [])
        done = set()

        while reqs:
            r = reqs.pop()
            if r not in done:
                done.add(r)
                ws = WorkingSet()
                req = Requirement.parse(r)
                dist = ws.find(req)
                if dist is None:
                    check_call(['easy_install', '-NZ', '-f', findlinks, r])
                    dist = ws.find(req)
                    if dist is None:
                        raise RuntimeError("Couldn't find distribution '%s'" %
                                           r)
                    dist.activate()
                    dct = get_metadata(dist.egg_name().split('-')[0])
                    for new_r in dct.get('requires', []):
                        reqs.append(new_r)
    finally:
        os.chdir(startdir)
        shutil.rmtree(tdir, ignore_errors=True)
Example #2
0
def build_docs_and_install(name, version, findlinks):  # pragma no cover
    tdir = tempfile.mkdtemp()
    startdir = os.getcwd()
    os.chdir(tdir)
    try:
        tarpath = download_github_tar('OpenMDAO-Plugins', name, version)
        
        # extract the repo tar file
        tar = tarfile.open(tarpath)
        tar.extractall()
        tar.close()
        
        files = os.listdir('.')
        files.remove(os.path.basename(tarpath))
        if len(files) != 1:
            raise RuntimeError("after untarring, found multiple directories: %s"
                               % files)
        
        # build sphinx docs
        os.chdir(files[0]) # should be in distrib directory now
        check_call(['plugin', 'build_docs', files[0]])
        
        # create an sdist so we can query metadata for distrib dependencies
        check_call([sys.executable, 'setup.py', 'sdist', '-d', '.'])
        
        if sys.platform.startswith('win'):
            tars = fnmatch.filter(os.listdir('.'), "*.zip")
        else:
            tars = fnmatch.filter(os.listdir('.'), "*.tar.gz")
        if len(tars) != 1:
            raise RuntimeError("should have found a single archive file,"
                               " but found %s instead" % tars)

        check_call(['easy_install', '-NZ', tars[0]])
        
        # now install any dependencies
        metadict = get_metadata(tars[0])
        reqs = metadict.get('requires', [])
        done = set()
        
        while reqs:
            r = reqs.pop()
            if r not in done:
                done.add(r)
                ws = WorkingSet()
                req = Requirement.parse(r)
                dist = ws.find(req)
                if dist is None:
                    check_call(['easy_install', '-NZ', '-f', findlinks, r])
                    dist = ws.find(req)
                    if dist is None:
                        raise RuntimeError("Couldn't find distribution '%s'" % r)
                    dist.activate()
                    dct = get_metadata(dist.egg_name().split('-')[0])
                    for new_r in dct.get('requires', []):
                        reqs.append(new_r)
    finally:
        os.chdir(startdir)
        shutil.rmtree(tdir, ignore_errors=True)
Example #3
0
    def test_find_conflicting(self):
        ws = WorkingSet([])
        Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg")
        ws.add(Foo)

        # create a requirement that conflicts with Foo 1.2
        req = next(parse_requirements("Foo<1.2"))

        with pytest.raises(VersionConflict) as vc:
            ws.find(req)

        msg = 'Foo 1.2 is installed but Foo<1.2 is required'
        assert vc.value.report() == msg
Example #4
0
    def test_find_conflicting(self):
        ws = WorkingSet([])
        Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg")
        ws.add(Foo)

        # create a requirement that conflicts with Foo 1.2
        req = next(parse_requirements("Foo<1.2"))

        with pytest.raises(VersionConflict) as vc:
            ws.find(req)

        msg = 'Foo 1.2 is installed but Foo<1.2 is required'
        assert vc.value.report() == msg
Example #5
0
def test_LazyModule():

    # create an entry point for taurus.core.util.test.dumm
    w = WorkingSet()
    d = w.find(Requirement.parse('taurus'))
    ep = EntryPoint.parse("dummy_mod = taurus.core.util.test.dummy", dist=d)
    modname = ep.name

    # lazy load the ep module as taurus.fbt
    LazyModule.import_ep(modname, "taurus", ep)

    # check that lazy-loading did not import the entry point modules
    assert modname not in sys.modules
    assert ep.module_name not in sys.modules

    # import the module and check that it is a LazyModule
    import taurus.dummy_mod as lzm
    assert isinstance(lzm, LazyModule)

    # same again
    import taurus.dummy_mod as lzm
    assert isinstance(lzm, LazyModule)

    # now access a member of the lazy module
    assert lzm.foo == 1

    # ...and check that any subsequent import will return a "real" module,
    # not a lazy one

    import taurus.dummy_mod as lzm
    assert not isinstance(lzm, LazyModule)
    assert isinstance(lzm, ModuleType)
Example #6
0
def load_plugins(
    build_configuration: BuildConfiguration.Builder,
    plugins: List[str],
    working_set: WorkingSet,
) -> None:
    """Load named plugins from the current working_set into the supplied build_configuration.

    "Loading" a plugin here refers to calling registration methods -- it is assumed each plugin
    is already on the path and an error will be thrown if it is not. Plugins should define their
    entrypoints in the `pantsbuild.plugin` group when configuring their distribution.

    Like source backends, the `build_file_aliases`, and `register_goals` methods are called if
    those entry points are defined.

    * Plugins are loaded in the order they are provided. *

    This is important as loading can add, remove or replace existing tasks installed by other plugins.

    If a plugin needs to assert that another plugin is registered before it, it can define an
    entrypoint "load_after" which can return a list of plugins which must have been loaded before it
    can be loaded. This does not change the order or what plugins are loaded in any way -- it is
    purely an assertion to guard against misconfiguration.

    :param build_configuration: The BuildConfiguration (for adding aliases).
    :param plugins: A list of plugin names optionally with versions, in requirement format.
                              eg ['widgetpublish', 'widgetgen==1.2'].
    :param working_set: A pkg_resources.WorkingSet to load plugins from.
    """
    loaded: Dict = {}
    for plugin in plugins or []:
        req = Requirement.parse(plugin)
        dist = working_set.find(req)

        if not dist:
            raise PluginNotFound(f"Could not find plugin: {req}")

        entries = dist.get_entry_map().get("pantsbuild.plugin", {})

        if "load_after" in entries:
            deps = entries["load_after"].load()()
            for dep_name in deps:
                dep = Requirement.parse(dep_name)
                if dep.key not in loaded:
                    raise PluginLoadOrderError(
                        f"Plugin {plugin} must be loaded after {dep}")
        if "target_types" in entries:
            target_types = entries["target_types"].load()()
            build_configuration.register_target_types(target_types)
        if "build_file_aliases" in entries:
            aliases = entries["build_file_aliases"].load()()
            build_configuration.register_aliases(aliases)
        if "rules" in entries:
            rules = entries["rules"].load()()
            build_configuration.register_rules(rules)
        loaded[dist.as_requirement().key] = dist
Example #7
0
def build_docs_and_install(name, version, findlinks):  # pragma no cover
    tdir = tempfile.mkdtemp()
    startdir = os.getcwd()
    os.chdir(tdir)
    try:
        tarpath = download_github_tar('OpenMDAO-Plugins', name, version)

        # extract the repo tar file
        tar = tarfile.open(tarpath)
        tar.extractall()
        tar.close()

        files = os.listdir('.')
        files.remove(os.path.basename(tarpath))
        if len(files) != 1:
            raise RuntimeError("after untarring, found multiple directories: %s"
                               % files)

        os.chdir(files[0])  # should be in distrib directory now

        cfg = SafeConfigParser(dict_type=OrderedDict)
        cfg.readfp(open('setup.cfg', 'r'), 'setup.cfg')
        if cfg.has_option('metadata', 'requires-dist'):
            reqs = cfg.get('metadata', 'requires-dist').strip()
            reqs = reqs.replace(',', ' ')
            reqs = [n.strip() for n in reqs.split()]
        else:
            # couldn't find requires-dist in setup.cfg, so
            # create an sdist so we can query metadata for distrib dependencies
            tarname = _bld_sdist_and_install(deps=False)

            # now find any dependencies
            metadict = get_metadata(tarname)
            reqs = metadict.get('requires', [])

        # install dependencies (some may be needed by sphinx)
        ws = WorkingSet()
        for r in reqs:
            print "Installing dependency '%s'" % r
            req = Requirement.parse(r)
            dist = ws.find(req)
            if dist is None:
                try:
                    check_call(['easy_install', '-Z', '-f', findlinks, r])
                except Exception:
                    traceback.print_exc()

        # build sphinx docs
        check_call(['plugin', 'build_docs', files[0]])

        # make a new sdist with docs in it and install it
        tarname = _bld_sdist_and_install()
    finally:
        os.chdir(startdir)
        shutil.rmtree(tdir, ignore_errors=True)
Example #8
0
def build_docs_and_install(name, version, findlinks):  # pragma no cover
    tdir = tempfile.mkdtemp()
    startdir = os.getcwd()
    os.chdir(tdir)
    try:
        tarpath = download_github_tar('OpenMDAO-Plugins', name, version)

        # extract the repo tar file
        tar = tarfile.open(tarpath)
        tar.extractall()
        tar.close()

        files = os.listdir('.')
        files.remove(os.path.basename(tarpath))
        if len(files) != 1:
            raise RuntimeError("after untarring, found multiple directories: %s"
                               % files)

        os.chdir(files[0])  # should be in distrib directory now

        cfg = SafeConfigParser(dict_type=OrderedDict)
        cfg.readfp(open('setup.cfg', 'r'), 'setup.cfg')
        if cfg.has_option('metadata', 'requires-dist'):
            reqs = cfg.get('metadata', 'requires-dist').strip()
            reqs = reqs.replace(',', ' ')
            reqs = [n.strip() for n in reqs.split()]
        else:
            # couldn't find requires-dist in setup.cfg, so
            # create an sdist so we can query metadata for distrib dependencies
            tarname = _bld_sdist_and_install(deps=False)

            # now find any dependencies
            metadict = get_metadata(tarname)
            reqs = metadict.get('requires', [])

        # install dependencies (some may be needed by sphinx)
        ws = WorkingSet()
        for r in reqs:
            print "Installing dependency '%s'" % r
            req = Requirement.parse(r)
            dist = ws.find(req)
            if dist is None:
                try:
                    check_call(['easy_install', '-Z', '-f', findlinks, r])
                except Exception:
                    traceback.print_exc()

        # build sphinx docs
        check_call(['plugin', 'build_docs', files[0]])

        # make a new sdist with docs in it and install it
        tarname = _bld_sdist_and_install()
    finally:
        os.chdir(startdir)
        shutil.rmtree(tdir, ignore_errors=True)
Example #9
0
def parse_requirement(pkgstring, comparator='=='):
    ins = InstallRequirement.from_line(pkgstring)
    pkg_name, specs = ins.name, str(ins.specifier)
    if specs:
        return pkg_name, specs

    req = Requirement.parse(pkg_name)
    working_set = WorkingSet()
    dist = working_set.find(req)
    if dist:
        specs = "%s%s" % (comparator, dist.version)

    return req.project_name, specs
Example #10
0
def check(request):
    package_name = settings.HEARTBEAT.get('package_name')
    if not package_name:
        raise ImproperlyConfigured(
            'Missing package_name key from heartbeat configuration')

    sys_path_distros = WorkingSet()
    package_req = Requirement.parse(package_name)

    distro = sys_path_distros.find(package_req)
    if not distro:
        return dict(error='no distribution found for {}'.format(package_name))

    return dict(name=distro.project_name, version=distro.version)
Example #11
0
def main(args, options):
    from pkg_resources import WorkingSet, Requirement, find_distributions

    if not options.site_dir:
        app.error('Must supply --site')

    distributions = list(find_distributions(options.site_dir))
    working_set = WorkingSet()
    for dist in distributions:
        working_set.add(dist)

    for arg in args:
        arg_req = Requirement.parse(arg)
        found_dist = working_set.find(arg_req)
        if not found_dist:
            print('Could not find %s!' % arg_req)
        out_zip = Distiller(found_dist).distill()
        print('Dumped %s => %s' % (arg_req, out_zip))
Example #12
0
def main(args, options):
  from pkg_resources import WorkingSet, Requirement, find_distributions

  if not options.site_dir:
    app.error('Must supply --site')

  distributions = list(find_distributions(options.site_dir))
  working_set = WorkingSet()
  for dist in distributions:
    working_set.add(dist)

  for arg in args:
    arg_req = Requirement.parse(arg)
    found_dist = working_set.find(arg_req)
    if not found_dist:
      print('Could not find %s!' % arg_req)
    out_zip = Distiller(found_dist).distill()
    print('Dumped %s => %s' % (arg_req, out_zip))
Example #13
0
def build_docs_and_install(owner, name, version, findlinks):  # pragma no cover
    tdir = tempfile.mkdtemp()
    startdir = os.getcwd()
    os.chdir(tdir)
    try:
        tarpath = download_github_tar(owner, name, version)

        # extract the repo tar file
        tar = tarfile.open(tarpath)
        tar.extractall()
        tar.close()

        files = os.listdir(".")
        files.remove(os.path.basename(tarpath))
        if len(files) != 1:
            raise RuntimeError("after untarring, found multiple directories: %s" % files)

        os.chdir(files[0])  # should be in distrib directory now

        cfg = SafeConfigParser(dict_type=OrderedDict)

        try:
            cfg.readfp(open("setup.cfg", "r"), "setup.cfg")

        except IOError as io_error:
            raise IOError, "OpenMDAO plugins must have a setup.cfg: {}".format(io_error), sys.exc_info()[2]

        try:
            reqs = cfg.get("metadata", "requires-dist").strip()
            reqs = reqs.replace(",", " ")
            reqs = [n.strip() for n in reqs.split()]

            try:
                flinks = cfg.get("easy_install", "find_links").strip()
                flinks = flinks.split("\n")
                flinks = [n.strip() for n in flinks]

                flinks.append(findlinks)

                findlinks = " ".join(flinks)

            except (NoSectionError, NoOptionError):
                pass

        except NoOptionError:
            # couldn't find requires-dist in setup.cfg, so
            # create an sdist so we can query metadata for distrib dependencies
            tarname = _bld_sdist_and_install(deps=False)

            # now find any dependencies
            metadict = get_metadata(tarname)
            reqs = metadict.get("requires", [])

        # install dependencies (some may be needed by sphinx)
        ws = WorkingSet()
        for r in reqs:
            print "Installing dependency '%s'" % r
            req = Requirement.parse(r)
            dist = ws.find(req)
            if dist is None:
                try:
                    check_call(["easy_install", "-Z", "-f", findlinks, r])
                except Exception:
                    traceback.print_exc()

        # build sphinx docs
        check_call(["plugin", "build_docs", files[0]])

        # make a new sdist with docs in it and install it
        tarname = _bld_sdist_and_install()
    finally:
        os.chdir(startdir)
        shutil.rmtree(tdir, ignore_errors=True)
Example #14
0
def load_plugins(
    build_configuration: BuildConfiguration,
    plugins: List[str],
    working_set: WorkingSet,
    is_v1_plugin: bool,
) -> None:
    """Load named plugins from the current working_set into the supplied build_configuration.

    "Loading" a plugin here refers to calling registration methods -- it is assumed each plugin
    is already on the path and an error will be thrown if it is not. Plugins should define their
    entrypoints in the `pantsbuild.plugin` group when configuring their distribution.

    Like source backends, the `build_file_aliases`, `global_subsystems` and `register_goals` methods
    are called if those entry points are defined.

    * Plugins are loaded in the order they are provided. *

    This is important as loading can add, remove or replace existing tasks installed by other plugins.

    If a plugin needs to assert that another plugin is registered before it, it can define an
    entrypoint "load_after" which can return a list of plugins which must have been loaded before it
    can be loaded. This does not change the order or what plugins are loaded in any way -- it is
    purely an assertion to guard against misconfiguration.

    :param build_configuration: The BuildConfiguration (for adding aliases).
    :param plugins: A list of plugin names optionally with versions, in requirement format.
                              eg ['widgetpublish', 'widgetgen==1.2'].
    :param working_set: A pkg_resources.WorkingSet to load plugins from.
    :param is_v1_plugin: Whether this is a v1 or v2 plugin.
    """
    loaded: Dict = {}
    for plugin in plugins or []:
        req = Requirement.parse(plugin)
        dist = working_set.find(req)

        if not dist:
            raise PluginNotFound(f"Could not find plugin: {req}")

        entries = dist.get_entry_map().get("pantsbuild.plugin", {})

        if "load_after" in entries:
            deps = entries["load_after"].load()()
            for dep_name in deps:
                dep = Requirement.parse(dep_name)
                if dep.key not in loaded:
                    raise PluginLoadOrderError(
                        f"Plugin {plugin} must be loaded after {dep}")

        if is_v1_plugin:
            if "register_goals" in entries:
                entries["register_goals"].load()()

            # TODO: Might v2 plugins need to register global subsystems? Hopefully not.
            if "global_subsystems" in entries:
                subsystems = entries["global_subsystems"].load()()
                build_configuration.register_optionables(subsystems)

            # The v2 target API is still TBD, so we keep build_file_aliases as a v1-only thing.
            # Having thus no overlap between v1 and v2 backend entrypoints makes things much simpler.
            # TODO: Revisit, ideally with a v2-only entry point, once the v2 target API is a thing.
            if "build_file_aliases" in entries:
                aliases = entries["build_file_aliases"].load()()
                build_configuration.register_aliases(aliases)
        else:
            if "rules" in entries:
                rules = entries["rules"].load()()
                build_configuration.register_rules(rules)
            if "build_file_aliases2" in entries:
                build_file_aliases2 = entries["build_file_aliases2"].load()()
                build_configuration.register_aliases(build_file_aliases2)
        loaded[dist.as_requirement().key] = dist
Example #15
0
class SetupEggSubRecipe(BaseDownloadSubRecipe):

    @property
    @reify
    def index_url(self):
        return self.options.get(
            'index',
            self.recipe.buildout['buildout'].get('index', default_index_url))

    @property
    @reify
    def find_links_urls(self):
        return self.options.get(
            'find-links',
            self.recipe.buildout['buildout'].get('find-links', '')).split()

    @property
    @reify
    def source_key_processors(self):
        return {
            'egg': lambda x: [('egg', x.strip())],
            'eggs': lambda x: [('egg', y.strip()) for y in x.splitlines()],
        }

    @property
    @reify
    def source_option_processors(self):
        ret = super(SetupEggSubRecipe, self).source_option_processors.copy()
        ret.update({
            'build': string_as_bool,
            'build-dependencies': string_as_bool,
            'extra-paths': lambda x: [x.strip() for x in x.splitlines()],
            'egg-path': lambda x: [x.strip() for x in x.splitlines()],
        })
        return ret

    @property
    @reify
    def allowed_options(self):
        ret = copy(super(SetupEggSubRecipe, self).allowed_options)
        ret.extend([
            'egg-name',
            'find-egg',
            'path',
            'signature',
            'url',
        ])
        for stage in ['after-build']:
            ret.extend([
                self.resolve_stage('patch', stage=stage),
                self.resolve_stage('patch-options', stage=stage),
                self.resolve_stage('patch-binary', stage=stage),
            ])
        return ret

    def initialize(self):
        super(SetupEggSubRecipe, self).initialize()
        if self.recipe.options.get_as_bool('split-working-set', False):
            self.working_set = WorkingSet([])
        else:
            self.working_set = self.recipe.working_set
        self.index = get_index(self.index_url, self.find_links_urls)

    def default_eggs_directory(self, develop=False):
        if develop and 'develop-eggs-directory' in self.recipe.buildout['buildout']:
            return self.recipe.buildout['buildout']['develop-eggs-directory']
        elif 'eggs-directory' in self.recipe.buildout['buildout']:
            return self.recipe.buildout['buildout']['eggs-directory']
        else:
            return os.path.join(os.path.dirname(sys.argv[0]), '..', 'eggs')

    def populate_source(self, source, dependency=False):
        super(SetupEggSubRecipe, self).populate_source(
            source, load_options=not dependency)
        if 'egg' not in source:
            source['egg'] = self.name
        source['requirement'] = Requirement.parse(source['egg'])
        source['egg'] = str(source['requirement'])
        source['find-requirement'] = Requirement.parse(source['find-egg']) \
            if 'find-egg' in source else source['requirement']
        source['find-egg'] = str(source['find-requirement'])
        source.setdefault('build', True)
        egg_directories = []
        if 'develop-eggs-directory' in self.recipe.buildout['buildout']:
            egg_directories.append(self.recipe.buildout['buildout']['develop-eggs-directory'])
        if 'eggs-directory' in self.recipe.buildout['buildout']:
            egg_directories.append(self.recipe.buildout['buildout']['eggs-directory'])
        source.setdefault('egg-path',
                          [source['location']] if 'location' in source else [] +
                          source.get('extra-paths', []) + egg_directories +
                          buildout_and_setuptools_path)
        source.setdefault('location',
                          self.default_eggs_directory(develop=source.get('develop', False)))
        source['egg-environment'] = Environment(source['egg-path'])
        source['build-options'] = {}
        if not dependency:
            for src_key, dst_key in [(key, re.sub('-', '_', key)) for key in
                                     [option for option in self.options
                                      if option in BUILD_EXT_OPTIONS]]:
                source['build-options'][dst_key] = self.options[src_key]
        source.setdefault('signature', self.resolve_signature(source))

    def process_source(self, source):
        if self.working_set.find(source['requirement']) is not None:
            return
        if source['build']:
            self.build_source(source)
        self.patch_source(source, cwdkey='build-directory',
                          stage='after-build')
        self.install_source(source)

    def acquire_source(self, source, destkey='working-directory'):
        candidates = self.requirement_match_list(source['egg-environment'], source['requirement'],
                                                 strip_signature=source['signature'])
        if not candidates or self.recipe.newest:
            if 'url' not in source:
                if self.recipe.offline:
                    raise UserError(
                        '''Couldn't download index "{}" in offline mode.'''.format(self.index))
                self.index.find_packages(source['find-requirement'])
                distributions = self.requirement_match_list(
                    self.index, source['find-requirement'],
                    requirement_type=self.requirement_type(source))
                if not distributions:
                    raise UserError('''No distributions available for requirement "{}".'''.format(
                        source['find-egg']))
                if not candidates or distributions[0].parsed_version > candidates[0].parsed_version:
                    source['url'] = distributions[0].location
                    source['egg-name'] = distributions[0].egg_name()
                else:
                    source['source-directory'] = candidates[0].location
                    source['build'] = False
                    source['egg-name'] = candidates[0].egg_name()
            if 'source-directory' not in source:
                self.logger.info("Getting distribution for '{}'.".format(
                    source['requirement'].project_name))
                super(SetupEggSubRecipe, self).acquire_source(source, destkey=destkey)
        else:
            source['source-directory'] = candidates[0].location
            source['build'] = False
            source['egg-name'] = candidates[0].egg_name()
        if source.get('build-dependencies', True):
            sourceenv = Environment([source['source-directory']])
            for key in sourceenv:
                for dist in sourceenv[key]:
                    for dependency_requirement in dist.requires():
                        dependency_source = {'egg': str(
                            dependency_requirement), 'parent-egg': str(source['egg'])}
                        self.sources.insert(self.sources.index(
                            source), dependency_source)
                        self.populate_source(
                            dependency_source, dependency=True)
                        self.prepare_source(dependency_source)

    def build_source(self, source):
        self.logger.info('''Building: {}'''.format(source['egg-name']))
        undo = []
        setup_py = os.path.join(source['source-directory'], 'setup.py')
        try:
            setup_cfg = os.path.join(source['source-directory'], 'setup.cfg')
            if os.path.exists(setup_cfg):
                os.rename(setup_cfg, setup_cfg + '-develop-aside')

                def restore_old_setup():
                    if os.path.exists(setup_cfg):
                        os.remove(setup_cfg)
                    os.rename(setup_cfg + '-develop-aside', setup_cfg)
                undo.append(restore_old_setup)
            else:
                open(setup_cfg, 'w').close()
                undo.append(lambda: os.remove(setup_cfg))
            updates = {}
            if source['build-options']:
                updates['build_ext'] = source['build-options']
            if source['signature']:
                updates['egg_info'] = {
                    'tag_build': "_{}".format(source['signature']),
                }
            setuptools_edit_config(setup_cfg, updates)

            setup_cmd_fd, setup_cmd = mkstemp(dir=source['source-directory'])
            setup_cmd_fh = os.fdopen(setup_cmd_fd, 'w')
            undo.append(lambda: os.remove(setup_cmd))
            undo.append(setup_cmd_fh.close)

            setup_cmd_fh.write((setup_template % dict(
                setuptools=setuptools_location,
                setupdir=source['source-directory'],
                setup=setup_py,
                __file__=setup_py,
            )).encode())
            setup_cmd_fh.flush()

            build_directory = mkdtemp('build',
                                      dir=source['source-directory'])

            action_args = []
            if source.get('develop', False) is True:
                action = 'develop'
                action_args.append('-Z')
            else:
                action = 'easy_install'
                action_args.append(source['source-directory'])

            args = [source['executable'], setup_cmd, action, '-mxNd',
                    build_directory]
            if self.log_level < logging.INFO:
                args += ['-v']
            elif self.log_level > logging.INFO:
                args += ['-q']
            args += action_args

            self.logger.debug('''Running: {}'''.format(' '.join(args)))
            self.recipe.call(*args, stdout_log_level=logging.DEBUG)
            source['build-directory'] = build_directory
        finally:
            for obj in reversed(undo):
                obj()

    def install_source(self, source, destkey='location'):
        if 'build-directory' not in source:
            return
        env = Environment([source['build-directory']])
        self.recipe.mkdir(source[destkey])
        for dists in [env[x] for x in env]:
            for src_dist in dists:
                dst_dist = src_dist.clone(
                    location=os.path.join(source[destkey],
                                          "{}.{}".format(src_dist.egg_name(), {
                                              EGG_DIST: 'egg',
                                              DEVELOP_DIST: 'egg-link',
                                          }[src_dist.precedence])))
                {
                    EGG_DIST: lambda src, dst:
                              self.recipe.copy(src, dst)
                              if os.path.isdir(src) else
                              self.recipe.extract_archive(src, dst),
                    DEVELOP_DIST: os.rename,
                }[src_dist.precedence](src_dist.location, dst_dist.location)
                # redo_pyc(newloc)
                self.working_set.add_entry(dst_dist.location)
                self.logger.info('''Got {}.'''.format(
                    str(dst_dist.egg_name())))

    @classmethod
    def requirement_match_list(cls, index, requirement, requirement_type=None,
                               prefer_final=True, strip_signature=''):
        def mangle_candidate(dist):
            if strip_signature:
                dist = dist.clone(version=re.sub(
                    r'_{}$'.format(strip_signature), '', dist.version))
            return dist
        candidates = [candidate for candidate in index[requirement.project_name]
                      if mangle_candidate(candidate) in requirement]
        if not candidates:
            return []
        if requirement_type is not None:
            candidates = [candidate for candidate in candidates
                          if candidate.precedence == requirement_type]
        if prefer_final:
            final_candidates = [candidate for candidate in candidates
                                if not candidate.parsed_version.is_prerelease]
            if final_candidates:
                candidates = final_candidates
        best = []
        bestv = None
        for candidate in candidates:
            candidatev = candidate.parsed_version
            if not bestv or candidatev > bestv:
                best = [candidate]
                bestv = candidatev
            elif candidatev == bestv:
                best.append(candidate)
        best.sort()
        return best

    @classmethod
    def requirement_type(cls, source):
        egg_type = source.get('egg-type', None)
        try:
            return {
                'source': SOURCE_DIST,
                'binary': BINARY_DIST,
                None: None,
            }[egg_type]
        except KeyError:
            return None

    @classmethod
    def resolve_signature(cls, source):
        struct = []
        for key, value in source['build-options'].items():
            struct.append((key, value))
        for key in ('patches', 'patch-options', 'patch-binary'):
            if key in source:
                struct.append(source[key])
        struct = tuple(struct) # pylint: disable=redefined-variable-type
        if not struct:
            return None
        base = string.digits + string.letters
        base_length = len(base)
        ret = ''
        struct_hash = abs(hash(struct))
        while struct_hash > 0:
            ret = base[struct_hash % base_length] + ret
            struct_hash /= base_length
        return "{}_{}".format(SIGNATURE_MARKER, ret)
Example #16
0
def load_plugins(
    build_configuration: BuildConfiguration,
    plugins: List[str],
    working_set: WorkingSet,
    is_v1_plugin: bool,
) -> None:
    """Load named plugins from the current working_set into the supplied build_configuration.

    "Loading" a plugin here refers to calling registration methods -- it is assumed each plugin
    is already on the path and an error will be thrown if it is not. Plugins should define their
    entrypoints in the `pantsbuild.plugin` group when configuring their distribution.

    Like source backends, the `build_file_aliases`, `global_subsystems` and `register_goals` methods
    are called if those entry points are defined.

    * Plugins are loaded in the order they are provided. *

    This is important as loading can add, remove or replace existing tasks installed by other plugins.

    If a plugin needs to assert that another plugin is registered before it, it can define an
    entrypoint "load_after" which can return a list of plugins which must have been loaded before it
    can be loaded. This does not change the order or what plugins are loaded in any way -- it is
    purely an assertion to guard against misconfiguration.

    :param build_configuration: The BuildConfiguration (for adding aliases).
    :param plugins: A list of plugin names optionally with versions, in requirement format.
                              eg ['widgetpublish', 'widgetgen==1.2'].
    :param working_set: A pkg_resources.WorkingSet to load plugins from.
    :param is_v1_plugin: Whether this is a v1 or v2 plugin.
    """
    loaded: Dict = {}
    for plugin in plugins or []:
        req = Requirement.parse(plugin)
        dist = working_set.find(req)

        if not dist:
            raise PluginNotFound(f"Could not find plugin: {req}")

        entries = dist.get_entry_map().get("pantsbuild.plugin", {})

        if "load_after" in entries:
            deps = entries["load_after"].load()()
            for dep_name in deps:
                dep = Requirement.parse(dep_name)
                if dep.key not in loaded:
                    raise PluginLoadOrderError(f"Plugin {plugin} must be loaded after {dep}")

        # While the Target API is a V2 concept, we expect V1 plugin authors to still write Target
        # API bindings. So, we end up using this entry point regardless of V1 vs. V2.
        if "targets2" in entries:
            targets = entries["targets2"].load()()
            build_configuration.register_targets(targets)

        if is_v1_plugin:
            if "register_goals" in entries:
                entries["register_goals"].load()()
            if "global_subsystems" in entries:
                subsystems = entries["global_subsystems"].load()()
                build_configuration.register_optionables(subsystems)
            # For now, `build_file_aliases` is still V1-only. TBD what entry-point we use for
            # `objects` and `context_aware_object_factories`.
            if "build_file_aliases" in entries:
                aliases = entries["build_file_aliases"].load()()
                build_configuration.register_aliases(aliases)
        else:
            if "rules" in entries:
                rules = entries["rules"].load()()
                build_configuration.register_rules(rules)
            if "build_file_aliases2" in entries:
                build_file_aliases2 = entries["build_file_aliases2"].load()()
                build_configuration.register_aliases(build_file_aliases2)
            if "targets2" in entries:
                targets = entries["targets2"].load()()
                build_configuration.register_targets(targets)
        loaded[dist.as_requirement().key] = dist
Example #17
0
        migrate_dist = get_distribution('sqlalchemy-migrate>=0.6')
        migrate = __import__('migrate')
    except DistributionNotFound, ImportError:
        raise SystemExit(
            u"\n"
            u"WARNING: 'sqlalchemy-migrate' not found, this is a bug in setup.py file!\n"
            u"WARNING:  El sistema se ha actualizado pero NO la base de datos. :("
        )

    cwd = os.path.dirname(os.path.abspath(__file__))
    sys_path = copy(sys.path)
    if cwd in sys_path:
        sys_path.remove(cwd)
    req = Requirement.parse(NAME)
    ws = WorkingSet(sys_path)
    old_nobix = ws.find(req)
    new_nobix = get_distribution(NAME)

    if not old_nobix:
        return

    if old_nobix.parsed_version < new_nobix.parsed_version:
        database_uri = get_config_db_uri(old_nobix.location)
        if database_uri:
            print u"\nUpgrading Nobix database %s -> %s ..." % (old_nobix.version, new_nobix.version),
            upgrade_db(uri=database_uri)
        else:
            raise SystemExit(
                u"\n"
                u"WARNING: No se puede obtener el nombre de la base de datos.\n"
                u"WARNING: El sistema se ha actualizado pero NO la base de datos. :("