def find_installed_siblings(tox_python, package_name, sibling_python_packages): installed_sibling_packages = [] for dep_name in get_installed_packages(tox_python): log.append( "Found {name} python package installed".format( name=dep_name)) if (dep_name == package_name or prAPI.to_filename(dep_name) == package_name): # We don't need to re-process ourself. # We've filtered ourselves from the source dir list, # but let's be sure nothing is weird. log.append( "Skipping {name} because it's us".format( name=dep_name)) continue if dep_name in sibling_python_packages: log.append( "Package {name} on system in {root}".format( name=dep_name, root=sibling_python_packages[dep_name])) installed_sibling_packages.append(dep_name) elif prAPI.to_filename(dep_name) in sibling_python_packages: real_name = prAPI.to_filename(dep_name) log.append( "Package {name} ({pkg_name}) on system in {root}".format( name=dep_name, pkg_name=real_name, root=sibling_python_packages[real_name])) # need to use dep_name here for later constraint file rewrite installed_sibling_packages.append(dep_name) return installed_sibling_packages
def __init__(self, buildout, name, options): """ Take the eggs we're given, and save the path to them into buildout variables """ # set up some internal class stuff we'll need later self.buildout = buildout self.name = name self.options = options # check if we have versions information # if we don't, there's not a lot we can really do if not self.buildout['buildout'].has_key('versions'): # log and bail logging.error('Buildout does not have versions information') sys.exit(1) # get the versions section from buildout versions_name = self.buildout['buildout']['versions'] versions_section = self.buildout[versions_name] # stick this in our local buildout space # we also need to add the path to the egg for k,v in versions_section.iteritems(): egg_directory = self.buildout['buildout']['eggs-directory'] python_version = '.'.join([str(x) for x in sys.version_info[0:2]]) k = to_filename(k) v = to_filename(v) path_to_egg = os.path.join(egg_directory, k + '-' + v + '-py' + python_version +'.egg/') self.options.setdefault(k, path_to_egg)
def scan(link): # Process a URL to see if it's for a package page if link.startswith(self.index_url): parts = list(map(urllib.parse.unquote, link[len(self.index_url) :].split("/"))) if len(parts) == 2 and "#" not in parts[1]: # it's a package page, sanitize and index it pkg = safe_name(parts[0]) ver = safe_version(parts[1]) self.package_pages.setdefault(pkg.lower(), {})[link] = True return to_filename(pkg), to_filename(ver) return None, None
def scan(link): # Process a URL to see if it's for a package page if link.startswith(self.index_url): parts = list( map(unquote, link[len(self.index_url):].split('/'))) if len(parts) == 2 and '#' not in parts[1]: # it's a package page, sanitize and index it pkg = safe_name(parts[0]) ver = safe_version(parts[1]) self.package_pages.setdefault(pkg.lower(), {})[link] = True return to_filename(pkg), to_filename(ver) return None, None
def write_egg(project, version_info, egg_data, destdir='.'): """Write an egg file, formatting its name per the egg specifications.""" filename = '{0}-{1}-py{2[0]}.{2[1]}.egg'.format( pkg_resources.to_filename(pkg_resources.safe_name(project.name)), pkg_resources.to_filename(pkg_resources.safe_version(project.version)), version_info) f = open(os.path.join(destdir, filename), 'w') try: f.write(egg_data) finally: f.close() return filename
def egg_name(self): wheel = self.wheel name = pkg_resources.safe_name(wheel.name) version = pkg_resources.safe_version(wheel.version) pyver = 'py%d.%d' % sys.version_info[:2] bits = [pkg_resources.to_filename(name), pkg_resources.to_filename(version), pyver] if any(abi != 'none' or arch != 'any' for pyver, abi, arch in wheel.tags): # not pure python bits.append(pkg_resources.get_build_platform()) return '-'.join(bits) + '.egg'
def get_metadata(distribution): egg_info = '%s.egg-info' % distribution.egg_name() metadata = distribution.egg_info if metadata is None: metadata = '%s/%s' % (distribution.location, egg_info) if not os.path.exists(metadata): metadata = '%s/%s-%s.egg-info' % ( distribution.location, pkg_resources.to_filename(distribution.project_name), pkg_resources.to_filename(distribution.version), ) if not os.path.exists(metadata): return None return (metadata, egg_info)
def __init__(self, providers_config): self.providers_config = providers_config # Load all the plugins. This will make all plugin classes of a particular # type to be available in the base plugin class's 'plugins' attribute. # For example, by importing module 'eodag.plugins.search.resto', the plugin # 'RestoSearch' will be available in self.supported_topics['search'].plugins for topic in self.supported_topics: # This way of discovering plugins means that anyone can create eodag # plugins as a separate python package (though it must require eodag), and # have it discovered as long as they declare an entry point of the type # 'eodag.plugins.search' for example in its setup script. See the setup # script of eodag for an example of how to do this. for entry_point in pkg_resources.iter_entry_points( "eodag.plugins.{}".format(topic)): try: entry_point.load() except ImportError: import traceback as tb logger.warning("Unable to load plugin: %s.", entry_point.name) logger.warning("Reason:\n%s", tb.format_exc()) logger.warning( "Check that the plugin module (%s) is importable", entry_point.module_name, ) if entry_point.dist.key != "eodag": # use plugin providers if any plugin_providers_config_path = [ x for x in Path( entry_point.dist.location, pkg_resources.to_filename(entry_point.dist.key), ).rglob("providers.yml") ] if plugin_providers_config_path: plugin_providers_config = load_config( plugin_providers_config_path[0]) merge_configs(plugin_providers_config, self.providers_config) self.providers_config = plugin_providers_config self.product_type_to_provider_config_map = {} for provider in list(self.providers_config): provider_config = self.providers_config[provider] if not hasattr(provider_config, "products"): logger.info( "%s: provider has no product configured and will be skipped" % provider) providers_config.pop(provider) continue for product_type in provider_config.products: product_type_providers = ( self.product_type_to_provider_config_map. setdefault( # noqa product_type, [])) product_type_providers.append(provider_config) product_type_providers.sort(key=attrgetter("priority"), reverse=True) self._built_plugins_cache = {}
def _scan(self, link): # Process a URL to see if it's for a package page NO_MATCH_SENTINEL = None, None if not link.startswith(self.index_url): return NO_MATCH_SENTINEL parts = list( map(urllib.parse.unquote, link[len(self.index_url):].split('/'))) if len(parts) != 2 or '#' in parts[1]: return NO_MATCH_SENTINEL # it's a package page, sanitize and index it pkg = safe_name(parts[0]) ver = safe_version(parts[1]) self.package_pages.setdefault(pkg.lower(), {})[link] = True return to_filename(pkg), to_filename(ver)
def project_vars(self): output_dir = self.output_path project_name = os.path.basename(os.path.split(output_dir)[1]) if self.options.package_name is None: pkg_name = _bad_chars_re.sub("", project_name.lower().replace("-", "_")) safe_name = pkg_resources.safe_name(project_name) else: pkg_name = self.options.package_name safe_name = pkg_name egg_name = pkg_resources.to_filename(safe_name) # get pyramid package version pyramid_version = self.pyramid_dist.version ## map pyramid package version of the documentation branch ## # if version ends with 'dev' then docs version is 'master' if self.pyramid_dist.version[-3:] == "dev": pyramid_docs_branch = "master" else: # if not version is not 'dev' find the version.major_version string # and combine it with '-branch' version_match = re.match(r"(\d+\.\d+)", self.pyramid_dist.version) if version_match is not None: pyramid_docs_branch = "%s-branch" % version_match.group() # if can not parse the version then default to 'latest' else: pyramid_docs_branch = "latest" return { "project": project_name, "package": pkg_name, "egg": egg_name, "pyramid_version": pyramid_version, "pyramid_docs_branch": pyramid_docs_branch, }
def _get_openmdao_packages(): # pkg_resources uses a 'safe' name for dists, which replaces all 'illegal' chars with '-' # '_' is an illegal char used in one of our packages return [ to_filename(d.project_name) for d in working_set if d.project_name.startswith('openmdao.') ]
def project_vars(self): output_dir = self.output_path project_name = os.path.basename(os.path.split(output_dir)[1]) pkg_name = _bad_chars_re.sub( '', project_name.lower().replace('-', '_')) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) # get pyramid package version pyramid_version = self.pyramid_dist.version ## map pyramid package version of the documentation branch ## # if version ends with 'dev' then docs version is 'master' if self.pyramid_dist.version[-3:] == 'dev': pyramid_docs_branch = 'master' else: # if not version is not 'dev' find the version.major_version string # and combine it with '-branch' version_match = re.match(r'(\d+\.\d+)', self.pyramid_dist.version) if version_match is not None: pyramid_docs_branch = "%s-branch" % version_match.group() # if can not parse the version then default to 'latest' else: pyramid_docs_branch = 'latest' return { 'project': project_name, 'package': pkg_name, 'egg': egg_name, 'pyramid_version': pyramid_version, 'pyramid_docs_branch': pyramid_docs_branch, }
def tag(self) -> str: if self.meta.build: info = self.project.environment.marker_environment platform = to_filename( safe_name(info["platform_system"] + "-" + info["platform_machine"])) implementation = info["implementation_name"] impl_name = ("cp" if implementation.startswith("cp") else "jp" if implementation.startswith("jp") else "ip" if implementation.startswith("ir") else "pp" if implementation.startswith("pypy") else "unknown") impl_ver = (info["python_full_version"].replace(".", "") if impl_name == "pp" else info["python_version"].replace( ".", "")) impl = impl_name + impl_ver abi_tag = get_abi_tag( tuple(int(p) for p in info["python_version"].split("."))) tag = (impl, abi_tag, platform) else: platform = "any" if self.project.python_requires.supports_py2(): impl = "py2.py3" else: impl = "py3" tag = (impl, "none", platform) return "-".join(tag)
def get_downloaded_python_package_version(name, version=None): versions = [] prefix = '{}-'.format(name) prefix1 = '{}-'.format(to_filename(name)) for archive_file in set( PYPI_ARCHIVE_DIR.files('{}*'.format(prefix)) + PYPI_ARCHIVE_DIR.files('{}*'.format(prefix1))): archive_filename = archive_file.basename() pos = archive_filename.find('.tar.') if pos == -1: pos = archive_filename.find('.zip') if pos == -1 and archive_file.ext == '.whl': pos = archive_filename[len(prefix):].find('-') if pos != -1: pos += len(prefix) if pos == -1: continue archive_version = archive_filename[len(prefix):pos] if version: if version == archive_version: return version else: versions.append(archive_version) versions.sort(key=lambda v: parse_version(v), reverse=True) return versions[0] if versions else None
def project_vars(self): output_dir = self.output_path project_name = os.path.basename(os.path.split(output_dir)[1]) pkg_name = _bad_chars_re.sub('', project_name.lower().replace('-', '_')) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) # get pyramid package version pyramid_version = self.pyramid_dist.version ## map pyramid package version of the documentation branch ## # if version ends with 'dev' then docs version is 'master' if self.pyramid_dist.version[-3:] == 'dev': pyramid_docs_branch = 'master' else: # if not version is not 'dev' find the version.major_version string # and combine it with '-branch' version_match = re.match(r'(\d+\.\d+)', self.pyramid_dist.version) if version_match is not None: pyramid_docs_branch = "%s-branch" % version_match.group() # if can not parse the version then default to 'latest' else: pyramid_docs_branch = 'latest' return { 'project': project_name, 'package': pkg_name, 'egg': egg_name, 'pyramid_version': pyramid_version, 'pyramid_docs_branch': pyramid_docs_branch, }
def scan(link): # Process a URL to see if it's for a package page if link.startswith(self.index_url): parts = list(map( <<<<<<< HEAD unquote, link[len(self.index_url):].split('/') ======= urllib.parse.unquote, link[len(self.index_url):].split('/') >>>>>>> 54eef0be98b1b67c8507db91f4cfa90b64991027 )) if len(parts)==2 and '#' not in parts[1]: # it's a package page, sanitize and index it pkg = safe_name(parts[0]) ver = safe_version(parts[1]) self.package_pages.setdefault(pkg.lower(),{})[link] = True return to_filename(pkg), to_filename(ver) return None, None
def is_develop_egg(dist): """ Is the distribution installed in development mode (setup.py develop) """ meta_provider = dist._provider egg_info_dir = os.path.dirname(meta_provider.egg_info) egg_name = pkg_resources.to_filename(dist.project_name) return meta_provider.egg_info.endswith(egg_name + ".egg-info") \ and os.path.exists(os.path.join(egg_info_dir, "setup.py"))
def _get_egg_info(metadata_config, packages_root): egg_name = metadata_config['name'] egg_base = packages_root egg_info = to_filename(egg_name) + '.egg-info' if egg_base != os.curdir: egg_info = os.path.join(egg_base, egg_info) return egg_info
def pre_run(self): self.egg_name = pkg_resources.safe_name(self.distribution.get_name()) self.egg_info = "%s.egg-info" % pkg_resources.to_filename( self.egg_name) if (not os.path.exists(self.egg_info) or _newer_requires_files(self.egg_info)): ei_cmd = self.get_finalized_command('egg_info') ei_cmd.run() self.install_test_requirements() _copy_test_requires_to(self.egg_info)
def get_project_path(self): self.run_command('egg_info') # Build extensions in-place self.reinitialize_command('build_ext', inplace=1) self.run_command('build_ext') ei_cmd = self.get_finalized_command("egg_info") project_path = normalize_path(ei_cmd.egg_base) return os.path.join(project_path, to_filename(ei_cmd.egg_name))
def get_local_dist_metadata_filepath(dist): # Dist filename syntax # name ["-" version ["-py" pyver ["-" required_platform]]] "." ext # https://setuptools.readthedocs.io/en/latest/formats.html#filename-embedded-metadata def valid_component(component): return component[1] # Stop taking filename components at the first missing/invalid component filename_component = takewhile(valid_component, ( ('', pkg_resources.to_filename(pkg_resources.safe_name( dist.project_name))), ('-', pkg_resources.to_filename(pkg_resources.safe_version(dist.version))), ('-py', dist.py_version), ('-', dist.platform), )) filename = ''.join(chain(*filename_component)) if isinstance(dist, pkg_resources.EggInfoDistribution): ext = 'egg-info' metadata_file = 'PKG-INFO' elif isinstance(dist, pkg_resources.DistInfoDistribution): ext = 'dist-info' metadata_file = 'METADATA' elif isinstance(dist, pkg_resources.Distribution): ext = os.path.join('egg', 'EGG-INFO') metadata_file = 'PKG-INFO' else: ext = None metadata_file = None filename = '{}.{}'.format(filename, ext) path = os.path.join(dist.location, filename, metadata_file) if ext: return path else: return None
def finalize_options(self): self.egg_name = safe_name(self.distribution.get_name()) log.debug("egg_name = " + self.egg_name) if self.egg_base is None: dirs = self.distribution.package_dir self.egg_base = (dirs or {}).get('',os.curdir) log.debug("egg_base = " + self.egg_base) self.ensure_dirname('egg_base') self.egg_info = to_filename(self.egg_name) + '.egg-info' if self.egg_base != os.curdir: self.egg_info = os.path.join(self.egg_base, self.egg_info) log.debug("egg_info = " + self.egg_info) self.defaults_path = os.path.join(self.egg_info, "pesky_defaults.json")
def egg_filename(name, version): """ Returns name for egg file as generated by :mod:`setuptools`. name: string Must be alphanumeric. version: string Must be alphanumeric. """ assert name and isinstance(name, basestring) match = _EGG_NAME_RE.search(name) if match is None or match.group() != name: raise ValueError('Egg name must be alphanumeric') assert version and isinstance(version, basestring) match = _EGG_VERSION_RE.search(version) if match is None or match.group() != version: raise ValueError('Egg version must be alphanumeric') name = pkg_resources.to_filename(pkg_resources.safe_name(name)) version = pkg_resources.to_filename(pkg_resources.safe_version(version)) return '%s-%s-py%s.egg' % (name, version, sys.version[:3])
def finalize_options(self): # Note: we need to capture the current value returned # by `self.tagged_version()`, so we can later update # `self.distribution.metadata.version` without # repercussions. self.egg_name = self.name self.egg_version = self.tagged_version() parsed_version = parse_version(self.egg_version) try: is_version = isinstance(parsed_version, packaging.version.Version) spec = ( "%s==%s" if is_version else "%s===%s" ) list( parse_requirements(spec % (self.egg_name, self.egg_version)) ) except ValueError: raise distutils.errors.DistutilsOptionError( "Invalid distribution name or version syntax: %s-%s" % (self.egg_name, self.egg_version) ) if self.egg_base is None: dirs = self.distribution.package_dir self.egg_base = (dirs or {}).get('', os.curdir) self.ensure_dirname('egg_base') self.egg_info = to_filename(self.egg_name) + '.egg-info' if self.egg_base != os.curdir: self.egg_info = os.path.join(self.egg_base, self.egg_info) if '-' in self.egg_name: self.check_broken_egg_info() # Set package version for the benefit of dumber commands # (e.g. sdist, bdist_wininst, etc.) # self.distribution.metadata.version = self.egg_version # If we bootstrapped around the lack of a PKG-INFO, as might be the # case in a fresh checkout, make sure that any special tags get added # to the version info # pd = self.distribution._patched_dist if pd is not None and pd.key == self.egg_name.lower(): pd._version = self.egg_version pd._parsed_version = parse_version(self.egg_version) self.distribution._patched_dist = None
def read_allrevisions(egg, project_name=None): """ Read the allrevisions.txt file from out of an egg file or directory """ if os.path.isfile(egg): with closing(ZipFile(egg)) as z: return read_allrevisions_file(z.open('EGG-INFO/allrevisions.txt')) if os.path.isdir(os.path.join(egg, 'EGG-INFO')): with open(os.path.join(egg, 'EGG-INFO', 'allrevisions.txt')) as f: return read_allrevisions_file(f) if project_name is not None: egg_info = os.path.join(egg, '%s.egg-info' % pkg_resources.to_filename(project_name)) if os.path.isdir(egg_info): with open(os.path.join(egg_info, 'allrevisions.txt')) as f: return read_allrevisions_file(f) raise IOError('Not an egg', egg)
def render_scaffolds(self): options = self.options args = self.args project_name = os.path.basename(args[0]) output_dir = os.path.abspath(os.path.normpath(args[0])) pkg_name = _bad_chars_re.sub('', project_name.lower()) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) vars = { 'project': project_name, 'package': pkg_name, 'egg': egg_name, } for scaffold_name in options.scaffold_name: for scaffold in self.scaffolds: if scaffold.name == scaffold_name: scaffold.run(self, output_dir, vars) return 0
def _get_package_root(name, sibling_packages): ''' Returns a package root from the sibling packages dict. If name is not found in sibling_packages, tries again using the 'filename' form of the name returned by the setuptools package resource API. :param name: package name :param sibling_packages: dict of python packages that zuul has cloned :returns: the package root (str) :raises: KeyError ''' try: pkg_root = sibling_packages[name] except KeyError: pkg_root = sibling_packages[prAPI.to_filename(name)] return pkg_root
def render_packages(self): options = self.options args = self.args project_name = os.path.basename(args[0]) output_dir = os.path.abspath(os.path.normpath(args[0])) pkg_name = _bad_chars_re.sub('', project_name.lower()) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) vars = { 'project': project_name, 'package': pkg_name, 'egg': egg_name, } for package_name in options.package_name: for package in self.packages: if package.name == package_name: package.run(self, output_dir, vars) return 0
def render_scaffolds(self): options = self.options args = self.args output_dir = os.path.abspath(os.path.normpath(args[0])) project_name = os.path.basename(os.path.split(output_dir)[1]) pkg_name = _bad_chars_re.sub('', project_name.lower()) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) vars = { 'project': project_name, 'package': pkg_name, 'egg': egg_name, } for scaffold_name in options.scaffold_name: for scaffold in self.scaffolds: if scaffold.name == scaffold_name: scaffold.run(self, output_dir, vars) return 0
def render_scaffolds(self): options = self.options args = self.args project_name = args[0].lstrip(os.path.sep) output_dir = os.path.normpath(os.path.join(os.getcwd(), project_name)) pkg_name = _bad_chars_re.sub('', project_name.lower()) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) vars = { 'project': project_name, 'package': pkg_name, 'egg': egg_name, } for scaffold_name in options.scaffold_name: for scaffold in self.scaffolds: if scaffold.name == scaffold_name: scaffold.run(self, output_dir, vars) return True
def read(self): import sys import glob import warnings opj = os.path.join if self.package is not None: package = self.package.__package__ if package is None: package = self.package.__name__ project = pkg_resources.to_filename(pkg_resources.safe_name(self.entry_point.dist.project_name)) package_pattern = '%s*.egg-info' % package project_pattern = '%s*.egg-info' % project file = getattr(self.package, '__file__', None) if file is not None: candidates = [] def _add_candidate(where): candidates.extend(glob.glob(where)) for entry in sys.path: if file.startswith(entry): _add_candidate(opj(entry, 'EGG-INFO')) # egg? for pattern in (package_pattern, project_pattern): # dist-installed? _add_candidate(opj(entry, pattern)) dir, name = os.path.split(self.package.__file__) for pattern in (package_pattern, project_pattern): _add_candidate(opj(dir, pattern)) _add_candidate(opj(dir, '..', pattern)) for candidate in candidates: if os.path.isdir(candidate): path = opj(candidate, 'PKG-INFO') else: path = candidate if os.path.exists(path): with open(path) as f: return f.read() warnings.warn('No PKG-INFO found for package: %s' % self.package_name)
def _needs_reinstall(self, setupdir, action): setup_py = setupdir.join("setup.py") setup_cfg = setupdir.join("setup.cfg") args = [self.envconfig.envpython, str(setup_py), "--name"] env = self._get_os_environ() output = action.popen(args, cwd=setupdir, redirect=False, returnout=True, env=env, capture_err=False) name = next((i for i in output.split("\n") if i and not i.startswith("pydev debugger:")), "") args = [ self.envconfig.envpython, "-c", "import sys; import json; print(json.dumps(sys.path))", ] out = action.popen(args, redirect=False, returnout=True, env=env) try: sys_path = json.loads(out) except ValueError: sys_path = [] egg_info_fname = ".".join((to_filename(name), "egg-info")) for d in reversed(sys_path): egg_info = py.path.local(d).join(egg_info_fname) if egg_info.check(): break else: return True needs_reinstall = any( conf_file.check() and conf_file.mtime() > egg_info.mtime() for conf_file in (setup_py, setup_cfg)) # Ensure the modification time of the egg-info folder is updated so we # won't need to do this again. # TODO(stephenfin): Remove once the minimum version of setuptools is # high enough to include https://github.com/pypa/setuptools/pull/1427/ if needs_reinstall: egg_info.setmtime() return needs_reinstall
def finalize_options(self): self.egg_name = safe_name(self.distribution.get_name()) self.vtags = self.tags() self.egg_version = self.tagged_version() parsed_version = parse_version(self.egg_version) try: is_version = isinstance(parsed_version, packaging.version.Version) spec = "%s==%s" if is_version else "%s===%s" list(parse_requirements(spec % (self.egg_name, self.egg_version))) except ValueError: raise distutils.errors.DistutilsOptionError( "Invalid distribution name or version syntax: %s-%s" % (self.egg_name, self.egg_version)) if self.egg_base is None: dirs = self.distribution.package_dir self.egg_base = (dirs or {}).get("", os.curdir) self.ensure_dirname("egg_base") self.egg_info = to_filename(self.egg_name) + ".egg-info" if self.egg_base != os.curdir: self.egg_info = os.path.join(self.egg_base, self.egg_info) if "-" in self.egg_name: self.check_broken_egg_info() # Set package version for the benefit of dumber commands # (e.g. sdist, bdist_wininst, etc.) # self.distribution.metadata.version = self.egg_version # If we bootstrapped around the lack of a PKG-INFO, as might be the # case in a fresh checkout, make sure that any special tags get added # to the version info # pd = self.distribution._patched_dist if pd is not None and pd.key == self.egg_name.lower(): pd._version = self.egg_version pd._parsed_version = parse_version(self.egg_version) self.distribution._patched_dist = None
def finalize_options(self): self.egg_name = safe_name(self.distribution.get_name()) self.vtags = self.tags() self.egg_version = self.tagged_version() try: list( parse_requirements('%s==%s' % (self.egg_name, self.egg_version)) ) except ValueError: raise distutils.errors.DistutilsOptionError( "Invalid distribution name or version syntax: %s-%s" % (self.egg_name, self.egg_version) ) if self.egg_base is None: dirs = self.distribution.package_dir self.egg_base = (dirs or {}).get('', os.curdir) self.ensure_dirname('egg_base') self.egg_info = to_filename(self.egg_name) + '.egg-info' if self.egg_base != os.curdir: self.egg_info = os.path.join(self.egg_base, self.egg_info) if '-' in self.egg_name: self.check_broken_egg_info() # Set package version for the benefit of dumber commands # (e.g. sdist, bdist_wininst, etc.) # self.distribution.metadata.version = self.egg_version # If we bootstrapped around the lack of a PKG-INFO, as might be the # case in a fresh checkout, make sure that any special tags get added # to the version info # pd = self.distribution._patched_dist if pd is not None and pd.key == self.egg_name.lower(): pd._version = self.egg_version pd._parsed_version = parse_version(self.egg_version) self.distribution._patched_dist = None
def build(self, build_dir: str, **kwargs): if not os.path.exists(build_dir): os.makedirs(build_dir, exist_ok=True) context.io.echo("- Building {}...".format(context.io.cyan("sdist"))) version = to_filename(safe_version(self.meta.version)) target = os.path.join( build_dir, "{}-{}.tar.gz".format(self.meta.project_name, version)) tar = tarfile.open(target, mode="w:gz", format=tarfile.PAX_FORMAT) try: tar_dir = "{}-{}".format(self.meta.project_name, version) files_to_add = self.find_files_to_add(True) for relpath in files_to_add: tar.add( relpath, arcname=os.path.join(tar_dir, str(relpath)), recursive=False, ) context.io.echo(f" - Adding: {relpath}", verbosity=context.io.DETAIL) fd, temp_name = tempfile.mkstemp(prefix="pkg-info") pkg_info = self.format_pkginfo(False).encode("utf-8") with open(fd, "wb") as f: f.write(pkg_info) tar.add(temp_name, arcname=os.path.join(tar_dir, "PKG-INFO"), recursive=False) context.io.echo(" - Adding: PKG-INFO", verbosity=context.io.DETAIL) finally: tar.close() context.io.echo("- Built {}".format( context.io.cyan(os.path.basename(target)))) return target
def render_scaffolds(self): options = self.options args = self.args output_dir = os.path.abspath(os.path.normpath(args[0])) project_name = os.path.basename(os.path.split(output_dir)[1]) pkg_name = _bad_chars_re.sub( '', project_name.lower().replace('-', '_')) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) # get pyramid package version pyramid_version = self.pyramid_dist.version ## map pyramid package version of the documentation branch ## # if version ends with 'dev' then docs version is 'master' if self.pyramid_dist.version[-3:] == 'dev': pyramid_docs_branch = 'master' else: # if not version is not 'dev' find the version.major_version string # and combine it with '-branch' version_match = re.match(r'(\d+\.\d+)', self.pyramid_dist.version) if version_match is not None: pyramid_docs_branch = "%s-branch" % version_match.group() # if can not parse the version then default to 'latest' else: pyramid_docs_branch = 'latest' vars = { 'project': project_name, 'package': pkg_name, 'egg': egg_name, 'pyramid_version': pyramid_version, 'pyramid_docs_branch': pyramid_docs_branch, } for scaffold_name in options.scaffold_name: for scaffold in self.scaffolds: if scaffold.name == scaffold_name: scaffold.run(self, output_dir, vars) return 0
def render_scaffolds(self): options = self.options args = self.args output_dir = os.path.abspath(os.path.normpath(args[0])) project_name = os.path.basename(os.path.split(output_dir)[1]) pkg_name = _bad_chars_re.sub('', project_name.lower().replace('-', '_')) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) # get pyramid package version pyramid_version = self.pyramid_dist.version ## map pyramid package version of the documentation branch ## # if version ends with 'dev' then docs version is 'master' if self.pyramid_dist.version[-3:] == 'dev': pyramid_docs_branch = 'master' else: # if not version is not 'dev' find the version.major_version string # and combine it with '-branch' version_match = re.match(r'(\d+\.\d+)', self.pyramid_dist.version) if version_match is not None: pyramid_docs_branch = "%s-branch" % version_match.group() # if can not parse the version then default to 'latest' else: pyramid_docs_branch = 'latest' vars = { 'project': project_name, 'package': pkg_name, 'egg': egg_name, 'pyramid_version': pyramid_version, 'pyramid_docs_branch': pyramid_docs_branch, } for scaffold_name in options.scaffold_name: for scaffold in self.scaffolds: if scaffold.name == scaffold_name: scaffold.run(self, output_dir, vars) return 0
def install_puppet_modules(): modules_installed = get_modules_installed() with open(MODULES_FILE_PATH) as modules_file: modules_requirements = modules_file.read().replace('/', '-') for module in parse_requirements(modules_requirements): current_cmd, compare, version, version_comparison = '', '', '', None module_name = to_filename(module.project_name).replace('_', '-', 1) if module_name in modules_installed: if module.specs: compare, version = module.specs[0] tmp_version = modules_installed[module_name] installed_version = StrictVersion(tmp_version) required_version = StrictVersion(version) if installed_version >= required_version: version_comparison = 0 else: version_comparison = -1 else: continue if version_comparison == 0 and compare is not '>': # module version installed is equal version continue else: # module version installed is smaller or bigger than version current_cmd = 'upgrade' else: current_cmd = 'install' if version and compare and '>' not in compare: run(current_cmd, module_name, version) else: if not version_comparison or version_comparison < 0: run(current_cmd, module_name)
def get_downloaded_python_package_version(name, version=None): versions = [] prefix = '{}-'.format(name) prefix1 = '{}-'.format(to_filename(name)) for archive_file in set(PYPI_ARCHIVE_DIR.files('{}*'.format(prefix)) + PYPI_ARCHIVE_DIR.files('{}*'.format(prefix1))): archive_filename = archive_file.basename() pos = archive_filename.find('.tar.') if pos == -1: pos = archive_filename.find('.zip') if pos == -1 and archive_file.ext == '.whl': pos = archive_filename[len(prefix):].find('-') if pos != -1: pos += len(prefix) if pos == -1: continue archive_version = archive_filename[len(prefix): pos] if version: if version == archive_version: return version else: versions.append(archive_version) versions.sort(key=lambda v: parse_version(v), reverse=True) return versions[0] if versions else None
def _get_openmdao_packages(): # pkg_resources uses a 'safe' name for dists, which replaces all 'illegal' chars with '-' # '_' is an illegal char used in one of our packages return [to_filename(d.project_name) for d in working_set if d.project_name.startswith('openmdao.')]
def install(self): """ Install the part. """ if os.path.isdir(self.lib_path): self.logger.info( 'Removing lib-directory %s.', self.lib_path ) shutil.rmtree(self.lib_path) if not os.path.isdir(self.lib_path): self.logger.info( 'Creating lib-directory %s.', self.lib_path ) os.mkdir(self.lib_path) self.options.created(self.lib_path) self.logger.debug('Ignoring files %s.', self.ignore_files) ignore = shutil.ignore_patterns(*self.ignore_files) # pylint: disable=no-member _, working_set = self.egg.working_set() for distribution in working_set: self.logger.debug('Considering egg %s.', distribution) if distribution.project_name in self.ignore_eggs: self.logger.debug('Ignoring egg %s.', distribution) continue self.logger.info('Copying egg %s.', distribution) # Create temp dir (if necessary). if zipfile.is_zipfile(distribution.location): tempdir = tempfile.mkdtemp() egg_dir = os.path.basename(distribution.location) egg_path = os.path.join(tempdir, egg_dir) egg_info_path = os.path.join(egg_path, 'EGG-INFO') self.logger.info( 'Unpacking egg %s to %s.', distribution, egg_path ) setuptools.archive_util.unpack_archive( distribution.location, egg_path ) # Replace the distribution we were originally looking at... distribution = pkg_resources.Distribution.from_filename( egg_path, metadata=pkg_resources.PathMetadata( egg_path, egg_info_path ) ) else: tempdir = None try: # Copy egg packages. src_root = distribution.location dst_root = self.lib_path for package in distribution.get_metadata_lines( 'top_level.txt' ): if package in self.ignore_packages: self.logger.debug( 'Ignoring package %s.', package ) continue self.logger.info('Copying package %s.', package) package_file = package + '.py' package_dir = package if os.path.isfile(os.path.join(src_root, package_file)): src = os.path.join(src_root, package_file) dst = os.path.join(dst_root, package_file) self.copyfile(src, dst) elif os.path.isdir(os.path.join(src_root, package_dir)): src = os.path.join(src_root, package_dir) dst = os.path.join(dst_root, package_dir) self.copydir(src, dst, ignore=ignore) else: self.logger.warning( 'Egg %s is missing package %s.', distribution, package ) # Copy egg info. if os.path.isdir(distribution.egg_info): egg_info_dir = pkg_resources.to_filename( distribution.project_name ) + '.egg-info' src = distribution.egg_info dst = os.path.join(self.lib_path, egg_info_dir) self.copydir(src, dst) else: self.logger.warning( 'Egg %s is missing egg-info.', distribution ) finally: # Remove temp dir (if necessary). if tempdir and os.path.isdir(tempdir): shutil.rmtree(tempdir) return self.options.created()
def egg_name(dist_name): return pkg_resources.to_filename(pkg_resources.safe_name(dist_name))
'Topic :: Multimedia :: Graphics', 'Topic :: Office/Business', 'Topic :: Office/Business :: Financial', 'Topic :: Office/Business :: Financial :: Point-Of-Sale', 'Topic :: Utilities', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules' ]; if(len(sys.argv)>1 and (sys.argv[1]=="versioninfo" or sys.argv[1]=="getversioninfo")): import json; pymodule_data = json.dumps(pymodule); print(pymodule_data); sys.exit(); if(len(sys.argv)>1 and (sys.argv[1]=="sourceinfo" or sys.argv[1]=="getsourceinfo")): srcinfofilename = os.path.realpath("."+os.path.sep+pkg_resources.to_filename(pymodule['name'])+".egg-info"+os.path.sep+"SOURCES.txt"); srcinfofile = open(srcinfofilename, "r"); srcinfodata = srcinfofile.read(); srcinfofile.close(); srcinfolist = srcinfodata.split('\n'); srcfilelist = ""; srcpdir = os.path.basename(os.path.dirname(os.path.realpath(__file__))); for ifile in srcinfolist: srcfilelist = "."+os.path.sep+srcpdir+os.path.sep+ifile+" "+srcfilelist; print(srcfilelist); sys.exit(); if(len(sys.argv)>1 and sys.argv[1]=="cleansourceinfo"): os.system("rm -rfv \""+os.path.realpath("."+os.path.sep+"dist\"")); os.system("rm -rfv \""+os.path.realpath("."+os.path.sep+pkg_resources.to_filename(pymodule['name'])+".egg-info\"")); sys.exit();
def create_plugin_setup_parameters(identifier="todo", name="TODO", version="0.1", description="TODO", author="TODO", mail="*****@*****.**", url="TODO", license="AGPLv3", source_folder=".", additional_data=None, additional_packages=None, ignored_packages=None, requires=None, extra_requires=None, cmdclass=None, eggs=None, package=None, dependency_links=None): import pkg_resources if package is None: package = "octoprint_{identifier}".format(**locals()) if additional_data is None: additional_data = list() if additional_packages is None: additional_packages = list() if ignored_packages is None: ignored_packages = list() if dependency_links is None: dependency_links = list() if requires is None: requires = ["OctoPrint"] if not isinstance(requires, list): raise ValueError("requires must be a list") if "OctoPrint" not in requires: requires = ["OctoPrint"] + list(requires) if extra_requires is None: extra_requires = dict() if not isinstance(extra_requires, dict): raise ValueError("extra_requires must be a dict") if cmdclass is None: cmdclass = dict() if not isinstance(cmdclass, dict): raise ValueError("cmdclass must be a dict") if eggs is None: eggs = [] if not isinstance(eggs, list): raise ValueError("eggs must be a list") egg = "{name}*.egg-info".format( name=pkg_resources.to_filename(pkg_resources.safe_name(name))) if egg not in eggs: eggs = [egg] + eggs cmdclass.update( dict(clean=CleanCommand.for_options( source_folder=os.path.join(source_folder, package), eggs=eggs))) translation_dir = os.path.join(source_folder, "translations") pot_file = os.path.join(translation_dir, "messages.pot") bundled_dir = os.path.join(source_folder, package, "translations") if os.path.isdir(translation_dir) and os.path.isfile(pot_file): cmdclass.update( get_babel_commandclasses( pot_file=pot_file, output_dir=translation_dir, bundled_dir=bundled_dir, pack_name_prefix="{name}-i18n-".format(**locals()), pack_path_prefix="_plugins/{identifier}/".format(**locals()))) from setuptools import find_packages packages = list( set([package] + filter( lambda x: x.startswith("{package}.".format(package=package)), find_packages(where=source_folder, exclude=ignored_packages)) + additional_packages)) print("Found packages: {packages!r}".format(**locals())) return dict( name=name, version=version, description=description, author=author, author_email=mail, url=url, license=license, # adding new commands cmdclass=cmdclass, # we only have our plugin package to install packages=packages, # we might have additional data files in sub folders that need to be installed too package_data={ package: package_data_dirs(os.path.join(source_folder, package), ["static", "templates", "translations"] + additional_data) }, include_package_data=True, # If you have any package data that needs to be accessible on the file system, such as templates or static assets # this plugin is not zip_safe. zip_safe=False, install_requires=requires, extras_require=extra_requires, dependency_links=dependency_links, # Hook the plugin into the "octoprint.plugin" entry point, mapping the plugin_identifier to the plugin_package. # That way OctoPrint will be able to find the plugin and load it. entry_points={ "octoprint.plugin": ["{identifier} = {package}".format(**locals())] })
def wheel_filename(self) -> str: name = to_filename(self.meta.project_name) version = to_filename(safe_version(self.meta.version)) return f"{name}-{version}-{self.tag}.whl"
def dist_info_name(self) -> str: name = to_filename(self.meta.project_name) version = to_filename(safe_version(self.meta.version)) return f"{name}-{version}.dist-info"
def create_plugin_setup_parameters(identifier="todo", name="TODO", version="0.1", description="TODO", author="TODO", mail="*****@*****.**", url="TODO", license="AGPLv3", source_folder=".", additional_data=None, additional_packages=None, ignored_packages=None, requires=None, extra_requires=None, cmdclass=None, eggs=None, package=None, dependency_links=None): import pkg_resources if package is None: package = "octoprint_{identifier}".format(**locals()) if additional_data is None: additional_data = list() if additional_packages is None: additional_packages = list() if ignored_packages is None: ignored_packages = list() if dependency_links is None: dependency_links = list() if requires is None: requires = ["OctoPrint"] if not isinstance(requires, list): raise ValueError("requires must be a list") if "OctoPrint" not in requires: requires = ["OctoPrint"] + list(requires) if extra_requires is None: extra_requires = dict() if not isinstance(extra_requires, dict): raise ValueError("extra_requires must be a dict") if cmdclass is None: cmdclass = dict() if not isinstance(cmdclass, dict): raise ValueError("cmdclass must be a dict") if eggs is None: eggs = [] if not isinstance(eggs, list): raise ValueError("eggs must be a list") egg = "{name}*.egg-info".format(name=pkg_resources.to_filename(pkg_resources.safe_name(name))) if egg not in eggs: eggs = [egg] + eggs cmdclass.update(dict( clean=CleanCommand.for_options(source_folder=os.path.join(source_folder, package), eggs=eggs) )) translation_dir = os.path.join(source_folder, "translations") pot_file = os.path.join(translation_dir, "messages.pot") bundled_dir = os.path.join(source_folder, package, "translations") if os.path.isdir(translation_dir) and os.path.isfile(pot_file): cmdclass.update(get_babel_commandclasses(pot_file=pot_file, output_dir=translation_dir, bundled_dir=bundled_dir, pack_name_prefix="{name}-i18n-".format(**locals()), pack_path_prefix="_plugins/{identifier}/".format(**locals()))) from setuptools import find_packages packages = list(set([package] + filter(lambda x: x.startswith("{package}.".format(package=package)), find_packages(where=source_folder, exclude=ignored_packages)) + additional_packages)) print("Found packages: {packages!r}".format(**locals())) return dict( name=name, version=version, description=description, author=author, author_email=mail, url=url, license=license, # adding new commands cmdclass=cmdclass, # we only have our plugin package to install packages=packages, # we might have additional data files in sub folders that need to be installed too package_data={package: package_data_dirs(os.path.join(source_folder, package), ["static", "templates", "translations"] + additional_data)}, include_package_data=True, # If you have any package data that needs to be accessible on the file system, such as templates or static assets # this plugin is not zip_safe. zip_safe=False, install_requires=requires, extras_require=extra_requires, dependency_links=dependency_links, # Hook the plugin into the "octoprint.plugin" entry point, mapping the plugin_identifier to the plugin_package. # That way OctoPrint will be able to find the plugin and load it. entry_points={ "octoprint.plugin": ["{identifier} = {package}".format(**locals())] } )
(self.egg_name, self.egg_version) ) ======= except ValueError as e: raise distutils.errors.DistutilsOptionError( "Invalid distribution name or version syntax: %s-%s" % (self.egg_name, self.egg_version) ) from e >>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119 if self.egg_base is None: dirs = self.distribution.package_dir self.egg_base = (dirs or {}).get('', os.curdir) self.ensure_dirname('egg_base') self.egg_info = to_filename(self.egg_name) + '.egg-info' if self.egg_base != os.curdir: self.egg_info = os.path.join(self.egg_base, self.egg_info) if '-' in self.egg_name: self.check_broken_egg_info() # Set package version for the benefit of dumber commands # (e.g. sdist, bdist_wininst, etc.) # self.distribution.metadata.version = self.egg_version # If we bootstrapped around the lack of a PKG-INFO, as might be the # case in a fresh checkout, make sure that any special tags get added # to the version info # pd = self.distribution._patched_dist
def _get_egg_info_name (self, ) : _egg_name = pkg_resources.safe_name(self.distribution.get_name(), ) return os.path.join( (self.distribution.package_dir or dict()).get("", os.curdir, ), (pkg_resources.to_filename(_egg_name, ) + ".egg-info"), )