def read_template(self, path_or_file): """Read and parse a manifest template file. 'path' can be a path or a file-like object. Updates the list accordingly. """ if isinstance(path_or_file, str): f = open(path_or_file) else: f = path_or_file try: content = f.read() # first, let's unwrap collapsed lines content = _COLLAPSE_PATTERN.sub('', content) # next, let's remove commented lines and empty lines content = _COMMENTED_LINE.sub('', content) # now we have our cleaned up lines lines = [line.strip() for line in content.split('\n')] finally: f.close() for line in lines: if line == '': continue try: self._process_template_line(line) except PackagingTemplateError as msg: logger.warning("%s, %s", path_or_file, msg)
def run(self): # remove the build/temp.<plat> directory (unless it's already # gone) if os.path.exists(self.build_temp): if self.dry_run: logger.info('removing %s', self.build_temp) else: rmtree(self.build_temp) else: logger.debug("'%s' does not exist -- can't clean it", self.build_temp) if self.all: # remove build directories for directory in (self.build_lib, self.bdist_base, self.build_scripts): if os.path.exists(directory): if self.dry_run: logger.info('removing %s', directory) else: rmtree(directory) else: logger.warning("'%s' does not exist -- can't clean it", directory) # just for the heck of it, try to remove the base build directory: # we might have emptied it right now, but if not we don't care if not self.dry_run: try: os.rmdir(self.build_base) logger.info("removing '%s'", self.build_base) except OSError: pass
def _process_url(self, url, project_name=None, follow_links=True): """Process an url and search for distributions packages. For each URL found, if it's a download, creates a PyPIdistribution object. If it's a homepage and we can follow links, process it too. :param url: the url to process :param project_name: the project name we are searching for. :param follow_links: Do not want to follow links more than from one level. This parameter tells if we want to follow the links we find (eg. run recursively this method on it) """ with self._open_url(url) as f: base_url = f.url if url not in self._processed_urls: self._processed_urls.append(url) link_matcher = self._get_link_matcher(url) for link, is_download in link_matcher(f.read().decode(), base_url): if link not in self._processed_urls: if self._is_distribution(link) or is_download: self._processed_urls.append(link) # it's a distribution, so create a dist object try: infos = get_infos_from_url(link, project_name, is_external=self.index_url not in url) except CantParseArchiveName as e: logger.warning( "version has not been parsed: %s", e) else: self._register_release(release_info=infos) else: if self._is_browsable(link) and follow_links: self._process_url(link, project_name, follow_links=False)
def get_file_list(self): """Figure out the list of files to include in the source distribution, and put it in 'self.filelist'. This might involve reading the manifest template (and writing the manifest), or just reading the manifest, or just using the default file set -- it all depends on the user's options. """ template_exists = len(self.distribution.extra_files) > 0 if not template_exists: logger.warning('%s: using default file list', self.get_command_name()) self.filelist.findall() if self.use_defaults: self.add_defaults() if template_exists: template = '\n'.join(self.distribution.extra_files) self.filelist.read_template(StringIO(template)) # call manifest builders, if any. for builder in self.manifest_builders: builder(self.distribution, self.filelist) if self.prune: self.prune_file_list() self.filelist.write(self.manifest)
def _metadata(dispatcher, args, **kw): opts = _parse_args(args[1:], 'f:', []) if opts['args']: name = opts['args'][0] dist = get_distribution(name, use_egg_info=True) if dist is None: logger.warning('%r not installed', name) return 1 elif os.path.isfile('setup.cfg'): logger.info('searching local dir for metadata') dist = Distribution() # XXX use config module dist.parse_config_files() else: logger.warning('no argument given and no local setup.cfg found') return 1 metadata = dist.metadata if 'f' in opts: keys = (k for k in opts['f'] if k in metadata) else: keys = metadata.keys() for key in keys: if key in metadata: print(metadata._convert_name(key) + ':') value = metadata[key] if isinstance(value, list): for v in value: print(' ', v) else: print(' ', value.replace('\n', '\n '))
def check_module(self, module, module_file): if not os.path.isfile(module_file): logger.warning("file %r (for module %r) not found", module_file, module) return False else: return True
def check_package(self, package, package_dir): """Helper function for find_package_modules and find_modules.""" # Empty dir name means current directory, which we can probably # assume exists. Also, os.path.exists and isdir don't know about # my "empty string means current dir" convention, so we have to # circumvent them. if package_dir != "": if not os.path.exists(package_dir): raise PackagingFileError( "package directory '%s' does not exist" % package_dir) if not os.path.isdir(package_dir): raise PackagingFileError( "supposed package directory '%s' exists, " "but is not a directory" % package_dir) # Require __init__.py for all but the "root package" if package: init_py = os.path.join(package_dir, "__init__.py") if os.path.isfile(init_py): return init_py else: logger.warning("package init file %r not found " "(or not a regular file)", init_py) # Either not in a package at all (__init__.py not expected), or # __init__.py doesn't exist -- so don't return the filename. return None
def build_extensions(self): for ext in self.extensions: try: self.build_extension(ext) except (CCompilerError, PackagingError, CompileError) as e: if not ext.optional: raise logger.warning('%s: building extension %r failed: %s', self.get_command_name(), ext.name, e)
def install(self): if os.path.isdir(self.build_dir): outfiles = self.copy_tree(self.build_dir, self.install_dir) else: logger.warning( '%s: %r does not exist -- no Python modules to install', self.get_command_name(), self.build_dir) return return outfiles
def expand_categories(self, path_with_categories): local_vars = get_paths() local_vars['distribution.name'] = self.distribution.metadata['Name'] expanded_path = format_value(path_with_categories, local_vars) expanded_path = format_value(expanded_path, local_vars) if '{' in expanded_path and '}' in expanded_path: logger.warning( '%s: unable to expand %s, some categories may be missing', self.get_command_name(), path_with_categories) return expanded_path
def _graph(dispatcher, args, **kw): name = args[1] dist = get_distribution(name, use_egg_info=True) if dist is None: logger.warning('Distribution not found.') return 1 else: dists = get_distributions(use_egg_info=True) graph = generate_graph(dists) print(graph.repr_node(dist))
def finalize_options(self): self.build_lib = self.get_finalized_command("build").build_lib for requirement in self.tests_require: if get_distribution(requirement) is None: logger.warning("test dependency %s is not installed, " "tests may fail", requirement) if (not self.suite and not self.runner and self.get_ut_with_discovery() is None): raise PackagingOptionError( "no test discovery available, please give a 'suite' or " "'runner' option or install unittest2")
def create_path_file(self): """Creates the .pth file""" filename = os.path.join(self.install_libbase, self.path_file + ".pth") if self.install_path_file: self.execute(write_file, (filename, [self.extra_dirs]), "creating %s" % filename) else: logger.warning('%s: path file %r not created', self.get_command_name(), filename)
def __init__( self, name, sources, include_dirs=None, define_macros=None, undef_macros=None, library_dirs=None, libraries=None, runtime_library_dirs=None, extra_objects=None, extra_compile_args=None, extra_link_args=None, export_symbols=None, swig_opts=None, depends=None, language=None, optional=None, **kw ): if not isinstance(name, str): raise AssertionError("'name' must be a string") if not isinstance(sources, list): raise AssertionError("'sources' must be a list of strings") for v in sources: if not isinstance(v, str): raise AssertionError("'sources' must be a list of strings") self.name = name self.sources = sources self.include_dirs = include_dirs or [] self.define_macros = define_macros or [] self.undef_macros = undef_macros or [] self.library_dirs = library_dirs or [] self.libraries = libraries or [] self.runtime_library_dirs = runtime_library_dirs or [] self.extra_objects = extra_objects or [] self.extra_compile_args = extra_compile_args or [] self.extra_link_args = extra_link_args or [] self.export_symbols = export_symbols or [] self.swig_opts = swig_opts or [] self.depends = depends or [] self.language = language self.optional = optional # If there are unknown keyword options, warn about them if len(kw) > 0: options = [repr(option) for option in kw] options = ", ".join(sorted(options)) logger.warning("unknown arguments given to Extension: %s", options)
def run(self): self.mkpath(self.install_dir) for _file in self.data_files.items(): destination = convert_path(self.expand_categories(_file[1])) dir_dest = os.path.abspath(os.path.dirname(destination)) self.mkpath(dir_dest) try: out = self.copy_file(_file[0], dir_dest)[0] except Error as e: logger.warning('%s: %s', self.get_command_name(), e) out = destination self.outfiles.append(out) self.data_files_out.append((_file[0], destination))
def make_release_tree(self, base_dir, files): """Create the directory tree that will become the source distribution archive. All directories implied by the filenames in 'files' are created under 'base_dir', and then we hard link or copy (if hard linking is unavailable) those files into place. Essentially, this duplicates the developer's source tree, but in a directory named after the distribution, containing only the files to be distributed. """ # Create all the directories under 'base_dir' necessary to # put 'files' there; the 'mkpath()' is just so we don't die # if the manifest happens to be empty. self.mkpath(base_dir) self.create_tree(base_dir, files, dry_run=self.dry_run) # And walk over the list of files, either making a hard link (if # os.link exists) to each one that doesn't already exist in its # corresponding location under 'base_dir', or copying each file # that's out-of-date in 'base_dir'. (Usually, all files will be # out-of-date, because by default we blow away 'base_dir' when # we're done making the distribution archives.) if hasattr(os, 'link'): # can make hard links on this system link = 'hard' msg = "making hard links in %s..." % base_dir else: # nope, have to copy link = None msg = "copying files to %s..." % base_dir if not files: logger.warning("no files to distribute -- empty manifest?") else: logger.info(msg) for file in self.distribution.metadata.requires_files: if file not in files: msg = "'%s' must be included explicitly in 'extra_files'" \ % file raise PackagingFileError(msg) for file in files: if not os.path.isfile(file): logger.warning("'%s' not a regular file -- skipping", file) else: dest = os.path.join(base_dir, file) self.copy_file(file, dest, link=link) self.distribution.metadata.write(os.path.join(base_dir, 'PKG-INFO'))
def _remove(distpatcher, args, **kw): opts = _parse_args(args[1:], 'y', []) if 'y' in opts: auto_confirm = True else: auto_confirm = False retcode = 0 for dist in set(opts['args']): try: remove(dist, auto_confirm=auto_confirm) except PackagingError: logger.warning('%r not installed', dist) retcode = 1 return retcode
def _install(dispatcher, args, **kw): # first check if we are in a source directory if len(args) < 2: # are we inside a project dir? if os.path.isfile('setup.cfg') or os.path.isfile('setup.py'): args.insert(1, os.getcwd()) else: logger.warning('No project to install.') return 1 target = args[1] # installing from a source dir or archive file? if os.path.isdir(target) or _is_archive_file(target): return not install_local_project(target) else: # download from PyPI return not install(target)
def search_projects(self, name=None, operator="or", **kwargs): """Find using the keys provided in kwargs. You can set operator to "and" or "or". """ for key in kwargs: if key not in _SEARCH_FIELDS: raise InvalidSearchField(key) if name: kwargs["name"] = name projects = self.proxy.search(kwargs, operator) for p in projects: project = self._get_project(p['name']) try: project.add_release(release=ReleaseInfo(p['name'], p['version'], metadata={'summary': p['summary']}, index=self._index)) except IrrationalVersionError as e: logger.warning("Irrational version error found: %s", e) return [self._projects[p['name'].lower()] for p in projects]
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries): """Generate linker options for searching library directories and linking with specific libraries. 'libraries' and 'library_dirs' are, respectively, lists of library names (not filenames!) and search directories. Returns a list of command-line options suitable for use with some compiler (depending on the two format strings passed in). """ lib_opts = [] for dir in library_dirs: lib_opts.append(compiler.library_dir_option(dir)) for dir in runtime_library_dirs: opt = compiler.runtime_library_dir_option(dir) if isinstance(opt, list): lib_opts.extend(opt) else: lib_opts.append(opt) # XXX it's important that we *not* remove redundant library mentions! # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to # resolve all symbols. I just hope we never have to say "-lfoo obj.o # -lbar" to get things to work -- that's certainly a possibility, but a # pretty nasty way to arrange your C code. for lib in libraries: lib_dir, lib_name = os.path.split(lib) if lib_dir != '': lib_file = compiler.find_library_file([lib_dir], lib_name) if lib_file is not None: lib_opts.append(lib_file) else: logger.warning("no library file corresponding to " "'%s' found (skipping)" % lib) else: lib_opts.append(compiler.library_option(lib)) return lib_opts
def add_defaults(self): """Add all default files to self.filelist. In addition to the setup.cfg file, this will include all files returned by the get_source_files of every registered command. This will find Python modules and packages, data files listed in package_data_, data_files and extra_files, scripts, C sources of extension modules or C libraries (headers are missing). """ if os.path.exists('setup.cfg'): self.filelist.append('setup.cfg') else: logger.warning("%s: standard 'setup.cfg' file not found", self.get_command_name()) for cmd_name in get_command_names(): try: cmd_obj = self.get_finalized_command(cmd_name) except PackagingOptionError: pass else: self.filelist.extend(cmd_obj.get_source_files())
def install_local_project(path): """Install a distribution from a source directory. If the source directory contains a setup.py install using distutils1. If a setup.cfg is found, install using the install_dist command. Returns True on success, False on Failure. """ path = os.path.abspath(path) if os.path.isdir(path): logger.info('Installing from source directory: %r', path) return _run_install_from_dir(path) elif _is_archive_file(path): logger.info('Installing from archive: %r', path) _unpacked_dir = tempfile.mkdtemp() try: shutil.unpack_archive(path, _unpacked_dir) return _run_install_from_archive(_unpacked_dir) finally: shutil.rmtree(_unpacked_dir) else: logger.warning('No project to install.') return False
def _list(dispatcher, args, **kw): opts = _parse_args(args[1:], '', []) dists = get_distributions(use_egg_info=True) if opts['args']: results = (d for d in dists if d.name.lower() in opts['args']) listall = False else: results = dists listall = True number = 0 for dist in results: print('%r %s (from %r)' % (dist.name, dist.version, dist.path)) number += 1 if number == 0: if listall: logger.info('Nothing seems to be installed.') else: logger.warning('No matching distribution found.') return 1 else: logger.info('Found %d projects installed.', number)
def set(self, name, value): """Control then set a metadata field.""" name = self._convert_name(name) if ((name in _ELEMENTSFIELD or name == 'Platform') and not isinstance(value, (list, tuple))): if isinstance(value, str): value = [v.strip() for v in value.split(',')] else: value = [] elif (name in _LISTFIELDS and not isinstance(value, (list, tuple))): if isinstance(value, str): value = [value] else: value = [] if logger.isEnabledFor(logging.WARNING): project_name = self['Name'] if name in _PREDICATE_FIELDS and value is not None: for v in value: # check that the values are valid predicates if not is_valid_predicate(v.split(';')[0]): logger.warning( '%r: %r is not a valid predicate (field %r)', project_name, v, name) # FIXME this rejects UNKNOWN, is that right? elif name in _VERSIONS_FIELDS and value is not None: if not is_valid_versions(value): logger.warning('%r: %r is not a valid version (field %r)', project_name, value, name) elif name in _VERSION_FIELDS and value is not None: if not is_valid_version(value): logger.warning('%r: %r is not a valid version (field %r)', project_name, value, name) if name in _UNICODEFIELDS: if name == 'Description': value = self._remove_line_prefix(value) self._fields[name] = value self._set_best_version()
def warn(self, msg, *args): """Wrapper around logging that also remembers messages.""" # XXX we could use a special handler for this, but would need to test # if it works even if the logger has a too high level self._warnings.append((msg, args)) return logger.warning('%s: %s' % (self.get_command_name(), msg), *args)
def link(self, target_desc, objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None): # XXX this ignores 'build_temp'! should follow the lead of # msvccompiler.py objects, output_dir = self._fix_object_args(objects, output_dir) libraries, library_dirs, runtime_library_dirs = \ self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) if runtime_library_dirs: logger.warning("don't know what to do with " "'runtime_library_dirs': %r", runtime_library_dirs) if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) if self._need_link(objects, output_filename): # Figure out linker args based on type of target. if target_desc == CCompiler.EXECUTABLE: startup_obj = 'c0w32' if debug: ld_args = self.ldflags_exe_debug[:] else: ld_args = self.ldflags_exe[:] else: startup_obj = 'c0d32' if debug: ld_args = self.ldflags_shared_debug[:] else: ld_args = self.ldflags_shared[:] # Create a temporary exports file for use by the linker if export_symbols is None: def_file = '' else: head, tail = os.path.split(output_filename) modname, ext = os.path.splitext(tail) temp_dir = os.path.dirname(objects[0]) # preserve tree structure def_file = os.path.join(temp_dir, '%s.def' % modname) contents = ['EXPORTS'] for sym in (export_symbols or []): contents.append(' %s=_%s' % (sym, sym)) self.execute(write_file, (def_file, contents), "writing %s" % def_file) # Borland C++ has problems with '/' in paths objects2 = [os.path.normpath(o) for o in objects] # split objects in .obj and .res files # Borland C++ needs them at different positions in the command line objects = [startup_obj] resources = [] for file in objects2: base, ext = os.path.splitext(os.path.normcase(file)) if ext == '.res': resources.append(file) else: objects.append(file) for l in library_dirs: ld_args.append("/L%s" % os.path.normpath(l)) ld_args.append("/L.") # we sometimes use relative paths # list of object files ld_args.extend(objects) # XXX the command line syntax for Borland C++ is a bit wonky; # certain filenames are jammed together in one big string, but # comma-delimited. This doesn't mesh too well with the # Unix-centric attitude (with a DOS/Windows quoting hack) of # 'spawn()', so constructing the argument list is a bit # awkward. Note that doing the obvious thing and jamming all # the filenames and commas into one argument would be wrong, # because 'spawn()' would quote any filenames with spaces in # them. Arghghh!. Apparently it works fine as coded... # name of dll/exe file ld_args.extend((',',output_filename)) # no map file and start libraries ld_args.append(',,') for lib in libraries: # see if we find it and if there is a bcpp specific lib # (xxx_bcpp.lib) libfile = self.find_library_file(library_dirs, lib, debug) if libfile is None: ld_args.append(lib) # probably a BCPP internal library -- don't warn else: # full name which prefers bcpp_xxx.lib over xxx.lib ld_args.append(libfile) # some default libraries ld_args.append('import32') ld_args.append('cw32mt') # def file for export symbols ld_args.extend((',',def_file)) # add resource files ld_args.append(',') ld_args.extend(resources) if extra_preargs: ld_args[:0] = extra_preargs if extra_postargs: ld_args.extend(extra_postargs) self.mkpath(os.path.dirname(output_filename)) try: self.spawn([self.linker] + ld_args) except PackagingExecError as msg: raise LinkError(msg) else: logger.debug("skipping %s (up-to-date)", output_filename)
def finalize_options(self): # This method (and its pliant slaves, like 'finalize_unix()', # 'finalize_other()', and 'select_scheme()') is where the default # installation directories for modules, extension modules, and # anything else we care to install from a Python module # distribution. Thus, this code makes a pretty important policy # statement about how third-party stuff is added to a Python # installation! Note that the actual work of installation is done # by the relatively simple 'install_*' commands; they just take # their orders from the installation directory options determined # here. # Check for errors/inconsistencies in the options; first, stuff # that's wrong on any platform. if ((self.prefix or self.exec_prefix or self.home) and (self.install_base or self.install_platbase)): raise PackagingOptionError( "must supply either prefix/exec-prefix/home or " "install-base/install-platbase -- not both") if self.home and (self.prefix or self.exec_prefix): raise PackagingOptionError( "must supply either home or prefix/exec-prefix -- not both") if self.user and (self.prefix or self.exec_prefix or self.home or self.install_base or self.install_platbase): raise PackagingOptionError( "can't combine user with prefix/exec_prefix/home or " "install_base/install_platbase") # Next, stuff that's wrong (or dubious) only on certain platforms. if os.name != "posix": if self.exec_prefix: logger.warning( '%s: exec-prefix option ignored on this platform', self.get_command_name()) self.exec_prefix = None # Now the interesting logic -- so interesting that we farm it out # to other methods. The goal of these methods is to set the final # values for the install_{lib,scripts,data,...} options, using as # input a heady brew of prefix, exec_prefix, home, install_base, # install_platbase, user-supplied versions of # install_{purelib,platlib,lib,scripts,data,...}, and the # INSTALL_SCHEME dictionary above. Phew! self.dump_dirs("pre-finalize_{unix,other}") if os.name == 'posix': self.finalize_unix() else: self.finalize_other() self.dump_dirs("post-finalize_{unix,other}()") # Expand configuration variables, tilde, etc. in self.install_base # and self.install_platbase -- that way, we can use $base or # $platbase in the other installation directories and not worry # about needing recursive variable expansion (shudder). py_version = '%s.%s' % sys.version_info[:2] prefix, exec_prefix, srcdir, projectbase = get_config_vars( 'prefix', 'exec_prefix', 'srcdir', 'projectbase') metadata = self.distribution.metadata self.config_vars = { 'dist_name': metadata['Name'], 'dist_version': metadata['Version'], 'dist_fullname': metadata.get_fullname(), 'py_version': py_version, 'py_version_short': py_version[:3], 'py_version_nodot': py_version[:3:2], 'sys_prefix': prefix, 'prefix': prefix, 'sys_exec_prefix': exec_prefix, 'exec_prefix': exec_prefix, 'srcdir': srcdir, 'projectbase': projectbase, 'userbase': self.install_userbase, 'usersite': self.install_usersite, } self.expand_basedirs() self.dump_dirs("post-expand_basedirs()") # Now define config vars for the base directories so we can expand # everything else. self.config_vars['base'] = self.install_base self.config_vars['platbase'] = self.install_platbase # Expand "~" and configuration variables in the installation # directories. self.expand_dirs() self.dump_dirs("post-expand_dirs()") # Create directories under USERBASE if self.user: self.create_user_dirs() # Pick the actual directory to install all modules to: either # install_purelib or install_platlib, depending on whether this # module distribution is pure or not. Of course, if the user # already specified install_lib, use their selection. if self.install_lib is None: if self.distribution.ext_modules: # has extensions: non-pure self.install_lib = self.install_platlib else: self.install_lib = self.install_purelib # Convert directories from Unix /-separated syntax to the local # convention. self.convert_paths('lib', 'purelib', 'platlib', 'scripts', 'data', 'headers', 'userbase', 'usersite') # Well, we're not actually fully completely finalized yet: we still # have to deal with 'extra_path', which is the hack for allowing # non-packagized module distributions (hello, Numerical Python!) to # get their own directories. self.handle_extra_path() self.install_libbase = self.install_lib # needed for .pth file self.install_lib = os.path.join(self.install_lib, self.extra_dirs) # If a new root directory was supplied, make all the installation # dirs relative to it. if self.root is not None: self.change_roots('libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers') self.dump_dirs("after prepending root") # Find out the build directories, ie. where to install from. self.set_undefined_options('build', 'build_base', 'build_lib') # Punt on doc directories for now -- after all, we're punting on # documentation completely! if self.no_distinfo is None: self.no_distinfo = False
def _read_setup_cfg(self, parser, cfg_filename): cfg_directory = os.path.dirname(os.path.abspath(cfg_filename)) content = {} for section in parser.sections(): content[section] = dict(parser.items(section)) # global setup hooks are called first if 'global' in content: if 'setup_hooks' in content['global']: setup_hooks = split_multiline(content['global']['setup_hooks']) # add project directory to sys.path, to allow hooks to be # distributed with the project sys.path.insert(0, cfg_directory) try: for line in setup_hooks: try: hook = resolve_name(line) except ImportError as e: logger.warning('cannot find setup hook: %s', e.args[0]) else: self.setup_hooks.append(hook) self.run_hooks(content) finally: sys.path.pop(0) metadata = self.dist.metadata # setting the metadata values if 'metadata' in content: for key, value in content['metadata'].items(): key = key.replace('_', '-') if metadata.is_multi_field(key): value = split_multiline(value) if key == 'project-url': value = [(label.strip(), url.strip()) for label, url in [v.split(',') for v in value]] if key == 'description-file': if 'description' in content['metadata']: msg = ("description and description-file' are " "mutually exclusive") raise PackagingOptionError(msg) filenames = value.split() # concatenate all files value = [] for filename in filenames: # will raise if file not found with open(filename) as description_file: value.append(description_file.read().strip()) # add filename as a required file if filename not in metadata.requires_files: metadata.requires_files.append(filename) value = '\n'.join(value).strip() key = 'description' if metadata.is_metadata_field(key): metadata[key] = self._convert_metadata(key, value) if 'files' in content: files = content['files'] self.dist.package_dir = files.pop('packages_root', None) files = dict((key, split_multiline(value)) for key, value in files.items()) self.dist.packages = [] packages = files.get('packages', []) if isinstance(packages, str): packages = [packages] for package in packages: if ':' in package: dir_, package = package.split(':') self.dist.package_dir[package] = dir_ self.dist.packages.append(package) self.dist.py_modules = files.get('modules', []) if isinstance(self.dist.py_modules, str): self.dist.py_modules = [self.dist.py_modules] self.dist.scripts = files.get('scripts', []) if isinstance(self.dist.scripts, str): self.dist.scripts = [self.dist.scripts] self.dist.package_data = {} # bookkeeping for the loop below firstline = True prev = None for line in files.get('package_data', []): if '=' in line: # package name -- file globs or specs key, value = line.split('=') prev = self.dist.package_data[key.strip()] = value.split() elif firstline: # invalid continuation on the first line raise PackagingOptionError( 'malformed package_data first line: %r (misses "=")' % line) else: # continuation, add to last seen package name prev.extend(line.split()) firstline = False self.dist.data_files = [] for data in files.get('data_files', []): data = data.split('=') if len(data) != 2: continue key, value = data values = [v.strip() for v in value.split(',')] self.dist.data_files.append((key, values)) # manifest template self.dist.extra_files = files.get('extra_files', []) resources = [] for rule in files.get('resources', []): glob, destination = rule.split('=', 1) rich_glob = glob.strip().split(' ', 1) if len(rich_glob) == 2: prefix, suffix = rich_glob else: assert len(rich_glob) == 1 prefix = '' suffix = glob if destination == '<exclude>': destination = None resources.append( (prefix.strip(), suffix.strip(), destination.strip())) self.dist.data_files = get_resources_dests( cfg_directory, resources) ext_modules = self.dist.ext_modules for section_key in content: # no str.partition in 2.4 :( labels = section_key.split(':') if len(labels) == 2 and labels[0] == 'extension': values_dct = content[section_key] if 'name' in values_dct: raise PackagingOptionError( 'extension name should be given as [extension: name], ' 'not as key') name = labels[1].strip() _check_name(name, self.dist.packages) ext_modules.append(Extension( name, _pop_values(values_dct, 'sources'), _pop_values(values_dct, 'include_dirs'), _pop_values(values_dct, 'define_macros'), _pop_values(values_dct, 'undef_macros'), _pop_values(values_dct, 'library_dirs'), _pop_values(values_dct, 'libraries'), _pop_values(values_dct, 'runtime_library_dirs'), _pop_values(values_dct, 'extra_objects'), _pop_values(values_dct, 'extra_compile_args'), _pop_values(values_dct, 'extra_link_args'), _pop_values(values_dct, 'export_symbols'), _pop_values(values_dct, 'swig_opts'), _pop_values(values_dct, 'depends'), values_dct.pop('language', None), values_dct.pop('optional', None), **values_dct))
def _process_template_line(self, line): # Parse the line: split it up, make sure the right number of words # is there, and return the relevant words. 'action' is always # defined: it's the first word of the line. Which of the other # three are defined depends on the action; it'll be either # patterns, (dir and patterns), or (dir_pattern). action, patterns, dir, dir_pattern = self._parse_template_line(line) # OK, now we know that the action is valid and we have the # right number of words on the line for that action -- so we # can proceed with minimal error-checking. if action == 'include': for pattern in patterns: if not self._include_pattern(pattern, anchor=True): logger.warning("no files found matching %r", pattern) elif action == 'exclude': for pattern in patterns: if not self.exclude_pattern(pattern, anchor=True): logger.warning("no previously-included files " "found matching %r", pattern) elif action == 'global-include': for pattern in patterns: if not self._include_pattern(pattern, anchor=False): logger.warning("no files found matching %r " "anywhere in distribution", pattern) elif action == 'global-exclude': for pattern in patterns: if not self.exclude_pattern(pattern, anchor=False): logger.warning("no previously-included files " "matching %r found anywhere in " "distribution", pattern) elif action == 'recursive-include': for pattern in patterns: if not self._include_pattern(pattern, prefix=dir): logger.warning("no files found matching %r " "under directory %r", pattern, dir) elif action == 'recursive-exclude': for pattern in patterns: if not self.exclude_pattern(pattern, prefix=dir): logger.warning("no previously-included files " "matching %r found under directory %r", pattern, dir) elif action == 'graft': if not self._include_pattern(None, prefix=dir_pattern): logger.warning("no directories found matching %r", dir_pattern) elif action == 'prune': if not self.exclude_pattern(None, prefix=dir_pattern): logger.warning("no previously-included directories found " "matching %r", dir_pattern) else: raise PackagingInternalError( "this cannot happen: invalid action %r" % action)
def _darwin_compiler_fixup(compiler_so, cc_args): """ This function will strip '-isysroot PATH' and '-arch ARCH' from the compile flags if the user has specified one them in extra_compile_flags. This is needed because '-arch ARCH' adds another architecture to the build, without a way to remove an architecture. Furthermore GCC will barf if multiple '-isysroot' arguments are present. """ stripArch = stripSysroot = False compiler_so = list(compiler_so) kernel_version = os.uname()[2] # 8.4.3 major_version = int(kernel_version.split('.')[0]) if major_version < 8: # OSX before 10.4.0, these don't support -arch and -isysroot at # all. stripArch = stripSysroot = True else: stripArch = '-arch' in cc_args stripSysroot = '-isysroot' in cc_args if stripArch or 'ARCHFLAGS' in os.environ: while True: try: index = compiler_so.index('-arch') # Strip this argument and the next one: del compiler_so[index:index+2] except ValueError: break if 'ARCHFLAGS' in os.environ and not stripArch: # User specified different -arch flags in the environ, # see also the sysconfig compiler_so = compiler_so + os.environ['ARCHFLAGS'].split() if stripSysroot: try: index = compiler_so.index('-isysroot') # Strip this argument and the next one: del compiler_so[index:index+2] except ValueError: pass # Check if the SDK that is used during compilation actually exists, # the universal build requires the usage of a universal SDK and not all # users have that installed by default. sysroot = None if '-isysroot' in cc_args: idx = cc_args.index('-isysroot') sysroot = cc_args[idx+1] elif '-isysroot' in compiler_so: idx = compiler_so.index('-isysroot') sysroot = compiler_so[idx+1] if sysroot and not os.path.isdir(sysroot): logger.warning( "compiling with an SDK that doesn't seem to exist: %r;\n" "please check your Xcode installation", sysroot) return compiler_so