def run_egg_info(self): assert self.source_dir if self.name: logger.debug( 'Running setup.py (path:%s) egg_info for package %s', self.setup_py, self.name, ) else: logger.debug( 'Running setup.py (path:%s) egg_info for package from %s', self.setup_py, self.link, ) with indent_log(): script = SETUPTOOLS_SHIM % self.setup_py base_cmd = [sys.executable, '-c', script] if self.isolated: base_cmd += ["--no-user-cfg"] egg_info_cmd = base_cmd + ['egg_info'] # We can't put the .egg-info files at the root, because then the # source code will be mistaken for an installed egg, causing # problems if self.editable: egg_base_option = [] else: egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') ensure_dir(egg_info_dir) egg_base_option = ['--egg-base', 'pip-egg-info'] call_subprocess( egg_info_cmd + egg_base_option, cwd=self.setup_py_dir, show_stdout=False, command_level=logging.DEBUG, command_desc='python setup.py egg_info') if not self.req: if isinstance( pkg_resources.parse_version(self.pkg_info()["Version"]), Version): op = "==" else: op = "===" self.req = Requirement( "".join([ self.pkg_info()["Name"], op, self.pkg_info()["Version"], ]) ) self._correct_build_location() else: metadata_name = canonicalize_name(self.pkg_info()["Name"]) if canonicalize_name(self.req.name) != metadata_name: logger.warning( 'Running setup.py (path:%s) egg_info for package %s ' 'produced metadata for project name %s. Fix your ' '#egg=%s fragments.', self.setup_py, self.name, metadata_name, self.name ) self.req = Requirement(metadata_name)
def save(self, pypi_version, current_time): # Check to make sure that we own the directory if not check_path_owner(os.path.dirname(self.statefile_path)): return # Now that we've ensured the directory is owned by this user, we'll go # ahead and make sure that all our directories are created. ensure_dir(os.path.dirname(self.statefile_path)) # Attempt to write out our version check file with lockfile.LockFile(self.statefile_path): if os.path.exists(self.statefile_path): with open(self.statefile_path) as statefile: state = json.load(statefile) else: state = {} state[sys.prefix] = { "last_check": current_time.strftime(SELFCHECK_DATE_FMT), "pypi_version": pypi_version, } with open(self.statefile_path, "w") as statefile: json.dump(state, statefile, sort_keys=True, separators=(",", ":"))
def prepare_files(self, finder): """ Prepare process. Create temp directories, download and/or unpack files. """ # make the wheelhouse if self.wheel_download_dir: ensure_dir(self.wheel_download_dir) # If any top-level requirement has a hash specified, enter # hash-checking mode, which requires hashes from all. root_reqs = self.unnamed_requirements + self.requirements.values() require_hashes = (self.require_hashes or any(req.has_hash_options for req in root_reqs)) # Actually prepare the files, and collect any exceptions. Most hash # exceptions cannot be checked ahead of time, because # req.populate_link() needs to be called before we can make decisions # based on link type. discovered_reqs = [] hash_errors = HashErrors() for req in chain(root_reqs, discovered_reqs): try: discovered_reqs.extend(self._prepare_file( finder, req, require_hashes=require_hashes, ignore_dependencies=self.ignore_dependencies)) except HashError as exc: exc.req = req hash_errors.append(exc) if hash_errors: raise hash_errors
def clobber(source, dest, is_base, fixer=None, filter=None): ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue elif (is_base and s.endswith('.dist-info') and # is self.req.project_name case preserving? s.lower().startswith( req.name.replace('-', '_').lower())): assert not info_dir, ('Multiple .dist-info directories: ' + destsubdir + ', ' + ', '.join(info_dir)) info_dir.append(destsubdir) for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. ensure_dir(destdir) # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying # over any metadata because we want more control over what # metadata we actually copy over. shutil.copyfile(srcfile, destfile) # Copy over the metadata for the file, currently this only # includes the atime and mtime. st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) # If our file is executable, then make our destination file # executable. if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = ( st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH ) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed)
def prepare_files(self, finder): """ Prepare process. Create temp directories, download and/or unpack files. """ # make the wheelhouse if self.wheel_download_dir: ensure_dir(self.wheel_download_dir) self._walk_req_to_install( functools.partial(self._prepare_file, finder))
def run_egg_info(self): assert self.source_dir if self.name: logger.debug( 'Running setup.py (path:%s) egg_info for package %s', self.setup_py, self.name, ) else: logger.debug( 'Running setup.py (path:%s) egg_info for package from %s', self.setup_py, self.link, ) with indent_log(): script = self._run_setup_py script = script.replace('__SETUP_PY__', repr(self.setup_py)) script = script.replace('__PKG_NAME__', repr(self.name)) base_cmd = [sys.executable, '-c', script] if self.isolated: base_cmd += ["--no-user-cfg"] egg_info_cmd = base_cmd + ['egg_info'] # We can't put the .egg-info files at the root, because then the # source code will be mistaken for an installed egg, causing # problems if self.editable: egg_base_option = [] else: egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info') ensure_dir(egg_info_dir) egg_base_option = ['--egg-base', 'pip-egg-info'] cwd = self.source_dir if self.editable_options and \ 'subdirectory' in self.editable_options: cwd = os.path.join(cwd, self.editable_options['subdirectory']) call_subprocess( egg_info_cmd + egg_base_option, cwd=cwd, show_stdout=False, command_level=logging.DEBUG, command_desc='python setup.py egg_info') if not self.req: if isinstance( pkg_resources.parse_version(self.pkg_info()["Version"]), Version): op = "==" else: op = "===" self.req = pkg_resources.Requirement.parse( "".join([ self.pkg_info()["Name"], op, self.pkg_info()["Version"], ])) self._correct_build_location()
def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): ensure_dir(target_dir) # Checking both purelib and platlib directories for installed # packages to be moved to target directory lib_dir_list = [] with target_temp_dir: # Checking both purelib and platlib directories for installed # packages to be moved to target directory scheme = distutils_scheme('', home=target_temp_dir.path) purelib_dir = scheme['purelib'] platlib_dir = scheme['platlib'] data_dir = scheme['data'] if os.path.exists(purelib_dir): lib_dir_list.append(purelib_dir) if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: lib_dir_list.append(platlib_dir) if os.path.exists(data_dir): lib_dir_list.append(data_dir) for lib_dir in lib_dir_list: for item in os.listdir(lib_dir): if lib_dir == data_dir: ddir = os.path.join(data_dir, item) if any(s.startswith(ddir) for s in lib_dir_list[:-1]): continue target_item_dir = os.path.join(target_dir, item) if os.path.exists(target_item_dir): if not upgrade: logger.warning( 'Target directory %s already exists. Specify ' '--upgrade to force replacement.', target_item_dir ) continue if os.path.islink(target_item_dir): logger.warning( 'Target directory %s already exists and is ' 'a link. Pip will not automatically replace ' 'links, please remove if replacement is ' 'desired.', target_item_dir ) continue if os.path.isdir(target_item_dir): shutil.rmtree(target_item_dir) else: os.remove(target_item_dir) shutil.move( os.path.join(lib_dir, item), target_item_dir )
def save(self): """Save the currentin-memory state. """ self._ensure_have_load_only() for fname, parser in self._modified_parsers: logger.info("Writing to %s", fname) # Ensure directory exists. ensure_dir(os.path.dirname(fname)) with open(fname, "w") as f: parser.write(f)
def tmpfile(self, contents): # Create a temporary file fd, path = tempfile.mkstemp( prefix="pip_", suffix="_config.ini", text=True ) os.close(fd) contents = textwrap.dedent(contents).lstrip() ensure_dir(os.path.dirname(path)) with open(path, "w") as f: f.write(contents) yield path os.remove(path)
def resolve(self, requirement_set): """Resolve what operations need to be done As a side-effect of this method, the packages (and their dependencies) are downloaded, unpacked and prepared for installation. This preparation is done by ``pip.operations.prepare``. Once PyPI has static dependency metadata available, it would be possible to move the preparation to become a step separated from dependency resolution. """ # make the wheelhouse if self.preparer.wheel_download_dir: ensure_dir(self.preparer.wheel_download_dir) # If any top-level requirement has a hash specified, enter # hash-checking mode, which requires hashes from all. root_reqs = ( requirement_set.unnamed_requirements + list(requirement_set.requirements.values()) ) self.require_hashes = ( requirement_set.require_hashes or any(req.has_hash_options for req in root_reqs) ) # Display where finder is looking for packages locations = self.finder.get_formatted_locations() if locations: logger.info(locations) # Actually prepare the files, and collect any exceptions. Most hash # exceptions cannot be checked ahead of time, because # req.populate_link() needs to be called before we can make decisions # based on link type. discovered_reqs = [] hash_errors = HashErrors() for req in chain(root_reqs, discovered_reqs): try: discovered_reqs.extend( self._resolve_one(requirement_set, req) ) except HashError as exc: exc.req = req hash_errors.append(exc) if hash_errors: raise hash_errors
def run(self, options, args): options.ignore_installed = True if options.python_version: python_versions = [options.python_version] else: python_versions = None dist_restriction_set = any([ options.python_version, options.platform, options.abi, options.implementation, ]) binary_only = FormatControl(set(), set([':all:'])) if dist_restriction_set and options.format_control != binary_only: raise CommandError( "--only-binary=:all: must be set and --no-binary must not " "be set (or must be set to :none:) when restricting platform " "and interpreter constraints using --python-version, " "--platform, --abi, or --implementation." ) options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, ignore_installed=True, ignore_dependencies=options.ignore_dependencies, session=session, isolated=options.isolated_mode, require_hashes=options.require_hashes ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, None ) if not requirement_set.has_requirements: return requirement_set.prepare_files(finder) downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info( 'Successfully downloaded %s', downloaded ) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set
def _open(self): ensure_dir(os.path.dirname(self.baseFilename)) return logging.handlers.RotatingFileHandler._open(self)
x_hats = data_x_hats ret_list = [data_x_hats, data_z_hats, data_measurements] for j, item in enumerate(save_arr): print var_paths file_path = var_paths[j] with open(file_path, 'w') as f: cPickle.dump(eval(item), f, cPickle.HIGHEST_PROTOCOL) print('Saved {} to {}.'.format(item, file_path)) else: x_hats = gen_model.get_x_hats(ret_list[0]) ret_list = [x_hats] + ret_list if FLAGS.vis and FLAGS.debug: # For visualizing the reconstructions. dir_path = os.path.join('debug', gen_model.model_dir) ensure_dir(dir_path) for i in range(x_hats.shape[0]): im_size = data_X[0:1].shape[1:] save_image( dataset.inv_transform(x_hats[i], type=FLAGS.input_transform_type), os.path.join(dir_path, 'x_test_{}_hat_{}.png'.format(i, 'loaded'))) save_image( data_X[i], os.path.join(dir_path, 'x_test_{}_{}.png'.format(i, 'original'))) if hasattr(FLAGS, 'inpaint_ratio'): save_image( dataset.inv_transform( data_measurements.reshape(
def run(self, options, args): cmdoptions.resolve_wheel_no_use_binary(options) cmdoptions.check_install_build_global(options) if options.allow_external: warnings.warn( "--allow-external has been deprecated and will be removed in " "the future. Due to changes in the repository protocol, it no " "longer has any effect.", RemovedInPip10Warning, ) if options.allow_all_external: warnings.warn( "--allow-all-external has been deprecated and will be removed " "in the future. Due to changes in the repository protocol, it " "no longer has any effect.", RemovedInPip10Warning, ) if options.allow_unverified: warnings.warn( "--allow-unverified has been deprecated and will be removed " "in the future. Due to changes in the repository protocol, it " "no longer has any effect.", RemovedInPip10Warning, ) if options.download_dir: warnings.warn( "pip install --download has been deprecated and will be " "removed in the future. Pip now has a download command that " "should be used instead.", RemovedInPip10Warning, ) options.ignore_installed = True if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if options.prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations" ) if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) install_options.append('--user') install_options.append('--prefix=') temp_target_dir = None if options.target_dir: options.ignore_installed = True temp_target_dir = tempfile.mkdtemp() options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue." ) install_options.append('--home=' + temp_target_dir) global_options = options.global_options or [] with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, upgrade=options.upgrade, as_egg=options.as_egg, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies, force_reinstall=options.force_reinstall, use_user_site=options.use_user_site, target_dir=temp_target_dir, session=session, pycompile=options.compile, isolated=options.isolated_mode, wheel_cache=wheel_cache, require_hashes=options.require_hashes, ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) if not requirement_set.has_requirements: return try: if (options.download_dir or not wheel or not options.cache_dir): # on -d don't do complex things like building # wheels, and don't try to build wheels when wheel is # not installed. requirement_set.prepare_files(finder) else: # build wheels before install. wb = WheelBuilder( requirement_set, finder, build_options=[], global_options=[], ) # Ignore the result: a failed wheel will be # installed from the sdist/vcs whatever. wb.build(autobuilding=True) if not options.download_dir: requirement_set.install( install_options, global_options, root=options.root_path, prefix=options.prefix_path, ) reqs = sorted( requirement_set.successfully_installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: if hasattr(req, 'installed_version'): if req.installed_version: item += '-' + req.installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) else: downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info( 'Successfully downloaded %s', downloaded ) except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() if options.target_dir: ensure_dir(options.target_dir) lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] for item in os.listdir(lib_dir): target_item_dir = os.path.join(options.target_dir, item) if os.path.exists(target_item_dir): if not options.upgrade: logger.warning( 'Target directory %s already exists. Specify ' '--upgrade to force replacement.', target_item_dir ) continue if os.path.islink(target_item_dir): logger.warning( 'Target directory %s already exists and is ' 'a link. Pip will not automatically replace ' 'links, please remove if replacement is ' 'desired.', target_item_dir ) continue if os.path.isdir(target_item_dir): shutil.rmtree(target_item_dir) else: os.remove(target_item_dir) shutil.move( os.path.join(lib_dir, item), target_item_dir ) shutil.rmtree(temp_target_dir) return requirement_set
def run(self, options, args): cmdoptions.resolve_wheel_no_use_binary(options) cmdoptions.check_install_build_global(options) def is_venv(): return hasattr(sys, 'real_prefix') or \ (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix) # Check whether we have root privileges and aren't in venv/virtualenv if os.getuid() == 0 and not is_venv(): logger.warning( "WARNING: Running pip install with root privileges is " "generally not a good idea. Try `%s install --user` instead." % path.basename(sys.argv[0])) if options.as_egg: warnings.warn( "--egg has been deprecated and will be removed in the future. " "This flag is mutually exclusive with large parts of pip, and " "actually using it invalidates pip's ability to manage the " "installation process.", RemovedInPip10Warning, ) if options.allow_external: warnings.warn( "--allow-external has been deprecated and will be removed in " "the future. Due to changes in the repository protocol, it no " "longer has any effect.", RemovedInPip10Warning, ) if options.allow_all_external: warnings.warn( "--allow-all-external has been deprecated and will be removed " "in the future. Due to changes in the repository protocol, it " "no longer has any effect.", RemovedInPip10Warning, ) if options.allow_unverified: warnings.warn( "--allow-unverified has been deprecated and will be removed " "in the future. Due to changes in the repository protocol, it " "no longer has any effect.", RemovedInPip10Warning, ) if options.download_dir: warnings.warn( "pip install --download has been deprecated and will be " "removed in the future. Pip now has a download command that " "should be used instead.", RemovedInPip10Warning, ) options.ignore_installed = True if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if options.prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations") if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv.") install_options.append('--user') install_options.append('--prefix=') temp_target_dir = None if options.target_dir: options.ignore_installed = True temp_target_dir = tempfile.mkdtemp() options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue.") install_options.append('--home=' + temp_target_dir) global_options = options.global_options or [] with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, upgrade=options.upgrade, upgrade_strategy=options.upgrade_strategy, as_egg=options.as_egg, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, force_reinstall=options.force_reinstall, use_user_site=options.use_user_site, target_dir=temp_target_dir, session=session, pycompile=options.compile, isolated=options.isolated_mode, wheel_cache=wheel_cache, require_hashes=options.require_hashes, ) self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, wheel_cache) if not requirement_set.has_requirements: return try: if (options.download_dir or not wheel or not options.cache_dir): # on -d don't do complex things like building # wheels, and don't try to build wheels when wheel is # not installed. requirement_set.prepare_files(finder) else: # build wheels before install. wb = WheelBuilder( requirement_set, finder, build_options=[], global_options=[], ) # Ignore the result: a failed wheel will be # installed from the sdist/vcs whatever. wb.build(autobuilding=True) if not options.download_dir: requirement_set.install( install_options, global_options, root=options.root_path, prefix=options.prefix_path, strip_file_prefix=options.strip_file_prefix, ) possible_lib_locations = get_lib_location_guesses( user=options.use_user_site, home=temp_target_dir, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) reqs = sorted(requirement_set.successfully_installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: installed_version = get_installed_version( req.name, possible_lib_locations) if installed_version: item += '-' + installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) else: downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info('Successfully downloaded %s', downloaded) except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() if options.target_dir: ensure_dir(options.target_dir) # Checking both purelib and platlib directories for installed # packages to be moved to target directory lib_dir_list = [] purelib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] platlib_dir = distutils_scheme('', home=temp_target_dir)['platlib'] if os.path.exists(purelib_dir): lib_dir_list.append(purelib_dir) if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: lib_dir_list.append(platlib_dir) for lib_dir in lib_dir_list: for item in os.listdir(lib_dir): target_item_dir = os.path.join(options.target_dir, item) if os.path.exists(target_item_dir): if not options.upgrade: logger.warning( 'Target directory %s already exists. Specify ' '--upgrade to force replacement.', target_item_dir) continue if os.path.islink(target_item_dir): logger.warning( 'Target directory %s already exists and is ' 'a link. Pip will not automatically replace ' 'links, please remove if replacement is ' 'desired.', target_item_dir) continue if os.path.isdir(target_item_dir): shutil.rmtree(target_item_dir) else: os.remove(target_item_dir) shutil.move(os.path.join(lib_dir, item), target_item_dir) shutil.rmtree(temp_target_dir) return requirement_set
def install(self, install_options, global_options=None, root=None, prefix=None): global_options = global_options if global_options is not None else [] if self.editable: self.install_editable( install_options, global_options, prefix=prefix) return if self.is_wheel: version = pip.wheel.wheel_version(self.source_dir) pip.wheel.check_compatibility(version, self.name) self.move_wheel_files(self.source_dir, root=root, prefix=prefix) self.install_succeeded = True return # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. global_options += self.options.get('global_options', []) install_options += self.options.get('install_options', []) if self.isolated: global_options = list(global_options) + ["--no-user-cfg"] with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = self.get_install_args( global_options, record_filename, root, prefix) msg = 'Running setup.py install for %s' % (self.name,) with open_spinner(msg) as spinner: with indent_log(): call_subprocess( install_args + install_options, cwd=self.setup_py_dir, show_stdout=False, spinner=spinner, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) return self.install_succeeded = True def prepend_root(path): if root is None or not os.path.isabs(path): return path else: return change_root(root, path) with open(record_filename) as f: for line in f: directory = os.path.dirname(line) if directory.endswith('.egg-info'): egg_info_dir = prepend_root(directory) break else: logger.warning( 'Could not find .egg-info directory in install record' ' for %s', self, ) # FIXME: put the record somewhere # FIXME: should this be an error? return new_lines = [] with open(record_filename) as f: for line in f: filename = line.strip() if os.path.isdir(filename): filename += os.path.sep new_lines.append( os.path.relpath(prepend_root(filename), egg_info_dir) ) ensure_dir(egg_info_dir) inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n')
def run(self, options, args): cmdoptions.resolve_wheel_no_use_binary(options) cmdoptions.check_install_build_global(options) if options.download_dir: options.ignore_installed = True if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv.") install_options.append('--user') install_options.append('--prefix=') temp_target_dir = None if options.target_dir: options.ignore_installed = True temp_target_dir = tempfile.mkdtemp() options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue.") install_options.append('--profile=' + temp_target_dir) global_options = options.global_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.download_cache: warnings.warn( "--download-cache has been deprecated and will be removed in " "the future. Pip now automatically uses and configures its " "cache.", RemovedInPip8Warning, ) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, upgrade=options.upgrade, as_egg=options.as_egg, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies, force_reinstall=options.force_reinstall, use_user_site=options.use_user_site, target_dir=temp_target_dir, session=session, pycompile=options.compile, isolated=options.isolated_mode, wheel_cache=wheel_cache, ) self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, wheel_cache) if not requirement_set.has_requirements: return try: if (options.download_dir or not wheel or not options.cache_dir): # on -d don't do complex things like building # wheels, and don't try to build wheels when wheel is # not installed. requirement_set.prepare_files(finder) else: # build wheels before install. wb = WheelBuilder( requirement_set, finder, build_options=[], global_options=[], ) # Ignore the result: a failed wheel will be # installed from the sdist/vcs whatever. wb.build(autobuilding=True) if not options.download_dir: requirement_set.install( install_options, global_options, root=options.root_path, ) reqs = sorted(requirement_set.successfully_installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: if hasattr(req, 'installed_version'): if req.installed_version: item += '-' + req.installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) else: downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info('Successfully downloaded %s', downloaded) except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() if options.target_dir: ensure_dir(options.target_dir) lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] for item in os.listdir(lib_dir): target_item_dir = os.path.join(options.target_dir, item) if os.path.exists(target_item_dir): if not options.upgrade: logger.warning( 'Target directory %s already exists. Specify ' '--upgrade to force replacement.', target_item_dir) continue if os.path.islink(target_item_dir): logger.warning( 'Target directory %s already exists and is ' 'a link. Pip will not automatically replace ' 'links, please remove if replacement is ' 'desired.', target_item_dir) continue if os.path.isdir(target_item_dir): shutil.rmtree(target_item_dir) else: os.remove(target_item_dir) shutil.move(os.path.join(lib_dir, item), target_item_dir) shutil.rmtree(temp_target_dir) return requirement_set
"""Map archive RECORD paths to installation RECORD paths.""" oldpath = normpath(srcfile, wheeldir) newpath = normpath(destfile, lib_dir) installed[oldpath] = newpath if modified: changed.add(destfile) def clobber(source, dest, is_base, fixer=None, filter=None): <<<<<<< HEAD if not os.path.exists(dest): # common for the 'include' path os.makedirs(dest) for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) ======= ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) >>>>>>> 54eef0be98b1b67c8507db91f4cfa90b64991027 if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue <<<<<<< HEAD elif (is_base and s.endswith('.dist-info')
def run(self, options, args): options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. options.editables = [] if options.python_version: python_versions = [options.python_version] else: python_versions = None dist_restriction_set = any([ options.python_version, options.platform, options.abi, options.implementation, ]) binary_only = FormatControl(set(), {':all:'}) if dist_restriction_set and options.format_control != binary_only: raise CommandError( "--only-binary=:all: must be set and --no-binary must not " "be set (or must be set to :none:) when restricting platform " "and interpreter constraints using --python-version, " "--platform, --abi, or --implementation." ) options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with TempDirectory( options.build_dir, delete=build_delete, kind="download" ) as directory: requirement_set = RequirementSet( build_dir=directory.path, src_dir=options.src_dir, download_dir=options.download_dir, require_hashes=options.require_hashes, progress_bar=options.progress_bar, ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, None ) resolver = Resolver( preparer=RequirementPreparer(), finder=finder, session=session, use_user_site=False, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=False, ignore_installed=True, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info( 'Successfully downloaded %s', downloaded ) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set
def build(self, autobuilding=False): """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ assert self._wheel_dir or (autobuilding and self._cache_root) # unpack sdists and constructs req set self.requirement_set.prepare_files(self.finder) reqset = self.requirement_set.requirements.values() buildset = [] for req in reqset: if req.constraint: continue if req.is_wheel: if not autobuilding: logger.info('Skipping %s, due to already being wheel.', req.name) elif req.editable: if not autobuilding: logger.info( 'Skipping bdist_wheel for %s, due to being editable', req.name) elif autobuilding and req.link and not req.link.is_artifact: pass elif autobuilding and not req.source_dir: pass else: if autobuilding: link = req.link base, ext = link.splitext() if pip.index.egg_info_matches(base, None, link) is None: # Doesn't look like a package - don't autobuild a wheel # because we'll have no way to lookup the result sanely continue if "binary" not in pip.index.fmt_ctl_formats( self.finder.format_control, canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " "being disabled for it.", req.name) continue buildset.append(req) if not buildset: return True # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for req in buildset]), ) with indent_log(): build_success, build_failure = [], [] for req in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag output_dir = _cache_for_link(self._cache_root, req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists( os.path.join(req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.requirement_set.build_dir) # Update the link for this. req.link = pip.index.Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url(req.link, req.source_dir, None, False, session=self.requirement_set.session) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return True if all builds were successful return len(build_failure) == 0
def run(self, options, args): cmdoptions.resolve_wheel_no_use_binary(options) cmdoptions.check_install_build_global(options) if options.download_dir: options.ignore_installed = True if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) install_options.append('--user') install_options.append('--prefix=') temp_target_dir = None if options.target_dir: options.ignore_installed = True temp_target_dir = tempfile.mkdtemp() options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue." ) install_options.append('--home=' + temp_target_dir) global_options = options.global_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.download_cache: warnings.warn( "--download-cache has been deprecated and will be removed in " "the future. Pip now automatically uses and configures its " "cache.", RemovedInPip8Warning, ) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, upgrade=options.upgrade, as_egg=options.as_egg, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies, force_reinstall=options.force_reinstall, use_user_site=options.use_user_site, target_dir=temp_target_dir, session=session, pycompile=options.compile, isolated=options.isolated_mode, wheel_cache=wheel_cache, ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) if not requirement_set.has_requirements: return try: if (options.download_dir or not wheel or not options.cache_dir): # on -d don't do complex things like building # wheels, and don't try to build wheels when wheel is # not installed. requirement_set.prepare_files(finder) else: # build wheels before install. wb = WheelBuilder( requirement_set, finder, build_options=[], global_options=[], ) # Ignore the result: a failed wheel will be # installed from the sdist/vcs whatever. wb.build(autobuilding=True) if not options.download_dir: requirement_set.install( install_options, global_options, root=options.root_path, ) reqs = sorted( requirement_set.successfully_installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: if hasattr(req, 'installed_version'): if req.installed_version: item += '-' + req.installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) else: downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info( 'Successfully downloaded %s', downloaded ) except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() if options.target_dir: ensure_dir(options.target_dir) lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] for item in os.listdir(lib_dir): target_item_dir = os.path.join(options.target_dir, item) if os.path.exists(target_item_dir): if not options.upgrade: logger.warning( 'Target directory %s already exists. Specify ' '--upgrade to force replacement.', target_item_dir ) continue if os.path.islink(target_item_dir): logger.warning( 'Target directory %s already exists and is ' 'a link. Pip will not automatically replace ' 'links, please remove if replacement is ' 'desired.', target_item_dir ) continue if os.path.isdir(target_item_dir): shutil.rmtree(target_item_dir) else: os.remove(target_item_dir) shutil.move( os.path.join(lib_dir, item), target_item_dir ) shutil.rmtree(temp_target_dir) return requirement_set
def run(self, options, args): options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. options.editables = [] if options.python_version: python_versions = [options.python_version] else: python_versions = None dist_restriction_set = any([ options.python_version, options.platform, options.abi, options.implementation, ]) binary_only = FormatControl(set(), {':all:'}) if dist_restriction_set and options.format_control != binary_only: raise CommandError( "--only-binary=:all: must be set and --no-binary must not " "be set (or must be set to :none:) when restricting platform " "and interpreter constraints using --python-version, " "--platform, --abi, or --implementation." ) options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with TempDirectory( options.build_dir, delete=build_delete, kind="download" ) as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, None ) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=options.download_dir, wheel_download_dir=None, progress_bar=options.progress_bar, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=None, use_user_site=False, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=False, ignore_installed=True, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info( 'Successfully downloaded %s', downloaded ) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set
def create_file(*args): fpath = os.path.join(*args) ensure_dir(os.path.dirname(fpath)) with open(fpath, "w") as f: f.write("Holla!")
def build(self, autobuilding=False): """Build wheels. :param unpack: If True, replace the sdist we built from the with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ assert self._wheel_dir or (autobuilding and self._cache_root) # unpack sdists and constructs req set self.requirement_set.prepare_files(self.finder) reqset = self.requirement_set.requirements.values() buildset = [] for req in reqset: if req.constraint: continue if req.is_wheel: if not autobuilding: logger.info( 'Skipping %s, due to already being wheel.', req.name) elif req.editable: if not autobuilding: logger.info( 'Skipping bdist_wheel for %s, due to being editable', req.name) elif autobuilding and req.link and not req.link.is_artifact: pass elif autobuilding and not req.source_dir: pass else: if autobuilding: link = req.link base, ext = link.splitext() if pip.index.egg_info_matches(base, None, link) is None: # Doesn't look like a package - don't autobuild a wheel # because we'll have no way to lookup the result sanely continue if "binary" not in pip.index.fmt_ctl_formats( self.finder.format_control, canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " "being disabled for it.", req.name) continue buildset.append(req) if not buildset: return True # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for req in buildset]), ) with indent_log(): build_success, build_failure = [], [] for req in buildset: if autobuilding: output_dir = _cache_for_link(self._cache_root, req.link) try: ensure_dir(output_dir) except OSError as e: logger.warn("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one(req, output_dir) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists(os.path.join( req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.requirement_set.build_dir) # Update the link for this. req.link = pip.index.Link( path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url( req.link, req.source_dir, None, False, session=self.requirement_set.session) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return True if all builds were successful return len(build_failure) == 0
def run_egg_info(self): assert self.source_dir if self.name: logger.debug( 'Running setup.py (path:%s) egg_info for package %s', self.setup_py, self.name, ) else: logger.debug( 'Running setup.py (path:%s) egg_info for package from %s', self.setup_py, self.link, ) with indent_log(): script = SETUPTOOLS_SHIM % self.setup_py base_cmd = [sys.executable, '-c', script] if self.isolated: base_cmd += ["--no-user-cfg"] egg_info_cmd = base_cmd + ['egg_info'] # We can't put the .egg-info files at the root, because then the # source code will be mistaken for an installed egg, causing # problems if self.editable: egg_base_option = [] else: egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info') ensure_dir(egg_info_dir) egg_base_option = ['--egg-base', 'pip-egg-info'] cwd = self.source_dir if self.editable_options and \ 'subdirectory' in self.editable_options: cwd = os.path.join(cwd, self.editable_options['subdirectory']) call_subprocess(egg_info_cmd + egg_base_option, cwd=cwd, show_stdout=False, command_level=logging.DEBUG, command_desc='python setup.py egg_info') if not self.req: if isinstance( pkg_resources.parse_version(self.pkg_info()["Version"]), Version): op = "==" else: op = "===" self.req = pkg_resources.Requirement.parse("".join([ self.pkg_info()["Name"], op, self.pkg_info()["Version"], ])) self._correct_build_location() else: metadata_name = canonicalize_name(self.pkg_info()["Name"]) if canonicalize_name(self.req.project_name) != metadata_name: logger.warning( 'Running setup.py (path:%s) egg_info for package %s ' 'produced metadata for project name %s. Fix your ' '#egg=%s fragments.', self.setup_py, self.name, metadata_name, self.name) self.req = pkg_resources.Requirement.parse(metadata_name)
def run(self, options, args): options.ignore_installed = True options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, ignore_installed=True, ignore_dependencies=options.ignore_dependencies, session=session, isolated=options.isolated_mode, require_hashes=options.require_hashes ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, None ) if not requirement_set.has_requirements: return requirement_set.prepare_files(finder) downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info( 'Successfully downloaded %s', downloaded ) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set
def install(self, install_options, global_options=[], root=None, prefix=None): if self.editable: self.install_editable(install_options, global_options, prefix=prefix) return if self.is_wheel: version = pip.wheel.wheel_version(self.source_dir) pip.wheel.check_compatibility(version, self.name) self.move_wheel_files(self.source_dir, root=root, prefix=prefix) self.install_succeeded = True return # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. global_options += self.options.get('global_options', []) install_options += self.options.get('install_options', []) if self.isolated: global_options = list(global_options) + ["--no-user-cfg"] with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = self.get_install_args(global_options, record_filename, root, prefix) msg = 'Running setup.py install for %s' % (self.name, ) with open_spinner(msg) as spinner: with indent_log(): call_subprocess( install_args + install_options, cwd=self.setup_py_dir, show_stdout=False, spinner=spinner, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) return self.install_succeeded = True def prepend_root(path): if root is None or not os.path.isabs(path): return path else: return change_root(root, path) with open(record_filename) as f: for line in f: directory = os.path.dirname(line) if directory.endswith('.egg-info'): egg_info_dir = prepend_root(directory) break else: logger.warning( 'Could not find .egg-info directory in install record' ' for %s', self, ) # FIXME: put the record somewhere # FIXME: should this be an error? return new_lines = [] with open(record_filename) as f: for line in f: filename = line.strip() if os.path.isdir(filename): filename += os.path.sep new_lines.append( os.path.relpath(prepend_root(filename), egg_info_dir)) ensure_dir(egg_info_dir) inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n')
def run(self, options, args): options.ignore_installed = True if options.python_version: python_versions = [options.python_version] else: python_versions = None dist_restriction_set = any([ options.python_version, options.platform, options.abi, options.implementation, ]) binary_only = FormatControl(set(), set([':all:'])) if dist_restriction_set and options.format_control != binary_only: raise CommandError( "--only-binary=:all: must be set and --no-binary must not " "be set (or must be set to :none:) when restricting platform " "and interpreter constraints using --python-version, " "--platform, --abi, or --implementation.") options.src_dir = testos.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, ignore_installed=True, ignore_dependencies=options.ignore_dependencies, session=session, isolated=options.isolated_mode, require_hashes=options.require_hashes) self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, None) if not requirement_set.has_requirements: return requirement_set.prepare_files(finder) downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info('Successfully downloaded %s', downloaded) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set