def _clean_pip_requires(self, requires_files): # Fixup incompatible dependencies if not (requires_files and self.forced_packages): return utils.log_iterable(sorted(requires_files), logger=LOG, header="Adjusting %s pip 'requires' files" % (len(requires_files))) forced_by_key = dict((pkg.key, pkg) for pkg in self.forced_packages) for fn in requires_files: old_lines = sh.load_file(fn).splitlines() new_lines = [] for line in old_lines: try: req = pip_helper.extract_requirement(line) new_lines.append(str(forced_by_key[req.key])) except Exception: # we don't force the package or it has a bad format new_lines.append(line) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) # NOTE(imelnikov): after updating requirement lists we should re-fetch # data from them again, so we drop pip helper caches here: pip_helper.drop_caches()
def _scan_pip_requires(self, requires_files): own_eggs = self._python_eggs(False) def replace_forced_requirements(fn, forced_by_key): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append( "%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) return len(alterations) def on_replace_done(fn, time_taken): LOG.debug( "Replacing potential forced requirements in %s" " took %s seconds", colorizer.quote(fn), time_taken) def validate_requirement(filename, source_req): install_egg = None for egg_info in own_eggs: if egg_info['name'] == source_req.key: install_egg = egg_info break if not install_egg: return # Ensure what we are about to install/create will actually work # with the desired version. If it is not compatible then we should # abort and someone should update the tag/branch in the origin # file (or fix it via some other mechanism). if install_egg['version'] not in source_req: msg = ("Can not satisfy '%s' with '%s', version" " conflict found in %s") raise exc.DependencyException( msg % (source_req, install_egg['req'], filename)) if not requires_files: return requires_files = sorted(requires_files) utils.log_iterable(requires_files, logger=LOG, header="Scanning %s pip 'requires' files" % (len(requires_files))) forced_by_key = {} for pkg in self.forced_pips: forced_by_key[pkg.key] = pkg mutations = 0 for fn in requires_files: LOG.debug("Replacing any potential forced requirements in %s", colorizer.quote(fn)) mutations += utils.time_it(functools.partial(on_replace_done, fn), replace_forced_requirements, fn, forced_by_key) # NOTE(imelnikov): after updating requirement lists we should re-fetch # data from them again, so we drop pip helper caches here. if mutations > 0: pip_helper.drop_caches()
def _scan_pip_requires(self, requires_files): def validate_requirement(filename, source_req): install_egg = None for egg_info in self._python_eggs: if egg_info['name'] == source_req.key: install_egg = egg_info break if not install_egg: return # Ensure what we are about to install/create will actually work # with the desired version. If it is not compatible then we should # abort and someone should update the tag/branch in the origin # file (or fix it via some other mechanism). if install_egg['version'] not in source_req: msg = ("Can not satisfy '%s' with '%s', version" " conflict found in %s") raise exc.DependencyException(msg % (source_req, install_egg['req'], filename)) if not requires_files: return utils.log_iterable(sorted(requires_files), logger=LOG, header="Scanning %s pip 'requires' files" % (len(requires_files))) forced_by_key = dict((pkg.key, pkg) for pkg in self.forced_packages) mutations = 0 for fn in sorted(requires_files): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append("%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) mutations += len(alterations) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) # NOTE(imelnikov): after updating requirement lists we should re-fetch # data from them again, so we drop pip helper caches here. if mutations > 0: pip_helper.drop_caches()