def _filter_download_requires(self): yum_map = self._get_known_yum_packages() pip_origins = {} for line in self.pips_to_install: req = pip_helper.extract_requirement(line) pip_origins[req.key] = line pips_to_download = [] req_to_install = [ pip_helper.extract_requirement(line) for line in self.pips_to_install ] requested_names = [req.key for req in req_to_install] rpm_to_install = self._convert_names_python2rpm(requested_names) satisfied_list = [] for (req, rpm_name) in zip(req_to_install, rpm_to_install): (version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: # We need the source requirement incase its a url. pips_to_download.append(pip_origins[req.key]) else: satisfied_list.append((req, rpm_name, version, repo)) if satisfied_list: # Organize by repo repos = collections.defaultdict(list) for (req, rpm_name, version, repo) in satisfied_list: repos[repo].append("%s as %s-%s" % (req, rpm_name, version)) for r in sorted(repos.keys()): header = ("%s Python packages are already available " "as RPMs from repository %s") header = header % (len(repos[r]), colorizer.quote(r)) utils.log_iterable(sorted(repos[r]), logger=LOG, header=header) return pips_to_download
def _filter_download_requires(self): yum_map = self._get_known_yum_packages() pip_origins = {} for line in self.pips_to_install: req = pip_helper.extract_requirement(line) pip_origins[req.key] = line pips_to_download = [] req_to_install = [pip_helper.extract_requirement(line) for line in self.pips_to_install] requested_names = [req.key for req in req_to_install] rpm_to_install = self._convert_names_python2rpm(requested_names) satisfied_list = [] for (req, rpm_name) in zip(req_to_install, rpm_to_install): (version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: # We need the source requirement incase its a url. pips_to_download.append(pip_origins[req.key]) else: satisfied_list.append((req, rpm_name, version, repo)) if satisfied_list: # Organize by repo repos = collections.defaultdict(list) for (req, rpm_name, version, repo) in satisfied_list: repos[repo].append("%s as %s-%s" % (req, rpm_name, version)) for r in sorted(repos.keys()): header = ("%s Python packages are already available " "as RPMs from repository %s") header = header % (len(repos[r]), colorizer.quote(r)) utils.log_iterable(sorted(repos[r]), logger=LOG, header=header) return pips_to_download
def package_instance(self, instance): # Skip things that aren't python... if self._is_buildable(instance): requires_what = self._filter_download_requires() requires_keys = set() for req in requires_what: if isinstance(req, six.string_types): req = pip_helper.extract_requirement(req) requires_keys.add(req.key) egg_info = getattr(instance, 'egg_info', None) if egg_info is not None: # Ensure we have gotten all the things... test_dependencies = (egg_info.get('test_dependencies', []) if instance.get_bool_option( 'use_tests_requires', default_value=True) else []) for req in itertools.chain(egg_info.get('dependencies', []), test_dependencies): if isinstance(req, six.string_types): req = pip_helper.extract_requirement(req) if req.key not in requires_keys: requires_what.append(req) requires_keys.add(req.key) self._install_into_venv(instance, requires_what) self._install_into_venv(instance, [instance.get_option('app_dir')]) else: LOG.warn("Skipping building %s (not python)", colorizer.quote(instance.name, quote_color='red'))
def test_example(self, _name, example): (stdout, stderr) = self._run_multipip(example['requirements']) expected_normalized = [] for line in example['expected'].strip().splitlines(): expected_normalized.append(pip_helper.extract_requirement(line)) parsed_normalized = [] for line in stdout.strip().splitlines(): parsed_normalized.append(pip_helper.extract_requirement(line)) self.assertEquivalentRequirements(expected_normalized, parsed_normalized) if 'conflicts' in example: self.assertEqual(example['conflicts'], self._extract_conflicts(stderr))
def _filter_download_requires(self): yum_map = self._get_known_yum_packages() pip_origins = {} for line in self.pips_to_install: req = pip_helper.extract_requirement(line) pip_origins[req.key] = line pips_to_download = [] req_to_install = [ pip_helper.extract_requirement(line) for line in self.pips_to_install ] requested_names = [req.key for req in req_to_install] rpm_names = self.py2rpm_helper.names_to_rpm_names(requested_names) satisfied_list = [] for req in req_to_install: rpm_name = rpm_names[req.key] rpm_info = self._find_yum_match(yum_map, req, rpm_name) if not rpm_info: # We need the source requirement in case it's a url. pips_to_download.append(pip_origins[req.key]) else: satisfied_list.append((req, rpm_name, rpm_info)) yum_buff = six.StringIO() if satisfied_list: # Organize by repo repos = collections.defaultdict(list) for (req, rpm_name, rpm_info) in satisfied_list: repo = rpm_info['repo'] rpm_found = '%s-%s' % (rpm_name, rpm_info['version']) repos[repo].append( "%s as %s" % (colorizer.quote(req), colorizer.quote(rpm_found))) dep_info = { 'requirement': str(req), 'rpm': rpm_info, } yum_buff.write(json.dumps(dep_info)) yum_buff.write("\n") for r in sorted(repos.keys()): header = ("%s Python packages are already available " "as RPMs from repository %s") header = header % (len(repos[r]), colorizer.quote(r)) utils.log_iterable(sorted(repos[r]), logger=LOG, header=header, color=None) sh.write_file(self.yum_satisfies_filename, yum_buff.getvalue()) return pips_to_download
def _fetch_missing_extra(python_names_in, python_names_out): python_missing_names = set() python_extra_names = set() python_reqs_in = set( pip_helper.extract_requirement(n) for n in python_names_in) python_reqs_out = set( pip_helper.extract_requirement(n) for n in python_names_out) for req in python_reqs_in: if req not in python_reqs_out: python_missing_names.add(str(req)) for req in python_reqs_out: if req not in python_reqs_in: python_extra_names.add(str(req)) return (python_missing_names, python_extra_names)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded.... if self._requirements_satisfied(pips_to_download, self.download_dir): LOG.info("All python dependencies have been already downloaded") else: def on_download_finish(time_taken): LOG.info("Took %0.2f seconds to download...", time_taken) def try_download(attempt): LOG.info("Downloading %s dependencies with pip (attempt %s)...", len(pips_to_download), attempt) output_filename = sh.joinpths(self.log_dir, "pip-download-attempt-%s.log" % (attempt)) LOG.info("Please wait this may take a while...") LOG.info("Check %s for download activity details...", colorizer.quote(output_filename)) utils.time_it(on_download_finish, pip_helper.download_dependencies, self.download_dir, pips_to_download, output_filename) utils.retry(self.MAX_PIP_DOWNLOAD_ATTEMPTS, self.PIP_DOWNLOAD_DELAY, try_download) pips_downloaded = [pip_helper.extract_requirement(p) for p in pips_to_download] what_downloaded = self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, what_downloaded)
def replace_forced_requirements(fn, forced_by_key): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append("%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) return len(alterations)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if (sh.isfile(self.downloaded_flag_file) and self._requirements_satisfied(pips_to_download, self.download_dir)): LOG.info("All python dependencies have been already downloaded") else: def try_download(attempt): LOG.info("Downloading %s dependencies with pip (attempt %s)...", len(pips_to_download), attempt) output_filename = sh.joinpths(self.log_dir, "pip-download-attempt-%s.log" % (attempt)) pip_helper.download_dependencies(self.download_dir, pips_to_download, output_filename) utils.retry(self.MAX_PIP_DOWNLOAD_ATTEMPTS, self.PIP_DOWNLOAD_DELAY, try_download) # NOTE(harlowja): Mark that we completed downloading successfully sh.touch_file(self.downloaded_flag_file, die_if_there=False, quiet=True, tracewriter=self.tracewriter) pips_downloaded = [pip_helper.extract_requirement(p) for p in pips_to_download] what_downloaded = self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, what_downloaded)
def _filter_download_requires(self): yum_map = self._get_known_yum_packages() pip_origins = {} for line in self.pips_to_install: req = pip_helper.extract_requirement(line) pip_origins[req.key] = line pips_to_download = [] req_to_install = [pip_helper.extract_requirement(line) for line in self.pips_to_install] requested_names = [req.key for req in req_to_install] rpm_names = self.py2rpm_helper.names_to_rpm_names(requested_names) satisfied_list = [] for req in req_to_install: rpm_name = rpm_names[req.key] rpm_info = self._find_yum_match(yum_map, req, rpm_name) if not rpm_info: # We need the source requirement in case it's a url. pips_to_download.append(pip_origins[req.key]) else: satisfied_list.append((req, rpm_name, rpm_info)) yum_buff = six.StringIO() if satisfied_list: # Organize by repo repos = collections.defaultdict(list) for (req, rpm_name, rpm_info) in satisfied_list: repo = rpm_info['repo'] rpm_found = '%s-%s' % (rpm_name, rpm_info['version']) repos[repo].append("%s as %s" % (colorizer.quote(req), colorizer.quote(rpm_found))) dep_info = { 'requirement': str(req), 'rpm': rpm_info, } yum_buff.write(json.dumps(dep_info)) yum_buff.write("\n") for r in sorted(repos.keys()): header = ("%s Python packages are already available " "as RPMs from repository %s") header = header % (len(repos[r]), colorizer.quote(r)) utils.log_iterable(sorted(repos[r]), logger=LOG, header=header, color=None) sh.write_file(self.yum_satisfies_filename, yum_buff.getvalue()) return pips_to_download
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`. """ # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) download_requires_filename = sh.joinpths(self.deps_dir, "download-requires") raw_pips_to_download = self.filter_download_requires() sh.write_file(download_requires_filename, "\n".join(str(req) for req in raw_pips_to_download)) if not raw_pips_to_download: return ([], []) downloaded_flag_file = sh.joinpths(self.deps_dir, "pip-downloaded") # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if sh.isfile(downloaded_flag_file) and self._requirements_satisfied( raw_pips_to_download, self.download_dir): LOG.info("All python dependencies have been already downloaded") else: pip_dir = sh.joinpths(self.deps_dir, "pip") pip_download_dir = sh.joinpths(pip_dir, "download") pip_build_dir = sh.joinpths(pip_dir, "build") # NOTE(aababilov): do not clean the cache, it is always useful pip_cache_dir = sh.joinpths(self.deps_dir, "pip-cache") pip_failures = [] for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files sh.deldir(pip_dir) sh.mkdir(pip_download_dir, recurse=True) header = "Downloading %s python dependencies (attempt %s)" header = header % (len(raw_pips_to_download), attempt) utils.log_iterable(sorted(raw_pips_to_download), logger=LOG, header=header) failed = False try: self._try_download_dependencies(attempt, raw_pips_to_download, pip_download_dir, pip_cache_dir, pip_build_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break for filename in sh.listdir(pip_download_dir, files_only=True): sh.move(filename, self.download_dir, force=True) sh.deldir(pip_dir) if pip_failures: raise pip_failures[-1] with open(downloaded_flag_file, "w"): pass pips_downloaded = [pip_helper.extract_requirement(p) for p in raw_pips_to_download] self._examine_download_dir(pips_downloaded, self.download_dir) what_downloaded = sh.listdir(self.download_dir, files_only=True) return (pips_downloaded, what_downloaded)
def _gather_pips_to_install(self, requires_files, extra_pips=None): """Analyze requires_files and extra_pips. Updates `self.forced_packages` and `self.pips_to_install`. Writes requirements to `self.gathered_requires_filename`. """ extra_pips = extra_pips or [] cmdline = [ self.multipip_executable, "--skip-requirements-regex", "python.*client", "--pip", self.pip_executable ] cmdline = cmdline + extra_pips + ["-r"] + requires_files cmdline.extend(["--ignore-package"]) cmdline.extend(OPENSTACK_PACKAGES) cmdline.extend(SKIP_PACKAGE_NAMES) cmdline.extend(self.python_names) stdout, stderr = sh.execute(cmdline, check_exit_code=False) self.pips_to_install = list(utils.splitlines_not_empty(stdout)) sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install)) utils.log_iterable(sorted(self.pips_to_install), logger=LOG, header="Full known python dependency list") incompatibles = collections.defaultdict(list) if stderr: current_name = '' for line in stderr.strip().splitlines(): if line.endswith(": incompatible requirements"): current_name = line.split(":", 1)[0].lower().strip() if current_name not in incompatibles: incompatibles[current_name] = [] else: incompatibles[current_name].append(line) for (name, lines) in incompatibles.items(): if not name: continue LOG.warn("Incompatible requirements found for %s", colorizer.quote(name, quote_color='red')) for line in lines: LOG.warn(line) if not self.pips_to_install: LOG.error("No dependencies for OpenStack found." "Something went wrong. Please check:") LOG.error("'%s'" % "' '".join(cmdline)) raise exc.DependencyException("No dependencies for OpenStack found") # Translate those that we altered requirements for into a set of forced # requirements file (and associated list). self.forced_packages = [] for req in [pip_helper.extract_requirement(line) for line in self.pips_to_install]: if req.key in incompatibles: self.forced_packages.append(req) sh.write_file(self.forced_requires_filename, "\n".join([str(req) for req in self.forced_packages]))
def _gather_pips_to_install(self, requires_files, extra_pips=None): """Analyze requires_files and extra_pips. Updates `self.forced_pips` and `self.pips_to_install`. Writes requirements to `self.gathered_requires_filename`. """ ignore_pips = set(self.python_names) ignore_pips.update(self.ignore_pips) forced_pips = set() forced_distro_pips = self.distro.get_dependency_config("forced_pips", quiet=True) if forced_distro_pips: forced_pips.update(forced_distro_pips) compatibles, incompatibles = self.multipip.resolve( extra_pips, requires_files, ignore_pips, forced_pips) self.pips_to_install = compatibles sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install)) pip_requirements, raw_requirements = pip_helper.read_requirement_files( [self.gathered_requires_filename]) pips_to_install = sorted(raw_requirements, cmp=sort_req) utils.log_iterable(pips_to_install, logger=LOG, header="Full known python dependency list") for (name, lines) in incompatibles.items(): LOG.warn("Incompatible requirements found for %s", colorizer.quote(name, quote_color='red')) for line in lines: LOG.warn(line) if not self.pips_to_install: LOG.error("No valid dependencies found. Something went wrong.") raise exc.DependencyException("No valid dependencies found") # Translate those that we altered requirements for into a set of forced # requirements file (and associated list). self.forced_pips = [] forced_pip_keys = [] for req in [ pip_helper.extract_requirement(line) for line in self.pips_to_install ]: if req.key in incompatibles and req.key not in forced_pip_keys: self.forced_pips.append(req) forced_pip_keys.append(req.key) self.forced_pips = sorted(self.forced_pips, cmp=sort_req) forced_pips = [str(req) for req in self.forced_pips] utils.log_iterable(forced_pips, logger=LOG, header="Automatically forced python dependencies") sh.write_file(self.forced_requires_filename, "\n".join(forced_pips))
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if (sh.isfile(self.downloaded_flag_file) and self._requirements_satisfied(pips_to_download, self.download_dir)): LOG.info("All python dependencies have been already downloaded") else: pip_failures = [] for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files for filename in sh.listdir(self.download_dir, files_only=True): sh.unlink(filename) header = "Downloading %s python dependencies (attempt %s)" header = header % (len(pips_to_download), attempt + 1) utils.log_iterable(sorted(pips_to_download), logger=LOG, header=header) failed = False try: self._try_download_dependencies(attempt + 1, pips_to_download, self.download_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break if pip_failures: raise pip_failures[-1] # NOTE(harlowja): Mark that we completed downloading successfully sh.touch_file(self.downloaded_flag_file, die_if_there=False, quiet=True, tracewriter=self.tracewriter) pips_downloaded = [ pip_helper.extract_requirement(p) for p in pips_to_download ] self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, sh.listdir(self.download_dir, files_only=True))
def _requirements_satisfied(pips_list, download_dir): downloaded_req = [pip_helper.get_archive_details(filename)["req"] for filename in sh.listdir(download_dir, files_only=True)] downloaded_req = dict((req.key, req.specs[0][1]) for req in downloaded_req) for req_str in pips_list: req = pip_helper.extract_requirement(req_str) try: downloaded_version = downloaded_req[req.key] except KeyError: return False else: if downloaded_version not in req: return False return True
def gather_pips_to_install(self, requires_files, extra_pips=None): """Analyze requires_files and extra_pips. Updates `self.forced_packages` and `self.pips_to_install`. Writes requirements to `self.gathered_requires_filename`. """ extra_pips = extra_pips or [] cmdline = [ self.multipip_executable, "--skip-requirements-regex", "python.*client", "--pip", self.pip_executable ] cmdline = cmdline + extra_pips + ["-r"] + requires_files cmdline.extend(["--ignore-package"]) cmdline.extend(OPENSTACK_PACKAGES) cmdline.extend(self.python_names) output = sh.execute(cmdline, check_exit_code=False) self.pips_to_install = list(utils.splitlines_not_empty(output[0])) conflict_descr = output[1].strip() forced_keys = set() if conflict_descr: for line in conflict_descr.splitlines(): LOG.warning(line) if line.endswith(": incompatible requirements"): forced_keys.add(line.split(":", 1)[0].lower()) sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install)) if not self.pips_to_install: LOG.error("No dependencies for OpenStack found." "Something went wrong. Please check:") LOG.error("'%s'" % "' '".join(cmdline)) raise RuntimeError("No dependencies for OpenStack found") utils.log_iterable(sorted(self.pips_to_install), logger=LOG, header="Full known python dependency list") self.forced_packages = [] for line in self.pips_to_install: req = pip_helper.extract_requirement(line) if req.key in forced_keys: self.forced_packages.append(req) sh.write_file(self.forced_requires_filename, "\n".join(str(req) for req in self.forced_packages))
def _all_rpm_names(self): # This file should have all the requirements (including test ones) # that we need to install (and which should have been built as rpms # in the previous build stages). gathered_requires = sh.load_file(self.gathered_requires_filename).splitlines() gathered_requires = [line.strip() for line in gathered_requires if line.strip()] req_names = [] for line in gathered_requires: req = pip_helper.extract_requirement(line) req_names.append(req.key) rpm_names = set(self._convert_names_python2rpm(req_names)) rpm_names |= self.requirements["requires"] for inst in self.instances: rpm_names |= inst.package_names() return list(rpm_names)
def _requirements_satisfied(pips_list, download_dir): downloaded_req = [ pip_helper.get_archive_details(filename)["req"] for filename in sh.listdir(download_dir, files_only=True) ] downloaded_req = dict( (req.key, req.specs[0][1]) for req in downloaded_req) for req_str in pips_list: req = pip_helper.extract_requirement(req_str) try: downloaded_version = downloaded_req[req.key] except KeyError: return False else: if downloaded_version not in req: return False return True
def _gather_pips_to_install(self, requires_files, extra_pips=None): """Analyze requires_files and extra_pips. Updates `self.forced_pips` and `self.pips_to_install`. Writes requirements to `self.gathered_requires_filename`. """ ignore_pips = set(self.python_names) ignore_pips.update(self.ignore_pips) forced_pips = set() forced_distro_pips = self.distro.get_dependency_config("forced_pips", quiet=True) if forced_distro_pips: forced_pips.update(forced_distro_pips) compatibles, incompatibles = self.multipip.resolve(extra_pips, requires_files, ignore_pips, forced_pips) self.pips_to_install = compatibles sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install)) pip_requirements, raw_requirements = pip_helper.read_requirement_files([self.gathered_requires_filename]) pips_to_install = sorted(raw_requirements, cmp=sort_req) utils.log_iterable(pips_to_install, logger=LOG, header="Full known python dependency list") for (name, lines) in incompatibles.items(): LOG.warn("Incompatible requirements found for %s", colorizer.quote(name, quote_color='red')) for line in lines: LOG.warn(line) if not self.pips_to_install: LOG.error("No valid dependencies found. Something went wrong.") raise exc.DependencyException("No valid dependencies found") # Translate those that we altered requirements for into a set of forced # requirements file (and associated list). self.forced_pips = [] forced_pip_keys = [] for req in [pip_helper.extract_requirement(line) for line in self.pips_to_install]: if req.key in incompatibles and req.key not in forced_pip_keys: self.forced_pips.append(req) forced_pip_keys.append(req.key) self.forced_pips = sorted(self.forced_pips, cmp=sort_req) forced_pips = [str(req) for req in self.forced_pips] utils.log_iterable(forced_pips, logger=LOG, header="Automatically forced python dependencies") sh.write_file(self.forced_requires_filename, "\n".join(forced_pips))
def _get_rpm_names(self, from_deps=True, from_instances=True): desired_rpms = [] py_reqs = set() if from_instances: inst_packages = list(self.requirements["requires"]) for inst in self.instances: inst_packages.extend(inst.package_names()) if sh.isdir(inst.get_option("app_dir")): try: py_req = inst.egg_info['req'] except AttributeError: pass else: rpm_name, _ = self._find_template_and_rpm_name( inst, inst.get_option('build_name', default_value=inst.name)) if rpm_name is not None: desired_rpms.append((rpm_name, py_req)) else: py_reqs.add(py_req) for rpm_name in inst_packages: desired_rpms.append((rpm_name, None)) if from_deps: # This file should have all the requirements (including test ones) # that we need to install (and which should have been built as rpms # in the previous build stages). requires = sh.load_file( self.gathered_requires_filename).splitlines() for line in [line.strip() for line in requires if line.strip()]: py_reqs.add(pip_helper.extract_requirement(line)) rpm_names = self.py2rpm_helper.names_to_rpm_names( [req.key for req in py_reqs]) desired_rpms.extend((rpm_names[req.key], req) for req in py_reqs) def _format_name(rpm_name, py_req): full_name = str(rpm_name).strip() if py_req is not None: full_name += ','.join(''.join(x) for x in py_req.specs) return full_name return sorted( _format_name(rpm_name, py_req) for rpm_name, py_req in desired_rpms)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded.... if self._requirements_satisfied(pips_to_download, self.download_dir): LOG.info("All python dependencies have been already downloaded") else: utils.retry(self.retries, self.retry_delay, self._try_download, pips_to_download) pips_downloaded = [pip_helper.extract_requirement(p) for p in pips_to_download] what_downloaded = self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, what_downloaded)
def _get_rpm_names(self, from_deps=True, from_instances=True): desired_rpms = [] py_reqs = set() if from_instances: inst_packages = list(self.requirements["requires"]) for inst in self.instances: inst_packages.extend(inst.package_names()) if sh.isdir(inst.get_option("app_dir")): try: py_req = inst.egg_info['req'] except AttributeError: pass else: rpm_name, _ = self._find_template_and_rpm_name( inst, inst.get_option('build_name', default_value=inst.name) ) if rpm_name is not None: desired_rpms.append((rpm_name, py_req)) else: py_reqs.add(py_req) for rpm_name in inst_packages: desired_rpms.append((rpm_name, None)) if from_deps: # This file should have all the requirements (including test ones) # that we need to install (and which should have been built as rpms # in the previous build stages). requires = sh.load_file(self.gathered_requires_filename).splitlines() for line in [line.strip() for line in requires if line.strip()]: py_reqs.add(pip_helper.extract_requirement(line)) rpm_names = self.py2rpm_helper.names_to_rpm_names([req.key for req in py_reqs]) desired_rpms.extend((rpm_names[req.key], req) for req in py_reqs) def _format_name(rpm_name, py_req): full_name = str(rpm_name).strip() if py_req is not None: full_name += ','.join(''.join(x) for x in py_req.specs) return full_name return sorted(_format_name(rpm_name, py_req) for rpm_name, py_req in desired_rpms)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if (sh.isfile(self.downloaded_flag_file) and self._requirements_satisfied(pips_to_download, self.download_dir)): LOG.info("All python dependencies have been already downloaded") else: pip_failures = [] for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files for filename in sh.listdir(self.download_dir, files_only=True): sh.unlink(filename) header = "Downloading %s python dependencies (attempt %s)" header = header % (len(pips_to_download), attempt + 1) utils.log_iterable(sorted(pips_to_download), logger=LOG, header=header) failed = False try: self._try_download_dependencies(attempt + 1, pips_to_download, self.download_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break if pip_failures: raise pip_failures[-1] # NOTE(harlowja): Mark that we completed downloading successfully sh.touch_file(self.downloaded_flag_file, die_if_there=False, quiet=True, tracewriter=self.tracewriter) pips_downloaded = [pip_helper.extract_requirement(p) for p in pips_to_download] self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, sh.listdir(self.download_dir, files_only=True))
def _clean_pip_requires(self, requires_files): # Fixup incompatible dependencies if not (requires_files and self.forced_packages): return utils.log_iterable(sorted(requires_files), logger=LOG, header="Adjusting %s pip 'requires' files" % (len(requires_files))) forced_by_key = dict((pkg.key, pkg) for pkg in self.forced_packages) for fn in requires_files: old_lines = sh.load_file(fn).splitlines() new_lines = [] for line in old_lines: try: req = pip_helper.extract_requirement(line) new_lines.append(str(forced_by_key[req.key])) except Exception: # we don't force the package or it has a bad format new_lines.append(line) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if (sh.isfile(self.downloaded_flag_file) and self._requirements_satisfied(pips_to_download, self.download_dir)): LOG.info("All python dependencies have been already downloaded") else: def try_download(attempt): LOG.info( "Downloading %s dependencies with pip (attempt %s)...", len(pips_to_download), attempt) output_filename = sh.joinpths( self.log_dir, "pip-download-attempt-%s.log" % (attempt)) pip_helper.download_dependencies(self.download_dir, pips_to_download, output_filename) utils.retry(self.MAX_PIP_DOWNLOAD_ATTEMPTS, self.PIP_DOWNLOAD_DELAY, try_download) # NOTE(harlowja): Mark that we completed downloading successfully sh.touch_file(self.downloaded_flag_file, die_if_there=False, quiet=True, tracewriter=self.tracewriter) pips_downloaded = [ pip_helper.extract_requirement(p) for p in pips_to_download ] what_downloaded = self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, what_downloaded)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded.... if self._requirements_satisfied(pips_to_download, self.download_dir): LOG.info("All python dependencies have been already downloaded") else: utils.retry(self.retries, self.retry_delay, self._try_download, pips_to_download) pips_downloaded = [ pip_helper.extract_requirement(p) for p in pips_to_download ] what_downloaded = self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, what_downloaded)
def replace_forced_requirements(fn, forced_by_key): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append( "%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) return len(alterations)
def _all_rpm_names(self): # This file should have all the requirements (including test ones) # that we need to install (and which should have been built as rpms # in the previous build stages). gathered_requires = sh.load_file( self.gathered_requires_filename).splitlines() gathered_requires = [ line.strip() for line in gathered_requires if line.strip() ] req_names = [] reqs = [] for line in gathered_requires: req = pip_helper.extract_requirement(line) if req.key in req_names: continue req_names.append(req.key) reqs.append(req) rpm_names = self._convert_names_python2rpm(req_names) # Ensure we select the right versions that is required and not a # version that doesn't match the requirements. desired_rpms = [] desired_rpm_names = set() desired_rpms_formatted = [] def format_name(rpm_name, py_req): full_name = str(rpm_name).strip() if py_req is not None: full_name += ",%s" % (py_req) return full_name def capture_rpm(rpm_name, py_req): if rpm_name in desired_rpm_names or not rpm_name: return desired_rpms_formatted.append(format_name(rpm_name, py_req)) desired_rpms.append((rpm_name, py_req)) desired_rpm_names.add(rpm_name) for (rpm_name, req) in zip(rpm_names, reqs): capture_rpm(rpm_name, req) for inst in self.instances: if sh.isdir(inst.get_option("app_dir")): req = None rpm_name = None try: (rpm_name, _tpl) = self._get_template_and_rpm_name(inst) req = inst.egg_info['req'] except AttributeError: pass capture_rpm(rpm_name, req) for rpm_name in inst.package_names(): capture_rpm(rpm_name, None) for rpm_name in self.requirements["requires"]: capture_rpm(rpm_name, None) cmd = [self.yumfind_executable, '-j'] desired_rpms_formatted = sorted(desired_rpms_formatted) for p in desired_rpms_formatted: cmd.extend(['-p', p]) header = "Validating %s required packages are still available" % ( len(desired_rpms)) utils.log_iterable(desired_rpms_formatted, header=header, logger=LOG) rpms_located = [] rpm_names_located = set() for matched in sh.execute(cmd)[0].splitlines(): matched = matched.strip() if matched: pkg = json.loads(matched) if isinstance(pkg, dict): rpm_names_located.add(pkg['name']) rpms_located.append(pkg) rpm_names_missing = desired_rpm_names - rpm_names_located if rpm_names_missing: # Include the python version required information (if applicable) missing_formatted = [] for n in sorted(rpm_names_missing): source_found = False for (n2, py_req) in desired_rpms: if n2 == n: missing_formatted.append(format_name(n2, py_req)) source_found = True break if not source_found: missing_formatted.append(format_name(n, None)) msg = "Could not find available rpm packages: %s" msg = msg % (", ".join(missing_formatted)) raise excp.DependencyException(msg) LOG.info("All %s required packages are still available!", len(desired_rpms)) desired_rpms = [] for pkg in rpms_located: LOG.debug("Found %s", pkg) desired_rpms.append("%s,%s" % (pkg['name'], pkg['version'])) return list(sorted(desired_rpms))
def _package_instance(self, instance, attempt=0): if not self._is_buildable(instance): # Skip things that aren't python... LOG.warn("Skipping building %s (not python)", colorizer.quote(instance.name, quote_color='red')) return def gather_extras(): extra_reqs = [] for p in instance.get_option("pips", default_value=[]): req = pip_helper.create_requirement(p['name'], p.get('version')) extra_reqs.append(req) if instance.get_bool_option('use_tests_requires', default_value=True): for p in instance.get_option("test_requires", default_value=[]): extra_reqs.append(pip_helper.create_requirement(p)) return extra_reqs all_requires_what = self._filter_download_requires() LOG.info("Packaging %s (attempt %s)", colorizer.quote(instance.name), attempt) all_requires_mapping = {} for req in all_requires_what: if isinstance(req, six.string_types): req = pip_helper.extract_requirement(req) all_requires_mapping[req.key] = req direct_requires_what = [] direct_requires_keys = set() egg_info = getattr(instance, 'egg_info', None) if egg_info is not None: # Ensure we have gotten all the things... test_dependencies = (egg_info.get( 'test_dependencies', []) if instance.get_bool_option( 'use_tests_requires', default_value=True) else []) for req in itertools.chain(egg_info.get('dependencies', []), test_dependencies): if isinstance(req, six.string_types): req = pip_helper.extract_requirement(req) if req.key not in direct_requires_keys: direct_requires_what.append(req) direct_requires_keys.add(req.key) requires_what = [] extra_requires_what = gather_extras() for req in extra_requires_what: if req.key in all_requires_mapping: req = all_requires_mapping[req.key] requires_what.append(req) try: direct_requires_keys.remove(req.key) except KeyError: pass for req in direct_requires_what: if req.key not in direct_requires_keys: continue if req.key in all_requires_mapping: req = all_requires_mapping[req.key] requires_what.append(req) what = 'installation for %s' % colorizer.quote(instance.name) utils.time_it(functools.partial(_on_finish, "Dependency %s" % what), self._install_into_venv, instance, requires_what) extra_env_overrides = { 'PBR_VERSION': instance.egg_info['version'], } utils.time_it(functools.partial(_on_finish, "Instance %s" % what), self._install_into_venv, instance, [instance.get_option('app_dir')], extra_env_overrides=extra_env_overrides)
def _fetch_epoch_mapping(self): epoch_map = self.distro.get_dependency_config("epoch_map", quiet=True) if not epoch_map: epoch_map = {} epoch_skips = self.distro.get_dependency_config("epoch_skips", quiet=True) if not epoch_skips: epoch_skips = _DEFAULT_SKIP_EPOCHS if not isinstance(epoch_skips, (list, tuple)): epoch_skips = [i.strip() for i in epoch_skips.split(",")] built_epochs = {} for name in self.python_names: if name in epoch_map: built_epochs[name] = str(epoch_map.pop(name)) else: built_epochs[name] = str(self.OPENSTACK_EPOCH) # Ensure epochs set by a yum searching (that are not in the list of # epochs to provide) are correctly set when building dependent # packages... keep_names = set() try: yum_satisfies = sh.load_file(self.yum_satisfies_filename) except IOError as e: if e.errno != errno.ENOENT: raise else: for line in yum_satisfies.splitlines(): raw_req_rpm = utils.parse_json(line) req = pip_helper.extract_requirement(raw_req_rpm['requirement']) if req.key in epoch_map: LOG.debug("Ensuring manually set epoch is retained for" " requirement '%s' with epoch %s", req, epoch_map[req.key]) keep_names.add(req.key) else: rpm_info = raw_req_rpm['rpm'] rpm_epoch = rpm_info.get('epoch') if rpm_epoch and str(rpm_epoch) not in epoch_skips: LOG.debug("Adding in yum satisfiable package %s for" " requirement '%s' with epoch %s from repo %s", rpm_info['name'], req, rpm_epoch, rpm_info['repo']) keep_names.add(req.key) epoch_map[req.key] = str(rpm_epoch) # Exclude names from the epoch map that we never downloaded in the # first place or that we did not just set automatically (since these # are not useful and should not be set in the first place). try: _pip_reqs, downloaded_reqs = pip_helper.read_requirement_files([self.build_requires_filename]) except IOError as e: if e.errno != errno.ENOENT: raise else: downloaded_names = set([req.key for req in downloaded_reqs]) tmp_epoch_map = {} for (name, epoch) in six.iteritems(epoch_map): name = name.lower() if name in downloaded_names or name in keep_names: tmp_epoch_map[name] = str(epoch) else: LOG.debug("Discarding %s:%s from the epoch mapping since" " it was not part of the downloaded (or automatically" " included) build requirements", name, epoch) epoch_map = tmp_epoch_map epoch_map.update(built_epochs) return epoch_map
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [pkg_resources.Requirement.parse(name).key for name in self.python_names] no_pips.extend(self.ignore_pips) yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) package_reqs = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append((filename, package_details['req'])) def _filter_package_files(): yum_provided = [] req_names = [req.key for (filename, req) in package_reqs] package_rpm_names = self.py2rpm_helper.names_to_rpm_names(req_names) filtered_files = [] for filename, req in package_reqs: rpm_name = package_rpm_names[req.key] if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... rpm_info = self._find_yum_match(yum_map, req, rpm_name) if not rpm_info: filtered_files.append(filename) else: yum_provided.append((req, rpm_info)) LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(rpm_info['repo'])) return (filtered_files, yum_provided) LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files, yum_provided = _filter_package_files() if yum_provided: yum_buff = six.StringIO() for (req, rpm_info) in yum_provided: dep_info = { 'requirement': str(req), 'rpm': rpm_info, } yum_buff.write(json.dumps(dep_info)) yum_buff.write("\n") sh.append_file(self.yum_satisfies_filename, yum_buff.getvalue()) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) ensure_prebuilt = self.distro.get_dependency_config("ensure_prebuilt", quiet=True) if not ensure_prebuilt: ensure_prebuilt = {} build_requires = six.StringIO() rpm_build_requires = six.StringIO() for (filename, req) in package_reqs: if filename in filtered_package_files: build_requires.write("%s\n" % (req)) prebuilt_reqs = [] for line in ensure_prebuilt.get(req.key, []): prebuilt_reqs.append(pip_helper.extract_requirement(line)) if prebuilt_reqs: rpm_build_requires.write("# %s from %s\n" % (req, sh.basename(filename))) rpm_names = self.py2rpm_helper.names_to_rpm_names( [r.key for r in prebuilt_reqs]) for r in prebuilt_reqs: rpm_name = rpm_names[r.key] LOG.info("Adding %s (%s) as a pre-build time" " requirement of %s (%s)", r, rpm_name, req, sh.basename(filename)) rpm_build_requires.write("%s\n" % (rpm_name)) rpm_build_requires.write("\n") sh.append_file(self.rpm_build_requires_filename, rpm_build_requires.getvalue()) sh.write_file(self.build_requires_filename, build_requires.getvalue()) # Now build them into SRPM rpm files. package_files = sorted(filtered_package_files) self.py2rpm_helper.build_all_srpms(package_files=package_files, tracewriter=self.tracewriter, jobs=self.jobs)
def _package_instance(self, instance, attempt=0): if not self._is_buildable(instance): # Skip things that aren't python... LOG.warn("Skipping building %s (not python)", colorizer.quote(instance.name, quote_color='red')) return def gather_extras(): extra_reqs = [] for p in instance.get_option("pips", default_value=[]): req = pip_helper.create_requirement(p['name'], p.get('version')) extra_reqs.append(req) if instance.get_bool_option('use_tests_requires', default_value=True): for p in instance.get_option("test_requires", default_value=[]): extra_reqs.append(pip_helper.create_requirement(p)) return extra_reqs all_requires_what = self._filter_download_requires() LOG.info("Packaging %s (attempt %s)", colorizer.quote(instance.name), attempt) all_requires_mapping = {} for req in all_requires_what: if isinstance(req, six.string_types): req = pip_helper.extract_requirement(req) all_requires_mapping[req.key] = req direct_requires_what = [] direct_requires_keys = set() egg_info = getattr(instance, 'egg_info', None) if egg_info is not None: # Ensure we have gotten all the things... test_dependencies = (egg_info.get('test_dependencies', []) if instance.get_bool_option( 'use_tests_requires', default_value=True) else []) for req in itertools.chain(egg_info.get('dependencies', []), test_dependencies): if isinstance(req, six.string_types): req = pip_helper.extract_requirement(req) if req.key not in direct_requires_keys: direct_requires_what.append(req) direct_requires_keys.add(req.key) requires_what = [] extra_requires_what = gather_extras() for req in extra_requires_what: if req.key in all_requires_mapping: req = all_requires_mapping[req.key] requires_what.append(req) try: direct_requires_keys.remove(req.key) except KeyError: pass for req in direct_requires_what: if req.key not in direct_requires_keys: continue if req.key in all_requires_mapping: req = all_requires_mapping[req.key] requires_what.append(req) what = 'installation for %s' % colorizer.quote(instance.name) utils.time_it(functools.partial(_on_finish, "Dependency %s" % what), self._install_into_venv, instance, requires_what) extra_env_overrides = { 'PBR_VERSION': instance.egg_info['version'], } utils.time_it(functools.partial(_on_finish, "Instance %s" % what), self._install_into_venv, instance, [instance.get_option('app_dir')], extra_env_overrides=extra_env_overrides)
def _scan_pip_requires(self, requires_files): def validate_requirement(filename, source_req): install_egg = None for egg_info in self._python_eggs: if egg_info['name'] == source_req.key: install_egg = egg_info break if not install_egg: return # Ensure what we are about to install/create will actually work # with the desired version. If it is not compatible then we should # abort and someone should update the tag/branch in the origin # file (or fix it via some other mechanism). if install_egg['version'] not in source_req: msg = ("Can not satisfy '%s' with '%s', version" " conflict found in %s") raise exc.DependencyException( msg % (source_req, install_egg['req'], filename)) if not requires_files: return utils.log_iterable(sorted(requires_files), logger=LOG, header="Scanning %s pip 'requires' files" % (len(requires_files))) forced_by_key = dict((pkg.key, pkg) for pkg in self.forced_packages) mutations = 0 for fn in sorted(requires_files): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append( "%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) mutations += len(alterations) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) # NOTE(imelnikov): after updating requirement lists we should re-fetch # data from them again, so we drop pip helper caches here. if mutations > 0: pip_helper.drop_caches()
def _gather_pips_to_install(self, requires_files, extra_pips=None): """Analyze requires_files and extra_pips. Updates `self.forced_packages` and `self.pips_to_install`. Writes requirements to `self.gathered_requires_filename`. """ extra_pips = extra_pips or [] cmdline = [ self.multipip_executable, "--skip-requirements-regex", "python.*client", "--pip", self.pip_executable ] cmdline = cmdline + extra_pips + ["-r"] + requires_files cmdline.extend(["--ignore-package"]) cmdline.extend(OPENSTACK_PACKAGES) cmdline.extend(SKIP_PACKAGE_NAMES) cmdline.extend(self.python_names) stdout, stderr = sh.execute(cmdline, check_exit_code=False) self.pips_to_install = list(utils.splitlines_not_empty(stdout)) sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install)) utils.log_iterable(sorted(self.pips_to_install), logger=LOG, header="Full known python dependency list") incompatibles = collections.defaultdict(list) if stderr: current_name = '' for line in stderr.strip().splitlines(): if line.endswith(": incompatible requirements"): current_name = line.split(":", 1)[0].lower().strip() if current_name not in incompatibles: incompatibles[current_name] = [] else: incompatibles[current_name].append(line) for (name, lines) in incompatibles.items(): if not name: continue LOG.warn("Incompatible requirements found for %s", colorizer.quote(name, quote_color='red')) for line in lines: LOG.warn(line) if not self.pips_to_install: LOG.error("No dependencies for OpenStack found." "Something went wrong. Please check:") LOG.error("'%s'" % "' '".join(cmdline)) raise exc.DependencyException( "No dependencies for OpenStack found") # Translate those that we altered requirements for into a set of forced # requirements file (and associated list). self.forced_packages = [] for req in [ pip_helper.extract_requirement(line) for line in self.pips_to_install ]: if req.key in incompatibles: self.forced_packages.append(req) sh.write_file(self.forced_requires_filename, "\n".join([str(req) for req in self.forced_packages]))
def _all_rpm_names(self): # This file should have all the requirements (including test ones) # that we need to install (and which should have been built as rpms # in the previous build stages). gathered_requires = sh.load_file(self.gathered_requires_filename).splitlines() gathered_requires = [line.strip() for line in gathered_requires if line.strip()] req_names = [] reqs = [] for line in gathered_requires: req = pip_helper.extract_requirement(line) if req.key in req_names: continue req_names.append(req.key) reqs.append(req) rpm_names = self._convert_names_python2rpm(req_names) # Ensure we select the right versions that is required and not a # version that doesn't match the requirements. desired_rpms = [] desired_rpm_names = set() desired_rpms_formatted = [] def format_name(rpm_name, py_req): full_name = str(rpm_name).strip() if py_req is not None: full_name += ",%s" % (py_req) return full_name def capture_rpm(rpm_name, py_req): if rpm_name in desired_rpm_names or not rpm_name: return desired_rpms_formatted.append(format_name(rpm_name, py_req)) desired_rpms.append((rpm_name, py_req)) desired_rpm_names.add(rpm_name) for (rpm_name, req) in zip(rpm_names, reqs): capture_rpm(rpm_name, req) for inst in self.instances: if sh.isdir(inst.get_option("app_dir")): req = None rpm_name = None try: (rpm_name, _tpl) = self._get_template_and_rpm_name(inst) req = inst.egg_info['req'] except AttributeError: pass capture_rpm(rpm_name, req) for rpm_name in inst.package_names(): capture_rpm(rpm_name, None) for rpm_name in self.requirements["requires"]: capture_rpm(rpm_name, None) cmd = [self.yumfind_executable, '-j'] desired_rpms_formatted = sorted(desired_rpms_formatted) for p in desired_rpms_formatted: cmd.extend(['-p', p]) header = "Validating %s required packages are still available" % (len(desired_rpms)) utils.log_iterable(desired_rpms_formatted, header=header, logger=LOG) rpms_located = [] rpm_names_located = set() for matched in sh.execute(cmd)[0].splitlines(): matched = matched.strip() if matched: pkg = json.loads(matched) if isinstance(pkg, dict): rpm_names_located.add(pkg['name']) rpms_located.append(pkg) rpm_names_missing = desired_rpm_names - rpm_names_located if rpm_names_missing: # Include the python version required information (if applicable) missing_formatted = [] for n in sorted(rpm_names_missing): source_found = False for (n2, py_req) in desired_rpms: if n2 == n: missing_formatted.append(format_name(n2, py_req)) source_found = True break if not source_found: missing_formatted.append(format_name(n, None)) msg = "Could not find available rpm packages: %s" msg = msg % (", ".join(missing_formatted)) raise excp.DependencyException(msg) LOG.info("All %s required packages are still available!", len(desired_rpms)) desired_rpms = [] for pkg in rpms_located: LOG.debug("Found %s", pkg) desired_rpms.append("%s,%s" % (pkg['name'], pkg['version'])) return list(sorted(desired_rpms))
def _scan_pip_requires(self, requires_files): def validate_requirement(filename, source_req): install_egg = None for egg_info in self._python_eggs: if egg_info['name'] == source_req.key: install_egg = egg_info break if not install_egg: return # Ensure what we are about to install/create will actually work # with the desired version. If it is not compatible then we should # abort and someone should update the tag/branch in the origin # file (or fix it via some other mechanism). if install_egg['version'] not in source_req: msg = ("Can not satisfy '%s' with '%s', version" " conflict found in %s") raise exc.DependencyException(msg % (source_req, install_egg['req'], filename)) if not requires_files: return utils.log_iterable(sorted(requires_files), logger=LOG, header="Scanning %s pip 'requires' files" % (len(requires_files))) forced_by_key = dict((pkg.key, pkg) for pkg in self.forced_packages) mutations = 0 for fn in sorted(requires_files): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append("%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) mutations += len(alterations) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) # NOTE(imelnikov): after updating requirement lists we should re-fetch # data from them again, so we drop pip helper caches here. if mutations > 0: pip_helper.drop_caches()