def __init__(self, *args, **kargs): comp.PythonRuntime.__init__(self, *args, **kargs) self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR) self.wait_time = max(self.cfg.getint('DEFAULT', 'service_wait_seconds'), 1) self.init_fn = sh.joinpths(self.get_option('trace_dir'), INIT_WHAT_HAPPENED) (fn, contents) = utils.load_template(self.name, INIT_WHAT_FN) self.init_what = yaml.load(contents)
def build_all_srpms(self, package_files, tracewriter, jobs): (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth( sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) cmdline = self._start_cmdline(escape_values=True)[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self._rpmbuild_dir, "--debug", ] executable = " ".join(self._start_cmdline()[0:1]) params = { "DOWNLOADS_DIR": self._download_dir, "LOGS_DIR": self._log_dir, "PY2RPM": executable, "PY2RPM_FLAGS": " ".join(cmdline) } marks_dir = sh.joinpths(self._deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=tracewriter) makefile_path = sh.joinpths(self._deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), jobs), logger=LOG) self._execute_make(makefile_path, marks_dir, jobs)
def build_all_srpms(self, package_files, tracewriter, jobs): (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) cmdline = self._start_cmdline(escape_values=True)[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self._rpmbuild_dir ] executable = " ".join(self._start_cmdline()[0:1]) params = { "DOWNLOADS_DIR": self._download_dir, "LOGS_DIR": self._log_dir, "PY2RPM": executable, "PY2RPM_FLAGS": " ".join(cmdline) } marks_dir = sh.joinpths(self._deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=tracewriter) makefile_path = sh.joinpths(self._deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), jobs), logger=LOG) self._execute_make(makefile_path, marks_dir, jobs)
def post_start(self): if not sh.isfile(self.init_fn) and self.get_bool_option('do-init'): self.wait_active() LOG.info("Running commands to initialize keystone.") (fn, contents) = utils.load_template(self.name, INIT_WHAT_FN) LOG.debug("Initializing with contents of %s", fn) params = {} params['keystone'] = khelper.get_shared_params(**utils.merge_dicts(self.options, khelper.get_shared_passwords(self))) params['glance'] = ghelper.get_shared_params(ip=self.get_option('ip'), **self.get_option('glance')) params['nova'] = nhelper.get_shared_params(ip=self.get_option('ip'), **self.get_option('nova')) params['neutron'] = net_helper.get_shared_params(ip=self.get_option('ip'), **self.get_option('neutron')) params['cinder'] = chelper.get_shared_params(ip=self.get_option('ip'), **self.get_option('cinder')) wait_urls = [ params['keystone']['endpoints']['admin']['uri'], params['keystone']['endpoints']['public']['uri'], ] for url in wait_urls: utils.wait_for_url(url) init_what = utils.load_yaml_text(contents) init_what = utils.expand_template_deep(init_what, params) try: init_how = khelper.Initializer(params['keystone']['service_token'], params['keystone']['endpoints']['admin']['uri']) init_how.initialize(**init_what) except RuntimeError: LOG.exception("Failed to initialize keystone, is the keystone client library available?") else: # Writing this makes sure that we don't init again sh.write_file(self.init_fn, utils.prettify_yaml(init_what)) LOG.info("If you wish to re-run initialization, delete %s", colorizer.quote(self.init_fn))
def _create_repo(self, repo_name): repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) for a_dir in (repo_dir, src_repo_dir): if not sh.isdir(a_dir): sh.mkdirslist(a_dir, tracewriter=self.tracewriter) cmdline = ["createrepo", a_dir] LOG.info("Creating repo at %s", a_dir) sh.execute(cmdline) repo_filename = sh.joinpths(self.anvil_repo_dir, "%s.repo" % repo_name) LOG.info("Writing %s", repo_filename) (_fn, content) = utils.load_template("packaging", "common.repo") params = { "repo_name": repo_name, "baseurl_bin": "file://%s" % repo_dir, "baseurl_src": "file://%s" % src_repo_dir, } sh.write_file(repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) # Install *.repo file so that anvil deps will be available # when building OpenStack system_repo_filename = sh.joinpths(self.YUM_REPO_DIR, "%s.repo" % repo_name) sh.copy(repo_filename, system_repo_filename) LOG.info("Copying to %s", system_repo_filename) self.tracewriter.file_touched(system_repo_filename)
def source_config(self, config_fn): if self.config_dir: if config_fn in self.source_configs: config_fn = self.source_configs.get(config_fn) fn = sh.joinpths(self.config_dir, config_fn) return (fn, sh.load_file(fn)) return utils.load_template(self.installer.name, config_fn)
def _copy_startup_scripts(self, spec_filename): common_init_content = utils.load_template("packaging", "common.init")[1] for src in rpm.spec(spec_filename).sources: script = sh.basename(src[0]) if not (script.endswith(".init")): continue target_filename = sh.joinpths(self.rpm_sources_dir, script) if sh.isfile(target_filename): continue bin_name = utils.strip_prefix_suffix(script, "openstack-", ".init") if bin_name == "quantum-server": daemon_args = ("'--config-file=/etc/quantum/plugin.ini" " --config-file=/etc/quantum/quantum.conf'") elif bin_name == "quantum-l3-agent": daemon_args = ("'--config-file=/etc/quantum/l3_agent.ini" " --config-file=/etc/quantum/quantum.conf'") elif bin_name == "quantum-dhcp-agent": daemon_args = ("'--config-file=/etc/quantum/dhcp_agent.ini" " --config-file=/etc/quantum/quantum.conf'") else: daemon_args = "" params = { "bin": bin_name, "package": bin_name.split("-", 1)[0], "daemon_args": daemon_args, } sh.write_file(target_filename, utils.expand_template(common_init_content, params))
def _create_package(self): files = self._gather_files() params = { "files": files, "requires": self._requirements(), "obsoletes": self._obsoletes(), "conflicts": self._conflicts(), "defines": self._defines(), "undefines": self._undefines(), "build": self._build_details(), "who": sh.getuser(), "date": utils.iso8601(), "patches": self._patches(), "details": self.details, } (_fn, content) = utils.load_template("packaging", "spec.tmpl") spec_base = self._make_fn("spec") spec_fn = sh.joinpths(self.build_paths["specs"], spec_base) LOG.debug("Creating spec file %s with params:", spec_fn) files["sources"].append("%s.tar.gz" % (spec_base)) utils.log_object(params, logger=LOG, level=logging.DEBUG) sh.write_file(spec_fn, utils.expand_template(content, params)) tar_it( sh.joinpths(self.build_paths["sources"], "%s.tar.gz" % (spec_base)), spec_base, wkdir=self.build_paths["specs"], )
def post_start(self): if not sh.isfile(self.init_fn) and self.get_bool_option('do-init'): self.wait_active() LOG.info("Running commands to initialize keystone.") (fn, contents) = utils.load_template(self.name, INIT_WHAT_FN) LOG.debug("Initializing with contents of %s", fn) params = {} params['keystone'] = khelper.get_shared_params(**utils.merge_dicts(self.options, khelper.get_shared_passwords(self))) params['glance'] = ghelper.get_shared_params(ip=self.get_option('ip'), **self.get_option('glance')) params['nova'] = nhelper.get_shared_params(ip=self.get_option('ip'), **self.get_option('nova')) params['quantum'] = qhelper.get_shared_params(ip=self.get_option('ip'), **self.get_option('quantum')) params['cinder'] = chelper.get_shared_params(ip=self.get_option('ip'), **self.get_option('cinder')) wait_urls = [ params['keystone']['endpoints']['admin']['uri'], params['keystone']['endpoints']['public']['uri'], ] for url in wait_urls: utils.wait_for_url(url) init_what = utils.load_yaml_text(contents) init_what = utils.expand_template_deep(self._filter_init(init_what), params) khelper.Initializer(params['keystone']['service_token'], params['keystone']['endpoints']['admin']['uri']).initialize(**init_what) # Writing this makes sure that we don't init again sh.write_file(self.init_fn, utils.prettify_yaml(init_what)) LOG.info("If you wish to re-run initialization, delete %s", colorizer.quote(self.init_fn))
def _write_spec_file(self, instance, rpm_name, template_name, params): requires_what = params.get('requires', []) test_requires_what = params.get('test_requires', []) egg_info = getattr(instance, 'egg_info', None) if egg_info: def ei_names(key): try: requires_python = [str(req) for req in egg_info[key]] except KeyError: return [] else: return self.py2rpm_helper.names_to_rpm_requires(requires_python) requires_what.extend(ei_names('dependencies')) test_requires_what.extend(ei_names('test_dependencies')) params["requires"] = requires_what params["test_requires"] = test_requires_what params["epoch"] = self.OPENSTACK_EPOCH params["part_fn"] = lambda filename: sh.joinpths( settings.TEMPLATE_DIR, self.SPEC_TEMPLATE_DIR, filename) parsed_version = pkg_resources.parse_version(params["version"]) params.update(self._make_spec_functors(parsed_version)) content = utils.load_template(self.SPEC_TEMPLATE_DIR, template_name)[1] spec_filename = sh.joinpths(self.rpmbuild_dir, "SPECS", "%s.spec" % rpm_name) sh.write_file(spec_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) return spec_filename
def _create_repo(self, repo_name): repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) for a_dir in (repo_dir, src_repo_dir): if not sh.isdir(a_dir): sh.mkdirslist(a_dir, tracewriter=self.tracewriter) cmdline = ["createrepo", a_dir] LOG.info("Creating repo at %s", a_dir) sh.execute(cmdline) repo_filename = sh.joinpths(self.anvil_repo_dir, "%s.repo" % repo_name) LOG.info("Writing %s", repo_filename) (_fn, content) = utils.load_template("packaging", "common.repo") params = { "repo_name": repo_name, "baseurl_bin": "file://%s" % repo_dir, "baseurl_src": "file://%s" % src_repo_dir, } sh.write_file(repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) # NOTE(harlowja): Install *.repo file so that anvil deps will be available # when building openstack core project packages. system_repo_filename = sh.joinpths(self.YUM_REPO_DIR, "%s.repo" % repo_name) sh.copy(repo_filename, system_repo_filename, tracewriter=self.tracewriter) LOG.info("Copied to %s", system_repo_filename)
def _create_package(self): files = self._gather_files() params = { 'files': files, 'requires': self._requirements(), 'obsoletes': self._obsoletes(), 'conflicts': self._conflicts(), 'defines': self._defines(), 'undefines': self._undefines(), 'build': self._build_details(), 'who': sh.getuser(), 'date': utils.iso8601(), 'details': self.details, } (_fn, content) = utils.load_template('packaging', 'spec.tmpl') spec_base = self._make_fn("spec") spec_fn = sh.joinpths(self.build_paths['specs'], spec_base) LOG.debug("Creating spec file %s with params:", spec_fn) files['sources'].append("%s.tar.gz" % (spec_base)) utils.log_object(params, logger=LOG, level=logging.DEBUG) sh.write_file(spec_fn, utils.expand_template(content, params)) tar_it(sh.joinpths(self.build_paths['sources'], "%s.tar.gz" % (spec_base)), spec_base, wkdir=self.build_paths['specs'])
def _write_spec_file(self, instance, rpm_name, template_name, params): requires_what = params.get('requires', []) test_requires_what = params.get('test_requires', []) egg_info = getattr(instance, 'egg_info', None) if egg_info: def ei_names(key): try: requires_python = [str(req) for req in egg_info[key]] except KeyError: return [] else: return self.py2rpm_helper.names_to_rpm_requires( requires_python) requires_what.extend(ei_names('dependencies')) test_requires_what.extend(ei_names('test_dependencies')) params["requires"] = requires_what params["test_requires"] = test_requires_what params["epoch"] = self.OPENSTACK_EPOCH params["part_fn"] = lambda filename: sh.joinpths( settings.TEMPLATE_DIR, self.SPEC_TEMPLATE_DIR, filename) parsed_version = pkg_resources.parse_version(params["version"]) params.update(self._make_spec_functors(parsed_version)) content = utils.load_template(self.SPEC_TEMPLATE_DIR, template_name)[1] spec_filename = sh.joinpths(self.rpmbuild_dir, "SPECS", "%s.spec" % rpm_name) sh.write_file(spec_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) return spec_filename
def post_start(self): if not sh.isfile(self.init_fn) and self.get_bool_option("do-init"): self.wait_active() LOG.info("Running commands to initialize keystone.") (fn, contents) = utils.load_template(self.name, INIT_WHAT_FN) LOG.debug("Initializing with contents of %s", fn) params = {} params["keystone"] = khelper.get_shared_params( **utils.merge_dicts(self.options, khelper.get_shared_passwords(self)) ) params["glance"] = ghelper.get_shared_params(ip=self.get_option("ip"), **self.get_option("glance")) params["nova"] = nhelper.get_shared_params(ip=self.get_option("ip"), **self.get_option("nova")) wait_urls = [ params["keystone"]["endpoints"]["admin"]["uri"], params["keystone"]["endpoints"]["public"]["uri"], ] for url in wait_urls: utils.wait_for_url(url) init_what = utils.load_yaml_text(contents) init_what = utils.expand_template_deep(self._filter_init(init_what), params) khelper.Initializer( params["keystone"]["service_token"], params["keystone"]["endpoints"]["admin"]["uri"] ).initialize(**init_what) # Writing this makes sure that we don't init again sh.write_file(self.init_fn, utils.prettify_yaml(init_what)) LOG.info("If you wish to re-run initialization, delete %s", colorizer.quote(self.init_fn))
def _setup_cleaner(self): LOG.info("Configuring cleaner template: %s", colorizer.quote(CLEANER_DATA_CONF)) (_fn, contents) = utils.load_template(self.name, CLEANER_DATA_CONF) # FIXME(harlowja), stop placing in checkout dir... cleaner_fn = sh.joinpths(sh.joinpths(self.get_option('app_dir'), BIN_DIR), CLEANER_DATA_CONF) sh.write_file(cleaner_fn, contents) sh.chmod(cleaner_fn, 0755) self.tracewriter.file_touched(cleaner_fn)
def build_binary(self): def is_src_rpm(path): if not path: return False if not sh.isfile(path): return False if not path.lower().endswith('.src.rpm'): return False return True def list_src_rpms(path): path_files = [] if sh.isdir(path): path_files = sh.listdir(path, filter_func=is_src_rpm) return sorted(path_files) build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) for repo_name in self.REPOS: src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) src_repo_files = list_src_rpms(src_repo_dir) if not src_repo_files: continue utils.log_iterable(src_repo_files, header=('Building %s RPM packages from their' ' SRPMs for repo %s using %s jobs') % (len(src_repo_files), self.SRC_REPOS[repo_name], self._jobs), logger=LOG) makefile_path = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name) marks_dir = sh.joinpths(self.deps_dir, "marks-binary") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) rpmbuild_flags = "--rebuild" if self.opts.get("usr_only", False): rpmbuild_flags += " --define 'usr_only 1'" params = { "SRC_REPO_DIR": src_repo_dir, "RPMBUILD_FLAGS": rpmbuild_flags, "LOGS_DIR": self.log_dir, 'RPMTOP_DIR': self.rpmbuild_dir, } (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "binary.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) with sh.remove_before_after(self.rpmbuild_dir): self._create_rpmbuild_subdirs() self._execute_make(makefile_path, marks_dir) repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) for d in sh.listdir(self.rpmbuild_dir, dirs_only=True): self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir) self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"), repo_dir) self._create_repo(repo_name)
def _setup_cleaner(self): LOG.info("Configuring cleaner template: %s", colorizer.quote(CLEANER_DATA_CONF)) (_fn, contents) = utils.load_template(self.name, CLEANER_DATA_CONF) # FIXME(harlowja), stop placing in checkout dir... cleaner_fn = sh.joinpths( sh.joinpths(self.get_option('app_dir'), BIN_DIR), CLEANER_DATA_CONF) sh.write_file(cleaner_fn, contents) sh.chmod(cleaner_fn, 0755) self.tracewriter.file_touched(cleaner_fn)
def build_binary(self): def _install_build_requirements(): build_requires = self.requirements["build-requires"] if build_requires: utils.log_iterable(sorted(build_requires), header=("Installing %s build requirements" % len(build_requires)), logger=LOG) cmdline = ["yum", "install", "-y"] + list(build_requires) sh.execute(cmdline) def _is_src_rpm(filename): return filename.endswith('.src.rpm') _install_build_requirements() for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) sh.mkdirslist(repo_dir, tracewriter=self.tracewriter) src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) if sh.isdir(src_repo_dir): src_repo_files = sh.listdir(src_repo_dir, files_only=True) src_repo_files = sorted([f for f in src_repo_files if _is_src_rpm(f)]) else: src_repo_files = [] if not src_repo_files: continue src_repo_base_files = [sh.basename(f) for f in src_repo_files] LOG.info('Building %s RPM packages from their SRPMs for repo %s using %s jobs', len(src_repo_files), self.SRC_REPOS[repo_name], self.jobs) makefile_name = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name) marks_dir = sh.joinpths(self.deps_dir, "marks-binary") sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) (_fn, content) = utils.load_template("packaging/makefiles", "binary.mk") rpmbuild_flags = ("--rebuild --define '_topdir %s'" % self.rpmbuild_dir) if self.opts.get("usr_only", False): rpmbuild_flags += "--define 'usr_only 1'" params = { "SRC_REPO_DIR": src_repo_dir, "RPMBUILD_FLAGS": rpmbuild_flags, "LOGS_DIR": self.log_dir, } sh.write_file(makefile_name, utils.expand_template(content, params), tracewriter=self.tracewriter) with sh.remove_before_after(self.rpmbuild_dir): self._create_rpmbuild_subdirs() self._execute_make(makefile_name, marks_dir) self._move_files(sh.joinpths(self.rpmbuild_dir, "RPMS"), repo_dir) self._create_repo(repo_name)
def build_all_binaries(self, repo_name, src_repo_dir, rpmbuild_flags, tracewriter, jobs): makefile_path = sh.joinpths(self._deps_dir, "binary-%s.mk" % repo_name) marks_dir = sh.joinpths(self._deps_dir, "marks-binary") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=tracewriter) params = { "SRC_REPO_DIR": src_repo_dir, "RPMBUILD_FLAGS": rpmbuild_flags, "LOGS_DIR": self._log_dir, "RPMTOP_DIR": self._rpmbuild_dir, } (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "binary.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=tracewriter) self._execute_make(makefile_path, marks_dir, jobs)
def _do_upstart_configure(self, app_name, app_pth, app_dir, program_args): # TODO FIXME symlinks won't work. Need to copy the files there. # https://bugs.launchpad.net/upstart/+bug/665022 cfg_fn = sh.joinpths(CONF_ROOT, app_name + CONF_EXT) if sh.isfile(cfg_fn): LOG.debug("Upstart config file already exists: %r" % (cfg_fn)) return LOG.debug("Loading upstart template to be used by: %r" % (cfg_fn)) (_, contents) = utils.load_template('general', UPSTART_CONF_TMPL) params = self._get_upstart_conf_params(app_pth, app_name, *program_args) adjusted_contents = utils.param_replace(contents, params) LOG.debug("Generated up start config for %r: %s" % (app_name, adjusted_contents)) with sh.Rooted(True): sh.write_file(cfg_fn, adjusted_contents) sh.chmod(cfg_fn, 0666)
def _copy_startup_scripts(self, spec_filename): common_init_content = utils.load_template("packaging", "common.init")[1] for src in rpm.spec(spec_filename).sources: script = sh.basename(src[0]) if not (script.endswith(".init")): continue target_filename = sh.joinpths(self.rpm_sources_dir, script) if sh.isfile(target_filename): continue bin_name = utils.strip_prefix_suffix(script, "openstack-", ".init") params = { "bin": bin_name, "package": bin_name.split("-", 1)[0], } sh.write_file(target_filename, utils.expand_template(common_init_content, params))
def source_config(self, config_fn): if self.config_dir: allow_missing = False if config_fn in self.source_configs: config_data = self.source_configs.get(config_fn) if isinstance(config_data, (tuple, list)): config_fn, allow_missing = config_data else: config_fn = config_data fn = sh.joinpths(self.config_dir, config_fn) try: return (fn, sh.load_file(fn)) except IOError as e: if e.errno == errno.ENOENT and allow_missing: return (fn, '') else: raise return utils.load_template(self.installer.name, config_fn)
def _write_spec_file(self, instance, rpm_name, template_name, params): requires_what = params.get('requires') if not requires_what: requires_what = [] requires_python = [] try: requires_python.extend(instance.egg_info['dependencies']) except AttributeError: pass if requires_python: requires_what.extend(self._convert_names_python2rpm(requires_python, False)) params['requires'] = requires_what params["epoch"] = self.OPENSTACK_EPOCH content = utils.load_template(self.SPEC_TEMPLATE_DIR, template_name)[1] spec_filename = sh.joinpths(self.rpmbuild_dir, "SPECS", "%s.spec" % rpm_name) sh.write_file(spec_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) return spec_filename
def _create_deps_repo(self): for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "RPMS"), recursive=True, files_only=True): sh.move(filename, self.deps_repo_dir, force=True) for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "SRPMS"), recursive=True, files_only=True): sh.move(filename, self.deps_src_repo_dir, force=True) for repo_dir in self.deps_repo_dir, self.deps_src_repo_dir: cmdline = ["createrepo", repo_dir] LOG.info("Creating repo at %s" % repo_dir) sh.execute(cmdline) LOG.info("Writing %s to %s", self.REPO_FN, self.anvil_repo_filename) (_fn, content) = utils.load_template('packaging', self.REPO_FN) params = {"baseurl_bin": "file://%s" % self.deps_repo_dir, "baseurl_src": "file://%s" % self.deps_src_repo_dir} sh.write_file(self.anvil_repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter)
def _copy_startup_scripts(self, instance, spec_details): common_init_content = utils.load_template("packaging", "common.init")[1] daemon_args = instance.get_option('daemon_args', default_value={}) for src in spec_details.get('sources', []): script = sh.basename(src) if not (script.endswith(".init")): continue target_filename = sh.joinpths(self.rpm_sources_dir, script) if sh.isfile(target_filename): continue bin_name = utils.strip_prefix_suffix(script, "openstack-", ".init") params = { "bin": bin_name, "package": bin_name.split("-", 1)[0], "daemon_args": daemon_args.get(bin_name, ''), } sh.write_file(target_filename, utils.expand_template(common_init_content, params))
def _write_spec_file(self, instance, rpm_name, template_name, params): requires_what = params.get('requires') if not requires_what: requires_what = [] requires_python = [] try: requires_python.extend(instance.egg_info['dependencies']) except AttributeError: pass if requires_python: requires_what.extend( self._convert_names_python2rpm(requires_python, False)) params['requires'] = requires_what params["epoch"] = self.OPENSTACK_EPOCH content = utils.load_template(self.SPEC_TEMPLATE_DIR, template_name)[1] spec_filename = sh.joinpths(self.rpmbuild_dir, "SPECS", "%s.spec" % rpm_name) sh.write_file(spec_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) return spec_filename
def _create_deps_repo(self): for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "RPMS"), recursive=True, files_only=True): sh.move(filename, self.deps_repo_dir, force=True) for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "SRPMS"), recursive=True, files_only=True): sh.move(filename, self.deps_src_repo_dir, force=True) for repo_dir in self.deps_repo_dir, self.deps_src_repo_dir: cmdline = ["createrepo", repo_dir] LOG.info("Creating repo at %s" % repo_dir) sh.execute(cmdline) LOG.info("Writing %s to %s", self.REPO_FN, self.anvil_repo_filename) (_fn, content) = utils.load_template('packaging', self.REPO_FN) params = { "baseurl_bin": "file://%s" % self.deps_repo_dir, "baseurl_src": "file://%s" % self.deps_src_repo_dir } sh.write_file(self.anvil_repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter)
def _copy_startup_scripts(self, instance, spec_filename): common_init_content = utils.load_template("packaging", "common.init")[1] cmd = [self.specprint_executable] cmd.extend(['-f', spec_filename]) daemon_args = instance.get_option('daemon_args', default_value={}) spec_details = json.loads(sh.execute(cmd)[0]) for src in spec_details.get('sources', []): script = sh.basename(src) if not (script.endswith(".init")): continue target_filename = sh.joinpths(self.rpm_sources_dir, script) if sh.isfile(target_filename): continue bin_name = utils.strip_prefix_suffix(script, "openstack-", ".init") params = { "bin": bin_name, "package": bin_name.split("-", 1)[0], "daemon_args": daemon_args.get(bin_name, ''), } sh.write_file(target_filename, utils.expand_template(common_init_content, params))
def _write_spec_file(self, instance, rpm_name, template_name, params): requires_what = params.get('requires', []) test_requires_what = params.get('test_requires', []) egg_info = getattr(instance, 'egg_info', None) if egg_info: def ei_names(key): requires_python = [str(req) for req in egg_info[key]] return self.py2rpm_helper.convert_names_to_rpm(requires_python, False) requires_what.extend(ei_names('dependencies')) test_requires_what.extend(ei_names('test_dependencies')) params["requires"] = requires_what params["test_requires"] = test_requires_what params["epoch"] = self.OPENSTACK_EPOCH params["part_fn"] = lambda filename: sh.joinpths( settings.TEMPLATE_DIR, self.SPEC_TEMPLATE_DIR, filename) content = utils.load_template(self.SPEC_TEMPLATE_DIR, template_name)[1] spec_filename = sh.joinpths(self.rpmbuild_dir, "SPECS", "%s.spec" % rpm_name) sh.write_file(spec_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) return spec_filename
def source_config(self, config_fn): return utils.load_template(self.name, config_fn)
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [pkg_resources.Requirement.parse(name).key for name in self.python_names] yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) def _filter_package_files(package_files): package_reqs = [] package_keys = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append(package_details['req']) package_keys.append(package_details['req'].key) package_rpm_names = self._convert_names_python2rpm(package_keys) filtered_files = [] for (filename, req, rpm_name) in zip(package_files, package_reqs, package_rpm_names): if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... (_version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: filtered_files.append(filename) else: LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(repo)) return filtered_files LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files = _filter_package_files(package_files) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) package_files = sorted(filtered_package_files) # Now build them into SRPM rpm files. (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) py2rpm_options = self._py2rpm_start_cmdline()[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self.rpmbuild_dir, ] params = { "DOWNLOADS_DIR": self.download_dir, "LOGS_DIR": self.log_dir, "PY2RPM": self.py2rpm_executable, "PY2RPM_FLAGS": " ".join(py2rpm_options), } marks_dir = sh.joinpths(self.deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) makefile_path = sh.joinpths(self.deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), self._jobs), logger=LOG) self._execute_make(makefile_path, marks_dir)
def build_binary(self): def is_src_rpm(path): if not path: return False if not sh.isfile(path): return False if not path.lower().endswith('.src.rpm'): return False return True def list_src_rpms(path): path_files = [] if sh.isdir(path): path_files = sh.listdir(path, filter_func=is_src_rpm) return sorted(path_files) build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) for repo_name in self.REPOS: src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) src_repo_files = list_src_rpms(src_repo_dir) if not src_repo_files: continue utils.log_iterable( src_repo_files, header=('Building %s RPM packages from their' ' SRPMs for repo %s using %s jobs') % (len(src_repo_files), self.SRC_REPOS[repo_name], self._jobs), logger=LOG) makefile_path = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name) marks_dir = sh.joinpths(self.deps_dir, "marks-binary") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) rpmbuild_flags = "--rebuild" if self.opts.get("usr_only", False): rpmbuild_flags += " --define 'usr_only 1'" params = { "SRC_REPO_DIR": src_repo_dir, "RPMBUILD_FLAGS": rpmbuild_flags, "LOGS_DIR": self.log_dir, 'RPMTOP_DIR': self.rpmbuild_dir, } (_fn, content) = utils.load_template( sh.joinpths("packaging", "makefiles"), "binary.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) with sh.remove_before_after(self.rpmbuild_dir): self._create_rpmbuild_subdirs() self._execute_make(makefile_path, marks_dir) repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) for d in sh.listdir(self.rpmbuild_dir, dirs_only=True): self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir) self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"), repo_dir) self._create_repo(repo_name)
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [ pkg_resources.Requirement.parse(name).key for name in self.python_names ] yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) def _filter_package_files(package_files): package_reqs = [] package_keys = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append(package_details['req']) package_keys.append(package_details['req'].key) package_rpm_names = self._convert_names_python2rpm(package_keys) filtered_files = [] for (filename, req, rpm_name) in zip(package_files, package_reqs, package_rpm_names): if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... (_version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: filtered_files.append(filename) else: LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(repo)) return filtered_files LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files = _filter_package_files(package_files) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) package_files = sorted(filtered_package_files) # Now build them into SRPM rpm files. (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth( sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) py2rpm_options = self._py2rpm_start_cmdline()[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self.rpmbuild_dir, ] params = { "DOWNLOADS_DIR": self.download_dir, "LOGS_DIR": self.log_dir, "PY2RPM": self.py2rpm_executable, "PY2RPM_FLAGS": " ".join(py2rpm_options), } marks_dir = sh.joinpths(self.deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) makefile_path = sh.joinpths(self.deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), self._jobs), logger=LOG) self._execute_make(makefile_path, marks_dir)