def _create_repo(self, repo_name): repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) for a_dir in (repo_dir, src_repo_dir): if not sh.isdir(a_dir): sh.mkdirslist(a_dir, tracewriter=self.tracewriter) cmdline = ["createrepo", a_dir] LOG.info("Creating repo at %s", a_dir) sh.execute(cmdline) repo_filename = sh.joinpths(self.anvil_repo_dir, "%s.repo" % repo_name) LOG.info("Writing %s", repo_filename) (_fn, content) = utils.load_template("packaging", "common.repo") params = { "repo_name": repo_name, "baseurl_bin": "file://%s" % repo_dir, "baseurl_src": "file://%s" % src_repo_dir, } sh.write_file(repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) # Install *.repo file so that anvil deps will be available # when building OpenStack system_repo_filename = sh.joinpths(self.YUM_REPO_DIR, "%s.repo" % repo_name) sh.copy(repo_filename, system_repo_filename) LOG.info("Copying to %s", system_repo_filename) self.tracewriter.file_touched(system_repo_filename)
def _create_repo(self, repo_name): repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) for a_dir in (repo_dir, src_repo_dir): if not sh.isdir(a_dir): sh.mkdirslist(a_dir, tracewriter=self.tracewriter) cmdline = ["createrepo", a_dir] LOG.info("Creating repo at %s", a_dir) sh.execute(cmdline) repo_filename = sh.joinpths(self.anvil_repo_dir, "%s.repo" % repo_name) LOG.info("Writing %s", repo_filename) (_fn, content) = utils.load_template("packaging", "common.repo") params = { "repo_name": repo_name, "baseurl_bin": "file://%s" % repo_dir, "baseurl_src": "file://%s" % src_repo_dir, } sh.write_file(repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) # NOTE(harlowja): Install *.repo file so that anvil deps will be available # when building openstack core project packages. system_repo_filename = sh.joinpths(self.YUM_REPO_DIR, "%s.repo" % repo_name) sh.copy(repo_filename, system_repo_filename, tracewriter=self.tracewriter) LOG.info("Copied to %s", system_repo_filename)
def _patches(self): in_patches = patcher.expand_patches(self.get_option('patches', 'package')) your_patches = [] for path in in_patches: target_path = sh.joinpths(self.build_paths['sources'], sh.basename(path)) sh.copy(path, target_path) your_patches.append(sh.basename(target_path)) return your_patches
def _patches(self): your_patches = [] in_patches = self.get_option('patches', 'package') if in_patches: for path in in_patches: path = sh.abspth(path) if sh.isdir(path): for c_path in sh.listdir(path, files_only=True): tgt_fn = sh.joinpths(self.build_paths['sources'], sh.basename(c_path)) sh.copy(c_path, tgt_fn) your_patches.append(sh.basename(tgt_fn)) else: tgt_fn = sh.joinpths(self.build_paths['sources'], sh.basename(path)) sh.copy(path, tgt_fn) your_patches.append(sh.basename(tgt_fn)) return your_patches
def pre_build(): build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) build_requirements = [] try: build_requirements.extend(_get_lines(self.rpm_build_requires_filename)) except IOError as e: if e.errno != errno.ENOENT: raise built_files = [] built_requirements = [] for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) matched_paths = [] available_paths = list_src_rpms(repo_dir) envra_path_details = self.envra_helper.explode(*available_paths) for (path, envra_detail) in zip(available_paths, envra_path_details): package_name = envra_detail.get('name') if package_name in build_requirements: matched_paths.append(path) built_requirements.append(package_name) if matched_paths: with sh.remove_before(self.prebuild_dir) as prebuild_dir: sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter) for path in matched_paths: sh.copy(path, sh.joinpths(prebuild_dir, sh.basename(path))) built_files.extend( build(prebuild_dir, repo_name, 'Prebuilding %s RPM packages from their' ' SRPMs for repo %s using %s jobs', "%s-prebuild" % self.group, built_files)) leftover_requirements = set() for req in build_requirements: if req not in built_requirements: leftover_requirements.add(req) return (leftover_requirements, built_files)
def build(repo_dir, repo_name, header_tpl, group, built_files): repo_files = [] for srpm in list_src_rpms(repo_dir): if srpm not in built_files: repo_files.append(srpm) if not repo_files: return [] utils.log_iterable(repo_files, header=header_tpl % (len(repo_files), self.SRC_REPOS[repo_name], self.jobs), logger=LOG) rpmbuild_flags = "--rebuild" if self.opts.get("usr_only", False): rpmbuild_flags += " --define 'usr_only 1'" if self.opts.get("overwrite_configs", False): rpmbuild_flags += " --define 'overwrite_configs 1'" with sh.remove_before(self.rpmbuild_dir): self._create_rpmbuild_subdirs() # This is needed so that make correctly identifies the right # files and the right *.mark files and so-on; instead of # grabbing all the files (including ones we don't want to # build just yet...) files_dirname = '%s-%s-build' % (repo_name, group) files_dir = sh.joinpths(self.deps_dir, files_dirname) sh.mkdirslist(files_dir) for srpm in repo_files: sh.copy(srpm, sh.joinpths(files_dir, sh.basename(srpm))) try: self.py2rpm_helper.build_all_binaries(repo_name, files_dir, rpmbuild_flags, self.tracewriter, self.jobs) finally: # If we made any rpms (even if a failure happened, make # sure that we move them to the right target repo). moved_rpms = move_rpms(repo_name) if len(moved_rpms) > 0: self._create_repo(repo_name) return repo_files
def get_archive_details(filename): if not sh.isfile(filename): raise IOError("Can not detail non-existent file %s" % (filename)) # Check if we already got the details of this file previously cache_key = "f:%s:%s" % (sh.basename(filename), sh.getsize(filename)) if cache_key in EGGS_DETAILED: return EGGS_DETAILED[cache_key] # Get pip to get us the egg-info. with utils.tempdir() as td: filename = sh.copy(filename, sh.joinpths(td, sh.basename(filename))) extract_to = sh.mkdir(sh.joinpths(td, 'build')) pip_util.unpack_file(filename, extract_to, content_type='', link='') details = get_directory_details(extract_to) EGGS_DETAILED[cache_key] = details return details
def _copy_patches(self, patches): for filename in patches: sh.copy(filename, self.rpm_sources_dir)
def _copy_sources(self, instance): other_sources_dir = sh.joinpths(settings.TEMPLATE_DIR, "packaging", "sources", instance.name) if sh.isdir(other_sources_dir): for filename in sh.listdir(other_sources_dir, files_only=True): sh.copy(filename, self.rpm_sources_dir)