def _move_rpm_files(source_dir, target_dir): if not sh.isdir(source_dir): return for filename in sh.listdir(source_dir, recursive=True, files_only=True): if not filename.lower().endswith(".rpm"): continue sh.move(filename, target_dir, force=True)
def pre_build(): build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) build_requirements = '' try: build_requirements = sh.load_file(self.rpm_build_requires_filename) except IOError as e: if e.errno != errno.ENOENT: raise build_requirements = set(pkg_resources.yield_lines(build_requirements)) for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) matched_paths = [] paths = list_src_rpms(repo_dir) envra_details = self.envra_helper.explode(*paths) for (path, envra_detail) in zip(paths, envra_details): package_name = envra_detail.get('name') if package_name in build_requirements: matched_paths.append(path) build_requirements.discard(package_name) if matched_paths: with sh.remove_before(self.prebuild_dir) as prebuild_dir: if not sh.isdir(prebuild_dir): sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter) for path in matched_paths: sh.move(path, sh.joinpths(prebuild_dir, sh.basename(path))) build(prebuild_dir, repo_name, 'Prebuilding %s RPM packages from their SRPMs' ' for repo %s using %s jobs') return build_requirements
def _make_source_archive(self): with utils.tempdir() as td: arch_base_name = "%s-%s" % (self.details['name'], self.details['version']) sh.copytree(self.get_option('app_dir'), sh.joinpths(td, arch_base_name)) arch_tmp_fn = sh.joinpths(td, "%s.tar.gz" % (arch_base_name)) tar_it(arch_tmp_fn, arch_base_name, td) sh.move(arch_tmp_fn, self.build_paths['sources']) return "%s.tar.gz" % (arch_base_name)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`. """ # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) download_requires_filename = sh.joinpths(self.deps_dir, "download-requires") raw_pips_to_download = self.filter_download_requires() sh.write_file(download_requires_filename, "\n".join(str(req) for req in raw_pips_to_download)) if not raw_pips_to_download: return ([], []) downloaded_flag_file = sh.joinpths(self.deps_dir, "pip-downloaded") # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if sh.isfile(downloaded_flag_file) and self._requirements_satisfied( raw_pips_to_download, self.download_dir): LOG.info("All python dependencies have been already downloaded") else: pip_dir = sh.joinpths(self.deps_dir, "pip") pip_download_dir = sh.joinpths(pip_dir, "download") pip_build_dir = sh.joinpths(pip_dir, "build") # NOTE(aababilov): do not clean the cache, it is always useful pip_cache_dir = sh.joinpths(self.deps_dir, "pip-cache") pip_failures = [] for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files sh.deldir(pip_dir) sh.mkdir(pip_download_dir, recurse=True) header = "Downloading %s python dependencies (attempt %s)" header = header % (len(raw_pips_to_download), attempt) utils.log_iterable(sorted(raw_pips_to_download), logger=LOG, header=header) failed = False try: self._try_download_dependencies(attempt, raw_pips_to_download, pip_download_dir, pip_cache_dir, pip_build_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break for filename in sh.listdir(pip_download_dir, files_only=True): sh.move(filename, self.download_dir, force=True) sh.deldir(pip_dir) if pip_failures: raise pip_failures[-1] with open(downloaded_flag_file, "w"): pass pips_downloaded = [pip_helper.extract_requirement(p) for p in raw_pips_to_download] self._examine_download_dir(pips_downloaded, self.download_dir) what_downloaded = sh.listdir(self.download_dir, files_only=True) return (pips_downloaded, what_downloaded)
def _setup_binaries(self): startmain_file = sh.joinpths(self.get_option("app_dir"), BIN_DIR, SWIFT_STARTMAIN) makerings_file = sh.joinpths(self.get_option("app_dir"), BIN_DIR, SWIFT_MAKERINGS) sh.move(sh.joinpths(self.get_option("cfg_dir"), SWIFT_MAKERINGS), makerings_file) sh.chmod(makerings_file, 0777) self.tracewriter.file_touched(makerings_file) sh.move(sh.joinpths(self.get_option("cfg_dir"), SWIFT_STARTMAIN), startmain_file) sh.chmod(startmain_file, 0777) self.tracewriter.file_touched(startmain_file)
def _move_rpm_files(self, source_dir, target_dir): # NOTE(imelnikov): we should create target_dir even if we have # nothing to move, because later we rely on its existence if not sh.isdir(target_dir): sh.mkdirslist(target_dir, tracewriter=self.tracewriter) if not sh.isdir(source_dir): return for filename in sh.listdir(source_dir, recursive=True, files_only=True): if not filename.lower().endswith(".rpm"): continue sh.move(filename, target_dir, force=True)
def download_dependencies(self, clear_cache=False): """Download dependencies from `$deps_dir/download-requires`. :param clear_cache: clear `$deps_dir/cache` dir (pip can work incorrectly when it has a cache) """ sh.deldir(self.download_dir) sh.mkdir(self.download_dir, recurse=True) download_requires_filename = sh.joinpths(self.deps_dir, "download-requires") raw_pips_to_download = self.filter_download_requires() pips_to_download = [ pkg_resources.Requirement.parse(str(p.strip())) for p in raw_pips_to_download if p.strip() ] sh.write_file(download_requires_filename, "\n".join(str(req) for req in pips_to_download)) if not pips_to_download: return [] pip_dir = sh.joinpths(self.deps_dir, "pip") pip_download_dir = sh.joinpths(pip_dir, "download") pip_build_dir = sh.joinpths(pip_dir, "build") pip_cache_dir = sh.joinpths(pip_dir, "cache") if clear_cache: sh.deldir(pip_cache_dir) pip_failures = [] how_many = len(pips_to_download) for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files sh.deldir(pip_download_dir) sh.mkdir(pip_download_dir, recurse=True) sh.deldir(pip_build_dir) utils.log_iterable(sorted(raw_pips_to_download), logger=LOG, header=("Downloading %s python dependencies " "(attempt %s)" % (how_many, attempt))) failed = False try: self._try_download_dependencies(attempt, pips_to_download, pip_download_dir, pip_cache_dir, pip_build_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break if pip_failures: raise pip_failures[-1] for filename in sh.listdir(pip_download_dir, files_only=True): sh.move(filename, self.download_dir) return sh.listdir(self.download_dir, files_only=True)
def download_dependencies(self, clear_cache=False): """Download dependencies from `$deps_dir/download-requires`. :param clear_cache: clear `$deps_dir/cache` dir (pip can work incorrectly when it has a cache) """ sh.deldir(self.download_dir) sh.mkdir(self.download_dir, recurse=True) download_requires_filename = sh.joinpths(self.deps_dir, "download-requires") raw_pips_to_download = self.filter_download_requires() pips_to_download = [pkg_resources.Requirement.parse(str(p.strip())) for p in raw_pips_to_download if p.strip()] sh.write_file(download_requires_filename, "\n".join(str(req) for req in pips_to_download)) if not pips_to_download: return [] pip_dir = sh.joinpths(self.deps_dir, "pip") pip_download_dir = sh.joinpths(pip_dir, "download") pip_build_dir = sh.joinpths(pip_dir, "build") pip_cache_dir = sh.joinpths(pip_dir, "cache") if clear_cache: sh.deldir(pip_cache_dir) pip_failures = [] how_many = len(pips_to_download) for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files sh.deldir(pip_download_dir) sh.mkdir(pip_download_dir, recurse=True) sh.deldir(pip_build_dir) utils.log_iterable(sorted(raw_pips_to_download), logger=LOG, header=("Downloading %s python dependencies " "(attempt %s)" % (how_many, attempt))) failed = False try: self._try_download_dependencies(attempt, pips_to_download, pip_download_dir, pip_cache_dir, pip_build_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break if pip_failures: raise pip_failures[-1] for filename in sh.listdir(pip_download_dir, files_only=True): sh.move(filename, self.download_dir) return sh.listdir(self.download_dir, files_only=True)
def _create_deps_repo(self): for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "RPMS"), recursive=True, files_only=True): sh.move(filename, self.deps_repo_dir, force=True) for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "SRPMS"), recursive=True, files_only=True): sh.move(filename, self.deps_src_repo_dir, force=True) for repo_dir in self.deps_repo_dir, self.deps_src_repo_dir: cmdline = ["createrepo", repo_dir] LOG.info("Creating repo at %s" % repo_dir) sh.execute(cmdline) LOG.info("Writing %s to %s", self.REPO_FN, self.anvil_repo_filename) (_fn, content) = utils.load_template('packaging', self.REPO_FN) params = {"baseurl_bin": "file://%s" % self.deps_repo_dir, "baseurl_src": "file://%s" % self.deps_src_repo_dir} sh.write_file(self.anvil_repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter)
def _create_deps_repo(self): for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "RPMS"), recursive=True, files_only=True): sh.move(filename, self.deps_repo_dir, force=True) for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "SRPMS"), recursive=True, files_only=True): sh.move(filename, self.deps_src_repo_dir, force=True) for repo_dir in self.deps_repo_dir, self.deps_src_repo_dir: cmdline = ["createrepo", repo_dir] LOG.info("Creating repo at %s" % repo_dir) sh.execute(cmdline) LOG.info("Writing %s to %s", self.REPO_FN, self.anvil_repo_filename) (_fn, content) = utils.load_template('packaging', self.REPO_FN) params = { "baseurl_bin": "file://%s" % self.deps_repo_dir, "baseurl_src": "file://%s" % self.deps_src_repo_dir } sh.write_file(self.anvil_repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter)
def _move_files(source_dir, target_dir): if not sh.isdir(source_dir): return for filename in sh.listdir(source_dir, recursive=True, files_only=True): sh.move(filename, target_dir, force=True)