def find_files(d, suffix): files = [] for root, dirs, filenames in os.walk(d): files.extend([os.path.join(root, f) for f in filenames if f.endswith(suffix)]) if files: log.info("Find out %s files: %s" % (suffix, files)) return files
def patch_apply(patchfile, cwd): if not os.path.isfile(patchfile): return log.info("apply patch: %s" % patchfile) #patch -f $PATCH_ARGS --no-backup-if-mismatch < $PATCH #cmd = [patch, '-f', '-p1', '--no-backup-if-mismatch', '<', patchfile] cmd = ["%s -f -p1 --no-backup-if-mismatch < %s" % (patch, patchfile)] return check_call(cmd, cwd=cwd, shell=True)
def __apply_meta_patches(self): meta_patch_dir = os.path.join(self.pkgdir, "%s/meta_patches" % self.DISTRO) if os.path.exists(meta_patch_dir): log.info('apply meta patches. DIR: %s', meta_patch_dir) meta_patch_order = os.path.join(meta_patch_dir, "PATCH_ORDER") with open(meta_patch_order) as f: for line in f.readlines(): patchfile = os.path.join(meta_patch_dir, line.strip()) shell.patch_apply(patchfile, cwd=self.build_dir)
def popen_communicate(cmd, input=None, timeout=None, **kwargs): log.info("\n%s\n%s\n......\n" % ("".ljust(100, "*"), " ".join(cmd))) ret = subprocess.Popen(cmd, **kwargs) out, err = ret.communicate(input=input, timeout=timeout) log.info("Return Code: %s", ret.returncode) if out and type(out) == bytes: log.debug(out.decode("utf-8")) if err and type(err) == bytes: log.error(err.decode("utf-8")) return ret.returncode
def check_call(cmd, **kwargs): msg = "\n%s\n%s\n......\n" % ("".ljust(100, "*"), " ".join(cmd)) log.info(msg) if kwargs.get("stdoutfile"): with open(kwargs.get("stdoutfile"), 'a') as f: f.write(msg) f.flush() kwargs.pop("stdoutfile") if "stdout" not in kwargs: kwargs["stdout"] = f if "stderr" not in kwargs: kwargs["stderr"] = f return subprocess.check_call(cmd, **kwargs)
def build_iteration(self, build_list): try_times = 0 to_build_list = build_list while to_build_list: try_times += 1 log.info("===== iteration %d start =====", try_times) self.start_build(to_build_list, self.processes) old_build_list = to_build_list[:] to_build_list = self.failed_list if len(old_build_list) == len(to_build_list): if self.processes <= 1: break self.processes = 1 log.info("Try again with only one process") self.output()
def __init__(self, source, rootdir, max_workers=1, pkglist_filename=None, pkglist=None, output=None, **kwargs): self.max_workers = min(max_workers, ABSOLUTE_MAX_WORKERS) self.source = source self.rootdir = rootdir self.output = output self.pkglist_filename = pkglist_filename self.package_list = pkglist or [] self.packages = context.Context() self.procdata = [] self.builds = {} log.info(json.dumps(self.__dict__, indent=4))
def build_iteration(self): num_of_tries = 0 to_build_list = self.package_list while to_build_list: num_of_tries = num_of_tries + 1 log.info("===== iteration %d start =====", num_of_tries) self.build_packages(to_build_list) old_build_list = to_build_list[:] to_build_list = [ pkg for pkg in self.builds.keys() if not self.builds[pkg].success ] if len(old_build_list) == len(to_build_list): if self.max_workers <= 1: break else: log.info("Try again with only one process") self.max_workers = 1 self.result()
def build_srpm(self): if self.original_file.endswith('.src.rpm'): rpmutil.srpm_extract(self.original_file, self.build_dir) if self.original_file.endswith('.spec'): utils.copy(self.original_file, self.build_spec_dir) self.__copy_additional_src() self.__apply_meta_patches() specfiles = utils.find_out_files(self.build_spec_dir, '.spec') log.info("##### BUILD SRPM - %s" % self.name) defines = { '_tis_build_type': env.type, 'tis_patch_ver': self.TIS_PATCH_VER, 'platform_release': env.release, '_tis_dist': self.TIS_DIST } rpmutil.build_srpm(specfiles[0], topdir=self.build_dir, **defines) self.srpm_file = utils.find_out_files(self.build_srpm_dir, ".src.rpm")[0] self.name = rpmutil.query_srpm_tag(self.srpm_file, 'Name') self.version = rpmutil.query_srpm_tag(self.srpm_file, 'Version') self.release = rpmutil.query_srpm_tag(self.srpm_file, 'Release')
def __copy_additional_src(self): """ 复制额外的source 和patches 到 rpmbuild/SOURCES example: files/* centos/files/* ${CGCS_BASE}/downloads/XXXX.tar.gz centos/patches """ if 'COPY_LIST' in self.__dict__: copy_path_list = self.COPY_LIST.split(' ') log.debug('COPY_LIST: %s', copy_path_list) for p in copy_path_list: p = p[:-2] if p.endswith("/*") else p p = p if os.path.isabs(p) else os.path.join(self.pkgdir, p) utils.copy(p, self.build_src_dir) patches_dir = os.path.join(self.pkgdir, "%s/patches" % self.DISTRO) if os.path.exists(patches_dir): log.info('copy additional patches from DIR: %s', patches_dir) utils.copy(patches_dir, self.build_src_dir)
def result(self): failed = [b.pkg for b in self.builds.values() if not b.success] success = [b.pkg for b in self.builds.values() if b.success] if failed: log.warn( ''' *** Build Failed *** following pkgs are successfully built: %s following pkgs could not be successfully built: %s *** Build Failed *** ''', '\n'.join(success), '\n'.join(failed)) else: log.info( ''' *** Build Successfully *** following pkgs are successfully built: %s *** Build Successfully *** ''', '\n'.join(success))
def build_packages(self, to_build_list): self.signal_handler() for pkg in to_build_list: index = self.get_free_process_index() while index is None: self.monitor_procdata() time.sleep(0.1) index = self.get_free_process_index() log.info("------ Start build %s in process %d ------", pkg, index) self.builds[pkg] = new_build_instance(pkg, source=os.path.join( self.source, pkg), index=index, rootdir=self.rootdir) p = multiprocessing.Process(target=do_build, args=(self.builds[pkg], ), name=pkg) self.procdata.append({'proc': p, 'build': self.builds[pkg]}) p.start() while len(self.procdata) > 0: self.monitor_procdata() time.sleep(0.1)
def execute(): for e in _ENV: if e not in env or env[e] is None: env[e] = os.environ.get(e) unknown_args = [ e for e in _ENV + _fields if e not in env or env[e] is None ] if unknown_args: log.critical('Argument %s must be input', unknown_args) if env.pkg: pkglist_path = os.path.join(env.source, env.pkg) if os.path.isfile(pkglist_path): env.pkglist_filename = env.pkg elif os.path.isdir(pkglist_path): env.pkglist = [env.pkg] env.max_workers = 4 log.CONF('%s-%s' % (env.SYSTEM, env.DISTRO), logdir=env.rootdir) log.info(env) chain = BuildChain(**env) chain.fetch_source() chain.fetch_package_list() chain.build_iteration()
def monitor_procdata(self): for pd in self.procdata: p = pd['proc'] result = p.exitcode if result is None: continue p.join() build = pd['build'] if result == SUCCESS: log.info("%s] Success Build", build.pkg.rjust(20)) build.success = True build.update_repo(max_workers=self.max_workers) elif result == FAIL: log.warn( "%s] Error Build. Try to build again if other packages will succeed.", build.pkg.rjust(17)) elif result == SKIP: log.info("%s] Skipping already built pkg", build.pkg.rjust(20)) build.success = True else: log.error("%s] Unknown exist code %d", build.pkg.rjust(20), result) del self.builds[build.pkg] self.procdata.remove(pd)
def cleanup(self): log.info('Clean Build Environment') for dirname in [self.build_srpm_dir, self.build_rpm_dir]: if os.path.exists(dirname): shutil.rmtree(dirname)
def git_checkout(url, dist_dir, branch='master'): log.info("fetch repo: %s branch: %s in dir: %s" % (url, branch, dist_dir)) if os.path.exists(dist_dir): shutil.rmtree(dist_dir) return check_call([git, "clone", "--depth=1", "--branch=%s" % branch, url, dist_dir])
def do_build(build): log.info("Start Build %s in process %d", build.pkg, build.index) build.do_build() return build
def check_output(cmd, **kwargs): msg = "\n%s\n%s\n......\n" % ("".ljust(100, "*"), " ".join(cmd)) log.info(msg) ret = subprocess.check_output(cmd, **kwargs).decode("UTF-8").strip() log.info("Result %s", ret) return ret
def on_build_process_finished(self, builder): if builder.success: log.info("BUILD SUCCESS : %s", builder.pkg) else: self.failed_list.append(builder.pkg) log.error("BUILD FAILED : %s", builder.pkg)
def output(self): log.info('OUTPUT')
def test_log_info(self): try: log.info("%s %d", "test for info", 1) assert True except Exception as e: assert False
def prepare_source(self): if self.srpm_file is None: self.find_build_original() self.find_build_data() self.build_srpm() log.info('SRPM build success: %s', self.srpm_file)
def get_free_process_index(self): busy_index = [b['build'].index for b in self.procdata] for i in range(self.max_workers): if i not in busy_index: log.info('Get free process %d', i) return i
def on_build_process_finished(self, builder): if builder.success: log.info("BUILD SUCCESS : %s", builder.pkg) else: log.error("BUILD FAILED : %s", builder.pkg)