def test_dep2(self): specs = RpmSpecCollection([ RpmSpecFile(BASIC_SPEC_CONTENT), RpmSpecFile(BASIC2_SPEC_CONTENT), RpmSpecFile(BASIC3_SPEC_CONTENT) ]) self.assertEqual(specs.compute_order(), ['package', 'packageC', 'packageB'])
def test_build_requires(self): spec = RpmSpecFile(DEP_SPEC_CONTENT) self.assertEqual(set(spec.build_requires()), set(['dep1', 'dep2']))
def test_name_package(self): spec = RpmSpecFile(NAME_PKG_CONTENT) self.assertEqual(spec.packages(), ['package', 'package-package-toto'])
def test_nsub_package(self): spec = RpmSpecFile(NSUB_PKG_CONTENT) self.assertEqual(spec.packages(), ['package', 'pre-serv-post'])
def test_sub_package(self): spec = RpmSpecFile(SUB_PKG_CONTENT) self.assertEqual(spec.packages(), ['package', 'package-subpkg-toto'])
def test_package_with_provides(self): spec = RpmSpecFile(PROVIDES_SPEC_CONTENT) self.assertEqual(spec.packages(), ['package', 'oldname'])
def test_package_with_macro(self): spec = RpmSpecFile(MACRO_SPEC_CONTENT) self.assertEqual(spec.packages(), ['package'])
def test_build_requires_with_operator_no_space(self): spec = RpmSpecFile(OPERATOR_DEP_SPEC_CONTENT2) self.assertEqual(set(spec.build_requires()), set(['dep1', 'dep2']))
def test_basic_package(self): spec = RpmSpecFile(BASIC_SPEC_CONTENT) self.assertEqual(spec.packages(), ['package'])
def test_basic(self): specs = RpmSpecCollection([RpmSpecFile(BASIC_SPEC_CONTENT)]) self.assertEqual(specs.compute_order(), ['package'])
def main(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required).", required=True) parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of " "fetching the default one using rdopkg.") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") parser.add_argument('--package-name', help="Build a specific package name only.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by delorean.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Imply --package-name") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Imply --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") options, args = parser.parse_known_args(sys.argv[1:]) cp = configparser.RawConfigParser(default_options) cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) global session session = getSession('sqlite:///commits.sqlite') packages = getpackages(local_info_repo=options.info_repo, tags=cp.get("DEFAULT", "tags")) if options.status is True: if options.package_name: names = (options.package_name, ) else: names = [p['name'] for p in packages] for name in names: commit = getLastProcessedCommit(session, name, 'invalid status') if commit: print(name, commit.status) else: print(name, 'NO_BUILD') sys.exit(0) if options.recheck is True: if not options.package_name: logger.error('Please use --package-name with --recheck.') sys.exit(1) commit = getLastProcessedCommit(session, options.package_name) if commit: if commit.status == 'SUCCESS': logger.error("Trying to recheck an already successful commit," " ignoring.") sys.exit(1) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) else: # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) else: logger.error("There are no existing commits for package %s" % options.package_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] for package in packages: project = package["name"] since = "-1" commit = getLastProcessedCommit(session, project) if commit: # This will return all commits since the last handled commit # including the last handled commit, remove it later if needed. since = "--after=%d" % (commit.dt_commit) repo = package["upstream"] distro = package["master-distgit"] if not options.package_name or package["name"] == options.package_name: project_toprocess = getinfo(cp, project, repo, distro, since, options.local, options.dev, package) # If since == -1, then we only want to trigger a build for the # most recent change if since == "-1" or options.head_only: del project_toprocess[:-1] # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check against # the last commit in the db, as multiple commits can have the same # commit date for commit_toprocess in project_toprocess: if ((options.dev is True) or options.run or (not session.query(Commit).filter( Commit.project_name == project, Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.status != "RETRY") .all())): toprocess.append(commit_toprocess) # if requested do a sort according to build and install # dependencies if options.order is True and not options.package_name: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([p['name'] for p in packages]) speclist = [] bootstraplist = [] for project_name in projects: specpath = os.path.join(cp.get("DEFAULT", "datadir"), project_name + "_distro", project_name + '.spec') speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap exercise rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: return cmp(a.dt_commit, b.dt_commit) return cmp(orders.index(a.project_name), orders.index(b.project_name)) toprocess.sort(cmp=my_cmp) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 gerrit = cp.get("DEFAULT", "gerrit") for commit in toprocess: project = commit.project_name project_info = session.query(Project).filter( Project.project_name == project).first() if not project_info: project_info = Project(project_name=project, last_email=0) commit_hash = commit.commit_hash if options.run: try: run(options.run, cp, commit, options.build_env, options.dev, options.use_public, options.order, do_build=False) except Exception as e: exit_code = 1 if options.stop: return exit_code pass continue logger.info("Processing %s %s" % (project, commit_hash)) notes = "" try: built_rpms, notes = build(cp, packages, commit, options.build_env, options.dev, options.use_public, options.order) except Exception as e: exit_code = 1 datadir = os.path.realpath(cp.get("DEFAULT", "datadir")) logfile = os.path.join(datadir, "repos", commit.getshardedcommitdir(), "rpmbuild.log") max_retries = cp.getint("DEFAULT", "maxretries") if (isknownerror(logfile) and (timesretried(project, commit_hash, commit.distro_hash) < max_retries)): logger.exception("Known error building packages for %s," " will retry later" % project) commit.status = "RETRY" commit.notes = getattr(e, "message", notes) session.add(commit) else: # If the log file hasn't been created we add what we have # This happens if the rpm build script didn't run. if not os.path.exists(logfile): with open(logfile, "w") as fp: fp.write(getattr(e, "message", notes)) if not project_info.suppress_email(): sendnotifymail(cp, packages, commit) project_info.sent_email() session.add(project_info) # allow to submit a gerrit review only if the last build was # successful or non existent to avoid creating a gerrit review # for the same problem multiple times. if gerrit is not None: if options.build_env: env_vars = list(options.build_env) else: env_vars = [] last_build = getLastProcessedCommit(session, project) if not last_build or last_build.status == 'SUCCESS': for pkg in packages: if project == pkg['name']: break else: pkg = None if pkg: url = (get_commit_url(commit, pkg) + commit.commit_hash) env_vars.append('GERRIT_URL=%s' % url) env_vars.append('GERRIT_LOG=%s/%s' % (cp.get("DEFAULT", "baseurl"), commit.getshardedcommitdir())) maintainers = ','.join(pkg['maintainers']) env_vars.append('GERRIT_MAINTAINERS=%s' % maintainers) logger.info('Creating a gerrit review using ' 'GERRIT_URL=%s ' 'GERRIT_MAINTAINERS=%s ' % (url, maintainers)) try: submit_review(cp, commit, env_vars) except Exception: logger.error('Unable to create review ' 'see review.log') else: logger.error('Unable to find info for project %s' % project) else: logger.info('Last build not successful ' 'for %s' % project) commit.status = "FAILED" commit.notes = getattr(e, "message", notes) session.add(commit) if options.stop: return exit_code else: commit.status = "SUCCESS" commit.notes = notes commit.rpms = ",".join(built_rpms) session.add(commit) if options.dev is False: session.commit() genreports(cp, packages, options) # If we were bootstrapping, set the packages that required it to RETRY if options.order is True and not options.package_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(cp, packages, options) return exit_code