def test_dep2(self): specs = RpmSpecCollection([ RpmSpecFile(BASIC_SPEC_CONTENT), RpmSpecFile(BASIC2_SPEC_CONTENT), RpmSpecFile(BASIC3_SPEC_CONTENT) ]) self.assertEqual(specs.compute_order(), ['package', 'packageC', 'packageB'])
def test_sub_package(self): spec = RpmSpecFile(SUB_PKG_CONTENT) self.assertEqual(spec.packages(), ['package', 'package-subpkg-toto'])
def test_package_with_provides(self): spec = RpmSpecFile(PROVIDES_SPEC_CONTENT) self.assertEqual(spec.packages(), ['package', 'oldname'])
def test_package_with_macro(self): spec = RpmSpecFile(MACRO_SPEC_CONTENT) self.assertEqual(spec.packages(), ['package'])
def test_basic_package(self): spec = RpmSpecFile(BASIC_SPEC_CONTENT) self.assertEqual(spec.packages(), ['package'])
def test_basic(self): specs = RpmSpecCollection([RpmSpecFile(BASIC_SPEC_CONTENT)]) self.assertEqual(specs.compute_order(), ['package'])
def test_build_requires(self): spec = RpmSpecFile(DEP_SPEC_CONTENT) self.assertEqual(set(spec.build_requires()), set(['dep1', 'dep2']))
def test_multiple_names(self): specs = RpmSpecCollection([RpmSpecFile(MULTIPLE_NAME_CONTENT)]) self.assertEqual(specs.compute_order(), ['package'])
def test_build_requires_with_operator_no_space(self): spec = RpmSpecFile(OPERATOR_DEP_SPEC_CONTENT2) self.assertEqual(set(spec.build_requires()), set(['dep1', 'dep2']))
def test_name_package(self): spec = RpmSpecFile(NAME_PKG_CONTENT) self.assertEqual(spec.packages(), ['package', 'package-package-toto'])
def test_nsub_package(self): spec = RpmSpecFile(NSUB_PKG_CONTENT) self.assertEqual(spec.packages(), ['package', 'pre-serv-post'])
def test_dep_sub(self): specs = RpmSpecCollection( [RpmSpecFile(DEP_SUB_PKG_CONTENT), RpmSpecFile(SUB_PKG_CONTENT)]) self.assertEqual(specs.compute_order(), ['package', 'packageD'])
def main(): parser = argparse.ArgumentParser() parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--config-override', action='append', help="Override a configuration option from the" " config file. Specify it as: " "section.option=value. Can be used multiple " "times if more than one override is needed.") parser.add_argument('--info-repo', help="use a local distroinfo repo instead of" " fetching the default one. Only applies when" " pkginfo_driver is rdoinfo or downstream in" " projects.ini") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible. Only commited" " changes in the local repo will be used in the" " build.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") group = parser.add_mutually_exclusive_group() group.add_argument('--project-name', action='append', help="Build a specific project name only." " Use multiple times to build more than one " "project in a run.") group.add_argument('--package-name', action='append', help="Build a specific package name only." " Use multiple times to build more than one " "package in a run.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by dlrn.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits. " "Implies --sequential.") parser.add_argument('--sequential', action="store_true", help="Run all actions sequentially, regardless of the" " number of workers specified in projects.ini.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Implies --package-name") parser.add_argument('--force-recheck', action="store_true", help="Force a rebuild for a particular package, even " "if its last build was successful. Requires setting " "allow_force_rechecks=True in projects.ini. " "Implies --package-name and --recheck") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Implies --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") parser.add_argument('--verbose-build', action="store_true", help="Show verbose output during the package build.") parser.add_argument('--verbose-mock', action="store_true", help=argparse.SUPPRESS) parser.add_argument('--no-repo', action="store_true", help="Do not generate a repo with all the built " "packages.") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) if options.verbose_mock: logger.warning('The --verbose-mock command-line option is deprecated.' ' Please use --verbose-build instead.') options.verbose_build = options.verbose_mock global verbose_build verbose_build = options.verbose_build cp = configparser.RawConfigParser() cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) if options.order is True: options.sequential = True config_options = ConfigOptions(cp, overrides=options.config_override) if options.dev: _, tmpdb_path = tempfile.mkstemp() logger.info("Using file %s for temporary db" % tmpdb_path) config_options.database_connection = "sqlite:///%s" % tmpdb_path session = getSession(config_options.database_connection) pkginfo_driver = config_options.pkginfo_driver global pkginfo pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(local_info_repo=options.info_repo, tags=config_options.tags, dev_mode=options.dev) if options.project_name: pkg_names = [ p['name'] for p in packages if p['project'] in options.project_name ] elif options.package_name: pkg_names = options.package_name else: pkg_names = None if options.status is True: if not pkg_names: pkg_names = [p['name'] for p in packages] for name in pkg_names: package = [p for p in packages if p['name'] == name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, name, 'invalid status', type=build_type) if commit: print("{:>9}".format(build_type), name, commit.status) else: print("{:>9}".format(build_type), name, 'NO_BUILD') sys.exit(0) if pkg_names: pkg_name = pkg_names[0] else: pkg_name = None def recheck_commit(commit, force): if commit.status == 'SUCCESS': if not force: logger.error( "Trying to recheck an already successful commit," " ignoring. If you want to force it, use --force-recheck" " and set allow_force_rechecks=True in projects.ini") sys.exit(1) else: logger.info("Forcefully rechecking a successfully built " "commit for %s" % commit.project_name) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) if options.recheck is True: if not pkg_name: logger.error('Please use --package-name or --project-name ' 'with --recheck.') sys.exit(1) if options.force_recheck and config_options.allow_force_rechecks: force_recheck = True else: force_recheck = False package = [p for p in packages if p['name'] == pkg_name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, pkg_name, type=build_type) if commit: recheck_commit(commit, force_recheck) else: logger.error("There are no existing commits for package %s", pkg_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] skipped_list = [] def add_commits(project_toprocess): # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check # against the last commit in the db, as multiple commits can # have the same commit date for commit_toprocess in project_toprocess: if options.dev is True or \ options.run or \ not session.query(Commit).filter( Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.extended_hash == commit_toprocess.extended_hash, Commit.type == commit_toprocess.type, Commit.status != "RETRY").all(): toprocess.append(commit_toprocess) if not pkg_name and not pkg_names: pool = multiprocessing.Pool() # This will use all the system cpus # Use functools.partial to iterate on the packages to process, # while keeping a few options fixed getinfo_wrapper = partial( getinfo, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options.database_connection) iterator = pool.imap(getinfo_wrapper, packages) while True: try: project_toprocess, updated_pkg, skipped = iterator.next() for package in packages: if package['name'] == updated_pkg['name']: if package['upstream'] == 'Unknown': package['upstream'] = updated_pkg['upstream'] logger.debug( "Updated upstream for package %s to %s", package['name'], package['upstream']) break if skipped: skipped_list.append(updated_pkg['name']) add_commits(project_toprocess) except StopIteration: break pool.close() pool.join() else: for package in packages: if package['name'] in pkg_names: project_toprocess, _, skipped = getinfo( package, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options.database_connection) if skipped: skipped_list.append(package['name']) add_commits(project_toprocess) closeSession(session) # Close session, will reopen during post_build # Store skip list datadir = os.path.realpath(config_options.datadir) if not os.path.exists(os.path.join(datadir, 'repos')): os.makedirs(os.path.join(datadir, 'repos')) with open(os.path.join(datadir, 'repos', 'skiplist.txt'), 'w') as fp: for pkg in skipped_list: fp.write(pkg + '\n') # Check if there is any commit at all to process if len(toprocess) == 0: if not pkg_name: # Use a shorter message if this was a full run logger.info("No commits to build.") else: logger.info("No commits to build. If this is not expected, please" " make sure the package name(s) are correct, and that " "any failed commit you want to rebuild has been " "removed from the database.") return 0 # if requested do a sort according to build and install # dependencies if options.order is True: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([c.project_name for c in toprocess]) speclist = [] bootstraplist = [] for project_name in projects: # Preprocess spec if needed pkginfo.preprocess(package_name=project_name) filename = None for f in os.listdir(pkginfo.distgit_dir(project_name)): if f.endswith('.spec'): filename = f if filename: specpath = os.path.join(pkginfo.distgit_dir(project_name), filename) speclist.append( sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) else: logger.warning("Could not find a spec for package %s" % project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: _a = a.dt_commit _b = b.dt_commit else: _a = orders.index(a.project_name) if a.project_name in \ orders else sys.maxsize _b = orders.index(b.project_name) if b.project_name in \ orders else sys.maxsize # cmp is no longer available in python3 so replace it. See Ordering # Comparisons on: # https://docs.python.org/3.0/whatsnew/3.0.html return (_a > _b) - (_a < _b) toprocess.sort(key=cmp_to_key(my_cmp)) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 if options.sequential is True: toprocess_copy = deepcopy(toprocess) for commit in toprocess: status = build_worker(packages, commit, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=True) exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.error("Received exception %s" % exception) failures = 1 else: if not options.run: failures = post_build(status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result(status, packages, session, toprocess_copy, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code else: # Setup multiprocessing pool pool = multiprocessing.Pool(config_options.workers) # Use functools.partial to iterate on the commits to process, # while keeping a few options fixed build_worker_wrapper = partial(build_worker, packages, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=False) iterator = pool.imap(build_worker_wrapper, toprocess) while True: try: status = iterator.next() exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.info("Received exception %s" % exception) failures = 1 else: # Create repo, build versions.csv file. # This needs to be sequential if not options.run: failures = post_build( status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result( status, packages, session, toprocess, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code except StopIteration: break pool.close() pool.join() # If we were bootstrapping, set the packages that required it to RETRY session = getSession(config_options.database_connection) if options.order is True and not pkg_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(packages, options.head_only, session, []) closeSession(session) if options.dev: os.remove(tmpdb_path) return exit_code
def main(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required).", required=True) parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of " "fetching the default one using rdopkg. Only" "applies when pkginfo_driver is rdoinfo in" "projects.ini") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") parser.add_argument('--package-name', help="Build a specific package name only.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by dlrn.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Imply --package-name") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Imply --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") options, args = parser.parse_known_args(sys.argv[1:]) cp = configparser.RawConfigParser(default_options) cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) global session session = getSession('sqlite:///commits.sqlite') global config_options config_options = ConfigOptions(cp) pkginfo_driver = config_options.pkginfo_driver pkginfo_object = import_object(pkginfo_driver) packages = pkginfo_object.getpackages(local_info_repo=options.info_repo, tags=config_options.tags) if options.status is True: if options.package_name: names = (options.package_name, ) else: names = [p['name'] for p in packages] for name in names: commit = getLastProcessedCommit(session, name, 'invalid status') if commit: print(name, commit.status) else: print(name, 'NO_BUILD') sys.exit(0) if options.recheck is True: if not options.package_name: logger.error('Please use --package-name with --recheck.') sys.exit(1) commit = getLastProcessedCommit(session, options.package_name) if commit: if commit.status == 'SUCCESS': logger.error("Trying to recheck an already successful commit," " ignoring.") sys.exit(1) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) else: # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) else: logger.error("There are no existing commits for package %s" % options.package_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] for package in packages: project = package["name"] since = "-1" commit = getLastProcessedCommit(session, project) if commit: # This will return all commits since the last handled commit # including the last handled commit, remove it later if needed. since = "--after=%d" % (commit.dt_commit) repo = package["upstream"] distro = package["master-distgit"] if not options.package_name or package["name"] == options.package_name: project_toprocess = getinfo(project, repo, distro, since, options.local, options.dev, package) # If since == -1, then we only want to trigger a build for the # most recent change if since == "-1" or options.head_only: del project_toprocess[:-1] # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check against # the last commit in the db, as multiple commits can have the same # commit date for commit_toprocess in project_toprocess: if ((options.dev is True) or options.run or (not session.query(Commit).filter( Commit.project_name == project, Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.status != "RETRY") .all())): toprocess.append(commit_toprocess) # if requested do a sort according to build and install # dependencies if options.order is True and not options.package_name: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([p['name'] for p in packages]) speclist = [] bootstraplist = [] for project_name in projects: specpath = os.path.join(config_options.datadir, project_name + "_distro", project_name + '.spec') speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap exercise rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: return cmp(a.dt_commit, b.dt_commit) return cmp(orders.index(a.project_name), orders.index(b.project_name)) toprocess.sort(cmp=my_cmp) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 for commit in toprocess: project = commit.project_name project_info = session.query(Project).filter( Project.project_name == project).first() if not project_info: project_info = Project(project_name=project, last_email=0) commit_hash = commit.commit_hash if options.run: try: run(options.run, commit, options.build_env, options.dev, options.use_public, options.order, do_build=False) except Exception as e: exit_code = 1 if options.stop: return exit_code pass continue logger.info("Processing %s %s" % (project, commit_hash)) notes = "" try: built_rpms, notes = build(packages, commit, options.build_env, options.dev, options.use_public, options.order) except Exception as e: datadir = os.path.realpath(config_options.datadir) exit_code = 1 logfile = os.path.join(datadir, "repos", commit.getshardedcommitdir(), "rpmbuild.log") if (isknownerror(logfile) and (timesretried(project, commit_hash, commit.distro_hash) < config_options.maxretries)): logger.exception("Known error building packages for %s," " will retry later" % project) commit.status = "RETRY" commit.notes = getattr(e, "message", notes) session.add(commit) else: # If the log file hasn't been created we add what we have # This happens if the rpm build script didn't run. if not os.path.exists(logfile): with open(logfile, "w") as fp: fp.write(getattr(e, "message", notes)) if not project_info.suppress_email(): sendnotifymail(packages, commit) project_info.sent_email() session.add(project_info) # allow to submit a gerrit review only if the last build was # successful or non existent to avoid creating a gerrit review # for the same problem multiple times. if config_options.gerrit is not None: if options.build_env: env_vars = list(options.build_env) else: env_vars = [] last_build = getLastProcessedCommit(session, project) if not last_build or last_build.status == 'SUCCESS': for pkg in packages: if project == pkg['name']: break else: pkg = None if pkg: url = (get_commit_url(commit, pkg) + commit.commit_hash) env_vars.append('GERRIT_URL=%s' % url) env_vars.append('GERRIT_LOG=%s/%s' % (config_options.baseurl, commit.getshardedcommitdir())) maintainers = ','.join(pkg['maintainers']) env_vars.append('GERRIT_MAINTAINERS=%s' % maintainers) logger.info('Creating a gerrit review using ' 'GERRIT_URL=%s ' 'GERRIT_MAINTAINERS=%s ' % (url, maintainers)) try: submit_review(commit, env_vars) except Exception: logger.error('Unable to create review ' 'see review.log') else: logger.error('Unable to find info for project %s' % project) else: logger.info('Last build not successful ' 'for %s' % project) commit.status = "FAILED" commit.notes = getattr(e, "message", notes) session.add(commit) if options.stop: return exit_code else: commit.status = "SUCCESS" commit.notes = notes commit.rpms = ",".join(built_rpms) session.add(commit) if options.dev is False: session.commit() genreports(packages, options) sync_repo(commit) # If we were bootstrapping, set the packages that required it to RETRY if options.order is True and not options.package_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(packages, options) return exit_code