def test_override(self): self.config.set("DEFAULT", "fallback_to_master", "1") self.config.set("DEFAULT", "workers", "7") config = ConfigOptions(self.config) # No changes self.assertEqual(config.fallback_to_master, True) self.assertEqual(config.workers, 7) overrides = ['DEFAULT.fallback_to_master=0', 'DEFAULT.workers=3'] config = ConfigOptions(self.config, overrides=overrides) # Overrides in effect self.assertEqual(config.fallback_to_master, False) self.assertEqual(config.workers, 3)
def setUp(self): super(TestProcessBuildResult, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set('DEFAULT', 'datadir', tempfile.mkdtemp()) config.set('DEFAULT', 'scriptsdir', tempfile.mkdtemp()) config.set('DEFAULT', 'baseurl', "file://%s" % config.get('DEFAULT', 'datadir')) self.config = ConfigOptions(config) self.commit = db.Commit(dt_commit=123, project_name='foo', type="rpm", commit_hash='1c67b1ab8c6fe273d4e175a14f0df5' 'd3cbbd0edf', repo_dir='/home/dlrn/data/foo', distro_hash='c31d1b18eb5ab5aed6721fc4fad06c9' 'bd242490f', dt_distro=123, distgit_dir='/home/dlrn/data/foo_distro', commit_branch='master', dt_build=1441245153) self.db_fd, filepath = tempfile.mkstemp() self.session = mocked_session("sqlite:///%s" % filepath) self.packages = [{ 'upstream': 'https://github.com/openstack/foo', 'name': 'foo', 'maintainers': '*****@*****.**' }, { 'upstream': 'https://github.com/openstack/test', 'name': 'test', 'maintainers': '*****@*****.**' }]
def setUp(self): super(TestDriverCopr, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set("DEFAULT", "build_driver", "dlrn.drivers.coprdriver.CoprBuildDriver") config.set('coprbuild_driver', 'coprid', 'foo/repo') self.config = ConfigOptions(config) self.temp_dir = tempfile.mkdtemp() # Create fake src.rpm with open('%s/pkg.src.rpm' % self.temp_dir, 'a') as fp: fp.write('') # Create fake build and download logs with open("%s/coprbuild.log" % self.temp_dir, 'a') as fp: fp.write("Created builds: 1234") with open('%s/coprdownload.log' % self.temp_dir, 'a') as fp: fp.write('') # Create fake download file structure os.mkdir(os.path.join(self.temp_dir, '1234')) target_chroot = os.path.join( self.temp_dir, '1234', 'fedora-rawhide-i386') os.mkdir(target_chroot) with open('%s/state.log.gz' % target_chroot, 'a') as fp: fp.write('') with open('%s/pkg.rpm' % target_chroot, 'a') as fp: fp.write('')
def setUp(self): super(TestGetinfo, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set("DEFAULT", "pkginfo_driver", "dlrn.drivers.rdoinfo.RdoInfoDriver") self.config = ConfigOptions(config)
def setUp(self): super(TestNotifications, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set('DEFAULT', 'datadir', tempfile.mkdtemp()) config.set('DEFAULT', 'scriptsdir', tempfile.mkdtemp()) config.set('DEFAULT', 'baseurl', "file://%s" % config.get('DEFAULT', 'datadir')) self.config = ConfigOptions(config) self.commit = db.Commit(dt_commit=123, project_name='foo', commit_hash='1c67b1ab8c6fe273d4e175a14f0df5' 'd3cbbd0edf', repo_dir='/home/dlrn/data/foo', distro_hash='c31d1b18eb5ab5aed6721fc4fad06c9' 'bd242490f', dt_distro=123, distgit_dir='/home/dlrn/data/foo_distro', commit_branch='master', dt_build=1441245153) self.packages = [{ 'upstream': 'https://github.com/openstack/foo', 'name': 'foo', 'maintainers': ['*****@*****.**'], 'master-distgit': 'https://github.com/rdo-packages/foo-distgit.git' }]
def setUp(self): super(TestDriverMock, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set("DEFAULT", "build_driver", "dlrn.drivers.mockdriver.MockBuildDriver") self.config = ConfigOptions(config) self.temp_dir = tempfile.mkdtemp()
def user_manager(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--debug', action='store_true', help="Print debug logs") subparsers = parser.add_subparsers(dest='command', title='subcommands', description='available subcommands') subparsers.required = True # Subcommand create parser_create = subparsers.add_parser('create', help='Create a user') parser_create.add_argument('--username', type=str, required=True, help='User name') parser_create.add_argument('--password', type=str, help='Password') # Subcommand delete parser_delete = subparsers.add_parser('delete', help='Delete a user') parser_delete.add_argument('--username', type=str, required=True, help='User name') parser_delete.add_argument('--force', dest='force', action='store_true', help='Do not request a confirmation') # Subcommand update parser_update = subparsers.add_parser('update', help='Update a user') parser_update.add_argument('--username', type=str, required=True, help='User name') parser_update.add_argument('--password', type=str, required=True, help='New password') options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) cp = configparser.RawConfigParser() cp.read(options.config_file) config_options = ConfigOptions(cp) return command_funcs[options.command](options, config_options.database_connection)
def setUp(self): super(TestDriverRdoInfo, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set("DEFAULT", "pkginfo_driver", "dlrn.drivers.rdoinfo.RdoInfoDriver") self.temp_dir = tempfile.mkdtemp() self.config = ConfigOptions(config) self.config.datadir = self.temp_dir
def test_build_rpm_wrapper_mock_config(self, wm_mock, ld_mock, bp_mock, sh_mock, env_mock, rc_mock): self.configfile.set('kojibuild_driver', 'fetch_mock_config', 'True') self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.kojidriver.KojiBuildDriver') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] build_rpm_wrapper(commit, False, False, False, None, True) self.assertEqual(wm_mock.call_count, 1)
def setUp(self): super(TestDriverLocal, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set("DEFAULT", "pkginfo_driver", "dlrn.drivers.local.LocalDriver") self.config = ConfigOptions(config) self.config.datadir = tempfile.mkdtemp() self.base_dir = tempfile.mkdtemp()
def setUp(self): super(TestDriverGit, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set("DEFAULT", "pkginfo_driver", "dlrn.drivers.gitrepo.GitRepoDriver") self.config = ConfigOptions(config) self.config.datadir = tempfile.mkdtemp() self.config.gitrepo_dirs = ['/openstack']
def test_build(self, ld_mock, sh_mock, env_mock, rc_mock): self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] try: build([], commit, None, False, False, False, True) except Exception as e: self.assertIn("No rpms built for", str(e))
def test_driver_config(self, ld_mock, env_mock, rc_mock): cp = configparser.RawConfigParser() cp.read("projects.ini") cp.set("DEFAULT", "build_driver", "dlrn.drivers.mockdriver.MockBuildDriver") # default is True, test override cp.set('mockbuild_driver', 'install_after_build', '0') config = ConfigOptions(cp) self.assertEqual(False, config.install_after_build)
def test_build_rpm_wrapper_release_numbering(self, ld_mock, sh_mock, env_mock, rc_mock): self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.configfile.set('DEFAULT', 'release_numbering', 'minor.date.hash') self.configfile.set('DEFAULT', 'release_minor', '2') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] build_rpm_wrapper(commit, False, False, False, None, True) self.assertEqual(os.environ['RELEASE_NUMBERING'], 'minor.date.hash') self.assertEqual(os.environ['RELEASE_MINOR'], '2')
def test_build_rpm_wrapper(self, ld_mock, sh_mock, env_mock, rc_mock): self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] build_rpm_wrapper(commit, False, False, False, None, True) # 3 sh calls: # 1- build_srpm.sh # 2- mock (handled by env_mock) # 3- restorecon (handled by rc_mock) self.assertEqual(env_mock.call_count, 2) self.assertEqual(rc_mock.call_count, 1) self.assertTrue( os.path.exists(os.path.join(self.config.datadir, "dlrn-1.cfg")))
def setUp(self): super(TestDriverDownstream, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set("DEFAULT", "pkginfo_driver", "dlrn.drivers.downstream.DownstreamInfoDriver") self.temp_dir = tempfile.mkdtemp() self.config = ConfigOptions(config) self.config.versions_url = \ 'https://trunk.rdoproject.org/centos7-master/current/versions.csv' self.config.downstream_distro_branch = 'testbranch' self.config.downstream_distgit_base = 'git://git.example.com/rpms' self.config.downstream_source_git_key = 'ds-patches' self.config.downstream_source_git_branch = 'dsbranch' self.config.datadir = self.temp_dir self.config.use_upstream_spec = False
def test_clone_no_fallback(self, sh_mock): config = configparser.RawConfigParser(default_options) config.read("projects.ini") config.set('DEFAULT', 'fallback_to_master', '0') self.config = ConfigOptions(config) # We need to redefine the mock object again, to use a side effect # that will fail in the git checkout call. A bit convoluted, but # it works with mock.patch.object(sh.Command, '__call__') as new_mock: new_mock.side_effect = _aux_sh self.assertRaises(sh.ErrorReturnCode_1, repositories.refreshrepo, 'url', 'path', branch='branch') expected = [mock.call('url', 'path'), mock.call('origin'), mock.call('-f', 'branch')] self.assertEqual(new_mock.call_args_list, expected)
def setUp(self): super(TestBuild, self).setUp() config = configparser.RawConfigParser(default_options) config.read("projects.ini") config.set('DEFAULT', 'datadir', tempfile.mkdtemp()) config.set('DEFAULT', 'scriptsdir', tempfile.mkdtemp()) config.set('DEFAULT', 'baseurl', "file://%s" % config.get('DEFAULT', 'datadir')) self.config = ConfigOptions(config) shutil.copyfile(os.path.join("scripts", "centos.cfg"), os.path.join(self.config.scriptsdir, "centos.cfg")) with open(os.path.join(self.config.datadir, "delorean-deps.repo"), "w") as fp: fp.write("[test]\nname=test\nenabled=0\n") self.db_fd, filepath = tempfile.mkstemp() self.session = db.getSession("sqlite:///%s" % filepath) utils.loadYAML(self.session, './dlrn/tests/samples/commits_1.yaml')
def test_clone_no_fallback_default(self, sh_mock): config = configparser.RawConfigParser() config.read("projects.ini") config.set('DEFAULT', 'fallback_to_master', '1') self.config = ConfigOptions(config) with mock.patch.object(sh.Command, '__call__') as new_mock: new_mock.side_effect = _aux_sh self.assertRaises(sh.ErrorReturnCode_1, repositories.refreshrepo, 'url', 'path', branch='rpm-master') expected = [ mock.call('url', 'path'), mock.call('origin'), mock.call('-f', 'rpm-master') ] self.assertEqual(new_mock.call_args_list, expected)
def test_detect_dirs(self): self.config = configparser.RawConfigParser() self.config.read("samples/projects.ini.detect") config = ConfigOptions(self.config) self.assertEqual( config.datadir, os.path.realpath( os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../data"))) self.assertEqual( config.templatedir, os.path.realpath( os.path.join(os.path.dirname(os.path.abspath(__file__)), "../templates"))) self.assertEqual( config.scriptsdir, os.path.realpath( os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../scripts")))
def test_clone_fallback_var(self, sh_mock): config = configparser.RawConfigParser() config.read("projects.ini") config.set('DEFAULT', 'fallback_to_master', '1') config.set('DEFAULT', 'nonfallback_branches', '^foo-') self.config = ConfigOptions(config) with mock.patch.object(sh.Command, '__call__') as new_mock: new_mock.side_effect = _aux_sh result = repositories.refreshrepo('url', 'path', branch='bar') self.assertEqual(result, ['master', 'None']) expected = [ mock.call('url', 'path'), mock.call('origin'), mock.call('-f', 'bar'), mock.call('master'), mock.call('--hard', 'origin/master'), mock.call('--pretty=format:%H %ct', '-1', '.') ] self.assertEqual(new_mock.call_args_list, expected)
def setUp(self): super(TestDriverKoji, self).setUp() config = configparser.RawConfigParser() config.read("projects.ini") config.set("DEFAULT", "build_driver", "dlrn.drivers.kojidriver.KojiBuildDriver") self.config = ConfigOptions(config) self.config.koji_krb_principal = '*****@*****.**' self.config.koji_krb_keytab = '/home/test/test.keytab' self.config.koji_scratch_build = True self.config.koji_build_target = 'build-target' self.temp_dir = tempfile.mkdtemp() self.config.datadir = self.temp_dir # Create fake build log with open("%s/kojibuild.log" % self.temp_dir, 'a') as fp: fp.write("Created task: 1234") # In the rhpkg case, we need to create a full dir structure self.rhpkg_extra_dir = "%s/repos/12/34/1234567890abcdef_1_12345678"\ % self.temp_dir os.makedirs(self.rhpkg_extra_dir) with open("%s/rhpkgbuild.log" % self.rhpkg_extra_dir, 'a') as fp: fp.write("Created task: 5678") # Another full-dir structure for the long extended hash test self.rhpkg_extra_dir_2 = ( "%s/repos/12/34/1234567890abcdef_1_12345678_abcdefgh" % self.temp_dir) os.makedirs(self.rhpkg_extra_dir_2) with open("%s/rhpkgbuild.log" % self.rhpkg_extra_dir_2, 'a') as fp: fp.write("Created task: 5678") # Another full-dir structure for the long extended hash test # with downstream driver self.rhpkg_extra_dir_3 = ( "%s/repos/12/34/1234567890abcdef_fedcba09_1_1" % self.temp_dir) os.makedirs(self.rhpkg_extra_dir_3) with open("%s/rhpkgbuild.log" % self.rhpkg_extra_dir_3, 'a') as fp: fp.write("Created task: 5678") # Create a fake rhpkg binary with open("%s/rhpkg" % self.temp_dir, 'a') as fp: fp.write("true") os.chmod("%s/rhpkg" % self.temp_dir, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) os.environ['PATH'] = self.temp_dir + ':' + os.environ['PATH']
def test_build_configdir(self, ld_mock, sh_mock, env_mock, rc_mock): configdir = tempfile.mkdtemp() self.configfile.set('DEFAULT', 'configdir', configdir) self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.config = ConfigOptions(self.configfile) shutil.copyfile(os.path.join("scripts", "centos8.cfg"), os.path.join(configdir, "centos8.cfg")) commit = db.getCommits(self.session)[-1] expected = [ mock.call('%s/centos8.cfg' % configdir, '%s/dlrn-1.cfg.new' % self.config.datadir), mock.call('%s/dlrn-1.cfg.new' % self.config.datadir, '%s/dlrn-1.cfg' % self.config.datadir) ] with mock.patch('shutil.copyfile', side_effect=shutil.copyfile) as cp_mock: build_rpm_wrapper(commit, False, False, False, None, True) self.assertEqual(expected, cp_mock.call_args_list)
def setUp(self): super(TestSyncRepo, self).setUp() config = configparser.RawConfigParser(default_options) config.read("projects.ini") config.set('DEFAULT', 'datadir', tempfile.mkdtemp()) config.set('DEFAULT', 'scriptsdir', tempfile.mkdtemp()) config.set('DEFAULT', 'baseurl', "file://%s" % config.get('DEFAULT', 'datadir')) config.set('DEFAULT', 'rsyncport', '30000') config.set('DEFAULT', 'rsyncdest', 'user@host:/directory') self.config = ConfigOptions(config) self.commit = db.Commit(dt_commit=123, project_name='foo', commit_hash='1c67b1ab8c6fe273d4e175a14f0df5' 'd3cbbd0edf', repo_dir='/home/dlrn/data/foo', distro_hash='c31d1b18eb5ab5aed6721fc4fad06c9' 'bd242490f', dt_distro=123, distgit_dir='/home/dlrn/data/foo_distro', commit_branch='master', dt_build=1441245153)
def test_get_config_option(self): config = ConfigOptions(self.config) self.assertEqual(config, getConfigOptions())
def setUp(self): super(TestDriverGit, self).setUp() config = configparser.RawConfigParser(default_options) config.read("projects.ini") self.config = ConfigOptions(config) self.config.gitrepo_dir = tempfile.mkdtemp()
def test_with_rdoinfo_driver(self): self.config.set("rdoinfo_driver", "repo", "https://test/test.git") config = ConfigOptions(self.config) self.assertEqual(config.rdoinfo_repo, 'https://test/test.git')
def purge(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--older-than', help="How old commits need to be purged " "(in days).", required=True) parser.add_argument('-y', help="Answer \"yes\" to any questions", action="store_true") options, args = parser.parse_known_args(sys.argv[1:]) cp = configparser.RawConfigParser() cp.read(options.config_file) config_options = ConfigOptions(cp) timeparsed = datetime.now() - timedelta(days=int(options.older_than)) if options.y is False: ans = raw_input(("Remove all data before %s, correct? [N/y] " % timeparsed.ctime())) if ans.lower() != "y": return session = getSession('sqlite:///commits.sqlite') # To remove builds we have to start at a point in time and move backwards # builds with no build date are also purged as these are legacy # All repositories can have the repodata directory and symlinks purged # But we must keep the rpms files of the most recent successful build of # each project as other symlinks not being purged will be pointing to them. topurge = getCommits(session, limit=0, before=int(mktime(timeparsed.timetuple())) ).all() fullpurge = [] for commit in topurge: if commit.flags & FLAG_PURGED: continue datadir = os.path.join(config_options.datadir, "repos", commit.getshardedcommitdir()) if commit.project_name not in fullpurge and commit.status == "SUCCESS": # So we have not removed any commit from this project yet, and it # is successful. Is it the newest one? previouscommits = getCommits(session, project=commit.project_name, since=commit.dt_build, with_status='SUCCESS').count() if previouscommits == 0: logger.info("Keeping old commit for %s" % commit.project_name) continue # this is the newest commit for this project, keep it try: for entry in os.listdir(datadir): entry = os.path.join(datadir, entry) if entry.endswith(".rpm") and not os.path.islink(entry): continue if os.path.isdir(entry): shutil.rmtree(entry) else: os.unlink(entry) except OSError: logger.warning("Cannot access directory %s for purge," " ignoring." % datadir) fullpurge.append(commit.project_name) commit.flags |= FLAG_PURGED else: shutil.rmtree(datadir) commit.flags |= FLAG_PURGED session.commit()
def _get_config_options(config_file): cp = configparser.RawConfigParser() cp.read(config_file) return ConfigOptions(cp)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of" " fetching the default one using rdopkg. Only" " applies when pkginfo_driver is rdoinfo in" " projects.ini") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible. Only commited" " changes in the local repo will be used in the" " build.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") group = parser.add_mutually_exclusive_group() group.add_argument('--project-name', action='append', help="Build a specific project name only." " Use multiple times to build more than one " "project in a run.") group.add_argument('--package-name', action='append', help="Build a specific package name only." " Use multiple times to build more than one " "package in a run.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by dlrn.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits. " "Implies --sequential.") parser.add_argument('--sequential', action="store_true", help="Run all actions sequentially, regardless of the" " number of workers specified in projects.ini.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Implies --package-name") parser.add_argument('--force-recheck', action="store_true", help="Force a rebuild for a particular package, even " "if its last build was successful. Requires setting " "allow_force_rechecks=True in projects.ini. " "Implies --package-name and --recheck") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Implies --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") parser.add_argument('--verbose-build', action="store_true", help="Show verbose output during the package build.") parser.add_argument('--verbose-mock', action="store_true", help=argparse.SUPPRESS) parser.add_argument('--no-repo', action="store_true", help="Do not generate a repo with all the built " "packages.") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) if options.verbose_mock: logger.warning('The --verbose-mock command-line option is deprecated.' ' Please use --verbose-build instead.') options.verbose_build = options.verbose_mock global verbose_build verbose_build = options.verbose_build cp = configparser.RawConfigParser() cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) if options.order is True: options.sequential = True config_options = ConfigOptions(cp) if options.dev: _, tmpdb_path = tempfile.mkstemp() logger.info("Using file %s for temporary db" % tmpdb_path) config_options.database_connection = "sqlite:///%s" % tmpdb_path session = getSession(config_options.database_connection) pkginfo_driver = config_options.pkginfo_driver global pkginfo pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(local_info_repo=options.info_repo, tags=config_options.tags, dev_mode=options.dev) if options.project_name: pkg_names = [p['name'] for p in packages if p['project'] in options.project_name] elif options.package_name: pkg_names = options.package_name else: pkg_names = None if options.status is True: if not pkg_names: pkg_names = [p['name'] for p in packages] for name in pkg_names: package = [p for p in packages if p['name'] == name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit( session, name, 'invalid status', type=build_type) if commit: print("{:>9}".format(build_type), name, commit.status) else: print("{:>9}".format(build_type), name, 'NO_BUILD') sys.exit(0) if pkg_names: pkg_name = pkg_names[0] else: pkg_name = None def recheck_commit(commit, force): if commit.status == 'SUCCESS': if not force: logger.error( "Trying to recheck an already successful commit," " ignoring. If you want to force it, use --force-recheck" " and set allow_force_rechecks=True in projects.ini") sys.exit(1) else: logger.info("Forcefully rechecking a successfully built " "commit for %s" % commit.project_name) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) if options.recheck is True: if not pkg_name: logger.error('Please use --package-name or --project-name ' 'with --recheck.') sys.exit(1) if options.force_recheck and config_options.allow_force_rechecks: force_recheck = True else: force_recheck = False package = [p for p in packages if p['name'] == pkg_name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, pkg_name, type=build_type) if commit: recheck_commit(commit, force_recheck) else: logger.error("There are no existing commits for package %s", pkg_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] def add_commits(project_toprocess): # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check # against the last commit in the db, as multiple commits can # have the same commit date for commit_toprocess in project_toprocess: if options.dev is True or \ options.run or \ not session.query(Commit).filter( Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.extended_hash == commit_toprocess.extended_hash, Commit.type == commit_toprocess.type, Commit.status != "RETRY").all(): toprocess.append(commit_toprocess) if not pkg_name and not pkg_names: pool = multiprocessing.Pool() # This will use all the system cpus # Use functools.partial to iterate on the packages to process, # while keeping a few options fixed getinfo_wrapper = partial(getinfo, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options. database_connection) iterator = pool.imap(getinfo_wrapper, packages) while True: try: project_toprocess, updated_pkg = iterator.next() for package in packages: if package['name'] == updated_pkg['name']: if package['upstream'] == 'Unknown': package['upstream'] = updated_pkg['upstream'] logger.debug( "Updated upstream for package %s to %s", package['name'], package['upstream']) break add_commits(project_toprocess) except StopIteration: break pool.close() pool.join() else: for package in packages: if package['name'] in pkg_names: project_toprocess, _ = getinfo(package, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options. database_connection) add_commits(project_toprocess) closeSession(session) # Close session, will reopen during post_build # Check if there is any commit at all to process if len(toprocess) == 0: if not pkg_name: # Use a shorter message if this was a full run logger.info("No commits to build.") else: logger.info("No commits to build. If this is not expected, please" " make sure the package name(s) are correct, and that " "any failed commit you want to rebuild has been " "removed from the database.") return 0 # if requested do a sort according to build and install # dependencies if options.order is True: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([c.project_name for c in toprocess]) speclist = [] bootstraplist = [] for project_name in projects: # Preprocess spec if needed pkginfo.preprocess(package_name=project_name) specpath = os.path.join(pkginfo.distgit_dir(project_name), project_name + '.spec') speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap exercise rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: _a = a.dt_commit _b = b.dt_commit else: _a = orders.index(a.project_name) _b = orders.index(b.project_name) # cmp is no longer available in python3 so replace it. See Ordering # Comparisons on: # https://docs.python.org/3.0/whatsnew/3.0.html return (_a > _b) - (_a < _b) toprocess.sort(key=cmp_to_key(my_cmp)) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 if options.sequential is True: toprocess_copy = deepcopy(toprocess) for commit in toprocess: status = build_worker(packages, commit, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=True) exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.error("Received exception %s" % exception) failures = 1 else: if not options.run: failures = post_build(status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result(status, packages, session, toprocess_copy, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code else: # Setup multiprocessing pool pool = multiprocessing.Pool(config_options.workers) # Use functools.partial to iterate on the commits to process, # while keeping a few options fixed build_worker_wrapper = partial(build_worker, packages, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=False) iterator = pool.imap(build_worker_wrapper, toprocess) while True: try: status = iterator.next() exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.info("Received exception %s" % exception) failures = 1 else: # Create repo, build versions.csv file. # This needs to be sequential if not options.run: failures = post_build( status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result( status, packages, session, toprocess, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code except StopIteration: break pool.close() pool.join() # If we were bootstrapping, set the packages that required it to RETRY session = getSession(config_options.database_connection) if options.order is True and not pkg_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(packages, options.head_only, session, []) closeSession(session) if options.dev: os.remove(tmpdb_path) return exit_code
def main(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required).", required=True) parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of " "fetching the default one using rdopkg. Only" "applies when pkginfo_driver is rdoinfo in" "projects.ini") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") parser.add_argument('--package-name', help="Build a specific package name only.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by dlrn.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Imply --package-name") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Imply --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") options, args = parser.parse_known_args(sys.argv[1:]) cp = configparser.RawConfigParser(default_options) cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) global session session = getSession('sqlite:///commits.sqlite') global config_options config_options = ConfigOptions(cp) pkginfo_driver = config_options.pkginfo_driver pkginfo_object = import_object(pkginfo_driver) packages = pkginfo_object.getpackages(local_info_repo=options.info_repo, tags=config_options.tags) if options.status is True: if options.package_name: names = (options.package_name, ) else: names = [p['name'] for p in packages] for name in names: commit = getLastProcessedCommit(session, name, 'invalid status') if commit: print(name, commit.status) else: print(name, 'NO_BUILD') sys.exit(0) if options.recheck is True: if not options.package_name: logger.error('Please use --package-name with --recheck.') sys.exit(1) commit = getLastProcessedCommit(session, options.package_name) if commit: if commit.status == 'SUCCESS': logger.error("Trying to recheck an already successful commit," " ignoring.") sys.exit(1) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) else: # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) else: logger.error("There are no existing commits for package %s" % options.package_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] for package in packages: project = package["name"] since = "-1" commit = getLastProcessedCommit(session, project) if commit: # This will return all commits since the last handled commit # including the last handled commit, remove it later if needed. since = "--after=%d" % (commit.dt_commit) repo = package["upstream"] distro = package["master-distgit"] if not options.package_name or package["name"] == options.package_name: project_toprocess = getinfo(project, repo, distro, since, options.local, options.dev, package) # If since == -1, then we only want to trigger a build for the # most recent change if since == "-1" or options.head_only: del project_toprocess[:-1] # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check against # the last commit in the db, as multiple commits can have the same # commit date for commit_toprocess in project_toprocess: if ((options.dev is True) or options.run or (not session.query(Commit).filter( Commit.project_name == project, Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.status != "RETRY") .all())): toprocess.append(commit_toprocess) # if requested do a sort according to build and install # dependencies if options.order is True and not options.package_name: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([p['name'] for p in packages]) speclist = [] bootstraplist = [] for project_name in projects: specpath = os.path.join(config_options.datadir, project_name + "_distro", project_name + '.spec') speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap exercise rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: return cmp(a.dt_commit, b.dt_commit) return cmp(orders.index(a.project_name), orders.index(b.project_name)) toprocess.sort(cmp=my_cmp) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 for commit in toprocess: project = commit.project_name project_info = session.query(Project).filter( Project.project_name == project).first() if not project_info: project_info = Project(project_name=project, last_email=0) commit_hash = commit.commit_hash if options.run: try: run(options.run, commit, options.build_env, options.dev, options.use_public, options.order, do_build=False) except Exception as e: exit_code = 1 if options.stop: return exit_code pass continue logger.info("Processing %s %s" % (project, commit_hash)) notes = "" try: built_rpms, notes = build(packages, commit, options.build_env, options.dev, options.use_public, options.order) except Exception as e: datadir = os.path.realpath(config_options.datadir) exit_code = 1 logfile = os.path.join(datadir, "repos", commit.getshardedcommitdir(), "rpmbuild.log") if (isknownerror(logfile) and (timesretried(project, commit_hash, commit.distro_hash) < config_options.maxretries)): logger.exception("Known error building packages for %s," " will retry later" % project) commit.status = "RETRY" commit.notes = getattr(e, "message", notes) session.add(commit) else: # If the log file hasn't been created we add what we have # This happens if the rpm build script didn't run. if not os.path.exists(logfile): with open(logfile, "w") as fp: fp.write(getattr(e, "message", notes)) if not project_info.suppress_email(): sendnotifymail(packages, commit) project_info.sent_email() session.add(project_info) # allow to submit a gerrit review only if the last build was # successful or non existent to avoid creating a gerrit review # for the same problem multiple times. if config_options.gerrit is not None: if options.build_env: env_vars = list(options.build_env) else: env_vars = [] last_build = getLastProcessedCommit(session, project) if not last_build or last_build.status == 'SUCCESS': for pkg in packages: if project == pkg['name']: break else: pkg = None if pkg: url = (get_commit_url(commit, pkg) + commit.commit_hash) env_vars.append('GERRIT_URL=%s' % url) env_vars.append('GERRIT_LOG=%s/%s' % (config_options.baseurl, commit.getshardedcommitdir())) maintainers = ','.join(pkg['maintainers']) env_vars.append('GERRIT_MAINTAINERS=%s' % maintainers) logger.info('Creating a gerrit review using ' 'GERRIT_URL=%s ' 'GERRIT_MAINTAINERS=%s ' % (url, maintainers)) try: submit_review(commit, env_vars) except Exception: logger.error('Unable to create review ' 'see review.log') else: logger.error('Unable to find info for project %s' % project) else: logger.info('Last build not successful ' 'for %s' % project) commit.status = "FAILED" commit.notes = getattr(e, "message", notes) session.add(commit) if options.stop: return exit_code else: commit.status = "SUCCESS" commit.notes = notes commit.rpms = ",".join(built_rpms) session.add(commit) if options.dev is False: session.commit() genreports(packages, options) sync_repo(commit) # If we were bootstrapping, set the packages that required it to RETRY if options.order is True and not options.package_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(packages, options) return exit_code