Exemplo n.º 1
0
    def test_noretry(self):
        # SUCCESS
        commit = db.getLastProcessedCommit(self.session, 'python-pysaml2')
        # FAILED
        commit2 = db.getLastProcessedCommit(self.session, 'python-alembic')
        # SUCCESS, RETRY (should be ignored)
        commit3 = \
            db.getLastProcessedCommit(self.session, 'python-tripleoclient')

        mock_fp = MagicMock()
        utils.dumpshas2file(mock_fp, commit, "a", "b", commit.status, 0,
                            ['python-saml2-1.0-1.el7.src.rpm'])
        utils.dumpshas2file(mock_fp, commit2, "a", "b", commit2.status, 1,
                            ['python-alembic-1.0-2.el7.src.rpm'])
        utils.dumpshas2file(mock_fp, commit3, "a", "b", commit3.status, 2,
                            ['file1-1.2-3.el7.noarch.rpm',
                             'file2-1.2-3.el7.src.rpm'])
        expected = [
            call.write(u'python-pysaml2,a,3a9326f251b9a4162eb0dfa9f1c924ef47c'
                       '2c55a,b,024e24f0cf4366c2290c22f24e42de714d1addd1'
                       ',SUCCESS,0,python-saml2-1.0-1.el7\n'),
            call.write(u'python-alembic,a,459549c9ab7fef91b2dc8986bc0643bb2f6'
                       'ec0c8,b,885e80778edb6cbb8ee4d8909623be8062369a04'
                       ',FAILED,1,python-alembic-1.0-2.el7\n'),
            call.write(u'python-tripleoclient,a,1da7b10e55abf8c518e8f61ee7966'
                       '188f0405f59,b,0b1ce934e5b2e7d45a448f6555d24036f9aeca51'
                       ',SUCCESS,2,file2-1.2-3.el7\n')
        ]
        self.assertEqual(mock_fp.mock_calls, expected)
Exemplo n.º 2
0
    def test_noretry(self):
        # SUCCESS
        commit = db.getLastProcessedCommit(self.session, 'python-pysaml2')
        # FAILED
        commit2 = db.getLastProcessedCommit(self.session, 'python-alembic')
        # SUCCESS, RETRY (should be ignored)
        commit3 = \
            db.getLastProcessedCommit(self.session, 'python-tripleoclient')

        mock_fp = MagicMock()
        utils.dumpshas2file(mock_fp, commit, "a", "b", commit.status, 0, None,
                            ['python-saml2-1.0-1.el7.src.rpm'])
        utils.dumpshas2file(mock_fp, commit2, "a", "b", commit2.status, 1,
                            'common', ['python-alembic-1.0-2.el7.src.rpm'])
        utils.dumpshas2file(
            mock_fp, commit3, "a", "b", commit3.status, 2, 'common',
            ['file1-1.2-3.el7.noarch.rpm', 'file2-1.2-3.el7.src.rpm'])
        expected = [
            call.write(u'python-pysaml2,a,3a9326f251b9a4162eb0dfa9f1c924ef47c'
                       '2c55a,b,024e24f0cf4366c2290c22f24e42de714d1addd1'
                       ',SUCCESS,0,None,cafecafe,python-saml2-1.0-1.el7\n'),
            call.write(u'python-alembic,a,459549c9ab7fef91b2dc8986bc0643bb2f6'
                       'ec0c8,b,885e80778edb6cbb8ee4d8909623be8062369a04'
                       ',FAILED,1,common,None,python-alembic-1.0-2.el7\n'),
            call.write(u'python-tripleoclient,a,1da7b10e55abf8c518e8f61ee7966'
                       '188f0405f59,b,0b1ce934e5b2e7d45a448f6555d24036f9aeca51'
                       ',SUCCESS,2,common,None,file2-1.2-3.el7\n')
        ]
        self.assertEqual(mock_fp.mock_calls, expected)
Exemplo n.º 3
0
 def test_commit_getshardedcommitdir_component(self):
     commit = db.getLastProcessedCommit(self.session, 'python-pysaml2')
     commit.component = 'foo'
     commit.extended_hash = None
     directory1 = commit.getshardedcommitdir()
     self.assertEqual(
         directory1, 'component/foo/3a/93/3a9326f251b9a4162eb0dfa9f1c9'
         '24ef47c2c55a_024e24f0')
Exemplo n.º 4
0
 def test_commit_getshardedcommitdir_extended_hash(self):
     commit = db.getLastProcessedCommit(self.session, 'python-pysaml2')
     commit.extended_hash = 'abc'
     directory1 = commit.getshardedcommitdir()
     self.assertEqual(
         directory1, '3a/93/3a9326f251b9a4162eb0dfa9f1c924ef47c2c55a_'
         '024e24f0_abc')
     commit.extended_hash = 'abcdef123456'
     directory2 = commit.getshardedcommitdir()
     self.assertEqual(
         directory2, '3a/93/3a9326f251b9a4162eb0dfa9f1c924ef47c2c55a_'
         '024e24f0_abcdef12')
Exemplo n.º 5
0
def getinfo(package,
            local=False,
            dev_mode=False,
            head_only=False,
            db_connection=None,
            type="rpm"):
    project = package["name"]
    since = "-1"
    session = getSession(db_connection)
    commit = getLastProcessedCommit(session, project, type=type)
    if commit:
        # If we have switched source branches, we want to behave
        # as if no previous commits had been built, and only build
        # the last one
        if commit.commit_branch == getsourcebranch(package):
            # This will return all commits since the last handled commit
            # including the last handled commit, remove it later if needed.
            since = "--after=%d" % (commit.dt_commit)
        else:
            # The last processed commit belongs to a different branch. Just
            # in case, let's check if we built a previous commit from the
            # current branch
            commit = getLastBuiltCommit(session,
                                        project,
                                        getsourcebranch(package),
                                        type=type)
            if commit:
                logger.info("Last commit belongs to another branch, but"
                            " we're ok with that")
                since = "--after=%d" % (commit.dt_commit)
                # In any case, we just want to build the last commit, if any
                head_only = True

    project_toprocess, skipped = pkginfo.getinfo(project=project,
                                                 package=package,
                                                 since=since,
                                                 local=local,
                                                 dev_mode=dev_mode,
                                                 type=type)

    closeSession(session)
    # If since == -1, then we only want to trigger a build for the
    # most recent change
    if since == "-1" or head_only:
        del project_toprocess[:-1]

    return project_toprocess, package, skipped
Exemplo n.º 6
0
def getinfo(package, local=False, dev_mode=False, head_only=False,
            db_connection=None, type="rpm"):
    project = package["name"]
    since = "-1"
    session = getSession(db_connection)
    commit = getLastProcessedCommit(session, project, type=type)
    if commit:
        # If we have switched source branches, we want to behave
        # as if no previous commits had been built, and only build
        # the last one
        if commit.commit_branch == getsourcebranch(package):
            # This will return all commits since the last handled commit
            # including the last handled commit, remove it later if needed.
            since = "--after=%d" % (commit.dt_commit)
        else:
            # The last processed commit belongs to a different branch. Just
            # in case, let's check if we built a previous commit from the
            # current branch
            commit = getLastBuiltCommit(session, project,
                                        getsourcebranch(package), type=type)
            if commit:
                logger.info("Last commit belongs to another branch, but"
                            " we're ok with that")
                since = "--after=%d" % (commit.dt_commit)
                # In any case, we just want to build the last commit, if any
                head_only = True

    project_toprocess = pkginfo.getinfo(project=project, package=package,
                                        since=since, local=local,
                                        dev_mode=dev_mode, type=type)

    closeSession(session)
    # If since == -1, then we only want to trigger a build for the
    # most recent change
    if since == "-1" or head_only:
        del project_toprocess[:-1]

    return project_toprocess, package
Exemplo n.º 7
0
 def test_commit_getshardedcommitdir(self):
     commit = db.getLastProcessedCommit(self.session, 'python-pysaml2')
     self.assertIn(commit.commit_hash, commit.getshardedcommitdir())
     commit.distro_hash = None
     self.assertIn(commit.commit_hash, commit.getshardedcommitdir())
 def test_newproject(self):
     commit = db.getLastProcessedCommit(self.session, 'python-newproject')
     self.assertEqual(commit, None)
 def test_withretry(self):
     # In our sample data the most recent of these has status == RETRY
     commit = \
         db.getLastProcessedCommit(self.session, 'python-tripleoclient')
     self.assertEqual(commit.dt_build, 1444033941)
 def test_noretry(self):
     commit = db.getLastProcessedCommit(self.session, 'python-pysaml2')
     self.assertEqual(commit.dt_build, 1444139517)
Exemplo n.º 11
0
def import_commit(repo_url, config_file, db_connection=None,
                  local_info_repo=None):
    cp = configparser.RawConfigParser()
    cp.read(config_file)
    config_options = ConfigOptions(cp)
    pkginfo_driver = config_options.pkginfo_driver
    pkginfo = import_object(pkginfo_driver, cfg_options=config_options)
    packages = pkginfo.getpackages(local_info_repo=local_info_repo,
                                   tags=config_options.tags,
                                   dev_mode=False)

    remote_yaml = repo_url + '/' + 'commit.yaml'
    r = urlopen(remote_yaml)
    contents = map(lambda x: x.decode('utf8'), r.readlines())

    osfd, tmpfilename = mkstemp()
    fp = os.fdopen(osfd, 'w')
    fp.writelines(contents)
    fp.close()

    commits = loadYAML_list(tmpfilename)
    os.remove(tmpfilename)
    datadir = os.path.realpath(config_options.datadir)
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    for commit in commits:
        commit.id = None
        if commit.artifacts == 'None':
            commit.artifacts = None
        commit.dt_build = int(commit.dt_build)
        commit.dt_commit = float(commit.dt_commit)
        commit.dt_distro = int(commit.dt_distro)
        # Check if the latest built commit for this project is newer
        # than this one. In that case, we should ignore it
        if db_connection:
            session = getSession(db_connection)
        else:
            session = getSession(config_options.database_connection)
        package = commit.project_name
        old_commit = getLastProcessedCommit(session, package)
        if old_commit:
            if old_commit.dt_commit >= commit.dt_commit:
                if old_commit.dt_distro >= commit.dt_distro:
                    logger.info('Skipping commit %s, a newer commit is '
                                'already built\n'
                                'Old: %s %s, new: %s %s' %
                                (commit.commit_hash, old_commit.dt_commit,
                                 old_commit.dt_distro, commit.dt_commit,
                                 commit.dt_distro))
                    continue    # Skip

        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        if not os.path.exists(yumrepodir):
            os.makedirs(yumrepodir)

        for logfile in ['build.log', 'installed', 'mock.log', 'root.log',
                        'rpmbuild.log', 'state.log']:
            logfile_url = repo_url + '/' + logfile
            try:
                r = urlopen(logfile_url)
                contents = map(lambda x: x.decode('utf8'), r.readlines())
                with open(os.path.join(yumrepodir, logfile), "w") as fp:
                    fp.writelines(contents)
            except urllib.error.HTTPError:
                # Ignore errors, if the remote build failed there may be
                # some missing files
                pass

        if commit.artifacts:
            for rpm in commit.artifacts.split(","):
                rpm_url = repo_url + '/' + rpm.split('/')[-1]
                try:
                    r = urlopen(rpm_url)
                    contents = r.read()
                    with open(os.path.join(datadir, rpm), "wb") as fp:
                        fp.write(contents)
                except urllib.error.HTTPError:
                    if rpm != 'None':
                        logger.warning("Failed to download rpm file %s"
                                       % rpm_url)
        # Get remote update lock, to prevent any other remote operation
        # while we are creating the repo and updating the database
        logger.debug("Acquiring remote update lock")
        with lock_file(os.path.join(datadir, 'remote.lck')):
            logger.debug("Acquired lock")
            if commit.status == 'SUCCESS':
                built_rpms = []
                for rpm in commit.artifacts.split(","):
                    built_rpms.append(rpm)
                status = [commit, built_rpms, commit.notes, None]
                post_build(status, packages, session)
            else:
                pkg = [p for p in packages if p['name'] == package][0]
                # Here we fire a refresh of the repositories
                # (upstream and distgit) to be sure to have them in the
                # data directory. We need that in the case the worker
                # is running on another host mainly for the
                # submit_review.sh script.
                pkginfo.getinfo(project=pkg["name"], package=pkg,
                                since='-1', local=False, dev_mode=False)
                # Paths on the worker might differ so we overwrite them
                # to reflect data path on the local API host.
                commit.distgit_dir = pkginfo.distgit_dir(pkg['name'])
                commit.repo_dir = os.path.join(
                    config_options.datadir, pkg['name'])
                status = [commit, '', '', commit.notes]
            process_build_result(status, packages, session, [])
            closeSession(session)   # Keep one session per commit
        logger.debug("Released lock")
    return 0
Exemplo n.º 12
0
 def test_withretry(self):
     # In our sample data the most recent of these has status == RETRY
     commit = \
         db.getLastProcessedCommit(self.session, 'python-tripleoclient')
     self.assertEqual(commit.dt_build, 1444033941)
Exemplo n.º 13
0
def main():
    parser = argparse.ArgumentParser()

    parser.add_argument('--config-file',
                        default='projects.ini',
                        help="Config file. Default: projects.ini")
    parser.add_argument('--info-repo',
                        help="use a local rdoinfo repo instead of"
                             " fetching the default one using rdopkg. Only"
                             " applies when pkginfo_driver is rdoinfo in"
                             " projects.ini")
    parser.add_argument('--build-env', action='append',
                        help="Variables for the build environment.")
    parser.add_argument('--local', action="store_true",
                        help="Use local git repos if possible. Only commited"
                             " changes in the local repo will be used in the"
                             " build.")
    parser.add_argument('--head-only', action="store_true",
                        help="Build from the most recent Git commit only.")
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--project-name', action='append',
                       help="Build a specific project name only."
                            " Use multiple times to build more than one "
                            "project in a run.")
    group.add_argument('--package-name', action='append',
                       help="Build a specific package name only."
                            " Use multiple times to build more than one "
                            "package in a run.")
    parser.add_argument('--dev', action="store_true",
                        help="Don't reset packaging git repo, force build "
                             "and add public master repo for dependencies "
                             "(dev mode).")
    parser.add_argument('--log-commands', action="store_true",
                        help="Log the commands run by dlrn.")
    parser.add_argument('--use-public', action="store_true",
                        help="Use the public master repo for dependencies "
                             "when doing install verification.")
    parser.add_argument('--order', action="store_true",
                        help="Compute the build order according to the spec "
                             "files instead of the dates of the commits. "
                             "Implies --sequential.")
    parser.add_argument('--sequential', action="store_true",
                        help="Run all actions sequentially, regardless of the"
                             " number of workers specified in projects.ini.")
    parser.add_argument('--status', action="store_true",
                        help="Get the status of packages.")
    parser.add_argument('--recheck', action="store_true",
                        help="Force a rebuild for a particular package. "
                        "Implies --package-name")
    parser.add_argument('--force-recheck', action="store_true",
                        help="Force a rebuild for a particular package, even "
                        "if its last build was successful. Requires setting "
                        "allow_force_rechecks=True in projects.ini. "
                        "Implies --package-name and --recheck")
    parser.add_argument('--version',
                        action='version',
                        version=version.version_info.version_string())
    parser.add_argument('--run',
                        help="Run a program instead of trying to build. "
                             "Implies --head-only")
    parser.add_argument('--stop', action="store_true",
                        help="Stop on error.")
    parser.add_argument('--verbose-build', action="store_true",
                        help="Show verbose output during the package build.")
    parser.add_argument('--verbose-mock', action="store_true",
                        help=argparse.SUPPRESS)
    parser.add_argument('--no-repo', action="store_true",
                        help="Do not generate a repo with all the built "
                        "packages.")
    parser.add_argument('--debug', action='store_true',
                        help="Print debug logs")

    options = parser.parse_args(sys.argv[1:])

    setup_logging(options.debug)

    if options.verbose_mock:
        logger.warning('The --verbose-mock command-line option is deprecated.'
                       ' Please use --verbose-build instead.')
        options.verbose_build = options.verbose_mock
    global verbose_build
    verbose_build = options.verbose_build

    cp = configparser.RawConfigParser()
    cp.read(options.config_file)

    if options.log_commands is True:
        logging.getLogger("sh.command").setLevel(logging.INFO)
    if options.order is True:
        options.sequential = True

    config_options = ConfigOptions(cp)
    if options.dev:
        _, tmpdb_path = tempfile.mkstemp()
        logger.info("Using file %s for temporary db" % tmpdb_path)
        config_options.database_connection = "sqlite:///%s" % tmpdb_path

    session = getSession(config_options.database_connection)
    pkginfo_driver = config_options.pkginfo_driver
    global pkginfo
    pkginfo = import_object(pkginfo_driver, cfg_options=config_options)
    packages = pkginfo.getpackages(local_info_repo=options.info_repo,
                                   tags=config_options.tags,
                                   dev_mode=options.dev)

    if options.project_name:
        pkg_names = [p['name'] for p in packages
                     if p['project'] in options.project_name]
    elif options.package_name:
        pkg_names = options.package_name
    else:
        pkg_names = None

    if options.status is True:
        if not pkg_names:
            pkg_names = [p['name'] for p in packages]
        for name in pkg_names:
            package = [p for p in packages if p['name'] == name][0]
            for build_type in package.get('types', ['rpm']):
                commit = getLastProcessedCommit(
                    session, name, 'invalid status',
                    type=build_type)
                if commit:
                    print("{:>9}".format(build_type), name, commit.status)
                else:
                    print("{:>9}".format(build_type), name, 'NO_BUILD')
        sys.exit(0)

    if pkg_names:
        pkg_name = pkg_names[0]
    else:
        pkg_name = None

    def recheck_commit(commit, force):
        if commit.status == 'SUCCESS':
            if not force:
                logger.error(
                    "Trying to recheck an already successful commit,"
                    " ignoring. If you want to force it, use --force-recheck"
                    " and set allow_force_rechecks=True in projects.ini")
                sys.exit(1)
            else:
                logger.info("Forcefully rechecking a successfully built "
                            "commit for %s" % commit.project_name)
        elif commit.status == 'RETRY':
            # In this case, we are going to retry anyway, so
            # do nothing and exit
            logger.warning("Trying to recheck a commit in RETRY state,"
                           " ignoring.")
            sys.exit(0)
        # We could set the status to RETRY here, but if we have gone
        # beyond max_retries it wouldn't work as expected. Thus, our
        # only chance is to remove the commit
        session.delete(commit)
        session.commit()
        sys.exit(0)

    if options.recheck is True:
        if not pkg_name:
            logger.error('Please use --package-name or --project-name '
                         'with --recheck.')
            sys.exit(1)

        if options.force_recheck and config_options.allow_force_rechecks:
            force_recheck = True
        else:
            force_recheck = False
        package = [p for p in packages if p['name'] == pkg_name][0]
        for build_type in package.get('types', ['rpm']):
            commit = getLastProcessedCommit(session, pkg_name, type=build_type)
            if commit:
                recheck_commit(commit, force_recheck)
            else:
                logger.error("There are no existing commits for package %s",
                             pkg_name)
                sys.exit(1)
    # when we run a program instead of building we don't care about
    # the commits, we just want to run once per package
    if options.run:
        options.head_only = True
    # Build a list of commits we need to process
    toprocess = []

    def add_commits(project_toprocess):
        # The first entry in the list of commits is a commit we have
        # already processed, we want to process it again only if in dev
        # mode or distro hash has changed, we can't simply check
        # against the last commit in the db, as multiple commits can
        # have the same commit date
        for commit_toprocess in project_toprocess:
            if options.dev is True or \
               options.run or \
               not session.query(Commit).filter(
                   Commit.commit_hash == commit_toprocess.commit_hash,
                   Commit.distro_hash == commit_toprocess.distro_hash,
                   Commit.extended_hash == commit_toprocess.extended_hash,
                   Commit.type == commit_toprocess.type,
                   Commit.status != "RETRY").all():
                toprocess.append(commit_toprocess)

    if not pkg_name and not pkg_names:
        pool = multiprocessing.Pool()   # This will use all the system cpus
        # Use functools.partial to iterate on the packages to process,
        # while keeping a few options fixed
        getinfo_wrapper = partial(getinfo, local=options.local,
                                  dev_mode=options.dev,
                                  head_only=options.head_only,
                                  db_connection=config_options.
                                  database_connection)
        iterator = pool.imap(getinfo_wrapper, packages)
        while True:
            try:
                project_toprocess, updated_pkg = iterator.next()
                for package in packages:
                    if package['name'] == updated_pkg['name']:
                        if package['upstream'] == 'Unknown':
                            package['upstream'] = updated_pkg['upstream']
                            logger.debug(
                                "Updated upstream for package %s to %s",
                                package['name'], package['upstream'])
                        break
                add_commits(project_toprocess)
            except StopIteration:
                break
        pool.close()
        pool.join()
    else:
        for package in packages:
            if package['name'] in pkg_names:
                project_toprocess, _ = getinfo(package, local=options.local,
                                               dev_mode=options.dev,
                                               head_only=options.head_only,
                                               db_connection=config_options.
                                               database_connection)
                add_commits(project_toprocess)
    closeSession(session)   # Close session, will reopen during post_build

    # Check if there is any commit at all to process
    if len(toprocess) == 0:
        if not pkg_name:
            # Use a shorter message if this was a full run
            logger.info("No commits to build.")
        else:
            logger.info("No commits to build. If this is not expected, please"
                        " make sure the package name(s) are correct, and that "
                        "any failed commit you want to rebuild has been "
                        "removed from the database.")
        return 0

    # if requested do a sort according to build and install
    # dependencies
    if options.order is True:
        # collect info from all spec files
        logger.info("Reading rpm spec files")
        projects = sorted([c.project_name for c in toprocess])

        speclist = []
        bootstraplist = []
        for project_name in projects:
            # Preprocess spec if needed
            pkginfo.preprocess(package_name=project_name)

            specpath = os.path.join(pkginfo.distgit_dir(project_name),
                                    project_name + '.spec')
            speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1',
                                       '-P', specpath))

            # Check if repo_bootstrap is defined in the package.
            # If so, we'll need to rebuild after the whole bootstrap exercise
            rawspec = open(specpath).read(-1)
            if 'repo_bootstrap' in rawspec:
                bootstraplist.append(project_name)

        logger.debug("Packages to rebuild: %s" % bootstraplist)

        specs = RpmSpecCollection([RpmSpecFile(spec)
                                  for spec in speclist])
        # compute order according to BuildRequires
        logger.info("Computing build order")
        orders = specs.compute_order()
        # hack because the package name is not consistent with the directory
        # name and the spec file name
        if 'python-networking_arista' in orders:
            orders.insert(orders.index('python-networking_arista'),
                          'python-networking-arista')

        # sort the commits according to the score of their project and
        # then use the timestamp of the commits as a secondary key
        def my_cmp(a, b):
            if a.project_name == b.project_name:
                _a = a.dt_commit
                _b = b.dt_commit
            else:
                _a = orders.index(a.project_name)
                _b = orders.index(b.project_name)
            # cmp is no longer available in python3 so replace it. See Ordering
            # Comparisons on:
            # https://docs.python.org/3.0/whatsnew/3.0.html
            return (_a > _b) - (_a < _b)

        toprocess.sort(key=cmp_to_key(my_cmp))
    else:
        # sort according to the timestamp of the commits
        toprocess.sort()

    exit_code = 0
    if options.sequential is True:
        toprocess_copy = deepcopy(toprocess)
        for commit in toprocess:
            status = build_worker(packages, commit, run_cmd=options.run,
                                  build_env=options.build_env,
                                  dev_mode=options.dev,
                                  use_public=options.use_public,
                                  order=options.order, sequential=True)
            exception = status[3]
            consistent = False
            datadir = os.path.realpath(config_options.datadir)
            with lock_file(os.path.join(datadir, 'remote.lck')):
                session = getSession(config_options.database_connection)
                if exception is not None:
                    logger.error("Received exception %s" % exception)
                    failures = 1
                else:
                    if not options.run:
                        failures = post_build(status, packages, session,
                                              build_repo=not options.no_repo)
                        consistent = (failures == 0)
                exit_value = process_build_result(status, packages, session,
                                                  toprocess_copy,
                                                  dev_mode=options.dev,
                                                  run_cmd=options.run,
                                                  stop=options.stop,
                                                  build_env=options.build_env,
                                                  head_only=options.head_only,
                                                  consistent=consistent,
                                                  failures=failures)
                closeSession(session)

            if exit_value != 0:
                exit_code = exit_value
            if options.stop and exit_code != 0:
                return exit_code
    else:
        # Setup multiprocessing pool
        pool = multiprocessing.Pool(config_options.workers)
        # Use functools.partial to iterate on the commits to process,
        # while keeping a few options fixed
        build_worker_wrapper = partial(build_worker, packages,
                                       run_cmd=options.run,
                                       build_env=options.build_env,
                                       dev_mode=options.dev,
                                       use_public=options.use_public,
                                       order=options.order, sequential=False)
        iterator = pool.imap(build_worker_wrapper, toprocess)

        while True:
            try:
                status = iterator.next()
                exception = status[3]
                consistent = False
                datadir = os.path.realpath(config_options.datadir)
                with lock_file(os.path.join(datadir, 'remote.lck')):
                    session = getSession(config_options.database_connection)
                    if exception is not None:
                        logger.info("Received exception %s" % exception)
                        failures = 1
                    else:
                        # Create repo, build versions.csv file.
                        # This needs to be sequential
                        if not options.run:
                            failures = post_build(
                                status, packages, session,
                                build_repo=not options.no_repo)
                            consistent = (failures == 0)
                    exit_value = process_build_result(
                        status, packages,
                        session, toprocess,
                        dev_mode=options.dev,
                        run_cmd=options.run,
                        stop=options.stop,
                        build_env=options.build_env,
                        head_only=options.head_only,
                        consistent=consistent,
                        failures=failures)
                    closeSession(session)
                if exit_value != 0:
                    exit_code = exit_value
                if options.stop and exit_code != 0:
                    return exit_code
            except StopIteration:
                break
        pool.close()
        pool.join()

    # If we were bootstrapping, set the packages that required it to RETRY
    session = getSession(config_options.database_connection)
    if options.order is True and not pkg_name:
        for bpackage in bootstraplist:
            commit = getLastProcessedCommit(session, bpackage)
            commit.status = 'RETRY'
            session.add(commit)
            session.commit()
    genreports(packages, options.head_only, session, [])
    closeSession(session)

    if options.dev:
        os.remove(tmpdb_path)
    return exit_code
Exemplo n.º 14
0
def process_build_result_rpm(
        status, packages, session, packages_to_process,
        dev_mode=False, run_cmd=False, stop=False,
        build_env=None, head_only=False, consistent=False,
        failures=0):
    config_options = getConfigOptions()
    commit = status[0]
    built_rpms = status[1]
    notes = status[2]
    exception = status[3]
    commit_hash = commit.commit_hash
    project = commit.project_name
    project_info = session.query(Project).filter(
        Project.project_name == project).first()
    if not project_info:
        project_info = Project(project_name=project, last_email=0)
    exit_code = 0

    if run_cmd:
        if exception is not None:
            exit_code = 1
            if stop:
                return exit_code
        return exit_code

    if exception is None:
        commit.status = "SUCCESS"
        commit.notes = notes
        commit.artifacts = ",".join(built_rpms)
    else:
        logger.error("Received exception %s" % exception)

        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        logfile = os.path.join(yumrepodir,
                               "rpmbuild.log")
        if (isknownerror(logfile) and
            (timesretried(project, session, commit_hash, commit.distro_hash) <
             config_options.maxretries)):
            logger.exception("Known error building packages for %s,"
                             " will retry later" % project)
            commit.status = "RETRY"
            commit.notes = str(exception)
            # do not switch from an error exit code to a retry
            # exit code
            if exit_code != 1:
                exit_code = 2
        else:
            exit_code = 1
            # If the log file hasn't been created we add what we have
            # This happens if the rpm build script didn't run.
            if not os.path.exists(yumrepodir):
                os.makedirs(yumrepodir)
            if not os.path.exists(logfile):
                with open(logfile, "w") as fp:
                    fp.write(str(exception))

            if not project_info.suppress_email():
                sendnotifymail(packages, commit)
                project_info.sent_email()
                session.add(project_info)

            # allow to submit a gerrit review only if the last build
            # was successful or non existent to avoid creating a gerrit
            # review for the same problem multiple times.
            if config_options.gerrit is not None:
                if build_env:
                    env_vars = list(build_env)
                else:
                    env_vars = []
                last_build = getLastProcessedCommit(session, project)
                if not last_build or last_build.status == 'SUCCESS':
                    try:
                        submit_review(commit, packages, env_vars)
                    except Exception:
                        logger.error('Unable to create review '
                                     'see review.log')
                else:
                    logger.info('Last build not successful '
                                'for %s' % project)
            commit.status = "FAILED"
            commit.notes = str(exception)
        if stop:
            return exit_code
    # Add commit to the session
    session.add(commit)

    genreports(packages, head_only, session, packages_to_process)
    # Export YAML file containing commit metadata
    export_commit_yaml(commit)
    try:
        sync_repo(commit)
    except Exception as e:
        logger.error('Repo sync failed for project %s' % project)
        consistent = False  # If we were consistent before, we are not anymore
        if exit_code == 0:  # The commit was ok, so marking as failed
            exit_code = 1
            # We need to make the commit status be "failed"
            commit.status = "FAILED"
            commit.notes = str(e)
            session.add(commit)
            # And open a review if needed
            if config_options.gerrit is not None:
                if build_env:
                    env_vars = list(build_env)
                else:
                    env_vars = []
                try:
                    submit_review(commit, packages, env_vars)
                except Exception:
                    logger.error('Unable to create review '
                                 'see review.log')

    session.commit()

    # Generate the current and consistent symlinks
    if exception is None:
        dirnames = ['current']
        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        yumrepodir_abs = os.path.join(datadir, yumrepodir)
        if consistent:
            dirnames.append('consistent')
        else:
            logger.info('%d packages not built correctly: not updating'
                        ' the consistent symlink' % failures)
        for dirname in dirnames:
            target_repo_dir = os.path.join(datadir, "repos", dirname)
            os.symlink(os.path.relpath(yumrepodir_abs,
                                       os.path.join(datadir, "repos")),
                       target_repo_dir + "_")
            os.rename(target_repo_dir + "_", target_repo_dir)

        # And synchronize them
        sync_symlinks(commit)

    if dev_mode is False:
        if consistent:
            # We have a consistent repo. Let's create a CIVote entry in the DB
            vote = CIVote(commit_id=commit.id, ci_name='consistent',
                          ci_url='', ci_vote=True, ci_in_progress=False,
                          timestamp=int(commit.dt_build), notes='')
            session.add(vote)
            session.commit()
    return exit_code
Exemplo n.º 15
0
def main():
    parser = argparse.ArgumentParser()
    # Some of the non-positional arguments are required, so change the text
    # saying "optional arguments" to just "arguments":
    parser._optionals.title = 'arguments'

    parser.add_argument('--config-file',
                        help="Config file (required).",
                        required=True)
    parser.add_argument('--info-repo',
                        help="use a local rdoinfo repo instead of "
                             "fetching the default one using rdopkg. Only"
                             "applies when pkginfo_driver is rdoinfo in"
                             "projects.ini")
    parser.add_argument('--build-env', action='append',
                        help="Variables for the build environment.")
    parser.add_argument('--local', action="store_true",
                        help="Use local git repos if possible.")
    parser.add_argument('--head-only', action="store_true",
                        help="Build from the most recent Git commit only.")
    parser.add_argument('--package-name',
                        help="Build a specific package name only.")
    parser.add_argument('--dev', action="store_true",
                        help="Don't reset packaging git repo, force build "
                             "and add public master repo for dependencies "
                             "(dev mode).")
    parser.add_argument('--log-commands', action="store_true",
                        help="Log the commands run by dlrn.")
    parser.add_argument('--use-public', action="store_true",
                        help="Use the public master repo for dependencies "
                             "when doing install verification.")
    parser.add_argument('--order', action="store_true",
                        help="Compute the build order according to the spec "
                             "files instead of the dates of the commits.")
    parser.add_argument('--status', action="store_true",
                        help="Get the status of packages.")
    parser.add_argument('--recheck', action="store_true",
                        help="Force a rebuild for a particular package. "
                        "Imply --package-name")
    parser.add_argument('--version',
                        action='version',
                        version=version.version_info.version_string())
    parser.add_argument('--run',
                        help="Run a program instead of trying to build. "
                             "Imply --head-only")
    parser.add_argument('--stop', action="store_true",
                        help="Stop on error.")

    options, args = parser.parse_known_args(sys.argv[1:])

    cp = configparser.RawConfigParser(default_options)
    cp.read(options.config_file)

    if options.log_commands is True:
        logging.getLogger("sh.command").setLevel(logging.INFO)

    global session
    session = getSession('sqlite:///commits.sqlite')
    global config_options
    config_options = ConfigOptions(cp)
    pkginfo_driver = config_options.pkginfo_driver
    pkginfo_object = import_object(pkginfo_driver)
    packages = pkginfo_object.getpackages(local_info_repo=options.info_repo,
                                          tags=config_options.tags)

    if options.status is True:
        if options.package_name:
            names = (options.package_name, )
        else:
            names = [p['name'] for p in packages]
        for name in names:
            commit = getLastProcessedCommit(session, name, 'invalid status')
            if commit:
                print(name, commit.status)
            else:
                print(name, 'NO_BUILD')
        sys.exit(0)

    if options.recheck is True:
        if not options.package_name:
            logger.error('Please use --package-name with --recheck.')
            sys.exit(1)
        commit = getLastProcessedCommit(session, options.package_name)
        if commit:
            if commit.status == 'SUCCESS':
                logger.error("Trying to recheck an already successful commit,"
                             " ignoring.")
                sys.exit(1)
            elif commit.status == 'RETRY':
                # In this case, we are going to retry anyway, so
                # do nothing and exit
                logger.warning("Trying to recheck a commit in RETRY state,"
                               " ignoring.")
                sys.exit(0)
            else:
                # We could set the status to RETRY here, but if we have gone
                # beyond max_retries it wouldn't work as expected. Thus, our
                # only chance is to remove the commit
                session.delete(commit)
                session.commit()
                sys.exit(0)
        else:
                logger.error("There are no existing commits for package %s"
                             % options.package_name)
                sys.exit(1)
    # when we run a program instead of building we don't care about
    # the commits, we just want to run once per package
    if options.run:
        options.head_only = True
    # Build a list of commits we need to process
    toprocess = []
    for package in packages:
        project = package["name"]
        since = "-1"
        commit = getLastProcessedCommit(session, project)
        if commit:
            # This will return all commits since the last handled commit
            # including the last handled commit, remove it later if needed.
            since = "--after=%d" % (commit.dt_commit)
        repo = package["upstream"]
        distro = package["master-distgit"]
        if not options.package_name or package["name"] == options.package_name:
            project_toprocess = getinfo(project, repo, distro, since,
                                        options.local, options.dev, package)
            # If since == -1, then we only want to trigger a build for the
            # most recent change
            if since == "-1" or options.head_only:
                del project_toprocess[:-1]

            # The first entry in the list of commits is a commit we have
            # already processed, we want to process it again only if in dev
            # mode or distro hash has changed, we can't simply check against
            # the last commit in the db, as multiple commits can have the same
            # commit date
            for commit_toprocess in project_toprocess:
                if ((options.dev is True) or
                    options.run or
                    (not session.query(Commit).filter(
                        Commit.project_name == project,
                        Commit.commit_hash == commit_toprocess.commit_hash,
                        Commit.distro_hash == commit_toprocess.distro_hash,
                        Commit.status != "RETRY")
                        .all())):
                    toprocess.append(commit_toprocess)

    # if requested do a sort according to build and install
    # dependencies
    if options.order is True and not options.package_name:
        # collect info from all spec files
        logger.info("Reading rpm spec files")
        projects = sorted([p['name'] for p in packages])

        speclist = []
        bootstraplist = []
        for project_name in projects:
            specpath = os.path.join(config_options.datadir,
                                    project_name + "_distro",
                                    project_name + '.spec')
            speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1',
                                       '-P', specpath))

            # Check if repo_bootstrap is defined in the package.
            # If so, we'll need to rebuild after the whole bootstrap exercise
            rawspec = open(specpath).read(-1)
            if 'repo_bootstrap' in rawspec:
                bootstraplist.append(project_name)

        logger.debug("Packages to rebuild: %s" % bootstraplist)

        specs = RpmSpecCollection([RpmSpecFile(spec)
                                  for spec in speclist])
        # compute order according to BuildRequires
        logger.info("Computing build order")
        orders = specs.compute_order()
        # hack because the package name is not consistent with the directory
        # name and the spec file name
        if 'python-networking_arista' in orders:
            orders.insert(orders.index('python-networking_arista'),
                          'python-networking-arista')

        # sort the commits according to the score of their project and
        # then use the timestamp of the commits as a secondary key
        def my_cmp(a, b):
            if a.project_name == b.project_name:
                return cmp(a.dt_commit, b.dt_commit)
            return cmp(orders.index(a.project_name),
                       orders.index(b.project_name))
        toprocess.sort(cmp=my_cmp)
    else:
        # sort according to the timestamp of the commits
        toprocess.sort()
    exit_code = 0
    for commit in toprocess:
        project = commit.project_name

        project_info = session.query(Project).filter(
            Project.project_name == project).first()
        if not project_info:
            project_info = Project(project_name=project, last_email=0)

        commit_hash = commit.commit_hash

        if options.run:
            try:
                run(options.run, commit, options.build_env,
                    options.dev, options.use_public, options.order,
                    do_build=False)
            except Exception as e:
                exit_code = 1
                if options.stop:
                    return exit_code
                pass
            continue

        logger.info("Processing %s %s" % (project, commit_hash))

        notes = ""
        try:
            built_rpms, notes = build(packages,
                                      commit, options.build_env, options.dev,
                                      options.use_public, options.order)
        except Exception as e:
            datadir = os.path.realpath(config_options.datadir)
            exit_code = 1
            logfile = os.path.join(datadir, "repos",
                                   commit.getshardedcommitdir(),
                                   "rpmbuild.log")
            if (isknownerror(logfile) and
                (timesretried(project, commit_hash, commit.distro_hash) <
                 config_options.maxretries)):
                logger.exception("Known error building packages for %s,"
                                 " will retry later" % project)
                commit.status = "RETRY"
                commit.notes = getattr(e, "message", notes)
                session.add(commit)
            else:
                # If the log file hasn't been created we add what we have
                # This happens if the rpm build script didn't run.
                if not os.path.exists(logfile):
                    with open(logfile, "w") as fp:
                        fp.write(getattr(e, "message", notes))

                if not project_info.suppress_email():
                    sendnotifymail(packages, commit)
                    project_info.sent_email()
                    session.add(project_info)

                # allow to submit a gerrit review only if the last build was
                # successful or non existent to avoid creating a gerrit review
                # for the same problem multiple times.
                if config_options.gerrit is not None:
                    if options.build_env:
                        env_vars = list(options.build_env)
                    else:
                        env_vars = []
                    last_build = getLastProcessedCommit(session, project)
                    if not last_build or last_build.status == 'SUCCESS':
                        for pkg in packages:
                            if project == pkg['name']:
                                break
                        else:
                            pkg = None
                        if pkg:
                            url = (get_commit_url(commit, pkg) +
                                   commit.commit_hash)
                            env_vars.append('GERRIT_URL=%s' % url)
                            env_vars.append('GERRIT_LOG=%s/%s' %
                                            (config_options.baseurl,
                                             commit.getshardedcommitdir()))
                            maintainers = ','.join(pkg['maintainers'])
                            env_vars.append('GERRIT_MAINTAINERS=%s' %
                                            maintainers)
                            logger.info('Creating a gerrit review using '
                                        'GERRIT_URL=%s '
                                        'GERRIT_MAINTAINERS=%s ' %
                                        (url, maintainers))
                            try:
                                submit_review(commit, env_vars)
                            except Exception:
                                logger.error('Unable to create review '
                                             'see review.log')
                        else:
                            logger.error('Unable to find info for project %s' %
                                         project)
                    else:
                        logger.info('Last build not successful '
                                    'for %s' % project)
                commit.status = "FAILED"
                commit.notes = getattr(e, "message", notes)
                session.add(commit)
            if options.stop:
                return exit_code
        else:
            commit.status = "SUCCESS"
            commit.notes = notes
            commit.rpms = ",".join(built_rpms)
            session.add(commit)
        if options.dev is False:
            session.commit()
        genreports(packages, options)
        sync_repo(commit)

    # If we were bootstrapping, set the packages that required it to RETRY
    if options.order is True and not options.package_name:
        for bpackage in bootstraplist:
            commit = getLastProcessedCommit(session, bpackage)
            commit.status = 'RETRY'
            session.add(commit)
            session.commit()

    genreports(packages, options)
    return exit_code
Exemplo n.º 16
0
def build(packages, commit, env_vars, dev_mode, use_public, bootstrap):
    # Set the build timestamp to now
    commit.dt_build = int(time())

    project_name = commit.project_name
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = os.path.join("repos", commit.getshardedcommitdir())
    yumrepodir_abs = os.path.join(datadir, yumrepodir)
    commit_hash = commit.commit_hash

    try:
        build_rpm_wrapper(commit, dev_mode, use_public, bootstrap,
                          env_vars)
    except Exception as e:
        raise Exception("Error in build_rpm_wrapper for %s: %s" %
                        (project_name, e))

    built_rpms = []
    for rpm in os.listdir(yumrepodir_abs):
        if rpm.endswith(".rpm"):
            built_rpms.append(os.path.join(yumrepodir, rpm))
    if not built_rpms:
        raise Exception("No rpms built for %s" % project_name)

    notes = "OK"
    if not os.path.isfile(os.path.join(yumrepodir_abs, "installed")):
        logger.error('Build failed. See logs at: %s/%s/' % (datadir,
                                                            yumrepodir))
        raise Exception("Error installing %s" % project_name)
    else:
        # Overwrite installed file, adding the repo reference
        with open(os.path.join(yumrepodir_abs, "installed"), "w") as fp:
            fp.write("%s %s %s" % (commit.project_name,
                                   commit.commit_hash,
                                   commit.distro_hash))

    shafile = open(os.path.join(yumrepodir_abs, "versions.csv"), "w")
    shafile.write("Project,Source Repo,Source Sha,Dist Repo,Dist Sha,"
                  "Status,Last Success Timestamp\n")
    failures = 0

    for otherproject in packages:
        otherprojectname = otherproject["name"]
        if otherprojectname == project_name:
            # Output sha's this project
            dumpshas2file(shafile, commit, otherproject["upstream"],
                          otherproject["master-distgit"], "SUCCESS",
                          commit.dt_build)
            continue
        # Output sha's of all other projects represented in this repo
        last_success = getCommits(session, project=otherprojectname,
                                  with_status="SUCCESS").first()
        last_processed = getLastProcessedCommit(session, otherprojectname,
                                                'INVALID STATE')
        if last_success:
            for rpm in last_success.rpms.split(","):
                rpm_link_src = os.path.join(yumrepodir_abs,
                                            os.path.split(rpm)[1])
                os.symlink(os.path.relpath(os.path.join(datadir, rpm),
                                           yumrepodir_abs), rpm_link_src)
            last = last_success
        else:
            last = last_processed
        if last:
            dumpshas2file(shafile, last, otherproject["upstream"],
                          otherproject["master-distgit"],
                          last_processed.status, last.dt_build)
            if last_processed.status != 'SUCCESS':
                failures += 1
        else:
            failures += 1
    shafile.close()

    # Use createrepo_c when available
    try:
        from sh import createrepo_c
        sh.createrepo = createrepo_c
    except ImportError:
        pass
    sh.createrepo(yumrepodir_abs)

    with open(os.path.join(
            yumrepodir_abs, "%s.repo" % config_options.reponame),
            "w") as fp:
        fp.write("[%s]\nname=%s-%s-%s\nbaseurl=%s/%s\nenabled=1\n"
                 "gpgcheck=0\npriority=1" % (config_options.reponame,
                                             config_options.reponame,
                                             project_name, commit_hash,
                                             config_options.baseurl,
                                             commit.getshardedcommitdir()))

    dirnames = ['current']
    if failures == 0:
        dirnames.append('consistent')
    else:
        logger.info('%d packages not built correctly: not updating the '
                    'consistent symlink' % failures)
    for dirname in dirnames:
        target_repo_dir = os.path.join(datadir, "repos", dirname)
        os.symlink(os.path.relpath(yumrepodir_abs,
                                   os.path.join(datadir, "repos")),
                   target_repo_dir + "_")
        os.rename(target_repo_dir + "_", target_repo_dir)

    return built_rpms, notes
Exemplo n.º 17
0
def process_build_result_rpm(status,
                             packages,
                             session,
                             packages_to_process,
                             dev_mode=False,
                             run_cmd=False,
                             stop=False,
                             build_env=None,
                             head_only=False,
                             consistent=False,
                             failures=0):
    config_options = getConfigOptions()
    commit = status[0]
    built_rpms = status[1]
    notes = status[2]
    exception = status[3]
    commit_hash = commit.commit_hash
    project = commit.project_name
    project_info = session.query(Project).filter(
        Project.project_name == project).first()
    if not project_info:
        project_info = Project(project_name=project, last_email=0)
    exit_code = 0

    if run_cmd:
        if exception is not None:
            exit_code = 1
            if stop:
                return exit_code
        return exit_code

    if exception is None:
        commit.status = "SUCCESS"
        commit.notes = notes
        commit.artifacts = ",".join(built_rpms)
    else:
        logger.error("Received exception %s" % exception)

        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        logfile = os.path.join(yumrepodir, "rpmbuild.log")

        # If the log file hasn't been created we add what we have
        # This happens if the rpm build script didn't run.
        if not os.path.exists(yumrepodir):
            os.makedirs(yumrepodir)
        if not os.path.exists(logfile):
            with open(logfile, "w") as fp:
                fp.write(str(exception))

        if (isknownerror(logfile) and
            (timesretried(project, session, commit_hash, commit.distro_hash) <
             config_options.maxretries)):
            logger.exception("Known error building packages for %s,"
                             " will retry later" % project)
            commit.status = "RETRY"
            commit.notes = str(exception)
            # do not switch from an error exit code to a retry
            # exit code
            if exit_code != 1:
                exit_code = 2
        else:
            exit_code = 1

            if not project_info.suppress_email():
                sendnotifymail(packages, commit)
                project_info.sent_email()
                session.add(project_info)

            # allow to submit a gerrit review only if the last build
            # was successful or non existent to avoid creating a gerrit
            # review for the same problem multiple times.
            if config_options.gerrit is not None:
                if build_env:
                    env_vars = list(build_env)
                else:
                    env_vars = []
                last_build = getLastProcessedCommit(session, project)
                if not last_build or last_build.status == 'SUCCESS':
                    try:
                        submit_review(commit, packages, env_vars)
                    except Exception:
                        logger.error('Unable to create review '
                                     'see review.log')
                else:
                    logger.info('Last build not successful '
                                'for %s' % project)
            commit.status = "FAILED"
            commit.notes = str(exception)
        if stop:
            return exit_code
    # Add commit to the session
    session.add(commit)

    genreports(packages, head_only, session, packages_to_process)
    # Export YAML file containing commit metadata
    export_commit_yaml(commit)
    try:
        sync_repo(commit)
    except Exception as e:
        logger.error('Repo sync failed for project %s' % project)
        consistent = False  # If we were consistent before, we are not anymore
        if exit_code == 0:  # The commit was ok, so marking as failed
            exit_code = 1
            # We need to make the commit status be "failed"
            commit.status = "FAILED"
            commit.notes = str(e)
            session.add(commit)
            # And open a review if needed
            if config_options.gerrit is not None:
                if build_env:
                    env_vars = list(build_env)
                else:
                    env_vars = []
                try:
                    submit_review(commit, packages, env_vars)
                except Exception:
                    logger.error('Unable to create review ' 'see review.log')

    session.commit()

    # Generate the current and consistent symlinks
    if exception is None:
        dirnames = ['current']
        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        yumrepodir_abs = os.path.join(datadir, yumrepodir)
        if consistent:
            dirnames.append('consistent')
        else:
            if config_options.use_components:
                logger.info('%d packages not built correctly for component'
                            ' %s: not updating the consistent symlink' %
                            (failures, commit.component))
            else:
                logger.info('%d packages not built correctly: not updating'
                            ' the consistent symlink' % failures)
        for dirname in dirnames:
            if config_options.use_components:
                target_repo_dir = os.path.join(datadir, "repos/component",
                                               commit.component, dirname)
                source_repo_dir = os.path.join(datadir, "repos/component",
                                               commit.component)
            else:
                target_repo_dir = os.path.join(datadir, "repos", dirname)
                source_repo_dir = os.path.join(datadir, "repos")
            os.symlink(os.path.relpath(yumrepodir_abs, source_repo_dir),
                       target_repo_dir + "_")
            os.rename(target_repo_dir + "_", target_repo_dir)

        # If using components, synchronize the upper-level repo files
        if config_options.use_components:
            for dirname in dirnames:
                aggregate_repo_files(dirname,
                                     datadir,
                                     session,
                                     config_options.reponame,
                                     hashed_dir=True)

        # And synchronize them
        sync_symlinks(commit)

    if dev_mode is False:
        if consistent:
            # We have a consistent repo. Let's create a CIVote entry in the DB
            vote = CIVote(commit_id=commit.id,
                          ci_name='consistent',
                          ci_url='',
                          ci_vote=True,
                          ci_in_progress=False,
                          timestamp=int(commit.dt_build),
                          notes='',
                          component=commit.component)
            session.add(vote)
            session.commit()
    return exit_code
Exemplo n.º 18
0
 def test_noretry(self):
     commit = db.getLastProcessedCommit(self.session, 'python-pysaml2')
     self.assertEqual(commit.dt_build, 1444139517)
Exemplo n.º 19
0
def main():
    parser = argparse.ArgumentParser()

    parser.add_argument('--config-file',
                        default='projects.ini',
                        help="Config file. Default: projects.ini")
    parser.add_argument('--config-override',
                        action='append',
                        help="Override a configuration option from the"
                        " config file. Specify it as: "
                        "section.option=value. Can be used multiple "
                        "times if more than one override is needed.")
    parser.add_argument('--info-repo',
                        help="use a local distroinfo repo instead of"
                        " fetching the default one. Only applies when"
                        " pkginfo_driver is rdoinfo or downstream in"
                        " projects.ini")
    parser.add_argument('--build-env',
                        action='append',
                        help="Variables for the build environment.")
    parser.add_argument('--local',
                        action="store_true",
                        help="Use local git repos if possible. Only commited"
                        " changes in the local repo will be used in the"
                        " build.")
    parser.add_argument('--head-only',
                        action="store_true",
                        help="Build from the most recent Git commit only.")
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--project-name',
                       action='append',
                       help="Build a specific project name only."
                       " Use multiple times to build more than one "
                       "project in a run.")
    group.add_argument('--package-name',
                       action='append',
                       help="Build a specific package name only."
                       " Use multiple times to build more than one "
                       "package in a run.")
    parser.add_argument('--dev',
                        action="store_true",
                        help="Don't reset packaging git repo, force build "
                        "and add public master repo for dependencies "
                        "(dev mode).")
    parser.add_argument('--log-commands',
                        action="store_true",
                        help="Log the commands run by dlrn.")
    parser.add_argument('--use-public',
                        action="store_true",
                        help="Use the public master repo for dependencies "
                        "when doing install verification.")
    parser.add_argument('--order',
                        action="store_true",
                        help="Compute the build order according to the spec "
                        "files instead of the dates of the commits. "
                        "Implies --sequential.")
    parser.add_argument('--sequential',
                        action="store_true",
                        help="Run all actions sequentially, regardless of the"
                        " number of workers specified in projects.ini.")
    parser.add_argument('--status',
                        action="store_true",
                        help="Get the status of packages.")
    parser.add_argument('--recheck',
                        action="store_true",
                        help="Force a rebuild for a particular package. "
                        "Implies --package-name")
    parser.add_argument('--force-recheck',
                        action="store_true",
                        help="Force a rebuild for a particular package, even "
                        "if its last build was successful. Requires setting "
                        "allow_force_rechecks=True in projects.ini. "
                        "Implies --package-name and --recheck")
    parser.add_argument('--version',
                        action='version',
                        version=version.version_info.version_string())
    parser.add_argument('--run',
                        help="Run a program instead of trying to build. "
                        "Implies --head-only")
    parser.add_argument('--stop', action="store_true", help="Stop on error.")
    parser.add_argument('--verbose-build',
                        action="store_true",
                        help="Show verbose output during the package build.")
    parser.add_argument('--verbose-mock',
                        action="store_true",
                        help=argparse.SUPPRESS)
    parser.add_argument('--no-repo',
                        action="store_true",
                        help="Do not generate a repo with all the built "
                        "packages.")
    parser.add_argument('--debug',
                        action='store_true',
                        help="Print debug logs")

    options = parser.parse_args(sys.argv[1:])

    setup_logging(options.debug)

    if options.verbose_mock:
        logger.warning('The --verbose-mock command-line option is deprecated.'
                       ' Please use --verbose-build instead.')
        options.verbose_build = options.verbose_mock
    global verbose_build
    verbose_build = options.verbose_build

    cp = configparser.RawConfigParser()
    cp.read(options.config_file)

    if options.log_commands is True:
        logging.getLogger("sh.command").setLevel(logging.INFO)
    if options.order is True:
        options.sequential = True

    config_options = ConfigOptions(cp, overrides=options.config_override)
    if options.dev:
        _, tmpdb_path = tempfile.mkstemp()
        logger.info("Using file %s for temporary db" % tmpdb_path)
        config_options.database_connection = "sqlite:///%s" % tmpdb_path

    session = getSession(config_options.database_connection)
    pkginfo_driver = config_options.pkginfo_driver
    global pkginfo
    pkginfo = import_object(pkginfo_driver, cfg_options=config_options)
    packages = pkginfo.getpackages(local_info_repo=options.info_repo,
                                   tags=config_options.tags,
                                   dev_mode=options.dev)

    if options.project_name:
        pkg_names = [
            p['name'] for p in packages if p['project'] in options.project_name
        ]
    elif options.package_name:
        pkg_names = options.package_name
    else:
        pkg_names = None

    if options.status is True:
        if not pkg_names:
            pkg_names = [p['name'] for p in packages]
        for name in pkg_names:
            package = [p for p in packages if p['name'] == name][0]
            for build_type in package.get('types', ['rpm']):
                commit = getLastProcessedCommit(session,
                                                name,
                                                'invalid status',
                                                type=build_type)
                if commit:
                    print("{:>9}".format(build_type), name, commit.status)
                else:
                    print("{:>9}".format(build_type), name, 'NO_BUILD')
        sys.exit(0)

    if pkg_names:
        pkg_name = pkg_names[0]
    else:
        pkg_name = None

    def recheck_commit(commit, force):
        if commit.status == 'SUCCESS':
            if not force:
                logger.error(
                    "Trying to recheck an already successful commit,"
                    " ignoring. If you want to force it, use --force-recheck"
                    " and set allow_force_rechecks=True in projects.ini")
                sys.exit(1)
            else:
                logger.info("Forcefully rechecking a successfully built "
                            "commit for %s" % commit.project_name)
        elif commit.status == 'RETRY':
            # In this case, we are going to retry anyway, so
            # do nothing and exit
            logger.warning("Trying to recheck a commit in RETRY state,"
                           " ignoring.")
            sys.exit(0)
        # We could set the status to RETRY here, but if we have gone
        # beyond max_retries it wouldn't work as expected. Thus, our
        # only chance is to remove the commit
        session.delete(commit)
        session.commit()
        sys.exit(0)

    if options.recheck is True:
        if not pkg_name:
            logger.error('Please use --package-name or --project-name '
                         'with --recheck.')
            sys.exit(1)

        if options.force_recheck and config_options.allow_force_rechecks:
            force_recheck = True
        else:
            force_recheck = False
        package = [p for p in packages if p['name'] == pkg_name][0]
        for build_type in package.get('types', ['rpm']):
            commit = getLastProcessedCommit(session, pkg_name, type=build_type)
            if commit:
                recheck_commit(commit, force_recheck)
            else:
                logger.error("There are no existing commits for package %s",
                             pkg_name)
                sys.exit(1)
    # when we run a program instead of building we don't care about
    # the commits, we just want to run once per package
    if options.run:
        options.head_only = True
    # Build a list of commits we need to process
    toprocess = []
    skipped_list = []

    def add_commits(project_toprocess):
        # The first entry in the list of commits is a commit we have
        # already processed, we want to process it again only if in dev
        # mode or distro hash has changed, we can't simply check
        # against the last commit in the db, as multiple commits can
        # have the same commit date
        for commit_toprocess in project_toprocess:
            if options.dev is True or \
               options.run or \
               not session.query(Commit).filter(
                   Commit.commit_hash == commit_toprocess.commit_hash,
                   Commit.distro_hash == commit_toprocess.distro_hash,
                   Commit.extended_hash == commit_toprocess.extended_hash,
                   Commit.type == commit_toprocess.type,
                   Commit.status != "RETRY").all():
                toprocess.append(commit_toprocess)

    if not pkg_name and not pkg_names:
        pool = multiprocessing.Pool()  # This will use all the system cpus
        # Use functools.partial to iterate on the packages to process,
        # while keeping a few options fixed
        getinfo_wrapper = partial(
            getinfo,
            local=options.local,
            dev_mode=options.dev,
            head_only=options.head_only,
            db_connection=config_options.database_connection)
        iterator = pool.imap(getinfo_wrapper, packages)
        while True:
            try:
                project_toprocess, updated_pkg, skipped = iterator.next()
                for package in packages:
                    if package['name'] == updated_pkg['name']:
                        if package['upstream'] == 'Unknown':
                            package['upstream'] = updated_pkg['upstream']
                            logger.debug(
                                "Updated upstream for package %s to %s",
                                package['name'], package['upstream'])
                        break
                if skipped:
                    skipped_list.append(updated_pkg['name'])
                add_commits(project_toprocess)
            except StopIteration:
                break
        pool.close()
        pool.join()
    else:
        for package in packages:
            if package['name'] in pkg_names:
                project_toprocess, _, skipped = getinfo(
                    package,
                    local=options.local,
                    dev_mode=options.dev,
                    head_only=options.head_only,
                    db_connection=config_options.database_connection)
                if skipped:
                    skipped_list.append(package['name'])
                add_commits(project_toprocess)
    closeSession(session)  # Close session, will reopen during post_build

    # Store skip list
    datadir = os.path.realpath(config_options.datadir)
    if not os.path.exists(os.path.join(datadir, 'repos')):
        os.makedirs(os.path.join(datadir, 'repos'))
    with open(os.path.join(datadir, 'repos', 'skiplist.txt'), 'w') as fp:
        for pkg in skipped_list:
            fp.write(pkg + '\n')

    # Check if there is any commit at all to process
    if len(toprocess) == 0:
        if not pkg_name:
            # Use a shorter message if this was a full run
            logger.info("No commits to build.")
        else:
            logger.info("No commits to build. If this is not expected, please"
                        " make sure the package name(s) are correct, and that "
                        "any failed commit you want to rebuild has been "
                        "removed from the database.")
        return 0

    # if requested do a sort according to build and install
    # dependencies
    if options.order is True:
        # collect info from all spec files
        logger.info("Reading rpm spec files")
        projects = sorted([c.project_name for c in toprocess])

        speclist = []
        bootstraplist = []
        for project_name in projects:
            # Preprocess spec if needed
            pkginfo.preprocess(package_name=project_name)

            filename = None
            for f in os.listdir(pkginfo.distgit_dir(project_name)):
                if f.endswith('.spec'):
                    filename = f

            if filename:
                specpath = os.path.join(pkginfo.distgit_dir(project_name),
                                        filename)
                speclist.append(
                    sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath))
                # Check if repo_bootstrap is defined in the package.
                # If so, we'll need to rebuild after the whole bootstrap
                rawspec = open(specpath).read(-1)
                if 'repo_bootstrap' in rawspec:
                    bootstraplist.append(project_name)
            else:
                logger.warning("Could not find a spec for package %s" %
                               project_name)

        logger.debug("Packages to rebuild: %s" % bootstraplist)

        specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist])
        # compute order according to BuildRequires
        logger.info("Computing build order")
        orders = specs.compute_order()
        # hack because the package name is not consistent with the directory
        # name and the spec file name
        if 'python-networking_arista' in orders:
            orders.insert(orders.index('python-networking_arista'),
                          'python-networking-arista')

        # sort the commits according to the score of their project and
        # then use the timestamp of the commits as a secondary key
        def my_cmp(a, b):
            if a.project_name == b.project_name:
                _a = a.dt_commit
                _b = b.dt_commit
            else:
                _a = orders.index(a.project_name) if a.project_name in \
                    orders else sys.maxsize
                _b = orders.index(b.project_name) if b.project_name in \
                    orders else sys.maxsize
            # cmp is no longer available in python3 so replace it. See Ordering
            # Comparisons on:
            # https://docs.python.org/3.0/whatsnew/3.0.html
            return (_a > _b) - (_a < _b)

        toprocess.sort(key=cmp_to_key(my_cmp))
    else:
        # sort according to the timestamp of the commits
        toprocess.sort()

    exit_code = 0
    if options.sequential is True:
        toprocess_copy = deepcopy(toprocess)
        for commit in toprocess:
            status = build_worker(packages,
                                  commit,
                                  run_cmd=options.run,
                                  build_env=options.build_env,
                                  dev_mode=options.dev,
                                  use_public=options.use_public,
                                  order=options.order,
                                  sequential=True)
            exception = status[3]
            consistent = False
            datadir = os.path.realpath(config_options.datadir)
            with lock_file(os.path.join(datadir, 'remote.lck')):
                session = getSession(config_options.database_connection)
                if exception is not None:
                    logger.error("Received exception %s" % exception)
                    failures = 1
                else:
                    if not options.run:
                        failures = post_build(status,
                                              packages,
                                              session,
                                              build_repo=not options.no_repo)
                        consistent = (failures == 0)
                exit_value = process_build_result(status,
                                                  packages,
                                                  session,
                                                  toprocess_copy,
                                                  dev_mode=options.dev,
                                                  run_cmd=options.run,
                                                  stop=options.stop,
                                                  build_env=options.build_env,
                                                  head_only=options.head_only,
                                                  consistent=consistent,
                                                  failures=failures)
                closeSession(session)

            if exit_value != 0:
                exit_code = exit_value
            if options.stop and exit_code != 0:
                return exit_code
    else:
        # Setup multiprocessing pool
        pool = multiprocessing.Pool(config_options.workers)
        # Use functools.partial to iterate on the commits to process,
        # while keeping a few options fixed
        build_worker_wrapper = partial(build_worker,
                                       packages,
                                       run_cmd=options.run,
                                       build_env=options.build_env,
                                       dev_mode=options.dev,
                                       use_public=options.use_public,
                                       order=options.order,
                                       sequential=False)
        iterator = pool.imap(build_worker_wrapper, toprocess)

        while True:
            try:
                status = iterator.next()
                exception = status[3]
                consistent = False
                datadir = os.path.realpath(config_options.datadir)
                with lock_file(os.path.join(datadir, 'remote.lck')):
                    session = getSession(config_options.database_connection)
                    if exception is not None:
                        logger.info("Received exception %s" % exception)
                        failures = 1
                    else:
                        # Create repo, build versions.csv file.
                        # This needs to be sequential
                        if not options.run:
                            failures = post_build(
                                status,
                                packages,
                                session,
                                build_repo=not options.no_repo)
                            consistent = (failures == 0)
                    exit_value = process_build_result(
                        status,
                        packages,
                        session,
                        toprocess,
                        dev_mode=options.dev,
                        run_cmd=options.run,
                        stop=options.stop,
                        build_env=options.build_env,
                        head_only=options.head_only,
                        consistent=consistent,
                        failures=failures)
                    closeSession(session)
                if exit_value != 0:
                    exit_code = exit_value
                if options.stop and exit_code != 0:
                    return exit_code
            except StopIteration:
                break
        pool.close()
        pool.join()

    # If we were bootstrapping, set the packages that required it to RETRY
    session = getSession(config_options.database_connection)
    if options.order is True and not pkg_name:
        for bpackage in bootstraplist:
            commit = getLastProcessedCommit(session, bpackage)
            commit.status = 'RETRY'
            session.add(commit)
            session.commit()
    genreports(packages, options.head_only, session, [])
    closeSession(session)

    if options.dev:
        os.remove(tmpdb_path)
    return exit_code
Exemplo n.º 20
0
 def test_newproject(self):
     commit = db.getLastProcessedCommit(self.session, 'python-newproject')
     self.assertEqual(commit, None)
Exemplo n.º 21
0
def import_commit(repo_url, config_file, db_connection=None,
                  local_info_repo=None):
    cp = configparser.RawConfigParser()
    cp.read(config_file)
    config_options = ConfigOptions(cp)
    pkginfo_driver = config_options.pkginfo_driver
    pkginfo = import_object(pkginfo_driver, cfg_options=config_options)
    packages = pkginfo.getpackages(local_info_repo=local_info_repo,
                                   tags=config_options.tags,
                                   dev_mode=False)

    remote_yaml = repo_url + '/' + 'commit.yaml'
    with closing(urlopen(remote_yaml)) as r:
        contents = map(lambda x: x.decode('utf8'), r.readlines())

    osfd, tmpfilename = mkstemp()
    with os.fdopen(osfd, 'w') as fp:
        fp.writelines(contents)

    commits = loadYAML_list(tmpfilename)
    os.remove(tmpfilename)
    datadir = os.path.realpath(config_options.datadir)
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    for commit in commits:
        commit.id = None
        if commit.artifacts == 'None':
            commit.artifacts = None
        commit.dt_build = int(commit.dt_build)
        commit.dt_commit = float(commit.dt_commit)
        commit.dt_distro = int(commit.dt_distro)
        # Check if the latest built commit for this project is newer
        # than this one. In that case, we should ignore it
        if db_connection:
            session = getSession(db_connection)
        else:
            session = getSession(config_options.database_connection)
        package = commit.project_name
        old_commit = getLastProcessedCommit(session, package)
        if old_commit:
            if old_commit.dt_commit >= commit.dt_commit:
                if old_commit.dt_distro >= commit.dt_distro:
                    logger.info('Skipping commit %s, a newer commit is '
                                'already built\n'
                                'Old: %s %s, new: %s %s' %
                                (commit.commit_hash, old_commit.dt_commit,
                                 old_commit.dt_distro, commit.dt_commit,
                                 commit.dt_distro))
                    continue    # Skip

        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        if not os.path.exists(yumrepodir):
            os.makedirs(yumrepodir)

        for logfile in ['build.log', 'installed', 'mock.log', 'root.log',
                        'rpmbuild.log', 'state.log']:
            logfile_url = repo_url + '/' + logfile
            try:
                with closing(urlopen(logfile_url)) as r:
                    contents = map(lambda x: x.decode('utf8'), r.readlines())
                with open(os.path.join(yumrepodir, logfile), "w") as fp:
                    fp.writelines(contents)
            except urllib.error.HTTPError:
                # Ignore errors, if the remote build failed there may be
                # some missing files
                pass

        if commit.artifacts:
            for rpm in commit.artifacts.split(","):
                rpm_url = repo_url + '/' + rpm.split('/')[-1]
                try:
                    with closing(urlopen(rpm_url)) as r:
                        contents = r.read()
                    with open(os.path.join(datadir, rpm), "wb") as fp:
                        fp.write(contents)
                except urllib.error.HTTPError:
                    if rpm != 'None':
                        logger.warning("Failed to download rpm file %s"
                                       % rpm_url)
        # Get remote update lock, to prevent any other remote operation
        # while we are creating the repo and updating the database
        logger.debug("Acquiring remote update lock")
        with lock_file(os.path.join(datadir, 'remote.lck')):
            logger.debug("Acquired lock")
            if commit.status == 'SUCCESS':
                built_rpms = []
                for rpm in commit.artifacts.split(","):
                    built_rpms.append(rpm)
                status = [commit, built_rpms, commit.notes, None]
                post_build(status, packages, session)
            else:
                pkg = [p for p in packages if p['name'] == package][0]
                # Here we fire a refresh of the repositories
                # (upstream and distgit) to be sure to have them in the
                # data directory. We need that in the case the worker
                # is running on another host mainly for the
                # submit_review.sh script.
                pkginfo.getinfo(project=pkg["name"], package=pkg,
                                since='-1', local=False, dev_mode=False)
                # Paths on the worker might differ so we overwrite them
                # to reflect data path on the local API host.
                commit.distgit_dir = pkginfo.distgit_dir(pkg['name'])
                commit.repo_dir = os.path.join(
                    config_options.datadir, pkg['name'])
                status = [commit, '', '', commit.notes]
            process_build_result(status, packages, session, [])
            closeSession(session)   # Keep one session per commit
        logger.debug("Released lock")
    return 0