Пример #1
0
    def test_successful_build_no_repo(self, sh_mock):
        packages = [{
            'upstream':
            'https://github.com/openstack/foo',
            'name':
            'foo',
            'maintainers':
            '*****@*****.**',
            'master-distgit':
            'https://github.com/rdo-packages/foo-distgit.git'
        }]
        built_rpms = [
            'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
            '_c31d1b18/foo-1.2.3.el7.centos.noarch.rpm',
            'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
            '_c31d1b18/foo-1.2.3.el7.centos.src.rpm'
        ]

        status = [self.commit, built_rpms, 'OK', None]
        # Create directory for the CSV file
        yumdir = os.path.join(self.config.datadir, "repos",
                              self.commit.getshardedcommitdir())
        os.makedirs(yumdir)
        output = shell.post_build(status,
                                  packages,
                                  self.session,
                                  build_repo=False)
        # There will be no createrepo call
        expected = []

        self.assertEqual(sh_mock.call_args_list, expected)
        self.assertEqual(output, 0)
Пример #2
0
    def test_successful_build_no_failures_component(self, sh_mock):
        packages = [{
            'upstream':
            'https://github.com/openstack/foo',
            'name':
            'foo',
            'maintainers':
            '*****@*****.**',
            'master-distgit':
            'https://github.com/rdo-packages/foo-distgit.git'
        }]
        built_rpms = [
            'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
            '_c31d1b18/foo-1.2.3.el7.centos.noarch.rpm',
            'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
            '_c31d1b18/foo-1.2.3.el7.centos.src.rpm'
        ]

        self.config.use_components = True
        self.commit.component = 'testcomponent'
        status = [self.commit, built_rpms, 'OK', None]
        # Create directory for the CSV file
        yumdir = os.path.join(self.config.datadir, "repos",
                              self.commit.getshardedcommitdir())
        os.makedirs(yumdir)
        output = shell.post_build(status, packages, self.session)
        expected = [mock.call(yumdir)]

        self.assertEqual(sh_mock.call_args_list, expected)
        self.assertEqual(output, 0)
        with open(os.path.join(yumdir, 'delorean.repo')) as fp:
            repofile = fp.read()
        assert 'component-testcomponent' in repofile
Пример #3
0
    def test_successful_build(self, sh_mock):
        built_rpms = ['repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
                      '_c31d1b18/foo-1.2.3.el7.centos.noarch.rpm',
                      'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
                      '_c31d1b18/foo-1.2.3.el7.centos.src.rpm']

        status = [self.commit, built_rpms, 'OK', None]
        # Create directory for the CSV file
        yumdir = os.path.join(self.config.datadir, "repos",
                              self.commit.getshardedcommitdir())
        os.makedirs(yumdir)
        output = shell.post_build(status, self.packages,
                                  self.session)

        self.assertTrue(os.path.exists(
                        os.path.join(self.config.datadir,
                                     "repos",
                                     self.commit.getshardedcommitdir(),
                                     "versions.csv")))

        expected = [mock.call(yumdir)]
        self.assertEqual(sh_mock.call_args_list, expected)
        self.assertEqual(output, 1)     # 1 non-successfully built package
Пример #4
0
def import_commit(repo_url, config_file, db_connection=None,
                  local_info_repo=None):
    cp = configparser.RawConfigParser()
    cp.read(config_file)
    config_options = ConfigOptions(cp)
    pkginfo_driver = config_options.pkginfo_driver
    pkginfo = import_object(pkginfo_driver, cfg_options=config_options)
    packages = pkginfo.getpackages(local_info_repo=local_info_repo,
                                   tags=config_options.tags,
                                   dev_mode=False)

    remote_yaml = repo_url + '/' + 'commit.yaml'
    r = urlopen(remote_yaml)
    contents = map(lambda x: x.decode('utf8'), r.readlines())

    osfd, tmpfilename = mkstemp()
    fp = os.fdopen(osfd, 'w')
    fp.writelines(contents)
    fp.close()

    commits = loadYAML_list(tmpfilename)
    os.remove(tmpfilename)
    datadir = os.path.realpath(config_options.datadir)
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    for commit in commits:
        commit.id = None
        if commit.artifacts == 'None':
            commit.artifacts = None
        commit.dt_build = int(commit.dt_build)
        commit.dt_commit = float(commit.dt_commit)
        commit.dt_distro = int(commit.dt_distro)
        # Check if the latest built commit for this project is newer
        # than this one. In that case, we should ignore it
        if db_connection:
            session = getSession(db_connection)
        else:
            session = getSession(config_options.database_connection)
        package = commit.project_name
        old_commit = getLastProcessedCommit(session, package)
        if old_commit:
            if old_commit.dt_commit >= commit.dt_commit:
                if old_commit.dt_distro >= commit.dt_distro:
                    logger.info('Skipping commit %s, a newer commit is '
                                'already built\n'
                                'Old: %s %s, new: %s %s' %
                                (commit.commit_hash, old_commit.dt_commit,
                                 old_commit.dt_distro, commit.dt_commit,
                                 commit.dt_distro))
                    continue    # Skip

        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        if not os.path.exists(yumrepodir):
            os.makedirs(yumrepodir)

        for logfile in ['build.log', 'installed', 'mock.log', 'root.log',
                        'rpmbuild.log', 'state.log']:
            logfile_url = repo_url + '/' + logfile
            try:
                r = urlopen(logfile_url)
                contents = map(lambda x: x.decode('utf8'), r.readlines())
                with open(os.path.join(yumrepodir, logfile), "w") as fp:
                    fp.writelines(contents)
            except urllib.error.HTTPError:
                # Ignore errors, if the remote build failed there may be
                # some missing files
                pass

        if commit.artifacts:
            for rpm in commit.artifacts.split(","):
                rpm_url = repo_url + '/' + rpm.split('/')[-1]
                try:
                    r = urlopen(rpm_url)
                    contents = r.read()
                    with open(os.path.join(datadir, rpm), "wb") as fp:
                        fp.write(contents)
                except urllib.error.HTTPError:
                    if rpm != 'None':
                        logger.warning("Failed to download rpm file %s"
                                       % rpm_url)
        # Get remote update lock, to prevent any other remote operation
        # while we are creating the repo and updating the database
        logger.debug("Acquiring remote update lock")
        with lock_file(os.path.join(datadir, 'remote.lck')):
            logger.debug("Acquired lock")
            if commit.status == 'SUCCESS':
                built_rpms = []
                for rpm in commit.artifacts.split(","):
                    built_rpms.append(rpm)
                status = [commit, built_rpms, commit.notes, None]
                post_build(status, packages, session)
            else:
                pkg = [p for p in packages if p['name'] == package][0]
                # Here we fire a refresh of the repositories
                # (upstream and distgit) to be sure to have them in the
                # data directory. We need that in the case the worker
                # is running on another host mainly for the
                # submit_review.sh script.
                pkginfo.getinfo(project=pkg["name"], package=pkg,
                                since='-1', local=False, dev_mode=False)
                # Paths on the worker might differ so we overwrite them
                # to reflect data path on the local API host.
                commit.distgit_dir = pkginfo.distgit_dir(pkg['name'])
                commit.repo_dir = os.path.join(
                    config_options.datadir, pkg['name'])
                status = [commit, '', '', commit.notes]
            process_build_result(status, packages, session, [])
            closeSession(session)   # Keep one session per commit
        logger.debug("Released lock")
    return 0
Пример #5
0
def import_commit(repo_url, config_file, db_connection=None,
                  local_info_repo=None):
    cp = configparser.RawConfigParser()
    cp.read(config_file)
    config_options = ConfigOptions(cp)
    pkginfo_driver = config_options.pkginfo_driver
    pkginfo = import_object(pkginfo_driver, cfg_options=config_options)
    packages = pkginfo.getpackages(local_info_repo=local_info_repo,
                                   tags=config_options.tags,
                                   dev_mode=False)

    remote_yaml = repo_url + '/' + 'commit.yaml'
    with closing(urlopen(remote_yaml)) as r:
        contents = map(lambda x: x.decode('utf8'), r.readlines())

    osfd, tmpfilename = mkstemp()
    with os.fdopen(osfd, 'w') as fp:
        fp.writelines(contents)

    commits = loadYAML_list(tmpfilename)
    os.remove(tmpfilename)
    datadir = os.path.realpath(config_options.datadir)
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    for commit in commits:
        commit.id = None
        if commit.artifacts == 'None':
            commit.artifacts = None
        commit.dt_build = int(commit.dt_build)
        commit.dt_commit = float(commit.dt_commit)
        commit.dt_distro = int(commit.dt_distro)
        # Check if the latest built commit for this project is newer
        # than this one. In that case, we should ignore it
        if db_connection:
            session = getSession(db_connection)
        else:
            session = getSession(config_options.database_connection)
        package = commit.project_name
        old_commit = getLastProcessedCommit(session, package)
        if old_commit:
            if old_commit.dt_commit >= commit.dt_commit:
                if old_commit.dt_distro >= commit.dt_distro:
                    logger.info('Skipping commit %s, a newer commit is '
                                'already built\n'
                                'Old: %s %s, new: %s %s' %
                                (commit.commit_hash, old_commit.dt_commit,
                                 old_commit.dt_distro, commit.dt_commit,
                                 commit.dt_distro))
                    continue    # Skip

        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        if not os.path.exists(yumrepodir):
            os.makedirs(yumrepodir)

        for logfile in ['build.log', 'installed', 'mock.log', 'root.log',
                        'rpmbuild.log', 'state.log']:
            logfile_url = repo_url + '/' + logfile
            try:
                with closing(urlopen(logfile_url)) as r:
                    contents = map(lambda x: x.decode('utf8'), r.readlines())
                with open(os.path.join(yumrepodir, logfile), "w") as fp:
                    fp.writelines(contents)
            except urllib.error.HTTPError:
                # Ignore errors, if the remote build failed there may be
                # some missing files
                pass

        if commit.artifacts:
            for rpm in commit.artifacts.split(","):
                rpm_url = repo_url + '/' + rpm.split('/')[-1]
                try:
                    with closing(urlopen(rpm_url)) as r:
                        contents = r.read()
                    with open(os.path.join(datadir, rpm), "wb") as fp:
                        fp.write(contents)
                except urllib.error.HTTPError:
                    if rpm != 'None':
                        logger.warning("Failed to download rpm file %s"
                                       % rpm_url)
        # Get remote update lock, to prevent any other remote operation
        # while we are creating the repo and updating the database
        logger.debug("Acquiring remote update lock")
        with lock_file(os.path.join(datadir, 'remote.lck')):
            logger.debug("Acquired lock")
            if commit.status == 'SUCCESS':
                built_rpms = []
                for rpm in commit.artifacts.split(","):
                    built_rpms.append(rpm)
                status = [commit, built_rpms, commit.notes, None]
                post_build(status, packages, session)
            else:
                pkg = [p for p in packages if p['name'] == package][0]
                # Here we fire a refresh of the repositories
                # (upstream and distgit) to be sure to have them in the
                # data directory. We need that in the case the worker
                # is running on another host mainly for the
                # submit_review.sh script.
                pkginfo.getinfo(project=pkg["name"], package=pkg,
                                since='-1', local=False, dev_mode=False)
                # Paths on the worker might differ so we overwrite them
                # to reflect data path on the local API host.
                commit.distgit_dir = pkginfo.distgit_dir(pkg['name'])
                commit.repo_dir = os.path.join(
                    config_options.datadir, pkg['name'])
                status = [commit, '', '', commit.notes]
            process_build_result(status, packages, session, [])
            closeSession(session)   # Keep one session per commit
        logger.debug("Released lock")
    return 0