コード例 #1
0
ファイル: rsync.py プロジェクト: hybrid-packages/DLRN
def sync_repo(commit):
    config_options = getConfigOptions()
    rsyncdest = config_options.rsyncdest
    rsyncport = config_options.rsyncport
    datadir = os.path.realpath(config_options.datadir)

    if rsyncdest != '':
        # We are only rsyncing the current repo dir to rsyncdest
        rsyncpaths = []
        # We are inserting a dot in the path after repos, this is used by
        # rsync -R (see man rsync)
        commitdir_abs = os.path.join(datadir, "repos", ".",
                                     commit.getshardedcommitdir())
        rsyncpaths.append(commitdir_abs)
        # We also need report.html, status_report.html, queue.html,
        # styles.css and the consistent and current symlinks
        for filename in [
                'report.html', 'status_report.html', 'styles.css', 'queue.html'
        ]:
            filepath = os.path.join(datadir, "repos", ".", filename)
            rsyncpaths.append(filepath)

        rsh_command = 'ssh -p %s -o StrictHostKeyChecking=no' % rsyncport
        try:
            sh.rsync('-avzR', '--delete-delay', '-e', rsh_command, rsyncpaths,
                     rsyncdest)
        except Exception as e:
            logger.warn('Failed to rsync content to %s ,'
                        'got error %s' % (rsyncdest, e))
            # Raise exception, so it can be treated as an error
            raise e
コード例 #2
0
ファイル: shell.py プロジェクト: openstack-packages/DLRN
def export_commit_yaml(commit):
    config_options = getConfigOptions()
    # Export YAML file containing commit metadata
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = os.path.join(datadir, "repos",
                              commit.getshardedcommitdir())
    saveYAML_commit(commit, os.path.join(yumrepodir, 'commit.yaml'))
コード例 #3
0
ファイル: rsync.py プロジェクト: openstack-packages/DLRN
def sync_repo(commit):
    config_options = getConfigOptions()
    rsyncdest = config_options.rsyncdest
    rsyncport = config_options.rsyncport
    datadir = os.path.realpath(config_options.datadir)

    if rsyncdest != '':
        # We are only rsyncing the current repo dir to rsyncdest
        rsyncpaths = []
        # We are inserting a dot in the path after repos, this is used by
        # rsync -R (see man rsync)
        commitdir_abs = os.path.join(datadir, "repos", ".",
                                     commit.getshardedcommitdir())
        rsyncpaths.append(commitdir_abs)
        # We also need report.html, status_report.html, queue.html,
        # styles.css and the consistent and current symlinks
        for filename in ['report.html', 'status_report.html', 'styles.css',
                         'queue.html', 'status_report.csv']:
            filepath = os.path.join(datadir, "repos", ".", filename)
            rsyncpaths.append(filepath)

        rsh_command = 'ssh -p %s -o StrictHostKeyChecking=no' % rsyncport
        try:
            sh.rsync('-avzR', '--delete-delay',
                     '-e', rsh_command,
                     rsyncpaths, rsyncdest)
        except Exception as e:
            logger.warn('Failed to rsync content to %s ,'
                        'got error %s' % (rsyncdest, e))
            # Raise exception, so it can be treated as an error
            raise e
コード例 #4
0
def run(program,
        commit,
        env_vars,
        dev_mode,
        use_public,
        bootstrap,
        do_build=True):
    config_options = getConfigOptions()
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = os.path.join("repos", commit.getshardedcommitdir())
    yumrepodir_abs = os.path.join(datadir, yumrepodir)

    commit_hash = commit.commit_hash
    project_name = commit.project_name
    repo_dir = commit.repo_dir

    if do_build:
        # If yum repo already exists remove it and assume we're starting fresh
        if os.path.exists(yumrepodir_abs):
            shutil.rmtree(yumrepodir_abs)
        os.makedirs(yumrepodir_abs)

    sh.git("--git-dir", "%s/.git" % repo_dir, "--work-tree=%s" % repo_dir,
           "reset", "--hard", commit_hash)

    run_cmd = []
    if env_vars:
        for env_var in env_vars:
            run_cmd.append(env_var)

    run_cmd.extend([
        program, config_options.target, project_name,
        os.path.join(datadir, yumrepodir), datadir, config_options.baseurl,
        os.path.realpath(commit.distgit_dir)
    ])
    if not do_build:
        logger.info('Running %s' % ' '.join(run_cmd))

    try:
        sh_version = SemanticVersion.from_pip_string(sh.__version__)
        min_sh_version = SemanticVersion.from_pip_string('1.09')
        if sh_version > min_sh_version:
            sh.env(run_cmd, _err=process_mock_output, _out=process_mock_output)
        else:
            sh.env_(run_cmd,
                    _err=process_mock_output,
                    _out=process_mock_output)
    except Exception as e:
        logger.error('cmd failed. See logs at: %s/%s/' % (datadir, yumrepodir))
        raise e
コード例 #5
0
def build_rpm(packages, commit, env_vars, dev_mode, use_public, bootstrap,
              sequential):
    config_options = getConfigOptions()
    # Set the build timestamp to now
    commit.dt_build = int(time())

    project_name = commit.project_name
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = _get_yumrepodir(commit)
    version_from = get_version_from(packages, project_name)

    try:
        build_rpm_wrapper(commit,
                          dev_mode,
                          use_public,
                          bootstrap,
                          env_vars,
                          sequential,
                          version_from=version_from)
    except Exception as e:
        logger.error('Build failed. See logs at: %s/%s/' %
                     (datadir, yumrepodir))
        raise Exception("Error in build_rpm_wrapper for %s: %s" %
                        (project_name, e))

    # This *could* have changed during the build, see kojidriver.py
    yumrepodir = _get_yumrepodir(commit)
    yumrepodir_abs = os.path.join(datadir, yumrepodir)

    built_rpms = []
    for rpm in os.listdir(yumrepodir_abs):
        if rpm.endswith(".rpm"):
            built_rpms.append(os.path.join(yumrepodir, rpm))
    if not built_rpms:
        raise Exception("No rpms built for %s" % project_name)

    notes = "OK"

    if not os.path.isfile(os.path.join(yumrepodir_abs, "installed")):
        logger.error('Build failed. See logs at: %s/%s/' %
                     (datadir, yumrepodir))
        raise Exception("Error installing %s" % project_name)
    else:
        # Overwrite installed file, adding the repo reference
        with open(os.path.join(yumrepodir_abs, "installed"), "w") as fp:
            fp.write(
                "%s %s %s" %
                (commit.project_name, commit.commit_hash, commit.distro_hash))
    return built_rpms, notes
コード例 #6
0
def run(program,
        commit,
        env_vars,
        dev_mode,
        use_public,
        bootstrap,
        do_build=True,
        version_from=None):
    config_options = getConfigOptions()
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = _get_yumrepodir(commit)
    yumrepodir_abs = os.path.join(datadir, yumrepodir)
    project_name = commit.project_name
    repo_dir = commit.repo_dir

    if do_build:
        # If yum repo already exists remove it and assume we're starting fresh
        if os.path.exists(yumrepodir_abs):
            shutil.rmtree(yumrepodir_abs)
        os.makedirs(yumrepodir_abs)

    if version_from:
        logger.info('Taking tags to define version from %s' % version_from)
        git = sh.git.bake(_cwd=repo_dir, _tty_out=False)
        git.merge('-s', 'ours', '-m', '"fake merge tags"', version_from)

    run_cmd = []
    if env_vars:
        for env_var in env_vars:
            run_cmd.append(env_var)

    run_cmd.extend([
        program, config_options.target, project_name,
        os.path.join(datadir, yumrepodir), datadir, config_options.baseurl,
        os.path.realpath(commit.distgit_dir)
    ])
    if not do_build:
        logger.info('Running %s' % ' '.join(run_cmd))

    try:
        sh.env(run_cmd, _err=process_mock_output, _out=process_mock_output)
    except Exception as e:
        # This *could* have changed during the build, see kojidriver.py
        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = _get_yumrepodir(commit)
        logger.error('cmd failed. See logs at: %s/%s/' % (datadir, yumrepodir))
        raise e
コード例 #7
0
ファイル: build.py プロジェクト: openstack-packages/DLRN
def build_rpm(packages, commit, env_vars, dev_mode, use_public,
              bootstrap, sequential):
    config_options = getConfigOptions()
    # Set the build timestamp to now
    commit.dt_build = int(time())

    project_name = commit.project_name
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = _get_yumrepodir(commit)
    version_from = get_version_from(packages, project_name)

    try:
        build_rpm_wrapper(commit, dev_mode, use_public, bootstrap,
                          env_vars, sequential,
                          version_from=version_from)
    except Exception as e:
        logger.error('Build failed. See logs at: %s/%s/' % (datadir,
                                                            yumrepodir))
        raise Exception("Error in build_rpm_wrapper for %s: %s" %
                        (project_name, e))

    # This *could* have changed during the build, see kojidriver.py
    yumrepodir = _get_yumrepodir(commit)
    yumrepodir_abs = os.path.join(datadir, yumrepodir)

    built_rpms = []
    for rpm in os.listdir(yumrepodir_abs):
        if rpm.endswith(".rpm"):
            built_rpms.append(os.path.join(yumrepodir, rpm))
    if not built_rpms:
        raise Exception("No rpms built for %s" % project_name)

    notes = "OK"

    if not os.path.isfile(os.path.join(yumrepodir_abs, "installed")):
        logger.error('Build failed. See logs at: %s/%s/' % (datadir,
                                                            yumrepodir))
        raise Exception("Error installing %s" % project_name)
    else:
        # Overwrite installed file, adding the repo reference
        with open(os.path.join(yumrepodir_abs, "installed"), "w") as fp:
            fp.write("%s %s %s" % (commit.project_name,
                                   commit.commit_hash,
                                   commit.distro_hash))
    return built_rpms, notes
コード例 #8
0
ファイル: build.py プロジェクト: openstack-packages/DLRN
def run(program, commit, env_vars, dev_mode, use_public, bootstrap,
        do_build=True, version_from=None):
    config_options = getConfigOptions()
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = _get_yumrepodir(commit)
    yumrepodir_abs = os.path.join(datadir, yumrepodir)
    project_name = commit.project_name
    repo_dir = commit.repo_dir

    if do_build:
        # If yum repo already exists remove it and assume we're starting fresh
        if os.path.exists(yumrepodir_abs):
            shutil.rmtree(yumrepodir_abs)
        os.makedirs(yumrepodir_abs)

    if version_from:
        logger.info('Taking tags to define version from %s' % version_from)
        git = sh.git.bake(_cwd=repo_dir, _tty_out=False)
        git.merge('-s', 'ours', '-m', '"fake merge tags"', version_from)

    run_cmd = []
    if env_vars:
        for env_var in env_vars:
            run_cmd.append(env_var)

    run_cmd.extend([program,
                    config_options.target, project_name,
                    os.path.join(datadir, yumrepodir),
                    datadir, config_options.baseurl,
                    os.path.realpath(commit.distgit_dir)])
    if not do_build:
        logger.info('Running %s' % ' '.join(run_cmd))

    try:
        sh.env(run_cmd, _err=process_mock_output, _out=process_mock_output)
    except Exception as e:
        # This *could* have changed during the build, see kojidriver.py
        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = _get_yumrepodir(commit)
        logger.error('cmd failed. See logs at: %s/%s/' % (datadir,
                                                          yumrepodir))
        raise e
コード例 #9
0
def sendnotifymail(packages, commit):
    config_options = getConfigOptions()

    details = copy.copy([
        package for package in packages
        if package["name"] == commit.project_name
    ][0])

    email_to = details['maintainers']
    if not config_options.smtpserver:
        logger.info("Skipping notify email to %r" % email_to)
        return

    details["logurl"] = "%s/%s" % (config_options.baseurl,
                                   commit.getshardedcommitdir())
    # Render the notification template
    jinja_env = jinja2.Environment(
        loader=jinja2.FileSystemLoader([config_options.templatedir]))
    jinja_template = jinja_env.get_template("notification_email.j2")
    error_body = jinja_template.render(details=details)

    msg = MIMEText(error_body)
    msg['Subject'] = '[dlrn] %s master package build failed' % \
                     commit.project_name

    email_from = '*****@*****.**'
    msg['From'] = email_from
    msg['To'] = "packagers"

    logger.info("Sending notify email to %r" % email_to)
    try:
        s = smtplib.SMTP(config_options.smtpserver)
        s.sendmail(email_from, email_to, msg.as_string())
        s.quit()
    except smtplib.SMTPException as e:
        logger.error("An issue occured when sending"
                     "notify email to %r (%s)" % (email_to, e))
    finally:
        s.close()
コード例 #10
0
def build_worker(packages,
                 commit,
                 run_cmd=False,
                 build_env=None,
                 dev_mode=False,
                 use_public=False,
                 order=False,
                 sequential=False):
    config_options = getConfigOptions()

    if run_cmd:
        try:
            run(run_cmd,
                commit,
                build_env,
                dev_mode,
                use_public,
                order,
                do_build=False)
            return [commit, '', '', None]
        except Exception as e:
            return [commit, '', '', e]

    if config_options.use_components:
        logger.info(
            "Processing %s %s for component %s" %
            (commit.project_name, commit.commit_hash, commit.component))
    else:
        logger.info("Processing %s %s" %
                    (commit.project_name, commit.commit_hash))

    notes = ""
    try:
        built_rpms, notes = build(packages, commit, build_env, dev_mode,
                                  use_public, order, sequential)
        return [commit, built_rpms, notes, None]
    except Exception as e:
        return [commit, '', '', e]
コード例 #11
0
def sendnotifymail(packages, commit):
    config_options = getConfigOptions()

    details = copy.copy(
        [package for package in packages
            if package["name"] == commit.project_name][0])

    email_to = details['maintainers']
    if not config_options.smtpserver:
        logger.info("Skipping notify email to %r" % email_to)
        return

    details["logurl"] = "%s/%s" % (config_options.baseurl,
                                   commit.getshardedcommitdir())
    # Render the notification template
    jinja_env = jinja2.Environment(
        loader=jinja2.FileSystemLoader([config_options.templatedir]))
    jinja_template = jinja_env.get_template("notification_email.j2")
    error_body = jinja_template.render(details=details)

    msg = MIMEText(error_body)
    msg['Subject'] = '[dlrn] %s master package build failed' % \
                     commit.project_name

    email_from = '*****@*****.**'
    msg['From'] = email_from
    msg['To'] = "packagers"

    logger.info("Sending notify email to %r" % email_to)
    try:
        s = smtplib.SMTP(config_options.smtpserver)
        s.sendmail(email_from, email_to, msg.as_string())
        s.quit()
    except smtplib.SMTPException as e:
        logger.error("An issue occured when sending"
                     "notify email to %r (%s)" % (email_to, e))
    finally:
        s.close()
コード例 #12
0
def submit_review(commit, packages, env_vars):
    config_options = getConfigOptions()
    datadir = os.path.realpath(config_options.datadir)
    scriptsdir = os.path.realpath(config_options.scriptsdir)
    yumrepodir = os.path.join("repos", commit.getshardedcommitdir())

    project_name = commit.project_name

    for pkg in packages:
        if project_name == pkg['name']:
            break
    else:
        logger.error('Unable to find info for project'
                     ' %s' % project)
        return

    url = (get_commit_url(commit, pkg) + commit.commit_hash)
    env_vars.append('GERRIT_URL=%s' % url)
    env_vars.append('GERRIT_LOG=%s/%s' % (config_options.baseurl,
                                          commit.getshardedcommitdir()))
    maintainers = ','.join(pkg['maintainers'])
    env_vars.append('GERRIT_MAINTAINERS=%s' % maintainers)
    env_vars.append('GERRIT_TOPIC=%s' % config_options.gerrit_topic)
    logger.info('Creating a gerrit review using '
                'GERRIT_URL=%s '
                'GERRIT_MAINTAINERS=%s ' %
                (url, maintainers))

    run_cmd = []
    if env_vars:
        for env_var in env_vars:
            run_cmd.append(env_var)

    run_cmd.extend([os.path.join(scriptsdir, "submit_review.sh"),
                    project_name, os.path.join(datadir, yumrepodir),
                    datadir, config_options.baseurl,
                    os.path.realpath(commit.distgit_dir)])
    sh.env(run_cmd, _timeout=300)
コード例 #13
0
ファイル: rsync.py プロジェクト: hybrid-packages/DLRN
def sync_symlinks(commit):
    config_options = getConfigOptions()
    rsyncdest = config_options.rsyncdest
    rsyncport = config_options.rsyncport
    datadir = os.path.realpath(config_options.datadir)

    if rsyncdest != '':
        # We want to sync the symlinks in a second pass, once all content
        # has been copied, to avoid a race condition it they are copied first
        rsyncpaths = []
        for filename in ['consistent', 'current']:
            filepath = os.path.join(datadir, "repos", ".", filename)
            rsyncpaths.append(filepath)

        rsh_command = 'ssh -p %s -o StrictHostKeyChecking=no' % rsyncport
        try:
            sh.rsync('-avzR', '--delete-delay', '-e', rsh_command, rsyncpaths,
                     rsyncdest)
        except Exception as e:
            # We are not raising exceptions for symlink rsyncs, these will
            # be fixed after another build
            logger.warn('Failed to rsync symlinks to %s ,'
                        'got error %s' % (rsyncdest, e))
コード例 #14
0
def submit_review(commit, packages, env_vars):
    config_options = getConfigOptions()
    datadir = os.path.realpath(config_options.datadir)
    scriptsdir = os.path.realpath(config_options.scriptsdir)
    yumrepodir = os.path.join("repos", commit.getshardedcommitdir())

    project_name = commit.project_name

    for pkg in packages:
        if project_name == pkg['name']:
            break
    else:
        logger.error('Unable to find info for project' ' %s' % project)
        return

    url = (get_commit_url(commit, pkg) + commit.commit_hash)
    env_vars.append('GERRIT_URL=%s' % url)
    env_vars.append('GERRIT_LOG=%s/%s' %
                    (config_options.baseurl, commit.getshardedcommitdir()))
    maintainers = ','.join(pkg['maintainers'])
    env_vars.append('GERRIT_MAINTAINERS=%s' % maintainers)
    env_vars.append('GERRIT_TOPIC=%s' % config_options.gerrit_topic)
    logger.info('Creating a gerrit review using '
                'GERRIT_URL=%s '
                'GERRIT_MAINTAINERS=%s ' % (url, maintainers))

    run_cmd = []
    if env_vars:
        for env_var in env_vars:
            run_cmd.append(env_var)

    run_cmd.extend([
        os.path.join(scriptsdir, "submit_review.sh"), project_name,
        os.path.join(datadir, yumrepodir), datadir, config_options.baseurl,
        os.path.realpath(commit.distgit_dir)
    ])
    sh.env(run_cmd, _timeout=300)
コード例 #15
0
ファイル: rsync.py プロジェクト: openstack-packages/DLRN
def sync_symlinks(commit):
    config_options = getConfigOptions()
    rsyncdest = config_options.rsyncdest
    rsyncport = config_options.rsyncport
    datadir = os.path.realpath(config_options.datadir)

    if rsyncdest != '':
        # We want to sync the symlinks in a second pass, once all content
        # has been copied, to avoid a race condition it they are copied first
        rsyncpaths = []
        for filename in ['consistent', 'current']:
            filepath = os.path.join(datadir, "repos", ".", filename)
            rsyncpaths.append(filepath)

        rsh_command = 'ssh -p %s -o StrictHostKeyChecking=no' % rsyncport
        try:
            sh.rsync('-avzR', '--delete-delay',
                     '-e', rsh_command,
                     rsyncpaths, rsyncdest)
        except Exception as e:
            # We are not raising exceptions for symlink rsyncs, these will
            # be fixed after another build
            logger.warn('Failed to rsync symlinks to %s ,'
                        'got error %s' % (rsyncdest, e))
コード例 #16
0
def refreshrepo(url, path, branch="master", local=False, full_path=None):
    config_options = getConfigOptions()
    logger.info("Getting %s to %s (%s)" % (url, path, branch))
    checkout_not_present = not os.path.exists(path)
    if checkout_not_present is True:
        try:
            sh.git.clone(url, path)
        except Exception as e:
            logger.error("Error cloning %s into %s: %s" % (url, path, e))
            raise

    elif local is False:
        # We need to cover a corner case here, where the repo URL has changed
        # since the last execution
        git = sh.git.bake(_cwd=path, _tty_out=False, _timeout=3600)
        try:
            remotes = git("remote", "-v").splitlines()
            fetch_url = None
            for remote in remotes:
                if '(fetch)' in remote:
                    line = remote.split()
                    if line[1] == url:
                        break
                    else:
                        fetch_url = line[1]
            else:
                # URL changed, so remove directory
                logger.warning("URL for %s changed from %s to %s, "
                               "cleaning directory and cloning again"
                               % (path, fetch_url, url))
                shutil.rmtree(path, ignore_errors=True)
                try:
                    sh.git.clone(url, path)
                except Exception as e:
                    logger.error("Error cloning %s into %s: %s" % (url, path,
                                                                   e))
                    raise
        except Exception:
            # Something failed here, maybe this is a failed repo clone
            # Let's warn, remove directory and clone again
            logger.warning("Directory %s does not contain a valid Git repo, "
                           "cleaning directory and cloning again" % path)
            shutil.rmtree(path)
            sh.git.clone(url, path)

    git_path = full_path or path
    git = sh.git.bake(_cwd=git_path, _tty_out=False, _timeout=3600)

    if local is False or checkout_not_present is True:
        try:
            git.fetch("origin")
        except Exception:
            # Sometimes hg repositories get into a invalid state leaving them
            # unusable, to avoid a looping error just remove it so it will be
            # recloned.
            logger.error("Error fetching into %s, deleting." % (path))
            sh.sudo("rm", "-rf", path)
            raise
        try:
            git.checkout('-f', branch)
        except sh.ErrorReturnCode_1:
            for branch_re in config_options.nonfallback_branches:
                if re.match(branch_re, branch):
                    # Do not try fallback on selected branches
                    raise
            else:
                if config_options.fallback_to_master:
                    # Fallback to master
                    if branch.startswith("rpm-"):
                        branch = "rpm-master"
                    elif branch.endswith("-rdo"):
                        # Distgit branches can start with rpm- or end with -rdo
                        branch = "rpm-master"
                    else:
                        branch = "master"
                    logger.info("Falling back to %s" % branch)
                    git.checkout(branch)
                else:
                    logger.error("Branch %s for %s does not exist, and the "
                                 "configuration does not allow a fallback to "
                                 "master." % (branch, url))
                    raise
        try:
            git.reset("--hard", "origin/%s" % branch)
        except Exception:
            # Maybe it was a tag, not a branch
            git.reset("--hard", "%s" % branch)

    repoinfo = str(git.log("--pretty=format:%H %ct", "-1", ".")).\
        strip().split(" ")
    repoinfo.insert(0, branch)
    return repoinfo
コード例 #17
0
def getsourcebranch(package):
    if 'source-branch' in package:
        return package['source-branch']
    else:
        config_options = getConfigOptions()
        return config_options.source
コード例 #18
0
 def test_get_config_option(self):
     config = ConfigOptions(self.config)
     self.assertEqual(config, getConfigOptions())
コード例 #19
0
def process_build_result_rpm(status,
                             packages,
                             session,
                             packages_to_process,
                             dev_mode=False,
                             run_cmd=False,
                             stop=False,
                             build_env=None,
                             head_only=False,
                             consistent=False,
                             failures=0):
    config_options = getConfigOptions()
    commit = status[0]
    built_rpms = status[1]
    notes = status[2]
    exception = status[3]
    commit_hash = commit.commit_hash
    project = commit.project_name
    project_info = session.query(Project).filter(
        Project.project_name == project).first()
    if not project_info:
        project_info = Project(project_name=project, last_email=0)
    exit_code = 0

    if run_cmd:
        if exception is not None:
            exit_code = 1
            if stop:
                return exit_code
        return exit_code

    if exception is None:
        commit.status = "SUCCESS"
        commit.notes = notes
        commit.artifacts = ",".join(built_rpms)
    else:
        logger.error("Received exception %s" % exception)

        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        logfile = os.path.join(yumrepodir, "rpmbuild.log")

        # If the log file hasn't been created we add what we have
        # This happens if the rpm build script didn't run.
        if not os.path.exists(yumrepodir):
            os.makedirs(yumrepodir)
        if not os.path.exists(logfile):
            with open(logfile, "w") as fp:
                fp.write(str(exception))

        if (isknownerror(logfile) and
            (timesretried(project, session, commit_hash, commit.distro_hash) <
             config_options.maxretries)):
            logger.exception("Known error building packages for %s,"
                             " will retry later" % project)
            commit.status = "RETRY"
            commit.notes = str(exception)
            # do not switch from an error exit code to a retry
            # exit code
            if exit_code != 1:
                exit_code = 2
        else:
            exit_code = 1

            if not project_info.suppress_email():
                sendnotifymail(packages, commit)
                project_info.sent_email()
                session.add(project_info)

            # allow to submit a gerrit review only if the last build
            # was successful or non existent to avoid creating a gerrit
            # review for the same problem multiple times.
            if config_options.gerrit is not None:
                if build_env:
                    env_vars = list(build_env)
                else:
                    env_vars = []
                last_build = getLastProcessedCommit(session, project)
                if not last_build or last_build.status == 'SUCCESS':
                    try:
                        submit_review(commit, packages, env_vars)
                    except Exception:
                        logger.error('Unable to create review '
                                     'see review.log')
                else:
                    logger.info('Last build not successful '
                                'for %s' % project)
            commit.status = "FAILED"
            commit.notes = str(exception)
        if stop:
            return exit_code
    # Add commit to the session
    session.add(commit)

    genreports(packages, head_only, session, packages_to_process)
    # Export YAML file containing commit metadata
    export_commit_yaml(commit)
    try:
        sync_repo(commit)
    except Exception as e:
        logger.error('Repo sync failed for project %s' % project)
        consistent = False  # If we were consistent before, we are not anymore
        if exit_code == 0:  # The commit was ok, so marking as failed
            exit_code = 1
            # We need to make the commit status be "failed"
            commit.status = "FAILED"
            commit.notes = str(e)
            session.add(commit)
            # And open a review if needed
            if config_options.gerrit is not None:
                if build_env:
                    env_vars = list(build_env)
                else:
                    env_vars = []
                try:
                    submit_review(commit, packages, env_vars)
                except Exception:
                    logger.error('Unable to create review ' 'see review.log')

    session.commit()

    # Generate the current and consistent symlinks
    if exception is None:
        dirnames = ['current']
        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        yumrepodir_abs = os.path.join(datadir, yumrepodir)
        if consistent:
            dirnames.append('consistent')
        else:
            if config_options.use_components:
                logger.info('%d packages not built correctly for component'
                            ' %s: not updating the consistent symlink' %
                            (failures, commit.component))
            else:
                logger.info('%d packages not built correctly: not updating'
                            ' the consistent symlink' % failures)
        for dirname in dirnames:
            if config_options.use_components:
                target_repo_dir = os.path.join(datadir, "repos/component",
                                               commit.component, dirname)
                source_repo_dir = os.path.join(datadir, "repos/component",
                                               commit.component)
            else:
                target_repo_dir = os.path.join(datadir, "repos", dirname)
                source_repo_dir = os.path.join(datadir, "repos")
            os.symlink(os.path.relpath(yumrepodir_abs, source_repo_dir),
                       target_repo_dir + "_")
            os.rename(target_repo_dir + "_", target_repo_dir)

        # If using components, synchronize the upper-level repo files
        if config_options.use_components:
            for dirname in dirnames:
                aggregate_repo_files(dirname,
                                     datadir,
                                     session,
                                     config_options.reponame,
                                     hashed_dir=True)

        # And synchronize them
        sync_symlinks(commit)

    if dev_mode is False:
        if consistent:
            # We have a consistent repo. Let's create a CIVote entry in the DB
            vote = CIVote(commit_id=commit.id,
                          ci_name='consistent',
                          ci_url='',
                          ci_vote=True,
                          ci_in_progress=False,
                          timestamp=int(commit.dt_build),
                          notes='',
                          component=commit.component)
            session.add(vote)
            session.commit()
    return exit_code
コード例 #20
0
def build_rpm_wrapper(commit, dev_mode, use_public, bootstrap, env_vars,
                      sequential):
    config_options = getConfigOptions()
    # Get the worker id
    if sequential is True:
        worker_id = 1
    else:
        worker_id = multiprocessing.current_process()._identity[0]

    mock_config = "dlrn-" + str(worker_id) + ".cfg"
    scriptsdir = os.path.realpath(config_options.scriptsdir)
    datadir = os.path.realpath(config_options.datadir)
    baseurl = config_options.baseurl
    templatecfg = os.path.join(scriptsdir, config_options.target + ".cfg")
    newcfg = os.path.join(datadir, mock_config + ".new")
    oldcfg = os.path.join(datadir, mock_config)
    shutil.copyfile(templatecfg, newcfg)

    # Add the most current repo, we may have dependencies in it
    if os.path.exists(os.path.join(datadir, "repos", "current", "repodata")):
        # Get the real path for the current repo, this could change during
        # parallel builds
        repolink = os.readlink(os.path.join(datadir, "repos", "current"))
        if repolink.startswith('/'):
            # absolute symlink
            repopath = repolink
        else:
            # relative symlink
            repopath = os.path.join(datadir, "repos", repolink)
        with open(newcfg, "r") as fp:
            contents = fp.readlines()
        # delete the last line which must be """
        contents = contents[:-1]
        contents = contents + [
            "[local]\n", "name=local\n",
            "baseurl=file://%s\n" % repopath, "enabled=1\n", "gpgcheck=0\n",
            "priority=1\n", "\"\"\""
        ]
        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    # Set the worker id in the mock configuration, to allow multiple workers
    # for the same config
    with open(newcfg, "r") as fp:
        contents = fp.readlines()
    with open(newcfg, "w") as fp:
        for line in contents:
            if line.startswith("config_opts['root']"):
                line = line[:-2] + "-" + str(worker_id) + "'\n"
            fp.write(line)

    # delete the last line which must be """
    with open(newcfg, "r") as fp:
        contents = fp.readlines()
    contents = contents[:-1]

    try:
        r = urlopen(baseurl + "/delorean-deps.repo")
        delorean_deps = True
    except Exception as e:
        logger.warning(
            "Could not open %s/delorean-deps.repo. If some dependent"
            " repositories must be included in the mock then check the"
            " baseurl value in projects.ini, and make sure the file can be"
            " downloaded." % baseurl)
        delorean_deps = False

    if delorean_deps:
        contents.extend(map(lambda x: x.decode('utf8'), r.readlines()))
        contents = contents + ["\n\"\"\""]
        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    if dev_mode or use_public:
        with open(newcfg, "r") as fp:
            contents = fp.readlines()

        # delete the last line which must be """
        contents = contents[:-1]
        try:
            r = urlopen(baseurl + "/current/delorean.repo")
        except Exception as e:
            logger.error("Could not open %s/current/delorean.repo. Check the "
                         "baseurl value in projects.ini, and make sure the "
                         "file can be downloaded." % baseurl)
            raise e

        contents.extend(map(lambda x: x.decode('utf8'), r.readlines()))
        contents.extend(["\n\"\"\""])

        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    # don't change dlrn.cfg if the content hasn't changed to prevent
    # mock from rebuilding its cache.
    try:
        if not filecmp.cmp(newcfg, oldcfg):
            shutil.copyfile(newcfg, oldcfg)
    except OSError:
        shutil.copyfile(newcfg, oldcfg)

    # Set env variable for Copr configuration
    if config_options.coprid:
        os.environ['COPR_ID'] = config_options.coprid

    # Set release numbering option
    if config_options.release_numbering == '0.1.date.hash':
        os.environ['RELEASE_NUMBERING'] = '0.1.date.hash'
    else:
        os.environ['RELEASE_NUMBERING'] = '0.date.hash'

    # Set env variable for mock configuration
    os.environ['MOCK_CONFIG'] = mock_config

    # if bootstraping, set the appropriate mock config option
    if bootstrap is True:
        os.environ['ADDITIONAL_MOCK_OPTIONS'] = '-D repo_bootstrap 1'
    dlrn.shell.pkginfo.preprocess(package_name=commit.project_name)

    run(os.path.join(scriptsdir, "build_rpm.sh"), commit, env_vars, dev_mode,
        use_public, bootstrap)
コード例 #21
0
ファイル: repositories.py プロジェクト: hybrid-packages/DLRN
def refreshrepo(url, path, branch="master", local=False, full_path=None):
    config_options = getConfigOptions()
    logger.info("Getting %s to %s (%s)" % (url, path, branch))
    checkout_not_present = not os.path.exists(path)
    if checkout_not_present is True:
        try:
            sh.git.clone(url, path)
        except Exception as e:
            logger.error("Error cloning %s into %s: %s" % (url, path, e))
            raise

    elif local is False:
        # We need to cover a corner case here, where the repo URL has changed
        # since the last execution
        git = sh.git.bake(_cwd=path, _tty_out=False, _timeout=3600)
        try:
            remotes = git("remote", "-v").splitlines()
            fetch_url = None
            for remote in remotes:
                if '(fetch)' in remote:
                    line = remote.split()
                    if line[1] == url:
                        break
                    else:
                        fetch_url = line[1]
            else:
                # URL changed, so remove directory
                logger.warning("URL for %s changed from %s to %s, "
                               "cleaning directory and cloning again" %
                               (path, fetch_url, url))
                shutil.rmtree(path, ignore_errors=True)
                try:
                    sh.git.clone(url, path)
                except Exception as e:
                    logger.error("Error cloning %s into %s: %s" %
                                 (url, path, e))
                    raise
        except Exception:
            # Something failed here, maybe this is a failed repo clone
            # Let's warn, remove directory and clone again
            logger.warning("Directory %s does not contain a valid Git repo, "
                           "cleaning directory and cloning again" % path)
            shutil.rmtree(path)
            sh.git.clone(url, path)

    git_path = full_path or path
    git = sh.git.bake(_cwd=git_path, _tty_out=False, _timeout=3600)

    if local is False or checkout_not_present is True:
        try:
            git.fetch("origin")
        except Exception:
            # Sometimes hg repositories get into a invalid state leaving them
            # unusable, to avoid a looping error just remove it so it will be
            # recloned.
            logger.error("Error fetching into %s, deleting." % (path))
            sh.sudo("rm", "-rf", path)
            raise
        try:
            git.checkout('-f', branch)
        except sh.ErrorReturnCode_1:
            if branch in ["master", "rpm-master"]:
                # Do not try fallback if already on master branch
                raise
            else:
                if config_options.fallback_to_master:
                    # Fallback to master
                    if branch.startswith("rpm-"):
                        branch = "rpm-master"
                    elif branch.endswith("-rdo"):
                        # Distgit branches can start with rpm- or end with -rdo
                        branch = "rpm-master"
                    else:
                        branch = "master"
                    logger.info("Falling back to %s" % branch)
                    git.checkout(branch)
                else:
                    logger.error("Branch %s for %s does not exist, and the "
                                 "configuration does not allow a fallback to "
                                 "master." % (branch, url))
                    raise
        try:
            git.reset("--hard", "origin/%s" % branch)
        except Exception:
            # Maybe it was a tag, not a branch
            git.reset("--hard", "%s" % branch)

    repoinfo = str(git.log("--pretty=format:%H %ct", "-1", ".")).\
        strip().split(" ")
    repoinfo.insert(0, branch)
    return repoinfo
コード例 #22
0
def sync_symlinks(commit):
    config_options = getConfigOptions()
    rsyncdest = config_options.rsyncdest
    rsyncport = config_options.rsyncport
    datadir = os.path.realpath(config_options.datadir)
    reponame = config_options.reponame

    if rsyncdest != '':
        # We want to sync the symlinks in a second pass, once all content
        # has been copied, to avoid a race condition it they are copied first
        rsyncpaths = []
        for filename in ['consistent', 'current']:
            if config_options.use_components:
                filepath = os.path.join(datadir, "repos", ".", "component",
                                        commit.component, filename)
            else:
                filepath = os.path.join(datadir, "repos", ".", filename)
            rsyncpaths.append(filepath)

        # If using components, current and consistent on the top-level dir
        # are not symlinks, but full directories

        if config_options.use_components:
            exclude_list = []
            extra_include_list = []
            for filename in ['consistent', 'current']:
                filepath = os.path.join(datadir, "repos", ".", filename)
                rsyncpaths.append(filepath)

                exclude_list.extend(['--exclude', '%s/%s.repo' %
                                     (filename, reponame)])
                exclude_list.extend(['--exclude', '%s/%s.repo.md5' %
                                     (filename, reponame)])
                exclude_list.extend(['--exclude', '%s/versions.csv' %
                                    filename])
                extra_include_list.append('%s/%s.repo' % (filepath, reponame))
                extra_include_list.append('%s/%s.repo.md5' %
                                          (filepath, reponame))
                extra_include_list.append('%s/versions.csv' % filepath)

        rsh_command = 'ssh -p %s -o StrictHostKeyChecking=no' % rsyncport

        if config_options.use_components:
            try:
                # First, rsync everything except the top-level symlinks
                sh.rsync('-avzR', '--delete-delay',
                         '-e', rsh_command,
                         exclude_list,
                         rsyncpaths, rsyncdest)
            except Exception as e:
                # We are not raising exceptions for symlink rsyncs, these will
                # be fixed after another build
                logger.warn('Failed to rsync symlinks to %s ,'
                            'got error %s' % (rsyncdest, e))
            try:
                # Then, the top-level symlinks
                sh.rsync('-avzR', '--delete-delay',
                         '-e', rsh_command,
                         extra_include_list, rsyncdest)
            except Exception as e:
                # We are not raising exceptions for symlink rsyncs, these will
                # be fixed after another build
                logger.warn('Failed to rsync symlinks to %s ,'
                            'got error %s' % (rsyncdest, e))
        else:
            try:
                sh.rsync('-avzR', '--delete-delay',
                         '-e', rsh_command,
                         rsyncpaths, rsyncdest)
            except Exception as e:
                # We are not raising exceptions for symlink rsyncs, these will
                # be fixed after another build
                logger.warn('Failed to rsync symlinks to %s ,'
                            'got error %s' % (rsyncdest, e))
コード例 #23
0
ファイル: reporting.py プロジェクト: openstack-packages/DLRN
def genreports(packages, head_only, session, all_commits):
    config_options = getConfigOptions()

    # Generate report of the last 300 package builds
    target = config_options.target
    src = config_options.source
    reponame = config_options.reponame
    templatedir = config_options.templatedir
    project_name = config_options.project_name
    datadir = config_options.datadir
    repodir = os.path.join(datadir, "repos")

    css_file = os.path.join(templatedir, 'stylesheets/styles.css')

    # create directories
    if not os.path.exists(repodir):
        os.makedirs(repodir)

    # configure jinja and filters
    jinja_env = jinja2.Environment(
        loader=jinja2.FileSystemLoader([templatedir]))
    jinja_env.filters["strftime"] = _jinja2_filter_strftime
    jinja_env.filters["get_commit_url"] = \
        partial(_jinja2_filter_get_commit_url, packages=packages)

    # generate build report
    commits = getCommits(session, without_status="RETRY", limit=300)
    jinja_template = jinja_env.get_template("report.j2")
    content = jinja_template.render(reponame=reponame,
                                    src=src,
                                    project_name=project_name,
                                    target=target,
                                    commits=commits)
    shutil.copy2(css_file, os.path.join(repodir, "styles.css"))
    report_file = os.path.join(repodir, "report.html")
    with open(report_file, "w") as fp:
        fp.write(content)

    # Generate status report
    if head_only:
        msg = "(all commit not built)"
    else:
        msg = ""

    pkgs = []
    # Find the most recent successfull build
    # then report on failures since then
    for package in packages:
        name = package["name"]
        commits = getCommits(session, project=name, limit=1)

        # No builds
        if commits.count() == 0:
            continue

        pkgs.append(package)
        last_build = commits.first()
        package["last_build"] = last_build

        # last build was successul
        if last_build.status == "SUCCESS":
            continue

        # Retrieve last successful build
        commits = getCommits(session, project=name, with_status="SUCCESS",
                             limit=1)

        # No successful builds
        if commits.count() == 0:
            commits = getCommits(session, project=name, with_status="FAILED",
                                 order="asc")
            package["first_failure"] = commits.first()
            package["days"] = -1
            continue

        last_success = commits.first()
        last_success_dt = last_success.dt_build

        commits = getCommits(session, project=name, with_status="FAILED",
                             order="asc", limit=None)
        commits = commits.filter(Commit.dt_build > last_success_dt)
        package["first_failure"] = commits.first()
        package["days"] = (datetime.now() -
                           datetime.fromtimestamp(last_success_dt)).days

    pkgs = sorted(pkgs, key=itemgetter("name"))
    jinja_template = jinja_env.get_template("status_report.j2")
    content = jinja_template.render(msg=msg,
                                    reponame=reponame,
                                    src=src,
                                    project_name=project_name,
                                    target=target,
                                    pkgs=pkgs)

    report_file = os.path.join(repodir, "status_report.html")
    with open(report_file, "w") as fp:
        fp.write(content)

    jinja_template = jinja_env.get_template("status_report_csv.j2")
    content = jinja_template.render(msg=msg,
                                    reponame=reponame,
                                    src=src,
                                    project_name=project_name,
                                    target=target,
                                    pkgs=pkgs)

    report_file = os.path.join(repodir, "status_report.csv")
    with open(report_file, "w") as fp:
        fp.write(content)

    # Create a report for the pending packages
    jinja_template = jinja_env.get_template("queue.j2")
    pending_commits = []
    for commit in all_commits:
        old_commit = getCommits(session, project=commit.project_name,
                                without_status="RETRY", limit=None).filter(
            Commit.commit_hash == commit.commit_hash).filter(
            Commit.distro_hash == commit.distro_hash).first()
        if not old_commit:
            pending_commits.append(commit)

    content = jinja_template.render(reponame=reponame,
                                    src=src,
                                    target=target,
                                    commits=pending_commits)
    report_file = os.path.join(repodir, "queue.html")
    with open(report_file, "w") as fp:
        fp.write(content)
コード例 #24
0
def export_commit_yaml(commit):
    config_options = getConfigOptions()
    # Export YAML file containing commit metadata
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = os.path.join(datadir, "repos", commit.getshardedcommitdir())
    saveYAML_commit(commit, os.path.join(yumrepodir, 'commit.yaml'))
コード例 #25
0
ファイル: shell.py プロジェクト: openstack-packages/DLRN
def post_build_rpm(status, packages, session, build_repo=True):
    config_options = getConfigOptions()
    commit = status[0]
    built_rpms = status[1]
    project_name = commit.project_name
    commit_hash = commit.commit_hash
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = os.path.join("repos", commit.getshardedcommitdir())
    yumrepodir_abs = os.path.join(datadir, yumrepodir)

    shafile = open(os.path.join(yumrepodir_abs, "versions.csv"), "w")
    shafile.write("Project,Source Repo,Source Sha,Dist Repo,Dist Sha,"
                  "Status,Last Success Timestamp,Pkg NVR\n")
    failures = 0

    for otherproject in packages:
        otherprojectname = otherproject["name"]
        if otherprojectname == project_name:
            # Output sha's this project
            dumpshas2file(shafile, commit, otherproject["upstream"],
                          otherproject["master-distgit"], "SUCCESS",
                          commit.dt_build, built_rpms)
            continue
        # Output sha's of all other projects represented in this repo
        last_success = getCommits(session, project=otherprojectname,
                                  with_status="SUCCESS",
                                  type=commit.type).first()
        last_processed = getCommits(session, project=otherprojectname,
                                    type=commit.type).first()

        if last_success:
            if build_repo:
                for rpm in last_success.artifacts.split(","):
                    rpm_link_src = os.path.join(yumrepodir_abs,
                                                os.path.split(rpm)[1])
                    os.symlink(os.path.relpath(os.path.join(datadir, rpm),
                                               yumrepodir_abs), rpm_link_src)
            last = last_success
        else:
            last = last_processed
        if last:
            if last.artifacts:
                rpmlist = last.artifacts.split(",")
            else:
                rpmlist = []
            upstream = otherproject.get('upstream', '')
            dumpshas2file(shafile, last, upstream,
                          otherproject["master-distgit"],
                          last_processed.status, last.dt_build,
                          rpmlist)
            if last_processed.status != 'SUCCESS':
                failures += 1
        else:
            failures += 1
    shafile.close()

    if build_repo:
        # Use createrepo_c when available
        try:
            from sh import createrepo_c
            sh.createrepo = createrepo_c
        except ImportError:
            pass

        if config_options.include_srpm_in_repo:
            sh.createrepo(yumrepodir_abs)
        else:
            sh.createrepo('-x', '*.src.rpm', yumrepodir_abs)

        with open(os.path.join(
                yumrepodir_abs, "%s.repo" % config_options.reponame),
                "w") as fp:
            fp.write("[%s]\nname=%s-%s-%s\nbaseurl=%s/%s\nenabled=1\n"
                     "gpgcheck=0\npriority=1\n" % (
                         config_options.reponame,
                         config_options.reponame,
                         project_name, commit_hash,
                         config_options.baseurl,
                         commit.getshardedcommitdir()))

    return failures
コード例 #26
0
ファイル: shell.py プロジェクト: openstack-packages/DLRN
def process_build_result_rpm(
        status, packages, session, packages_to_process,
        dev_mode=False, run_cmd=False, stop=False,
        build_env=None, head_only=False, consistent=False,
        failures=0):
    config_options = getConfigOptions()
    commit = status[0]
    built_rpms = status[1]
    notes = status[2]
    exception = status[3]
    commit_hash = commit.commit_hash
    project = commit.project_name
    project_info = session.query(Project).filter(
        Project.project_name == project).first()
    if not project_info:
        project_info = Project(project_name=project, last_email=0)
    exit_code = 0

    if run_cmd:
        if exception is not None:
            exit_code = 1
            if stop:
                return exit_code
        return exit_code

    if exception is None:
        commit.status = "SUCCESS"
        commit.notes = notes
        commit.artifacts = ",".join(built_rpms)
    else:
        logger.error("Received exception %s" % exception)

        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        logfile = os.path.join(yumrepodir,
                               "rpmbuild.log")
        if (isknownerror(logfile) and
            (timesretried(project, session, commit_hash, commit.distro_hash) <
             config_options.maxretries)):
            logger.exception("Known error building packages for %s,"
                             " will retry later" % project)
            commit.status = "RETRY"
            commit.notes = str(exception)
            # do not switch from an error exit code to a retry
            # exit code
            if exit_code != 1:
                exit_code = 2
        else:
            exit_code = 1
            # If the log file hasn't been created we add what we have
            # This happens if the rpm build script didn't run.
            if not os.path.exists(yumrepodir):
                os.makedirs(yumrepodir)
            if not os.path.exists(logfile):
                with open(logfile, "w") as fp:
                    fp.write(str(exception))

            if not project_info.suppress_email():
                sendnotifymail(packages, commit)
                project_info.sent_email()
                session.add(project_info)

            # allow to submit a gerrit review only if the last build
            # was successful or non existent to avoid creating a gerrit
            # review for the same problem multiple times.
            if config_options.gerrit is not None:
                if build_env:
                    env_vars = list(build_env)
                else:
                    env_vars = []
                last_build = getLastProcessedCommit(session, project)
                if not last_build or last_build.status == 'SUCCESS':
                    try:
                        submit_review(commit, packages, env_vars)
                    except Exception:
                        logger.error('Unable to create review '
                                     'see review.log')
                else:
                    logger.info('Last build not successful '
                                'for %s' % project)
            commit.status = "FAILED"
            commit.notes = str(exception)
        if stop:
            return exit_code
    # Add commit to the session
    session.add(commit)

    genreports(packages, head_only, session, packages_to_process)
    # Export YAML file containing commit metadata
    export_commit_yaml(commit)
    try:
        sync_repo(commit)
    except Exception as e:
        logger.error('Repo sync failed for project %s' % project)
        consistent = False  # If we were consistent before, we are not anymore
        if exit_code == 0:  # The commit was ok, so marking as failed
            exit_code = 1
            # We need to make the commit status be "failed"
            commit.status = "FAILED"
            commit.notes = str(e)
            session.add(commit)
            # And open a review if needed
            if config_options.gerrit is not None:
                if build_env:
                    env_vars = list(build_env)
                else:
                    env_vars = []
                try:
                    submit_review(commit, packages, env_vars)
                except Exception:
                    logger.error('Unable to create review '
                                 'see review.log')

    session.commit()

    # Generate the current and consistent symlinks
    if exception is None:
        dirnames = ['current']
        datadir = os.path.realpath(config_options.datadir)
        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        yumrepodir_abs = os.path.join(datadir, yumrepodir)
        if consistent:
            dirnames.append('consistent')
        else:
            logger.info('%d packages not built correctly: not updating'
                        ' the consistent symlink' % failures)
        for dirname in dirnames:
            target_repo_dir = os.path.join(datadir, "repos", dirname)
            os.symlink(os.path.relpath(yumrepodir_abs,
                                       os.path.join(datadir, "repos")),
                       target_repo_dir + "_")
            os.rename(target_repo_dir + "_", target_repo_dir)

        # And synchronize them
        sync_symlinks(commit)

    if dev_mode is False:
        if consistent:
            # We have a consistent repo. Let's create a CIVote entry in the DB
            vote = CIVote(commit_id=commit.id, ci_name='consistent',
                          ci_url='', ci_vote=True, ci_in_progress=False,
                          timestamp=int(commit.dt_build), notes='')
            session.add(vote)
            session.commit()
    return exit_code
コード例 #27
0
def post_build_rpm(status, packages, session, build_repo=True):
    config_options = getConfigOptions()
    commit = status[0]
    built_rpms = status[1]
    project_name = commit.project_name
    commit_hash = commit.commit_hash
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = os.path.join("repos", commit.getshardedcommitdir())
    yumrepodir_abs = os.path.join(datadir, yumrepodir)

    shafile = open(os.path.join(yumrepodir_abs, "versions.csv"), "w")
    shafile.write("Project,Source Repo,Source Sha,Dist Repo,Dist Sha,"
                  "Status,Last Success Timestamp,Component,Extended Sha,"
                  "Pkg NVR\n")
    failures = 0

    for otherproject in packages:
        if (config_options.use_components and 'component' in otherproject
                and otherproject['component'] != commit.component):
            # Only dump information and create symlinks for the same component
            continue

        otherprojectname = otherproject["name"]
        if otherprojectname == project_name:
            # Output sha's this project
            dumpshas2file(shafile, commit, otherproject["upstream"],
                          otherproject["master-distgit"], "SUCCESS",
                          commit.dt_build, commit.component, built_rpms)
            continue
        # Output sha's of all other projects represented in this repo
        last_success = getCommits(session,
                                  project=otherprojectname,
                                  with_status="SUCCESS",
                                  type=commit.type).first()
        last_processed = getCommits(session,
                                    project=otherprojectname,
                                    type=commit.type).first()

        if last_success:
            if build_repo:
                for rpm in last_success.artifacts.split(","):
                    rpm_link_src = os.path.join(yumrepodir_abs,
                                                os.path.split(rpm)[1])
                    os.symlink(
                        os.path.relpath(os.path.join(datadir, rpm),
                                        yumrepodir_abs), rpm_link_src)
            last = last_success
        else:
            last = last_processed
        if last:
            if last.artifacts:
                rpmlist = last.artifacts.split(",")
            else:
                rpmlist = []
            upstream = otherproject.get('upstream', '')
            dumpshas2file(shafile, last, upstream,
                          otherproject["master-distgit"],
                          last_processed.status, last.dt_build,
                          commit.component, rpmlist)
            if last_processed.status != 'SUCCESS':
                failures += 1
        else:
            failures += 1
    shafile.close()

    if build_repo:
        # Use createrepo_c when available
        try:
            from sh import createrepo_c
            sh.createrepo = createrepo_c
        except ImportError:
            pass

        if config_options.include_srpm_in_repo:
            sh.createrepo(yumrepodir_abs)
        else:
            sh.createrepo('-x', '*.src.rpm', yumrepodir_abs)

        with open(
                os.path.join(yumrepodir_abs,
                             "%s.repo" % config_options.reponame), "w") as fp:
            if config_options.use_components:
                repo_id = "%s-component-%s" % (config_options.reponame,
                                               commit.component)
            else:
                repo_id = config_options.reponame
            fp.write(
                "[%s]\nname=%s-%s-%s\nbaseurl=%s/%s\nenabled=1\n"
                "gpgcheck=0\npriority=1\n" %
                (repo_id, config_options.reponame, project_name, commit_hash,
                 config_options.baseurl, commit.getshardedcommitdir()))

    return failures
コード例 #28
0
ファイル: test_config.py プロジェクト: hybrid-packages/DLRN
 def test_get_config_option(self):
     config = ConfigOptions(self.config)
     self.assertEqual(config, getConfigOptions())
コード例 #29
0
def getdistrobranch(package):
    if 'distro-branch' in package:
        return package['distro-branch']
    else:
        config_options = getConfigOptions()
        return config_options.distro
コード例 #30
0
ファイル: repositories.py プロジェクト: hybrid-packages/DLRN
def getdistrobranch(package):
    if 'distro-branch' in package:
        return package['distro-branch']
    else:
        config_options = getConfigOptions()
        return config_options.distro
コード例 #31
0
def genreports(packages, head_only, session, all_commits):
    config_options = getConfigOptions()

    # Generate report of the last 300 package builds
    target = config_options.target
    src = config_options.source
    reponame = config_options.reponame
    templatedir = config_options.templatedir
    project_name = config_options.project_name
    datadir = config_options.datadir
    repodir = os.path.join(datadir, "repos")

    css_file = os.path.join(templatedir, 'stylesheets/styles.css')

    # create directories
    if not os.path.exists(repodir):
        os.makedirs(repodir)

    # configure jinja and filters
    jinja_env = jinja2.Environment(
        loader=jinja2.FileSystemLoader([templatedir]))
    jinja_env.filters["strftime"] = _jinja2_filter_strftime
    jinja_env.filters["get_commit_url"] = \
        partial(_jinja2_filter_get_commit_url, packages=packages)

    # generate build report
    commits = getCommits(session, without_status="RETRY", limit=300)
    jinja_template = jinja_env.get_template("report.j2")
    content = jinja_template.render(reponame=reponame,
                                    src=src,
                                    project_name=project_name,
                                    target=target,
                                    commits=commits)
    shutil.copy2(css_file, os.path.join(repodir, "styles.css"))
    report_file = os.path.join(repodir, "report.html")
    with open(report_file, "w") as fp:
        fp.write(content)

    # Generate status report
    if head_only:
        msg = "(all commit not built)"
    else:
        msg = ""

    pkgs = []
    # Find the most recent successfull build
    # then report on failures since then
    for package in packages:
        name = package["name"]
        commits = getCommits(session, project=name, limit=1)

        # No builds
        if commits.count() == 0:
            continue

        pkgs.append(package)
        last_build = commits.first()
        package["last_build"] = last_build

        # last build was successul
        if last_build.status == "SUCCESS":
            continue

        # Retrieve last successful build
        commits = getCommits(session,
                             project=name,
                             with_status="SUCCESS",
                             limit=1)

        # No successful builds
        if commits.count() == 0:
            commits = getCommits(session,
                                 project=name,
                                 with_status="FAILED",
                                 order="asc")
            package["first_failure"] = commits.first()
            package["days"] = -1
            continue

        last_success = commits.first()
        last_success_dt = last_success.dt_build

        commits = getCommits(session,
                             project=name,
                             with_status="FAILED",
                             order="asc",
                             limit=None)
        commits = commits.filter(Commit.dt_build > last_success_dt)
        package["first_failure"] = commits.first()
        package["days"] = (datetime.now() -
                           datetime.fromtimestamp(last_success_dt)).days

    pkgs = sorted(pkgs, key=itemgetter("name"))
    jinja_template = jinja_env.get_template("status_report.j2")
    content = jinja_template.render(msg=msg,
                                    reponame=reponame,
                                    src=src,
                                    project_name=project_name,
                                    target=target,
                                    pkgs=pkgs)

    report_file = os.path.join(repodir, "status_report.html")
    with open(report_file, "w") as fp:
        fp.write(content)

    jinja_template = jinja_env.get_template("status_report_csv.j2")
    content = jinja_template.render(msg=msg,
                                    reponame=reponame,
                                    src=src,
                                    project_name=project_name,
                                    target=target,
                                    pkgs=pkgs)

    report_file = os.path.join(repodir, "status_report.csv")
    with open(report_file, "w") as fp:
        fp.write(content)

    # Create a report for the pending packages
    jinja_template = jinja_env.get_template("queue.j2")
    pending_commits = []
    for commit in all_commits:
        old_commit = getCommits(
            session,
            project=commit.project_name,
            without_status="RETRY",
            limit=None).filter(
                Commit.commit_hash == commit.commit_hash).filter(
                    Commit.distro_hash == commit.distro_hash).filter(
                        Commit.extended_hash == commit.extended_hash).first()
        if not old_commit:
            pending_commits.append(commit)

    content = jinja_template.render(reponame=reponame,
                                    src=src,
                                    target=target,
                                    commits=pending_commits)
    report_file = os.path.join(repodir, "queue.html")
    with open(report_file, "w") as fp:
        fp.write(content)
コード例 #32
0
ファイル: repositories.py プロジェクト: hybrid-packages/DLRN
def getsourcebranch(package):
    if 'source-branch' in package:
        return package['source-branch']
    else:
        config_options = getConfigOptions()
        return config_options.source
コード例 #33
0
ファイル: build.py プロジェクト: openstack-packages/DLRN
def build_rpm_wrapper(commit, dev_mode, use_public, bootstrap, env_vars,
                      sequential, version_from=None):
    config_options = getConfigOptions()
    # Get the worker id
    if sequential is True:
        worker_id = 1
    else:
        worker_id = multiprocessing.current_process()._identity[0]

    # Retrieve build driver
    build_driver = config_options.build_driver
    buildrpm = import_object(build_driver, cfg_options=config_options)

    # FIXME(hguemar): move all the mock config logic to driver
    mock_config = "dlrn-" + str(worker_id) + ".cfg"
    scriptsdir = os.path.realpath(config_options.scriptsdir)
    configdir = os.path.realpath(config_options.configdir)
    datadir = os.path.realpath(config_options.datadir)
    baseurl = config_options.baseurl
    templatecfg = os.path.join(configdir, config_options.target + ".cfg")
    newcfg = os.path.join(datadir, mock_config + ".new")
    oldcfg = os.path.join(datadir, mock_config)
    shutil.copyfile(templatecfg, newcfg)

    if (config_options.build_driver ==
            'dlrn.drivers.kojidriver.KojiBuildDriver' and
            config_options.fetch_mock_config):
        buildrpm.write_mock_config(oldcfg)

    # Add the most current repo, we may have dependencies in it
    if os.path.exists(os.path.join(datadir, "repos", "current", "repodata")):
        # Get the real path for the current repo, this could change during
        # parallel builds
        repolink = os.readlink(os.path.join(datadir, "repos", "current"))
        if repolink.startswith('/'):
            # absolute symlink
            repopath = repolink
        else:
            # relative symlink
            repopath = os.path.join(datadir, "repos", repolink)
        with open(newcfg, "r") as fp:
            contents = fp.readlines()
        # delete the last line which must be """
        contents = contents[:-1]
        contents = contents + ["[local]\n", "name=local\n",
                               "baseurl=file://%s\n" % repopath,
                               "enabled=1\n", "gpgcheck=0\n", "priority=1\n",
                               "\"\"\""]
        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    # Set the worker id in the mock configuration, to allow multiple workers
    # for the same config
    with open(newcfg, "r") as fp:
        contents = fp.readlines()
    with open(newcfg, "w") as fp:
        for line in contents:
            if line.startswith("config_opts['root']"):
                line = line[:-2] + "-" + str(worker_id) + "'\n"
            fp.write(line)

    # delete the last line which must be """
    with open(newcfg, "r") as fp:
        contents = fp.readlines()
    contents = contents[:-1]

    try:
        if not baseurl:
            raise Exception("No baseurl defined")
        r = urlopen(baseurl + "/delorean-deps.repo")
        delorean_deps = True
    except Exception as e:
        logger.warning(
            "Could not open %s/delorean-deps.repo. If some dependent"
            " repositories must be included in the mock then check the"
            " baseurl value in projects.ini, and make sure the file can be"
            " downloaded." % baseurl)
        delorean_deps = False

    if delorean_deps:
        contents.extend(map(lambda x: x.decode('utf8'), r.readlines()))
        contents = contents + ["\n\"\"\""]
        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    if dev_mode or use_public:
        with open(newcfg, "r") as fp:
            contents = fp.readlines()

        # delete the last line which must be """
        contents = contents[:-1]
        try:
            r = urlopen(baseurl + "/current/delorean.repo")
        except Exception as e:
            logger.error("Could not open %s/current/delorean.repo. Check the "
                         "baseurl value in projects.ini, and make sure the "
                         "file can be downloaded." % baseurl)
            raise e

        contents.extend(map(lambda x: x.decode('utf8'), r.readlines()))
        contents.extend(["\n\"\"\""])

        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    # don't change dlrn.cfg if the content hasn't changed to prevent
    # mock from rebuilding its cache.
    try:
        if not filecmp.cmp(newcfg, oldcfg):
            if (config_options.build_driver ==
                    'dlrn.drivers.kojidriver.KojiBuildDriver'):
                if not config_options.fetch_mock_config:
                    shutil.copyfile(newcfg, oldcfg)
            else:
                shutil.copyfile(newcfg, oldcfg)
    except OSError:
        shutil.copyfile(newcfg, oldcfg)

    # Set env variable for Copr configuration
    if (config_options.build_driver ==
            'dlrn.drivers.coprdriver.CoprBuildDriver' and
            config_options.coprid):
        os.environ['COPR_ID'] = config_options.coprid

    # Set release numbering option
    if config_options.release_numbering == '0.1.date.hash':
        os.environ['RELEASE_NUMBERING'] = '0.1.date.hash'
    else:
        os.environ['RELEASE_NUMBERING'] = '0.date.hash'

    # Set env variable for mock configuration
    os.environ['MOCK_CONFIG'] = mock_config

    # if bootstraping, set the appropriate mock config option
    if bootstrap is True:
        additional_mock_options = '-D repo_bootstrap 1'
    else:
        additional_mock_options = None

    dlrn.shell.pkginfo.preprocess(package_name=commit.project_name,
                                  commit_hash=commit.commit_hash)

    if (config_options.pkginfo_driver ==
            'dlrn.drivers.gitrepo.GitRepoDriver' and
            config_options.keep_tarball):
        if commit.commit_branch == config_options.source:
            # We are following the master tarball here, use it
            os.environ['DLRN_KEEP_TARBALL'] = '1'
        else:
            if 'DLRN_KEEP_TARBALL' in os.environ:
                del os.environ['DLRN_KEEP_TARBALL']

    if config_options.keep_changelog:
        os.environ['DLRN_KEEP_CHANGELOG'] = '1'

    if (config_options.pkginfo_driver == 'dlrn.drivers.local.LocalDriver'):
        os.environ['DLRN_KEEP_SPEC_AS_IS'] = '1'

    # We may do some git repo manipulation, so we need to make sure the
    # right commit is there
    os.environ['DLRN_SOURCE_COMMIT'] = commit.commit_hash

    run(os.path.join(scriptsdir, "build_srpm.sh"), commit, env_vars,
        dev_mode, use_public, bootstrap, version_from=version_from)

    # SRPM is built, now build the RPM using the driver
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = _get_yumrepodir(commit)
    yumrepodir_abs = os.path.join(datadir, yumrepodir)

    # If we are using the downstream driver, write the reference commit
    if (config_options.pkginfo_driver ==
            'dlrn.drivers.downstream.DownstreamInfoDriver'):
        dlrn.shell.pkginfo._write_reference_commit(yumrepodir_abs)

    buildrpm.build_package(output_directory=yumrepodir_abs,
                           additional_mock_opts=additional_mock_options,
                           package_name=commit.project_name,
                           commit=commit)
コード例 #34
0
def build_rpm_wrapper(commit,
                      dev_mode,
                      use_public,
                      bootstrap,
                      env_vars,
                      sequential,
                      version_from=None):
    config_options = getConfigOptions()
    # Get the worker id
    if sequential is True:
        worker_id = 1
    else:
        worker_id = multiprocessing.current_process()._identity[0]

    # Retrieve build driver
    build_driver = config_options.build_driver
    buildrpm = import_object(build_driver, cfg_options=config_options)

    # FIXME(hguemar): move all the mock config logic to driver
    mock_config = "dlrn-" + str(worker_id) + ".cfg"
    scriptsdir = os.path.realpath(config_options.scriptsdir)
    configdir = os.path.realpath(config_options.configdir)
    datadir = os.path.realpath(config_options.datadir)
    baseurl = config_options.baseurl
    templatecfg = os.path.join(configdir, config_options.target + ".cfg")
    newcfg = os.path.join(datadir, mock_config + ".new")
    oldcfg = os.path.join(datadir, mock_config)
    shutil.copyfile(templatecfg, newcfg)

    if (config_options.build_driver
            == 'dlrn.drivers.kojidriver.KojiBuildDriver'
            and config_options.fetch_mock_config):
        buildrpm.write_mock_config(oldcfg)

    # Add the most current repo, we may have dependencies in it
    current_repo = os.path.join(datadir, "repos", "current",
                                "%s.repo" % config_options.reponame)
    if os.path.exists(current_repo):
        # Read the .repo file
        with open(current_repo) as fp:
            current_repo_contents = fp.readlines()
        with open(newcfg, "r") as fp:
            contents = fp.readlines()
        # delete the last line which must be """
        contents = contents[:-1]
        contents = contents + current_repo_contents + ["\"\"\""]
        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    # Set the worker id in the mock configuration, to allow multiple workers
    # for the same config
    with open(newcfg, "r") as fp:
        contents = fp.readlines()
    with open(newcfg, "w") as fp:
        for line in contents:
            if line.startswith("config_opts['root']"):
                line = line[:-2] + "-" + str(worker_id) + "'\n"
            fp.write(line)

    # delete the last line which must be """
    with open(newcfg, "r") as fp:
        contents = fp.readlines()
    contents = contents[:-1]

    try:
        if not baseurl:
            raise Exception("No baseurl defined")
        r = urlopen(baseurl + "/delorean-deps.repo")
        delorean_deps = True
    except Exception:
        logger.warning(
            "Could not open %s/delorean-deps.repo. If some dependent"
            " repositories must be included in the mock then check the"
            " baseurl value in projects.ini, and make sure the file can be"
            " downloaded." % baseurl)
        delorean_deps = False

    if delorean_deps:
        contents.extend(map(lambda x: x.decode('utf8'), r.readlines()))
        contents = contents + ["\n\"\"\""]
        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    if dev_mode or use_public:
        with open(newcfg, "r") as fp:
            contents = fp.readlines()

        # delete the last line which must be """
        contents = contents[:-1]
        try:
            r = urlopen(baseurl + "/current/delorean.repo")
        except Exception as e:
            logger.error("Could not open %s/current/delorean.repo. Check the "
                         "baseurl value in projects.ini, and make sure the "
                         "file can be downloaded." % baseurl)
            raise e

        contents.extend(map(lambda x: x.decode('utf8'), r.readlines()))
        contents.extend(["\n\"\"\""])

        with open(newcfg, "w") as fp:
            fp.writelines(contents)

    # don't change dlrn.cfg if the content hasn't changed to prevent
    # mock from rebuilding its cache.
    try:
        if not filecmp.cmp(newcfg, oldcfg):
            if (config_options.build_driver ==
                    'dlrn.drivers.kojidriver.KojiBuildDriver'):
                if not config_options.fetch_mock_config:
                    shutil.copyfile(newcfg, oldcfg)
            else:
                shutil.copyfile(newcfg, oldcfg)
    except OSError:
        shutil.copyfile(newcfg, oldcfg)

    # Set env variable for Copr configuration
    if (config_options.build_driver
            == 'dlrn.drivers.coprdriver.CoprBuildDriver'
            and config_options.coprid):
        os.environ['COPR_ID'] = config_options.coprid

    # Set release numbering option
    if config_options.release_numbering == '0.1.date.hash':
        os.environ['RELEASE_NUMBERING'] = '0.1.date.hash'
    elif config_options.release_numbering == 'minor.date.hash':
        os.environ['RELEASE_NUMBERING'] = 'minor.date.hash'
        os.environ['RELEASE_MINOR'] = config_options.release_minor
    else:
        os.environ['RELEASE_NUMBERING'] = '0.date.hash'

    # Set env variable for mock configuration
    os.environ['MOCK_CONFIG'] = mock_config

    # if bootstraping, set the appropriate mock config option
    if bootstrap is True:
        additional_mock_options = '-D repo_bootstrap 1'
    else:
        additional_mock_options = None

    dlrn.shell.pkginfo.preprocess(package_name=commit.project_name,
                                  commit_hash=commit.commit_hash)

    if (config_options.pkginfo_driver == 'dlrn.drivers.gitrepo.GitRepoDriver'
            and config_options.keep_tarball):
        if (commit.commit_branch == config_options.source
                or commit.commit_branch == 'master'):
            # We are following the master tarball here, use it
            os.environ['DLRN_KEEP_TARBALL'] = '1'
        else:
            if 'DLRN_KEEP_TARBALL' in os.environ:
                del os.environ['DLRN_KEEP_TARBALL']

    if config_options.keep_changelog:
        os.environ['DLRN_KEEP_CHANGELOG'] = '1'

    if (config_options.pkginfo_driver == 'dlrn.drivers.local.LocalDriver'):
        os.environ['DLRN_KEEP_SPEC_AS_IS'] = '1'

    # We may do some git repo manipulation, so we need to make sure the
    # right commit is there
    os.environ['DLRN_SOURCE_COMMIT'] = commit.commit_hash

    run(os.path.join(scriptsdir, "build_srpm.sh"),
        commit,
        env_vars,
        dev_mode,
        use_public,
        bootstrap,
        version_from=version_from)

    # SRPM is built, now build the RPM using the driver
    datadir = os.path.realpath(config_options.datadir)
    yumrepodir = _get_yumrepodir(commit)
    yumrepodir_abs = os.path.join(datadir, yumrepodir)

    # If we are using the downstream driver, write the reference commit
    if (config_options.pkginfo_driver ==
            'dlrn.drivers.downstream.DownstreamInfoDriver'):
        dlrn.shell.pkginfo._write_reference_commit(yumrepodir_abs)

    buildrpm.build_package(output_directory=yumrepodir_abs,
                           additional_mock_opts=additional_mock_options,
                           package_name=commit.project_name,
                           commit=commit)