Пример #1
0
def migrate_db(source_string, dest_string):
    osfd, tmpfilename = mkstemp()
    session = getSession(source_string)
    saveYAML(session, tmpfilename)
    session.close()
    session2 = getSession(dest_string)
    loadYAML(session2, tmpfilename)
    os.remove(tmpfilename)
Пример #2
0
def last_tested_repo_GET():
    # max_age: Maximum age in hours, used as base for the search
    # success(optional): find repos with a successful/unsuccessful vote
    # job_id(optional); name of the CI that sent the vote
    # sequential_mode(optional): if set to true, change the search algorithm
    #                            to only use previous_job_id as CI name to
    #                            search for. Defaults to false
    # previous_job_id(optional): CI name to search for, if sequential_mode is
    #                            True
    max_age = request.json.get('max_age', None)
    job_id = request.json.get('job_id', None)
    success = request.json.get('success', None)
    sequential_mode = request.json.get('sequential_mode', None)
    previous_job_id = request.json.get('previous_job_id', None)

    if success is not None:
        success = bool(strtobool(success))

    if sequential_mode is not None:
        sequential_mode = bool(strtobool(sequential_mode))

    if sequential_mode and previous_job_id is None:
        raise InvalidUsage('Missing parameter previous_job_id',
                           status_code=400)

    if max_age is None:
        raise InvalidUsage('Missing parameters', status_code=400)

    # Calculate timestamp as now - max_age
    if int(max_age) == 0:
        timestamp = 0
    else:
        oldest_time = datetime.now() - timedelta(hours=int(max_age))
        timestamp = time.mktime(oldest_time.timetuple())

    session = getSession(app.config['DB_PATH'])
    try:
        if sequential_mode:
            # CI pipeline case
            vote = getVote(session, timestamp, success, previous_job_id,
                           fallback=False)
        else:
            # Normal case
            vote = getVote(session, timestamp, success, job_id)
    except Exception as e:
        raise e

    commit = session.query(Commit).filter(
        Commit.status == 'SUCCESS',
        Commit.id == vote.commit_id).first()

    result = {'commit_hash': commit.commit_hash,
              'distro_hash': commit.distro_hash,
              'timestamp': vote.timestamp,
              'job_id': vote.ci_name,
              'success': vote.ci_vote,
              'in_progress': vote.ci_in_progress,
              'user': vote.user}
    closeSession(session)
    return jsonify(result), 200
Пример #3
0
def verify_pw(username, password):
    session = getSession(app.config['DB_PATH'])
    user = session.query(User).filter(User.username == username).first()
    if user is not None:
        return passlib.hash.sha512_crypt.verify(password, user.password)
    else:
        return False
Пример #4
0
def get_report():
    package_name = request.args.get('package', None)
    success = request.args.get('success', None)
    offset = request.args.get('offset', 0)

    if success is not None:
        if bool(strtobool(success)):
            with_status = "SUCCESS"
        else:
            with_status = "FAILED"
    else:
        with_status = None

    session = getSession(app.config['DB_PATH'])
    commits = getCommits(session,
                         without_status="RETRY",
                         project=package_name,
                         with_status=with_status,
                         limit=pagination_limit,
                         offset=offset)

    count = commits.count()

    config_options = _get_config_options(app.config['CONFIG_FILE'])
    closeSession(session)

    return render_template('report.j2',
                           reponame='Detailed build report',
                           target=config_options.target,
                           src=config_options.source,
                           project_name=config_options.project_name,
                           commits=commits,
                           count=count,
                           limit=pagination_limit)
Пример #5
0
def create_user(options, db_connection):
    try:
        session = getSession(db_connection)
        olduser = session.query(User).filter(
            User.username == options.username).first()

        if olduser is None:
            if options.password is None:
                newpass = getpass("Enter password for %s: " %
                                  options.username)
            else:
                newpass = options.password

            password = passlib.hash.sha512_crypt.encrypt(newpass)
            newuser = User(username=options.username,
                           password=password)
            session.add(newuser)
            session.commit()
            closeSession(session)
            print("User %s successfully created" % options.username)
        else:
            print("User %s already exists" % options.username)
            return -1
    except Exception as e:
        print("Failed to create user %s, %s" % (options.username, e))
        return -1
    return 0
Пример #6
0
def get_report():
    package_name = request.args.get('package', None)
    success = request.args.get('success', None)
    offset = request.args.get('offset', 0)

    if success is not None:
        if bool(strtobool(success)):
            with_status = "SUCCESS"
        else:
            with_status = "FAILED"
    else:
        with_status = None

    session = getSession(app.config['DB_PATH'])
    commits = getCommits(session, without_status="RETRY",
                         project=package_name, with_status=with_status,
                         limit=pagination_limit, offset=offset)

    count = commits.count()

    config_options = _get_config_options(app.config['CONFIG_FILE'])
    closeSession(session)

    return render_template('report.j2',
                           reponame='Detailed build report',
                           target=config_options.target,
                           src=config_options.source,
                           project_name=config_options.project_name,
                           commits=commits,
                           count=count,
                           limit=pagination_limit)
Пример #7
0
def verify_pw(username, password):
    session = getSession(app.config['DB_PATH'])
    user = session.query(User).filter(User.username == username).first()
    if user is not None:
        return passlib.hash.sha512_crypt.verify(password, user.password)
    else:
        return False
Пример #8
0
def create_user(options, db_connection):
    try:
        session = getSession(db_connection)
        olduser = session.query(User).filter(
            User.username == options.username).first()

        if olduser is None:
            if options.password is None:
                newpass = getpass("Enter password for %s: " % options.username)
            else:
                newpass = options.password

            password = passlib.hash.sha512_crypt.encrypt(newpass)
            newuser = User(username=options.username, password=password)
            session.add(newuser)
            session.commit()
            closeSession(session)
            print("User %s successfully created" % options.username)
        else:
            print("User %s already exists" % options.username)
            return -1
    except Exception as e:
        print("Failed to create user %s, %s" % (options.username, e))
        return -1
    return 0
Пример #9
0
def report_result():
    # job_id: name of CI
    # commit_hash: commit hash
    # distro_hash: distro hash
    # url: URL where more information can be found
    # timestamp: CI execution timestamp
    # success: boolean
    # notes(optional): notes

    if request.headers['Content-Type'] != 'application/json':
        raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415)

    try:
        commit_hash = request.json['commit_hash']
        distro_hash = request.json['distro_hash']
        timestamp = request.json['timestamp']
        job_id = request.json['job_id']
        success = request.json['success']
        url = request.json['url']
    except KeyError:
        raise InvalidUsage('Missing parameters', status_code=400)

    notes = request.json.get('notes', '')

    session = getSession(app.config['DB_PATH'])
    commit = session.query(Commit).filter(
        Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash,
        Commit.distro_hash == distro_hash).first()
    if commit is None:
        raise InvalidUsage('commit_hash+distro_hash combination not found',
                           status_code=404)

    commit_id = commit.id

    vote = CIVote(commit_id=commit_id,
                  ci_name=job_id,
                  ci_url=url,
                  ci_vote=bool(strtobool(success)),
                  ci_in_progress=False,
                  timestamp=int(timestamp),
                  notes=notes,
                  user=auth.username())
    session.add(vote)
    session.commit()

    result = {
        'commit_hash': commit_hash,
        'distro_hash': distro_hash,
        'timestamp': timestamp,
        'job_id': job_id,
        'success': bool(strtobool(success)),
        'in_progress': False,
        'url': url,
        'notes': notes,
        'user': auth.username()
    }
    closeSession(session)
    return jsonify(result), 201
Пример #10
0
 def setUp(self):
     super(base.TestCase, self).setUp()
     self.db_fd, filepath = tempfile.mkstemp()
     self.session = db.getSession("sqlite:///%s" % filepath)
     utils.loadYAML(self.session,
                    './dlrn/tests/samples/commits_components.yaml')
     self.datadir = tempfile.mkdtemp()
     self.repodir = os.path.join(self.datadir,
                                 'repos/component/tripleo/test1')
     os.makedirs(self.repodir)
     with open(os.path.join(self.repodir, "delorean.repo"), 'w') as fp:
         fp.write("TESTING ONE TWO THREE")
Пример #11
0
    def create_db(self):
        """
        Injects the database initial tables to the database using the existing
        utils offered by dlrn itself
        :return: None
        """
        self.log.debug("Injecting %s data to %s", self.db_data, self.db_file)

        session = dlrn_db.getSession("sqlite:///%s" % self.db_file)
        try:
            utils.loadYAML(session, self.db_data)
        except sql_a_exc.IntegrityError:
            self.log.info("DB is not empty, not injecting data")
Пример #12
0
 def test_getsessions(self, ce_mock, sm_mock):
     db.getSession()
     db.getSession(url="sqlite:///test.db")
     # The 2nd call shouldn't result in a new session
     db.getSession()
     self.assertEqual(len(sm_mock.call_args_list), 2)
     expected = [mock.call('sqlite://'), mock.call('sqlite:///test.db')]
     self.assertEqual(ce_mock.call_args_list, expected)
Пример #13
0
def get_metrics():
    # start_date: start date for period, in YYYY-mm-dd format (UTC)
    # end_date: end date for period, in YYYY-mm-dd format (UTC)
    # package_name (optional): return metrics for package_name
    if request.headers['Content-Type'] != 'application/json':
        raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415)

    try:
        start_date = request.json['start_date']
        end_date = request.json['end_date']
    except KeyError:
        raise InvalidUsage('Missing parameters', status_code=400)
    package_name = request.json.get('package_name', None)

    # Convert dates to timestamp
    fmt = '%Y-%m-%d'
    try:
        start_timestamp = int(calendar.timegm(time.strptime(start_date, fmt)))
        end_timestamp = int(calendar.timegm(time.strptime(end_date, fmt)))
    except ValueError:
        raise InvalidUsage('Invalid date format, it must be YYYY-mm-dd',
                           status_code=400)

    # Find the commits count for each metric
    session = getSession(app.config['DB_PATH'])
    commits = session.query(Commit).filter(Commit.status == 'SUCCESS',
                                           Commit.dt_build >= start_timestamp,
                                           Commit.dt_build < end_timestamp)

    if package_name:
        commits = commits.filter(Commit.project_name == package_name)

    successful_commits = commits.count()

    commits = session.query(Commit).filter(Commit.status == 'FAILED',
                                           Commit.dt_build >= start_timestamp,
                                           Commit.dt_build <= end_timestamp)

    if package_name:
        commits = commits.filter(Commit.project_name == package_name)

    failed_commits = commits.count()
    total_commits = successful_commits + failed_commits

    result = {
        'succeeded': successful_commits,
        'failed': failed_commits,
        'total': total_commits
    }
    closeSession(session)
    return jsonify(result), 200
Пример #14
0
def get_metrics():
    # start_date: start date for period, in YYYY-mm-dd format (UTC)
    # end_date: end date for period, in YYYY-mm-dd format (UTC)
    # package_name (optional): return metrics for package_name
    try:
        start_date = request.json['start_date']
        end_date = request.json['end_date']
    except KeyError:
        raise InvalidUsage('Missing parameters', status_code=400)
    package_name = request.json.get('package_name', None)

    # Convert dates to timestamp
    fmt = '%Y-%m-%d'
    try:
        start_timestamp = int(calendar.timegm(time.strptime(start_date, fmt)))
        end_timestamp = int(calendar.timegm(time.strptime(end_date, fmt)))
    except ValueError:
        raise InvalidUsage('Invalid date format, it must be YYYY-mm-dd',
                           status_code=400)

    # Find the commits count for each metric
    session = getSession(app.config['DB_PATH'])
    commits = session.query(Commit).filter(
        Commit.status == 'SUCCESS',
        Commit.dt_build >= start_timestamp,
        Commit.dt_build < end_timestamp)

    if package_name:
        commits = commits.filter(
            Commit.project_name == package_name)

    successful_commits = commits.count()

    commits = session.query(Commit).filter(
        Commit.status == 'FAILED',
        Commit.dt_build >= start_timestamp,
        Commit.dt_build <= end_timestamp)

    if package_name:
        commits = commits.filter(
            Commit.project_name == package_name)

    failed_commits = commits.count()
    total_commits = successful_commits + failed_commits

    result = {'succeeded': successful_commits,
              'failed': failed_commits,
              'total': total_commits}
    closeSession(session)
    return jsonify(result), 200
 def test_getsessions(self, ce_mock, sm_mock):
     db.getSession()
     db.getSession(url="sqlite:///test.db")
     # The 2nd call shouldn't result in a new session
     db.getSession()
     self.assertEqual(len(sm_mock.call_args_list), 2)
     expected = [mock.call('sqlite://'),
                 mock.call('sqlite:///test.db')]
     self.assertEqual(ce_mock.call_args_list, expected)
Пример #16
0
def get_civotes_detail():
    commit_hash = request.args.get('commit_hash', None)
    distro_hash = request.args.get('distro_hash', None)
    ci_name = request.args.get('ci_name', None)
    success = request.args.get('success', None)
    offset = request.args.get('offset', 0)

    session = getSession(app.config['DB_PATH'])
    votes = session.query(CIVote)
    votes = votes.filter(CIVote.ci_name != 'consistent')

    if commit_hash and distro_hash:
        commit = session.query(Commit).filter(
            Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash,
            Commit.distro_hash == distro_hash).first()
        votes = votes.from_self().filter(CIVote.commit_id == commit.id)
    elif ci_name:
        votes = votes.filter(CIVote.ci_name == ci_name)
    else:
        raise InvalidUsage(
            "Please specify either commit_hash+distro_hash or "
            "ci_name as parameters.",
            status_code=400)

    votes = votes.offset(offset).limit(pagination_limit)

    if success is not None:
        votes = votes.from_self().filter(
            CIVote.ci_vote == bool(strtobool(success)))

    votelist = votes.all()
    count = votes.count()

    for i in range(len(votelist)):
        commit = getCommits(
            session,
            limit=0).filter(Commit.id == votelist[i].commit_id).first()
        votelist[i].commit_hash = commit.commit_hash
        votelist[i].distro_hash = commit.distro_hash
        votelist[i].distro_hash_short = commit.distro_hash[:8]

    closeSession(session)
    config_options = _get_config_options(app.config['CONFIG_FILE'])

    return render_template('votes.j2',
                           target=config_options.target,
                           votes=votelist,
                           count=count,
                           limit=pagination_limit)
Пример #17
0
def update_user(options, db_connection):
    session = getSession(db_connection)
    password = passlib.hash.sha512_crypt.encrypt(options.password)
    user = session.query(User).filter(
        User.username == options.username).first()

    if user is None:
        print("ERROR: User %s does not exist" % options.username)
        return -1
    else:
        user.password = password
        session.add(user)
        session.commit()
    closeSession(session)
    return 0
Пример #18
0
def update_user(options, db_connection):
    session = getSession(db_connection)
    password = passlib.hash.sha512_crypt.encrypt(options.password)
    user = session.query(User).filter(
        User.username == options.username).first()

    if user is None:
        print("ERROR: User %s does not exist" % options.username)
        return -1
    else:
        user.password = password
        session.add(user)
        session.commit()
    closeSession(session)
    return 0
Пример #19
0
def repo_status():
    # commit_hash: commit hash
    # distro_hash: distro hash
    # success(optional): only report successful/unsuccessful votes
    if request.headers['Content-Type'] != 'application/json':
        raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415)

    commit_hash = request.json.get('commit_hash', None)
    distro_hash = request.json.get('distro_hash', None)
    success = request.json.get('success', None)
    if (commit_hash is None or distro_hash is None):
        raise InvalidUsage('Missing parameters', status_code=400)

    if success is not None:
        success = bool(strtobool(success))

    # Find the commit id for commit_hash/distro_hash
    session = getSession(app.config['DB_PATH'])
    commit = session.query(Commit).filter(
        Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash,
        Commit.distro_hash == distro_hash).first()
    if commit is None:
        raise InvalidUsage('commit_hash+distro_hash combination not found',
                           status_code=404)
    commit_id = commit.id

    # Now find every vote for this commit_hash/distro_hash combination
    votes = session.query(CIVote).filter(CIVote.commit_id == commit_id)
    if success is not None:
        votes = votes.filter(CIVote.ci_vote == int(success))

    # And format the output
    data = []
    for vote in votes:
        d = {
            'timestamp': vote.timestamp,
            'commit_hash': commit_hash,
            'distro_hash': distro_hash,
            'job_id': vote.ci_name,
            'success': bool(vote.ci_vote),
            'in_progress': vote.ci_in_progress,
            'url': vote.ci_url,
            'notes': vote.notes,
            'user': vote.user
        }
        data.append(d)
    closeSession(session)
    return jsonify(data)
Пример #20
0
def getinfo(package,
            local=False,
            dev_mode=False,
            head_only=False,
            db_connection=None,
            type="rpm"):
    project = package["name"]
    since = "-1"
    session = getSession(db_connection)
    commit = getLastProcessedCommit(session, project, type=type)
    if commit:
        # If we have switched source branches, we want to behave
        # as if no previous commits had been built, and only build
        # the last one
        if commit.commit_branch == getsourcebranch(package):
            # This will return all commits since the last handled commit
            # including the last handled commit, remove it later if needed.
            since = "--after=%d" % (commit.dt_commit)
        else:
            # The last processed commit belongs to a different branch. Just
            # in case, let's check if we built a previous commit from the
            # current branch
            commit = getLastBuiltCommit(session,
                                        project,
                                        getsourcebranch(package),
                                        type=type)
            if commit:
                logger.info("Last commit belongs to another branch, but"
                            " we're ok with that")
                since = "--after=%d" % (commit.dt_commit)
                # In any case, we just want to build the last commit, if any
                head_only = True

    project_toprocess, skipped = pkginfo.getinfo(project=project,
                                                 package=package,
                                                 since=since,
                                                 local=local,
                                                 dev_mode=dev_mode,
                                                 type=type)

    closeSession(session)
    # If since == -1, then we only want to trigger a build for the
    # most recent change
    if since == "-1" or head_only:
        del project_toprocess[:-1]

    return project_toprocess, package, skipped
Пример #21
0
 def setUp(self):
     super(TestBuild, self).setUp()
     self.configfile = configparser.RawConfigParser()
     self.configfile.read("projects.ini")
     self.configfile.set('DEFAULT', 'datadir', tempfile.mkdtemp())
     self.configfile.set('DEFAULT', 'scriptsdir', tempfile.mkdtemp())
     self.configfile.set('DEFAULT', 'baseurl', "file://%s" %
                         self.configfile.get('DEFAULT', 'datadir'))
     self.config = ConfigOptions(self.configfile)
     shutil.copyfile(os.path.join("scripts", "centos.cfg"),
                     os.path.join(self.config.scriptsdir, "centos.cfg"))
     with open(os.path.join(self.config.datadir,
               "delorean-deps.repo"), "w") as fp:
         fp.write("[test]\nname=test\nenabled=0\n")
     self.db_fd, filepath = tempfile.mkstemp()
     self.session = db.getSession("sqlite:///%s" % filepath)
     utils.loadYAML(self.session, './dlrn/tests/samples/commits_1.yaml')
Пример #22
0
def report_result():
    # job_id: name of CI
    # commit_hash: commit hash
    # distro_hash: distro hash
    # url: URL where more information can be found
    # timestamp: CI execution timestamp
    # success: boolean
    # notes(optional): notes
    try:
        commit_hash = request.json['commit_hash']
        distro_hash = request.json['distro_hash']
        timestamp = request.json['timestamp']
        job_id = request.json['job_id']
        success = request.json['success']
        url = request.json['url']
    except KeyError:
        raise InvalidUsage('Missing parameters', status_code=400)

    notes = request.json.get('notes', '')

    session = getSession(app.config['DB_PATH'])
    commit = _get_commit(session, commit_hash, distro_hash)
    if commit is None:
        raise InvalidUsage('commit_hash+distro_hash combination not found',
                           status_code=404)

    commit_id = commit.id

    vote = CIVote(commit_id=commit_id, ci_name=job_id, ci_url=url,
                  ci_vote=bool(strtobool(success)), ci_in_progress=False,
                  timestamp=int(timestamp), notes=notes,
                  user=auth.username())
    session.add(vote)
    session.commit()

    result = {'commit_hash': commit_hash,
              'distro_hash': distro_hash,
              'timestamp': timestamp,
              'job_id': job_id,
              'success': bool(strtobool(success)),
              'in_progress': False,
              'url': url,
              'notes': notes,
              'user': auth.username()}
    closeSession(session)
    return jsonify(result), 201
Пример #23
0
 def setUp(self):
     super(TestBuild, self).setUp()
     config = configparser.RawConfigParser(default_options)
     config.read("projects.ini")
     config.set('DEFAULT', 'datadir', tempfile.mkdtemp())
     config.set('DEFAULT', 'scriptsdir', tempfile.mkdtemp())
     config.set('DEFAULT', 'baseurl', "file://%s" % config.get('DEFAULT',
                                                               'datadir'))
     self.config = ConfigOptions(config)
     shutil.copyfile(os.path.join("scripts", "centos.cfg"),
                     os.path.join(self.config.scriptsdir, "centos.cfg"))
     with open(os.path.join(self.config.datadir,
               "delorean-deps.repo"), "w") as fp:
         fp.write("[test]\nname=test\nenabled=0\n")
     self.db_fd, filepath = tempfile.mkstemp()
     self.session = db.getSession("sqlite:///%s" % filepath)
     utils.loadYAML(self.session, './dlrn/tests/samples/commits_1.yaml')
Пример #24
0
def get_civotes():
    session = getSession(app.config['DB_PATH'])
    offset = request.args.get('offset', 0)

    votes = session.query(CIVote)
    votes = votes.filter(CIVote.ci_name != 'consistent')
    votes = votes.order_by(desc(CIVote.timestamp))
    votes = votes.offset(offset).limit(pagination_limit)
    count = votes.count()
    # Let's find all individual commit_hash + distro_hash combinations
    commit_id_list = []
    for vote in votes:
        if vote.commit_id not in commit_id_list:
            commit_id_list.append(vote.commit_id)

    # Populate list for commits
    repolist = []
    for commit_id in commit_id_list:
        commit = getCommits(session, limit=0).filter(
            Commit.id == commit_id).first()

        repodetail = RepoDetail()
        repodetail.commit_hash = commit.commit_hash
        repodetail.distro_hash = commit.distro_hash
        repodetail.distro_hash_short = commit.distro_hash[:8]
        repodetail.success = votes.from_self().filter(
            CIVote.commit_id == commit_id, CIVote.ci_vote == 1).count()
        repodetail.failure = votes.from_self().filter(
            CIVote.commit_id == commit_id, CIVote.ci_vote == 0).count()
        repodetail.timestamp = votes.from_self().filter(
            CIVote.commit_id == commit_id).order_by(desc(CIVote.timestamp)).\
            first().timestamp
        repolist.append(repodetail)

    repolist = sorted(repolist, key=lambda repo: repo.timestamp, reverse=True)

    closeSession(session)

    config_options = _get_config_options(app.config['CONFIG_FILE'])

    return render_template('votes_general.j2',
                           target=config_options.target,
                           repodetail=repolist,
                           count=count,
                           limit=pagination_limit)
Пример #25
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--config-file',
                        help="Config file (required)",
                        required=True)
    parser.add_argument('--noop',
                        help="Preview actions but do not execute them",
                        action="store_true")

    options = parser.parse_args(sys.argv[1:])

    cp = configparser.RawConfigParser()
    cp.read(options.config_file)
    datadir = os.path.realpath(cp.get('DEFAULT', 'datadir'))
    session = getSession(cp.get('DEFAULT', 'database_connection'))

    # We need to find promotions, and for each promotion name create the
    # corresponding symlink
    promotions = session.query(Promotion.promotion_name).distinct()

    promotion_list = []
    for promotion in promotions:
        promotion_list.append(promotion.promotion_name)

    # Find latest promotions for each promotion name, and re-do the symlinks
    for name in promotion_list:
        promotion = session.query(Promotion).\
            order_by(desc(Promotion.timestamp)).\
            filter(Promotion.promotion_name == name).first()

        commit = session.query(Commit).\
            filter(Commit.id == promotion.commit_id).first()

        repo_dir = os.path.join(datadir, "repos", commit.getshardedcommitdir())
        symlink_path = os.path.join(datadir, "repos", name)
        print("Going to symlink %s to %s" % (symlink_path, repo_dir))

        if not options.noop:
            try:
                os.symlink(
                    os.path.relpath(repo_dir, os.path.join(datadir, "repos")),
                    symlink_path + "_")
                os.rename(symlink_path + "_", symlink_path)
            except Exception as e:
                print("Symlink creation failed: %s", e)
Пример #26
0
def get_civotes():
    session = getSession(app.config['DB_PATH'])
    offset = request.args.get('offset', 0)

    votes = session.query(CIVote)
    votes = votes.filter(CIVote.ci_name != 'consistent')
    votes = votes.order_by(desc(CIVote.timestamp))
    votes = votes.offset(offset).limit(pagination_limit)
    count = votes.count()
    # Let's find all individual commit_hash + distro_hash combinations
    commit_id_list = []
    for vote in votes:
        if vote.commit_id not in commit_id_list:
            commit_id_list.append(vote.commit_id)

    # Populate list for commits
    repolist = []
    for commit_id in commit_id_list:
        commit = getCommits(session,
                            limit=0).filter(Commit.id == commit_id).first()

        repodetail = RepoDetail()
        repodetail.commit_hash = commit.commit_hash
        repodetail.distro_hash = commit.distro_hash
        repodetail.distro_hash_short = commit.distro_hash[:8]
        repodetail.success = votes.from_self().filter(
            CIVote.commit_id == commit_id, CIVote.ci_vote == 1).count()
        repodetail.failure = votes.from_self().filter(
            CIVote.commit_id == commit_id, CIVote.ci_vote == 0).count()
        repodetail.timestamp = votes.from_self().filter(
            CIVote.commit_id == commit_id).order_by(desc(CIVote.timestamp)).\
            first().timestamp
        repolist.append(repodetail)

    repolist = sorted(repolist, key=lambda repo: repo.timestamp, reverse=True)

    closeSession(session)

    config_options = _get_config_options(app.config['CONFIG_FILE'])

    return render_template('votes_general.j2',
                           target=config_options.target,
                           repodetail=repolist,
                           count=count,
                           limit=pagination_limit)
Пример #27
0
def get_civotes_detail():
    commit_hash = request.args.get('commit_hash', None)
    distro_hash = request.args.get('distro_hash', None)
    ci_name = request.args.get('ci_name', None)
    success = request.args.get('success', None)
    offset = request.args.get('offset', 0)

    session = getSession(app.config['DB_PATH'])
    votes = session.query(CIVote)
    votes = votes.filter(CIVote.ci_name != 'consistent')

    if commit_hash and distro_hash:
        commit = _get_commit(session, commit_hash, distro_hash)
        votes = votes.from_self().filter(CIVote.commit_id == commit.id)
    elif ci_name:
        votes = votes.filter(CIVote.ci_name == ci_name)
    else:
        raise InvalidUsage("Please specify either commit_hash+distro_hash or "
                           "ci_name as parameters.", status_code=400)

    votes = votes.offset(offset).limit(pagination_limit)

    if success is not None:
        votes = votes.from_self().filter(
            CIVote.ci_vote == bool(strtobool(success)))

    votelist = votes.all()
    count = votes.count()

    for i in range(len(votelist)):
        commit = getCommits(session, limit=0).filter(
            Commit.id == votelist[i].commit_id).first()
        votelist[i].commit_hash = commit.commit_hash
        votelist[i].distro_hash = commit.distro_hash
        votelist[i].distro_hash_short = commit.distro_hash[:8]

    closeSession(session)
    config_options = _get_config_options(app.config['CONFIG_FILE'])

    return render_template('votes.j2',
                           target=config_options.target,
                           votes=votelist,
                           count=count,
                           limit=pagination_limit)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--config-file',
                        help="Config file (required)", required=True)
    parser.add_argument('--noop',
                        help="Preview actions but do not execute them",
                        action="store_true")

    options = parser.parse_args(sys.argv[1:])

    cp = configparser.RawConfigParser()
    cp.read(options.config_file)
    datadir = os.path.realpath(cp.get('DEFAULT', 'datadir'))
    session = getSession(cp.get('DEFAULT', 'database_connection'))

    # We need to find promotions, and for each promotion name create the
    # corresponding symlink
    promotions = session.query(Promotion.promotion_name).distinct()

    promotion_list = []
    for promotion in promotions:
        promotion_list.append(promotion.promotion_name)

    # Find latest promotions for each promotion name, and re-do the symlinks
    for name in promotion_list:
        promotion = session.query(Promotion).\
            order_by(desc(Promotion.timestamp)).\
            filter(Promotion.promotion_name == name).first()

        commit = session.query(Commit).\
            filter(Commit.id == promotion.commit_id).first()

        repo_dir = os.path.join(datadir, "repos", commit.getshardedcommitdir())
        symlink_path = os.path.join(datadir, "repos", name)
        print("Going to symlink %s to %s" % (symlink_path, repo_dir))

        if not options.noop:
            try:
                os.symlink(os.path.relpath(repo_dir,
                                           os.path.join(datadir, "repos")),
                           symlink_path + "_")
                os.rename(symlink_path + "_", symlink_path)
            except Exception as e:
                print("Symlink creation failed: %s", e)
Пример #29
0
    def inject_dlrn_fixtures(self):
        """
        Injects the fixture to the database using the existing utils
        offered by dlrn itself
        """
        session = dlrn_db.getSession(
            "sqlite:///%s" % self.config['db_filepath'])
        db_filepath = self.config['db_filepath']
        self.config['results']['inject-dlrn-fixtures'] = db_filepath
        self.config['results']['dlrn_host'] = self.config['dlrn_host']

        if self.config['dry-run']:
            return

        os.makedirs(os.path.join(self.dlrn_repo_dir, 'repos'))
        try:
            utils.loadYAML(session, self.config['db_fixtures'])
        except sqlite3.IntegrityError:
            self.log.info("DB is not empty, not injecting fixtures")
Пример #30
0
def compare():
    parser = argparse.ArgumentParser()
    parser.add_argument('--info-repo',
                        help="use a local rdoinfo repo instead of "
                             "fetching the default one using rdopkg. Only"
                             "applies when pkginfo_driver is rdoinfo in"
                             "projects.ini")

    options, args = parser.parse_known_args(sys.argv[1:])
    pkginfo_driver = config_options.pkginfo_driver
    pkginfo_object = import_object(pkginfo_driver)
    packages = pkginfo_object.getpackages(local_info_repo=options.info_repo,
                                          tags=config_options.tags)

    compare_details = {}
    # Each argument is a ":" seperate filename:title, this filename is the
    # sqlite db file and the title is whats used in the dable being displayed
    table_header = ["Name", "Out of Sync"]
    for dbdetail in args:
        dbfilename, dbtitle = dbdetail.split(":")
        table_header.extend((dbtitle + " upstream", dbtitle + " spec"))

        session = getSession('sqlite:///%s' % dbfilename)

        for package in packages:
            package_name = package["name"]
            compare_details.setdefault(package_name, [package_name, " "])
            last_success = getCommits(session, project=package_name,
                                      with_status="SUCCESS").first()
            if last_success:
                compare_details[package_name].extend(
                    (last_success.commit_hash[:8],
                     last_success.distro_hash[:8]))
            else:
                compare_details[package_name].extend(("None", "None"))
        session.close()

    table = PrettyTable(table_header)
    for name, compare_detail in compare_details.items():
        if len(set(compare_detail)) > 4:
            compare_detail[1] = "*"
        table.add_row(compare_detail)
    print(table)
Пример #31
0
def repo_status():
    # commit_hash: commit hash
    # distro_hash: distro hash
    # success(optional): only report successful/unsuccessful votes
    commit_hash = request.json.get('commit_hash', None)
    distro_hash = request.json.get('distro_hash', None)
    success = request.json.get('success', None)
    if (commit_hash is None or distro_hash is None):
        raise InvalidUsage('Missing parameters', status_code=400)

    if success is not None:
        success = bool(strtobool(success))

    # Find the commit id for commit_hash/distro_hash
    session = getSession(app.config['DB_PATH'])
    commit = _get_commit(session, commit_hash, distro_hash)

    if commit is None:
        raise InvalidUsage('commit_hash+distro_hash combination not found',
                           status_code=404)
    commit_id = commit.id

    # Now find every vote for this commit_hash/distro_hash combination
    votes = session.query(CIVote).filter(CIVote.commit_id == commit_id)
    if success is not None:
        votes = votes.filter(CIVote.ci_vote == int(success))

    # And format the output
    data = []
    for vote in votes:
        d = {'timestamp': vote.timestamp,
             'commit_hash': commit_hash,
             'distro_hash': distro_hash,
             'job_id': vote.ci_name,
             'success': bool(vote.ci_vote),
             'in_progress': vote.ci_in_progress,
             'url': vote.ci_url,
             'notes': vote.notes,
             'user': vote.user}
        data.append(d)
    closeSession(session)
    return jsonify(data)
Пример #32
0
def purge_promoted_hashes(config, timestamp, dry_run=True):
    session = getSession(config.get('DEFAULT', 'database_connection'))
    basedir = os.path.join(config.get('DEFAULT', 'datadir'), 'repos')
    reponame = config.get('DEFAULT', 'reponame')

    # Get list of all promote names
    all_promotions = session.query(Promotion).\
        distinct(Promotion.promotion_name).\
        group_by(Promotion.promotion_name).all()
    closeSession(session)

    promotion_list = ['current', 'consistent']
    for prom in all_promotions:
        promotion_list.append(prom.promotion_name)

    logger.debug("Promotion list: %s" % promotion_list)

    # Now go through all directories
    for prom in promotion_list:
        directory = os.path.join(basedir, prom)
        logger.info("Looking into directory: %s" % directory)
        if os.path.islink(os.path.join(directory, reponame + '.repo')):
            protected_path = os.path.dirname(
                os.path.realpath(os.path.join(directory, reponame + '.repo')))
        else:
            logger.warning('No symlinks at %s' % directory)
            protected_path = ''

        logger.debug("Setting protected path: %s" % protected_path)
        # We have to traverse a 3-level hash structure
        # Not deleting the first two levels (xx/yy), just the final level,
        # where the files are located
        for path in glob.glob('%s/??/??/*' % directory):
            if os.path.isdir(path):
                dirstats = os.stat(path)
                if timestamp > dirstats.st_mtime:
                    if os.path.realpath(path) == protected_path:
                        logger.info('Not deleting %s, it is protected' % path)
                        continue
                    logger.info("Remove %s" % path)
                    if not dry_run:
                        shutil.rmtree(path, ignore_errors=True)
Пример #33
0
def delete_user(options, db_connection):
    session = getSession(db_connection)
    user = session.query(User).filter(
        User.username == options.username).first()

    if user is None:
        print("ERROR: User %s does not exist" % options.username)
        return -1
    else:
        if not options.force:
            print("Are you sure you want to delete user %s? "
                  "If so, type YES to continue." % options.username)
            confirm = input()
            if confirm != "YES":
                print("Action not confirmed, exiting")
                return -1
        session.delete(user)
        session.commit()
        print("User %s deleted" % options.username)
    closeSession(session)
    return 0
Пример #34
0
def delete_user(options, db_connection):
    session = getSession(db_connection)
    user = session.query(User).filter(
        User.username == options.username).first()

    if user is None:
        print("ERROR: User %s does not exist" % options.username)
        return -1
    else:
        if not options.force:
            print("Are you sure you want to delete user %s? "
                  "If so, type YES to continue." % options.username)
            confirm = input()
            if confirm != "YES":
                print("Action not confirmed, exiting")
                return -1
        session.delete(user)
        session.commit()
        print("User %s deleted" % options.username)
    closeSession(session)
    return 0
Пример #35
0
def getinfo(package, local=False, dev_mode=False, head_only=False,
            db_connection=None, type="rpm"):
    project = package["name"]
    since = "-1"
    session = getSession(db_connection)
    commit = getLastProcessedCommit(session, project, type=type)
    if commit:
        # If we have switched source branches, we want to behave
        # as if no previous commits had been built, and only build
        # the last one
        if commit.commit_branch == getsourcebranch(package):
            # This will return all commits since the last handled commit
            # including the last handled commit, remove it later if needed.
            since = "--after=%d" % (commit.dt_commit)
        else:
            # The last processed commit belongs to a different branch. Just
            # in case, let's check if we built a previous commit from the
            # current branch
            commit = getLastBuiltCommit(session, project,
                                        getsourcebranch(package), type=type)
            if commit:
                logger.info("Last commit belongs to another branch, but"
                            " we're ok with that")
                since = "--after=%d" % (commit.dt_commit)
                # In any case, we just want to build the last commit, if any
                head_only = True

    project_toprocess = pkginfo.getinfo(project=project, package=package,
                                        since=since, local=local,
                                        dev_mode=dev_mode, type=type)

    closeSession(session)
    # If since == -1, then we only want to trigger a build for the
    # most recent change
    if since == "-1" or head_only:
        del project_toprocess[:-1]

    return project_toprocess, package
Пример #36
0
def _get_db():
    if 'db' not in flask_g:
        flask_g.db = getSession(app.config['DB_PATH'])
    return flask_g.db
Пример #37
0
 def setUp(self):
     super(TestUser, self).setUp()
     self.db_fd, filepath = tempfile.mkstemp()
     self.session = db.getSession("sqlite:///%s" % filepath)
     utils.loadYAML(self.session, './dlrn/tests/samples/commits_2.yaml')
 def test_getsession(self, sm_mock):
     db.getSession()
     self.assertEqual(len(sm_mock.call_args_list), 1)
 def setUp(self):
     super(TestsWithData, self).setUp()
     self.session = db.getSession(new=True)
     utils.loadYAML(self.session, './dlrn/tests/samples/commits_1.yaml')
Пример #40
0
def import_commit(repo_url, config_file, db_connection=None,
                  local_info_repo=None):
    cp = configparser.RawConfigParser()
    cp.read(config_file)
    config_options = ConfigOptions(cp)
    pkginfo_driver = config_options.pkginfo_driver
    pkginfo = import_object(pkginfo_driver, cfg_options=config_options)
    packages = pkginfo.getpackages(local_info_repo=local_info_repo,
                                   tags=config_options.tags,
                                   dev_mode=False)

    remote_yaml = repo_url + '/' + 'commit.yaml'
    r = urlopen(remote_yaml)
    contents = map(lambda x: x.decode('utf8'), r.readlines())

    osfd, tmpfilename = mkstemp()
    fp = os.fdopen(osfd, 'w')
    fp.writelines(contents)
    fp.close()

    commits = loadYAML_list(tmpfilename)
    os.remove(tmpfilename)
    datadir = os.path.realpath(config_options.datadir)
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    for commit in commits:
        commit.id = None
        if commit.artifacts == 'None':
            commit.artifacts = None
        commit.dt_build = int(commit.dt_build)
        commit.dt_commit = float(commit.dt_commit)
        commit.dt_distro = int(commit.dt_distro)
        # Check if the latest built commit for this project is newer
        # than this one. In that case, we should ignore it
        if db_connection:
            session = getSession(db_connection)
        else:
            session = getSession(config_options.database_connection)
        package = commit.project_name
        old_commit = getLastProcessedCommit(session, package)
        if old_commit:
            if old_commit.dt_commit >= commit.dt_commit:
                if old_commit.dt_distro >= commit.dt_distro:
                    logger.info('Skipping commit %s, a newer commit is '
                                'already built\n'
                                'Old: %s %s, new: %s %s' %
                                (commit.commit_hash, old_commit.dt_commit,
                                 old_commit.dt_distro, commit.dt_commit,
                                 commit.dt_distro))
                    continue    # Skip

        yumrepodir = os.path.join(datadir, "repos",
                                  commit.getshardedcommitdir())
        if not os.path.exists(yumrepodir):
            os.makedirs(yumrepodir)

        for logfile in ['build.log', 'installed', 'mock.log', 'root.log',
                        'rpmbuild.log', 'state.log']:
            logfile_url = repo_url + '/' + logfile
            try:
                r = urlopen(logfile_url)
                contents = map(lambda x: x.decode('utf8'), r.readlines())
                with open(os.path.join(yumrepodir, logfile), "w") as fp:
                    fp.writelines(contents)
            except urllib.error.HTTPError:
                # Ignore errors, if the remote build failed there may be
                # some missing files
                pass

        if commit.artifacts:
            for rpm in commit.artifacts.split(","):
                rpm_url = repo_url + '/' + rpm.split('/')[-1]
                try:
                    r = urlopen(rpm_url)
                    contents = r.read()
                    with open(os.path.join(datadir, rpm), "wb") as fp:
                        fp.write(contents)
                except urllib.error.HTTPError:
                    if rpm != 'None':
                        logger.warning("Failed to download rpm file %s"
                                       % rpm_url)
        # Get remote update lock, to prevent any other remote operation
        # while we are creating the repo and updating the database
        logger.debug("Acquiring remote update lock")
        with lock_file(os.path.join(datadir, 'remote.lck')):
            logger.debug("Acquired lock")
            if commit.status == 'SUCCESS':
                built_rpms = []
                for rpm in commit.artifacts.split(","):
                    built_rpms.append(rpm)
                status = [commit, built_rpms, commit.notes, None]
                post_build(status, packages, session)
            else:
                pkg = [p for p in packages if p['name'] == package][0]
                # Here we fire a refresh of the repositories
                # (upstream and distgit) to be sure to have them in the
                # data directory. We need that in the case the worker
                # is running on another host mainly for the
                # submit_review.sh script.
                pkginfo.getinfo(project=pkg["name"], package=pkg,
                                since='-1', local=False, dev_mode=False)
                # Paths on the worker might differ so we overwrite them
                # to reflect data path on the local API host.
                commit.distgit_dir = pkginfo.distgit_dir(pkg['name'])
                commit.repo_dir = os.path.join(
                    config_options.datadir, pkg['name'])
                status = [commit, '', '', commit.notes]
            process_build_result(status, packages, session, [])
            closeSession(session)   # Keep one session per commit
        logger.debug("Released lock")
    return 0
Пример #41
0
def promote():
    # commit_hash: commit hash
    # distro_hash: distro hash
    # promote_name: symlink name
    try:
        commit_hash = request.json['commit_hash']
        distro_hash = request.json['distro_hash']
        promote_name = request.json['promote_name']
    except KeyError:
        raise InvalidUsage('Missing parameters', status_code=400)

    # Check for invalid promote names
    if (promote_name == 'consistent' or promote_name == 'current'):
        raise InvalidUsage('Invalid promote_name %s' % promote_name,
                           status_code=403)

    config_options = _get_config_options(app.config['CONFIG_FILE'])

    session = getSession(app.config['DB_PATH'])
    commit = _get_commit(session, commit_hash, distro_hash)
    if commit is None:
        raise InvalidUsage('commit_hash+distro_hash combination not found',
                           status_code=404)

    # If the commit has been purged, do not move on
    if commit.flags & FLAG_PURGED:
        raise InvalidUsage('commit_hash+distro_hash has been purged, cannot '
                           'promote it', status_code=410)

    target_link = os.path.join(app.config['REPO_PATH'], promote_name)
    # Check for invalid target links, like ../promotename
    target_dir = os.path.dirname(os.path.abspath(target_link))
    if not os.path.samefile(target_dir, app.config['REPO_PATH']):
        raise InvalidUsage('Invalid promote_name %s' % promote_name,
                           status_code=403)

    # We should create a relative symlink
    yumrepodir = commit.getshardedcommitdir()

    # Remove symlink if it exists, so we can create it again
    if os.path.lexists(os.path.abspath(target_link)):
        os.remove(target_link)
    try:
        os.symlink(yumrepodir, target_link)
    except Exception as e:
        raise InvalidUsage("Symlink creation failed with error: %s" %
                           e, status_code=500)

    timestamp = time.mktime(datetime.now().timetuple())
    promotion = Promotion(commit_id=commit.id, promotion_name=promote_name,
                          timestamp=timestamp, user=auth.username())

    session.add(promotion)
    session.commit()

    repo_hash = _repo_hash(commit)
    repo_url = "%s/%s" % (config_options.baseurl, yumrepodir)

    result = {'commit_hash': commit_hash,
              'distro_hash': distro_hash,
              'repo_hash': repo_hash,
              'repo_url': repo_url,
              'promote_name': promote_name,
              'timestamp': timestamp,
              'user': auth.username()}
    closeSession(session)
    return jsonify(result), 201
Пример #42
0
def genreports(packages, options):
    global session
    session = getSession('sqlite:///commits.sqlite')
    config_options = getConfigOptions()

    # Generate report of the last 300 package builds
    target = config_options.target
    src = config_options.source
    reponame = config_options.reponame
    templatedir = config_options.templatedir
    datadir = config_options.datadir

    css_file = os.path.join(templatedir, 'stylesheets/styles.css')

    # configure jinja and filters
    jinja_env = jinja2.Environment(
        loader=jinja2.FileSystemLoader([templatedir]))
    jinja_env.filters["strftime"] = _jinja2_filter_strftime
    jinja_env.filters["get_commit_url"] = \
        partial(_jinja2_filter_get_commit_url, packages=packages)

    # generate build report
    commits = getCommits(session, without_status="RETRY", limit=300)
    jinja_template = jinja_env.get_template("report.j2")
    content = jinja_template.render(reponame=reponame,
                                    src=src,
                                    target=target,
                                    commits=commits)
    shutil.copy2(css_file, os.path.join(datadir, "repos", "styles.css"))
    report_file = os.path.join(datadir, "repos", "report.html")
    with open(report_file, "w") as fp:
        fp.write(content)

    # Generate status report
    if options.head_only:
        msg = "(all commit not built)"
    else:
        msg = ""

    pkgs = []
    # Find the most recent successfull build
    # then report on failures since then
    for package in packages:
        name = package["name"]
        commits = getCommits(session, project=name, limit=1)

        # No builds
        if commits.count() == 0:
            continue

        pkgs.append(package)
        last_build = commits.first()
        package["last_build"] = last_build

        # last build was successul
        if last_build.status == "SUCCESS":
            continue

        # Retrieve last successful build
        commits = getCommits(session, project=name, with_status="SUCCESS",
                             limit=1)

        # No successful builds
        if commits.count() == 0:
            commits = getCommits(session, project=name, with_status="FAILED",
                                 order="asc")
            package["first_failure"] = commits.first()
            package["days"] = -1
            continue

        last_success = commits.first()
        last_success_dt = last_success.dt_build

        commits = getCommits(session, project=name, with_status="FAILED",
                             order="asc", limit=None)
        commits = commits.filter(Commit.dt_build > last_success_dt)
        package["first_failure"] = commits.first()
        package["days"] = (datetime.now() -
                           datetime.fromtimestamp(last_success_dt)).days

    pkgs = sorted(pkgs, key=itemgetter("name"))
    jinja_template = jinja_env.get_template("status_report.j2")
    content = jinja_template.render(msg=msg,
                                    reponame=reponame,
                                    src=src,
                                    target=target,
                                    pkgs=pkgs)

    report_file = os.path.join(datadir, "repos", "status_report.html")
    with open(report_file, "w") as fp:
        fp.write(content)
Пример #43
0
def purge():
    parser = argparse.ArgumentParser()
    # Some of the non-positional arguments are required, so change the text
    # saying "optional arguments" to just "arguments":
    parser._optionals.title = 'arguments'

    parser.add_argument('--config-file',
                        help="Config file (required)", required=True)
    parser.add_argument('--older-than',
                        help="How old commits need to be purged "
                             "(in days).", required=True)
    parser.add_argument('-y', help="Answer \"yes\" to any questions",
                        action="store_true")

    options, args = parser.parse_known_args(sys.argv[1:])

    cp = configparser.RawConfigParser()
    cp.read(options.config_file)
    config_options = ConfigOptions(cp)

    timeparsed = datetime.now() - timedelta(days=int(options.older_than))

    if options.y is False:
        ans = raw_input(("Remove all data before %s, correct? [N/y] " %
                        timeparsed.ctime()))
        if ans.lower() != "y":
            return

    session = getSession('sqlite:///commits.sqlite')

    # To remove builds we have to start at a point in time and move backwards
    # builds with no build date are also purged as these are legacy
    # All repositories can have the repodata directory and symlinks purged
    # But we must keep the rpms files of the most recent successful build of
    # each project as other symlinks not being purged will be pointing to them.
    topurge = getCommits(session,
                         limit=0,
                         before=int(mktime(timeparsed.timetuple()))
                         ).all()

    fullpurge = []
    for commit in topurge:
        if commit.flags & FLAG_PURGED:
            continue
        datadir = os.path.join(config_options.datadir, "repos",
                               commit.getshardedcommitdir())
        if commit.project_name not in fullpurge and commit.status == "SUCCESS":
            # So we have not removed any commit from this project yet, and it
            # is successful. Is it the newest one?
            previouscommits = getCommits(session,
                                         project=commit.project_name,
                                         since=commit.dt_build,
                                         with_status='SUCCESS').count()

            if previouscommits == 0:
                logger.info("Keeping old commit for %s" % commit.project_name)
                continue  # this is the newest commit for this project, keep it

            try:
                for entry in os.listdir(datadir):
                    entry = os.path.join(datadir, entry)
                    if entry.endswith(".rpm") and not os.path.islink(entry):
                        continue
                    if os.path.isdir(entry):
                        shutil.rmtree(entry)
                    else:
                        os.unlink(entry)
            except OSError:
                logger.warning("Cannot access directory %s for purge,"
                               " ignoring." % datadir)
            fullpurge.append(commit.project_name)
            commit.flags |= FLAG_PURGED
        else:
            shutil.rmtree(datadir)
            commit.flags |= FLAG_PURGED
    session.commit()
Пример #44
0
 def test_getsession(self, sm_mock):
     db.getSession()
     self.assertEqual(len(sm_mock.call_args_list), 1)
Пример #45
0
def main():
    parser = argparse.ArgumentParser()
    # Some of the non-positional arguments are required, so change the text
    # saying "optional arguments" to just "arguments":
    parser._optionals.title = 'arguments'

    parser.add_argument('--config-file',
                        help="Config file (required).",
                        required=True)
    parser.add_argument('--info-repo',
                        help="use a local rdoinfo repo instead of "
                             "fetching the default one using rdopkg. Only"
                             "applies when pkginfo_driver is rdoinfo in"
                             "projects.ini")
    parser.add_argument('--build-env', action='append',
                        help="Variables for the build environment.")
    parser.add_argument('--local', action="store_true",
                        help="Use local git repos if possible.")
    parser.add_argument('--head-only', action="store_true",
                        help="Build from the most recent Git commit only.")
    parser.add_argument('--package-name',
                        help="Build a specific package name only.")
    parser.add_argument('--dev', action="store_true",
                        help="Don't reset packaging git repo, force build "
                             "and add public master repo for dependencies "
                             "(dev mode).")
    parser.add_argument('--log-commands', action="store_true",
                        help="Log the commands run by dlrn.")
    parser.add_argument('--use-public', action="store_true",
                        help="Use the public master repo for dependencies "
                             "when doing install verification.")
    parser.add_argument('--order', action="store_true",
                        help="Compute the build order according to the spec "
                             "files instead of the dates of the commits.")
    parser.add_argument('--status', action="store_true",
                        help="Get the status of packages.")
    parser.add_argument('--recheck', action="store_true",
                        help="Force a rebuild for a particular package. "
                        "Imply --package-name")
    parser.add_argument('--version',
                        action='version',
                        version=version.version_info.version_string())
    parser.add_argument('--run',
                        help="Run a program instead of trying to build. "
                             "Imply --head-only")
    parser.add_argument('--stop', action="store_true",
                        help="Stop on error.")

    options, args = parser.parse_known_args(sys.argv[1:])

    cp = configparser.RawConfigParser(default_options)
    cp.read(options.config_file)

    if options.log_commands is True:
        logging.getLogger("sh.command").setLevel(logging.INFO)

    global session
    session = getSession('sqlite:///commits.sqlite')
    global config_options
    config_options = ConfigOptions(cp)
    pkginfo_driver = config_options.pkginfo_driver
    pkginfo_object = import_object(pkginfo_driver)
    packages = pkginfo_object.getpackages(local_info_repo=options.info_repo,
                                          tags=config_options.tags)

    if options.status is True:
        if options.package_name:
            names = (options.package_name, )
        else:
            names = [p['name'] for p in packages]
        for name in names:
            commit = getLastProcessedCommit(session, name, 'invalid status')
            if commit:
                print(name, commit.status)
            else:
                print(name, 'NO_BUILD')
        sys.exit(0)

    if options.recheck is True:
        if not options.package_name:
            logger.error('Please use --package-name with --recheck.')
            sys.exit(1)
        commit = getLastProcessedCommit(session, options.package_name)
        if commit:
            if commit.status == 'SUCCESS':
                logger.error("Trying to recheck an already successful commit,"
                             " ignoring.")
                sys.exit(1)
            elif commit.status == 'RETRY':
                # In this case, we are going to retry anyway, so
                # do nothing and exit
                logger.warning("Trying to recheck a commit in RETRY state,"
                               " ignoring.")
                sys.exit(0)
            else:
                # We could set the status to RETRY here, but if we have gone
                # beyond max_retries it wouldn't work as expected. Thus, our
                # only chance is to remove the commit
                session.delete(commit)
                session.commit()
                sys.exit(0)
        else:
                logger.error("There are no existing commits for package %s"
                             % options.package_name)
                sys.exit(1)
    # when we run a program instead of building we don't care about
    # the commits, we just want to run once per package
    if options.run:
        options.head_only = True
    # Build a list of commits we need to process
    toprocess = []
    for package in packages:
        project = package["name"]
        since = "-1"
        commit = getLastProcessedCommit(session, project)
        if commit:
            # This will return all commits since the last handled commit
            # including the last handled commit, remove it later if needed.
            since = "--after=%d" % (commit.dt_commit)
        repo = package["upstream"]
        distro = package["master-distgit"]
        if not options.package_name or package["name"] == options.package_name:
            project_toprocess = getinfo(project, repo, distro, since,
                                        options.local, options.dev, package)
            # If since == -1, then we only want to trigger a build for the
            # most recent change
            if since == "-1" or options.head_only:
                del project_toprocess[:-1]

            # The first entry in the list of commits is a commit we have
            # already processed, we want to process it again only if in dev
            # mode or distro hash has changed, we can't simply check against
            # the last commit in the db, as multiple commits can have the same
            # commit date
            for commit_toprocess in project_toprocess:
                if ((options.dev is True) or
                    options.run or
                    (not session.query(Commit).filter(
                        Commit.project_name == project,
                        Commit.commit_hash == commit_toprocess.commit_hash,
                        Commit.distro_hash == commit_toprocess.distro_hash,
                        Commit.status != "RETRY")
                        .all())):
                    toprocess.append(commit_toprocess)

    # if requested do a sort according to build and install
    # dependencies
    if options.order is True and not options.package_name:
        # collect info from all spec files
        logger.info("Reading rpm spec files")
        projects = sorted([p['name'] for p in packages])

        speclist = []
        bootstraplist = []
        for project_name in projects:
            specpath = os.path.join(config_options.datadir,
                                    project_name + "_distro",
                                    project_name + '.spec')
            speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1',
                                       '-P', specpath))

            # Check if repo_bootstrap is defined in the package.
            # If so, we'll need to rebuild after the whole bootstrap exercise
            rawspec = open(specpath).read(-1)
            if 'repo_bootstrap' in rawspec:
                bootstraplist.append(project_name)

        logger.debug("Packages to rebuild: %s" % bootstraplist)

        specs = RpmSpecCollection([RpmSpecFile(spec)
                                  for spec in speclist])
        # compute order according to BuildRequires
        logger.info("Computing build order")
        orders = specs.compute_order()
        # hack because the package name is not consistent with the directory
        # name and the spec file name
        if 'python-networking_arista' in orders:
            orders.insert(orders.index('python-networking_arista'),
                          'python-networking-arista')

        # sort the commits according to the score of their project and
        # then use the timestamp of the commits as a secondary key
        def my_cmp(a, b):
            if a.project_name == b.project_name:
                return cmp(a.dt_commit, b.dt_commit)
            return cmp(orders.index(a.project_name),
                       orders.index(b.project_name))
        toprocess.sort(cmp=my_cmp)
    else:
        # sort according to the timestamp of the commits
        toprocess.sort()
    exit_code = 0
    for commit in toprocess:
        project = commit.project_name

        project_info = session.query(Project).filter(
            Project.project_name == project).first()
        if not project_info:
            project_info = Project(project_name=project, last_email=0)

        commit_hash = commit.commit_hash

        if options.run:
            try:
                run(options.run, commit, options.build_env,
                    options.dev, options.use_public, options.order,
                    do_build=False)
            except Exception as e:
                exit_code = 1
                if options.stop:
                    return exit_code
                pass
            continue

        logger.info("Processing %s %s" % (project, commit_hash))

        notes = ""
        try:
            built_rpms, notes = build(packages,
                                      commit, options.build_env, options.dev,
                                      options.use_public, options.order)
        except Exception as e:
            datadir = os.path.realpath(config_options.datadir)
            exit_code = 1
            logfile = os.path.join(datadir, "repos",
                                   commit.getshardedcommitdir(),
                                   "rpmbuild.log")
            if (isknownerror(logfile) and
                (timesretried(project, commit_hash, commit.distro_hash) <
                 config_options.maxretries)):
                logger.exception("Known error building packages for %s,"
                                 " will retry later" % project)
                commit.status = "RETRY"
                commit.notes = getattr(e, "message", notes)
                session.add(commit)
            else:
                # If the log file hasn't been created we add what we have
                # This happens if the rpm build script didn't run.
                if not os.path.exists(logfile):
                    with open(logfile, "w") as fp:
                        fp.write(getattr(e, "message", notes))

                if not project_info.suppress_email():
                    sendnotifymail(packages, commit)
                    project_info.sent_email()
                    session.add(project_info)

                # allow to submit a gerrit review only if the last build was
                # successful or non existent to avoid creating a gerrit review
                # for the same problem multiple times.
                if config_options.gerrit is not None:
                    if options.build_env:
                        env_vars = list(options.build_env)
                    else:
                        env_vars = []
                    last_build = getLastProcessedCommit(session, project)
                    if not last_build or last_build.status == 'SUCCESS':
                        for pkg in packages:
                            if project == pkg['name']:
                                break
                        else:
                            pkg = None
                        if pkg:
                            url = (get_commit_url(commit, pkg) +
                                   commit.commit_hash)
                            env_vars.append('GERRIT_URL=%s' % url)
                            env_vars.append('GERRIT_LOG=%s/%s' %
                                            (config_options.baseurl,
                                             commit.getshardedcommitdir()))
                            maintainers = ','.join(pkg['maintainers'])
                            env_vars.append('GERRIT_MAINTAINERS=%s' %
                                            maintainers)
                            logger.info('Creating a gerrit review using '
                                        'GERRIT_URL=%s '
                                        'GERRIT_MAINTAINERS=%s ' %
                                        (url, maintainers))
                            try:
                                submit_review(commit, env_vars)
                            except Exception:
                                logger.error('Unable to create review '
                                             'see review.log')
                        else:
                            logger.error('Unable to find info for project %s' %
                                         project)
                    else:
                        logger.info('Last build not successful '
                                    'for %s' % project)
                commit.status = "FAILED"
                commit.notes = getattr(e, "message", notes)
                session.add(commit)
            if options.stop:
                return exit_code
        else:
            commit.status = "SUCCESS"
            commit.notes = notes
            commit.rpms = ",".join(built_rpms)
            session.add(commit)
        if options.dev is False:
            session.commit()
        genreports(packages, options)
        sync_repo(commit)

    # If we were bootstrapping, set the packages that required it to RETRY
    if options.order is True and not options.package_name:
        for bpackage in bootstraplist:
            commit = getLastProcessedCommit(session, bpackage)
            commit.status = 'RETRY'
            session.add(commit)
            session.commit()

    genreports(packages, options)
    return exit_code
Пример #46
0
def genreports(cp, packages, options):
    global session
    session = getSession('sqlite:///commits.sqlite')

    # Generate report of the last 300 package builds
    target = cp.get("DEFAULT", "target")
    src = cp.get("DEFAULT", "source")
    reponame = cp.get("DEFAULT", "reponame")
    templatedir = cp.get("DEFAULT", "templatedir")
    datadir = cp.get("DEFAULT", "datadir")

    css_file = os.path.join(templatedir, 'stylesheets/styles.css')

    # configure jinja and filters
    jinja_env = jinja2.Environment(
        loader=jinja2.FileSystemLoader([templatedir]))
    jinja_env.filters["strftime"] = _jinja2_filter_strftime
    jinja_env.filters["get_commit_url"] = \
        partial(_jinja2_filter_get_commit_url, packages=packages)

    # generate build report
    commits = getCommits(session, without_status="RETRY", limit=300)
    jinja_template = jinja_env.get_template("report.j2")
    content = jinja_template.render(reponame=reponame,
                                    src=src,
                                    target=target,
                                    commits=commits)
    shutil.copy2(css_file, os.path.join(datadir, "repos", "styles.css"))
    report_file = os.path.join(cp.get("DEFAULT", "datadir"),
                               "repos", "report.html")
    with open(report_file, "w") as fp:
        fp.write(content)

    # Generate status report
    if options.head_only:
        msg = "(all commit not built)"
    else:
        msg = ""

    pkgs = []
    # Find the most recent successfull build
    # then report on failures since then
    for package in packages:
        name = package["name"]
        commits = getCommits(session, project=name, limit=1)

        # No builds
        if commits.count() == 0:
            continue

        pkgs.append(package)
        last_build = commits.first()
        package["last_build"] = last_build

        # last build was successul
        if last_build.status == "SUCCESS":
            continue

        # Retrieve last successful build
        commits = getCommits(session, project=name, with_status="SUCCESS",
                             limit=1)

        # No successful builds
        if commits.count() == 0:
            commits = getCommits(session, project=name, with_status="FAILED",
                                 order="asc")
            package["first_failure"] = commits.first()
            package["days"] = -1
            continue

        last_success = commits.first()
        last_success_dt = last_success.dt_build

        commits = getCommits(session, project=name, with_status="FAILED",
                             order="asc", limit=None)
        commits = commits.filter(Commit.dt_build > last_success_dt)
        package["first_failure"] = commits.first()
        package["days"] = (datetime.now() -
                           datetime.fromtimestamp(last_success_dt)).days

    pkgs = sorted(pkgs, key=itemgetter("name"))
    jinja_template = jinja_env.get_template("status_report.j2")
    content = jinja_template.render(msg=msg,
                                    reponame=reponame,
                                    src=src,
                                    target=target,
                                    pkgs=pkgs)

    report_file = os.path.join(cp.get("DEFAULT", "datadir"),
                               "repos", "status_report.html")
    with open(report_file, "w") as fp:
        fp.write(content)
Пример #47
0
def main():
    parser = argparse.ArgumentParser()

    parser.add_argument('--config-file',
                        default='projects.ini',
                        help="Config file. Default: projects.ini")
    parser.add_argument('--info-repo',
                        help="use a local rdoinfo repo instead of"
                             " fetching the default one using rdopkg. Only"
                             " applies when pkginfo_driver is rdoinfo in"
                             " projects.ini")
    parser.add_argument('--build-env', action='append',
                        help="Variables for the build environment.")
    parser.add_argument('--local', action="store_true",
                        help="Use local git repos if possible. Only commited"
                             " changes in the local repo will be used in the"
                             " build.")
    parser.add_argument('--head-only', action="store_true",
                        help="Build from the most recent Git commit only.")
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--project-name', action='append',
                       help="Build a specific project name only."
                            " Use multiple times to build more than one "
                            "project in a run.")
    group.add_argument('--package-name', action='append',
                       help="Build a specific package name only."
                            " Use multiple times to build more than one "
                            "package in a run.")
    parser.add_argument('--dev', action="store_true",
                        help="Don't reset packaging git repo, force build "
                             "and add public master repo for dependencies "
                             "(dev mode).")
    parser.add_argument('--log-commands', action="store_true",
                        help="Log the commands run by dlrn.")
    parser.add_argument('--use-public', action="store_true",
                        help="Use the public master repo for dependencies "
                             "when doing install verification.")
    parser.add_argument('--order', action="store_true",
                        help="Compute the build order according to the spec "
                             "files instead of the dates of the commits. "
                             "Implies --sequential.")
    parser.add_argument('--sequential', action="store_true",
                        help="Run all actions sequentially, regardless of the"
                             " number of workers specified in projects.ini.")
    parser.add_argument('--status', action="store_true",
                        help="Get the status of packages.")
    parser.add_argument('--recheck', action="store_true",
                        help="Force a rebuild for a particular package. "
                        "Implies --package-name")
    parser.add_argument('--force-recheck', action="store_true",
                        help="Force a rebuild for a particular package, even "
                        "if its last build was successful. Requires setting "
                        "allow_force_rechecks=True in projects.ini. "
                        "Implies --package-name and --recheck")
    parser.add_argument('--version',
                        action='version',
                        version=version.version_info.version_string())
    parser.add_argument('--run',
                        help="Run a program instead of trying to build. "
                             "Implies --head-only")
    parser.add_argument('--stop', action="store_true",
                        help="Stop on error.")
    parser.add_argument('--verbose-build', action="store_true",
                        help="Show verbose output during the package build.")
    parser.add_argument('--verbose-mock', action="store_true",
                        help=argparse.SUPPRESS)
    parser.add_argument('--no-repo', action="store_true",
                        help="Do not generate a repo with all the built "
                        "packages.")
    parser.add_argument('--debug', action='store_true',
                        help="Print debug logs")

    options = parser.parse_args(sys.argv[1:])

    setup_logging(options.debug)

    if options.verbose_mock:
        logger.warning('The --verbose-mock command-line option is deprecated.'
                       ' Please use --verbose-build instead.')
        options.verbose_build = options.verbose_mock
    global verbose_build
    verbose_build = options.verbose_build

    cp = configparser.RawConfigParser()
    cp.read(options.config_file)

    if options.log_commands is True:
        logging.getLogger("sh.command").setLevel(logging.INFO)
    if options.order is True:
        options.sequential = True

    config_options = ConfigOptions(cp)
    if options.dev:
        _, tmpdb_path = tempfile.mkstemp()
        logger.info("Using file %s for temporary db" % tmpdb_path)
        config_options.database_connection = "sqlite:///%s" % tmpdb_path

    session = getSession(config_options.database_connection)
    pkginfo_driver = config_options.pkginfo_driver
    global pkginfo
    pkginfo = import_object(pkginfo_driver, cfg_options=config_options)
    packages = pkginfo.getpackages(local_info_repo=options.info_repo,
                                   tags=config_options.tags,
                                   dev_mode=options.dev)

    if options.project_name:
        pkg_names = [p['name'] for p in packages
                     if p['project'] in options.project_name]
    elif options.package_name:
        pkg_names = options.package_name
    else:
        pkg_names = None

    if options.status is True:
        if not pkg_names:
            pkg_names = [p['name'] for p in packages]
        for name in pkg_names:
            package = [p for p in packages if p['name'] == name][0]
            for build_type in package.get('types', ['rpm']):
                commit = getLastProcessedCommit(
                    session, name, 'invalid status',
                    type=build_type)
                if commit:
                    print("{:>9}".format(build_type), name, commit.status)
                else:
                    print("{:>9}".format(build_type), name, 'NO_BUILD')
        sys.exit(0)

    if pkg_names:
        pkg_name = pkg_names[0]
    else:
        pkg_name = None

    def recheck_commit(commit, force):
        if commit.status == 'SUCCESS':
            if not force:
                logger.error(
                    "Trying to recheck an already successful commit,"
                    " ignoring. If you want to force it, use --force-recheck"
                    " and set allow_force_rechecks=True in projects.ini")
                sys.exit(1)
            else:
                logger.info("Forcefully rechecking a successfully built "
                            "commit for %s" % commit.project_name)
        elif commit.status == 'RETRY':
            # In this case, we are going to retry anyway, so
            # do nothing and exit
            logger.warning("Trying to recheck a commit in RETRY state,"
                           " ignoring.")
            sys.exit(0)
        # We could set the status to RETRY here, but if we have gone
        # beyond max_retries it wouldn't work as expected. Thus, our
        # only chance is to remove the commit
        session.delete(commit)
        session.commit()
        sys.exit(0)

    if options.recheck is True:
        if not pkg_name:
            logger.error('Please use --package-name or --project-name '
                         'with --recheck.')
            sys.exit(1)

        if options.force_recheck and config_options.allow_force_rechecks:
            force_recheck = True
        else:
            force_recheck = False
        package = [p for p in packages if p['name'] == pkg_name][0]
        for build_type in package.get('types', ['rpm']):
            commit = getLastProcessedCommit(session, pkg_name, type=build_type)
            if commit:
                recheck_commit(commit, force_recheck)
            else:
                logger.error("There are no existing commits for package %s",
                             pkg_name)
                sys.exit(1)
    # when we run a program instead of building we don't care about
    # the commits, we just want to run once per package
    if options.run:
        options.head_only = True
    # Build a list of commits we need to process
    toprocess = []

    def add_commits(project_toprocess):
        # The first entry in the list of commits is a commit we have
        # already processed, we want to process it again only if in dev
        # mode or distro hash has changed, we can't simply check
        # against the last commit in the db, as multiple commits can
        # have the same commit date
        for commit_toprocess in project_toprocess:
            if options.dev is True or \
               options.run or \
               not session.query(Commit).filter(
                   Commit.commit_hash == commit_toprocess.commit_hash,
                   Commit.distro_hash == commit_toprocess.distro_hash,
                   Commit.extended_hash == commit_toprocess.extended_hash,
                   Commit.type == commit_toprocess.type,
                   Commit.status != "RETRY").all():
                toprocess.append(commit_toprocess)

    if not pkg_name and not pkg_names:
        pool = multiprocessing.Pool()   # This will use all the system cpus
        # Use functools.partial to iterate on the packages to process,
        # while keeping a few options fixed
        getinfo_wrapper = partial(getinfo, local=options.local,
                                  dev_mode=options.dev,
                                  head_only=options.head_only,
                                  db_connection=config_options.
                                  database_connection)
        iterator = pool.imap(getinfo_wrapper, packages)
        while True:
            try:
                project_toprocess, updated_pkg = iterator.next()
                for package in packages:
                    if package['name'] == updated_pkg['name']:
                        if package['upstream'] == 'Unknown':
                            package['upstream'] = updated_pkg['upstream']
                            logger.debug(
                                "Updated upstream for package %s to %s",
                                package['name'], package['upstream'])
                        break
                add_commits(project_toprocess)
            except StopIteration:
                break
        pool.close()
        pool.join()
    else:
        for package in packages:
            if package['name'] in pkg_names:
                project_toprocess, _ = getinfo(package, local=options.local,
                                               dev_mode=options.dev,
                                               head_only=options.head_only,
                                               db_connection=config_options.
                                               database_connection)
                add_commits(project_toprocess)
    closeSession(session)   # Close session, will reopen during post_build

    # Check if there is any commit at all to process
    if len(toprocess) == 0:
        if not pkg_name:
            # Use a shorter message if this was a full run
            logger.info("No commits to build.")
        else:
            logger.info("No commits to build. If this is not expected, please"
                        " make sure the package name(s) are correct, and that "
                        "any failed commit you want to rebuild has been "
                        "removed from the database.")
        return 0

    # if requested do a sort according to build and install
    # dependencies
    if options.order is True:
        # collect info from all spec files
        logger.info("Reading rpm spec files")
        projects = sorted([c.project_name for c in toprocess])

        speclist = []
        bootstraplist = []
        for project_name in projects:
            # Preprocess spec if needed
            pkginfo.preprocess(package_name=project_name)

            specpath = os.path.join(pkginfo.distgit_dir(project_name),
                                    project_name + '.spec')
            speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1',
                                       '-P', specpath))

            # Check if repo_bootstrap is defined in the package.
            # If so, we'll need to rebuild after the whole bootstrap exercise
            rawspec = open(specpath).read(-1)
            if 'repo_bootstrap' in rawspec:
                bootstraplist.append(project_name)

        logger.debug("Packages to rebuild: %s" % bootstraplist)

        specs = RpmSpecCollection([RpmSpecFile(spec)
                                  for spec in speclist])
        # compute order according to BuildRequires
        logger.info("Computing build order")
        orders = specs.compute_order()
        # hack because the package name is not consistent with the directory
        # name and the spec file name
        if 'python-networking_arista' in orders:
            orders.insert(orders.index('python-networking_arista'),
                          'python-networking-arista')

        # sort the commits according to the score of their project and
        # then use the timestamp of the commits as a secondary key
        def my_cmp(a, b):
            if a.project_name == b.project_name:
                _a = a.dt_commit
                _b = b.dt_commit
            else:
                _a = orders.index(a.project_name)
                _b = orders.index(b.project_name)
            # cmp is no longer available in python3 so replace it. See Ordering
            # Comparisons on:
            # https://docs.python.org/3.0/whatsnew/3.0.html
            return (_a > _b) - (_a < _b)

        toprocess.sort(key=cmp_to_key(my_cmp))
    else:
        # sort according to the timestamp of the commits
        toprocess.sort()

    exit_code = 0
    if options.sequential is True:
        toprocess_copy = deepcopy(toprocess)
        for commit in toprocess:
            status = build_worker(packages, commit, run_cmd=options.run,
                                  build_env=options.build_env,
                                  dev_mode=options.dev,
                                  use_public=options.use_public,
                                  order=options.order, sequential=True)
            exception = status[3]
            consistent = False
            datadir = os.path.realpath(config_options.datadir)
            with lock_file(os.path.join(datadir, 'remote.lck')):
                session = getSession(config_options.database_connection)
                if exception is not None:
                    logger.error("Received exception %s" % exception)
                    failures = 1
                else:
                    if not options.run:
                        failures = post_build(status, packages, session,
                                              build_repo=not options.no_repo)
                        consistent = (failures == 0)
                exit_value = process_build_result(status, packages, session,
                                                  toprocess_copy,
                                                  dev_mode=options.dev,
                                                  run_cmd=options.run,
                                                  stop=options.stop,
                                                  build_env=options.build_env,
                                                  head_only=options.head_only,
                                                  consistent=consistent,
                                                  failures=failures)
                closeSession(session)

            if exit_value != 0:
                exit_code = exit_value
            if options.stop and exit_code != 0:
                return exit_code
    else:
        # Setup multiprocessing pool
        pool = multiprocessing.Pool(config_options.workers)
        # Use functools.partial to iterate on the commits to process,
        # while keeping a few options fixed
        build_worker_wrapper = partial(build_worker, packages,
                                       run_cmd=options.run,
                                       build_env=options.build_env,
                                       dev_mode=options.dev,
                                       use_public=options.use_public,
                                       order=options.order, sequential=False)
        iterator = pool.imap(build_worker_wrapper, toprocess)

        while True:
            try:
                status = iterator.next()
                exception = status[3]
                consistent = False
                datadir = os.path.realpath(config_options.datadir)
                with lock_file(os.path.join(datadir, 'remote.lck')):
                    session = getSession(config_options.database_connection)
                    if exception is not None:
                        logger.info("Received exception %s" % exception)
                        failures = 1
                    else:
                        # Create repo, build versions.csv file.
                        # This needs to be sequential
                        if not options.run:
                            failures = post_build(
                                status, packages, session,
                                build_repo=not options.no_repo)
                            consistent = (failures == 0)
                    exit_value = process_build_result(
                        status, packages,
                        session, toprocess,
                        dev_mode=options.dev,
                        run_cmd=options.run,
                        stop=options.stop,
                        build_env=options.build_env,
                        head_only=options.head_only,
                        consistent=consistent,
                        failures=failures)
                    closeSession(session)
                if exit_value != 0:
                    exit_code = exit_value
                if options.stop and exit_code != 0:
                    return exit_code
            except StopIteration:
                break
        pool.close()
        pool.join()

    # If we were bootstrapping, set the packages that required it to RETRY
    session = getSession(config_options.database_connection)
    if options.order is True and not pkg_name:
        for bpackage in bootstraplist:
            commit = getLastProcessedCommit(session, bpackage)
            commit.status = 'RETRY'
            session.add(commit)
            session.commit()
    genreports(packages, options.head_only, session, [])
    closeSession(session)

    if options.dev:
        os.remove(tmpdb_path)
    return exit_code
Пример #48
0
def mocked_session(url):
    db_fd, filepath = tempfile.mkstemp()
    session = db.getSession("sqlite:///%s" % filepath)
    utils.loadYAML(session, './dlrn/tests/samples/commits_1.yaml')
    return session
Пример #49
0
 def setUp(self):
     super(TestUser, self).setUp()
     self.db_fd, filepath = tempfile.mkstemp()
     self.session = db.getSession("sqlite:///%s" % filepath)
     utils.loadYAML(self.session, './dlrn/tests/samples/commits_2.yaml')
Пример #50
0
def mocked_session(url):
    db_fd, filepath = tempfile.mkstemp()
    session = db.getSession("sqlite:///%s" % filepath)
    utils.loadYAML(session, './dlrn/tests/samples/commits_2.yaml')
    return session
Пример #51
0
def purge():
    parser = argparse.ArgumentParser()
    # Some of the non-positional arguments are required, so change the text
    # saying "optional arguments" to just "arguments":
    parser._optionals.title = 'arguments'

    parser.add_argument('--config-file',
                        help="Config file (required)", required=True)
    parser.add_argument('--older-than',
                        help="Purge builds older than provided value"
                             " (in days).", required=True)
    parser.add_argument('-y', help="Answer \"yes\" to any questions",
                        action="store_true")
    parser.add_argument('--dry-run', help="Do not change anything, show"
                        " what changes would be made",
                        action="store_true")
    parser.add_argument('--exclude-dirs', help="Do not remove commits whose"
                        " packages are included in one of the specifided"
                        " directories (comma-separated list).")
    parser.add_argument('--debug', action='store_true',
                        help="Print debug logs")

    options = parser.parse_args(sys.argv[1:])

    setup_logging(options.debug)

    cp = configparser.RawConfigParser()
    cp.read(options.config_file)

    timeparsed = datetime.now() - timedelta(days=int(options.older_than))

    if options.y is False:
        ans = input(("Remove all data before %s, correct? [N/y] " %
                     timeparsed.ctime()))
        if ans.lower() != "y":
            return

    session = getSession(cp.get('DEFAULT', 'database_connection'))

    # To remove builds we have to start at a point in time and move backwards
    # builds with no build date are also purged as these are legacy
    # All repositories can have the repodata directory and symlinks purged
    # But we must keep the rpm files of the most recent successful build of
    # each project as other symlinks not being purged will be pointing to them.
    topurge = getCommits(session,
                         limit=0,
                         before=int(mktime(timeparsed.timetuple()))
                         ).all()

    fullpurge = []
    for commit in topurge:
        if commit.flags & FLAG_PURGED:
            continue

        if is_commit_in_dirs(commit, options.exclude_dirs):
            # The commit RPMs are in one of the directories
            # that should not be touched.
            logger.info("Ignoring commit %s for %s, it is in one of the"
                        " excluded directories" % (commit.id,
                                                   commit.project_name))
            continue

        datadir = os.path.join(cp.get('DEFAULT', 'datadir'), "repos",
                               commit.getshardedcommitdir())
        if commit.project_name not in fullpurge and commit.status == "SUCCESS":
            # So we have not removed any commit from this project yet, and it
            # is successful. Is it the newest one?
            previouscommits = getCommits(session,
                                         project=commit.project_name,
                                         since=commit.dt_build,
                                         with_status='SUCCESS').count()

            if previouscommits == 0:
                logger.info("Keeping old commit for %s" % commit.project_name)
                continue  # this is the newest commit for this project, keep it

            try:
                for entry in os.listdir(datadir):
                    entry = os.path.join(datadir, entry)
                    if entry.endswith(".rpm") and not os.path.islink(entry):
                        continue
                    if os.path.isdir(entry):
                        logger.info("Remove %s" % entry)
                        if options.dry_run is False:
                            shutil.rmtree(entry)
                    else:
                        logger.info("Delete %s" % entry)
                        if options.dry_run is False:
                            os.unlink(entry)
            except OSError:
                logger.warning("Cannot access directory %s for purge,"
                               " ignoring." % datadir)
            fullpurge.append(commit.project_name)
            commit.flags |= FLAG_PURGED
            logger.info("Remove %s" % datadir)
            if options.dry_run is False:
                shutil.rmtree(datadir, ignore_errors=True)
        else:
            # If the commit was not successful, we need to be careful not to
            # remove the directory if there was a successful build
            if commit.status != "SUCCESS":
                othercommits = session.query(Commit).filter(
                    Commit.project_name == commit.project_name,
                    Commit.commit_hash == commit.commit_hash,
                    Commit.status == 'SUCCESS').count()

                if othercommits == 0:
                    logger.info("Remove %s" % datadir)
                    if options.dry_run is False:
                        shutil.rmtree(datadir, ignore_errors=True)
            else:
                    logger.info("Remove %s" % datadir)
                    if options.dry_run is False:
                        shutil.rmtree(datadir, ignore_errors=True)
            commit.flags |= FLAG_PURGED
    if options.dry_run is False:
        session.commit()
    closeSession(session)
Пример #52
0
def purge():
    parser = argparse.ArgumentParser()
    # Some of the non-positional arguments are required, so change the text
    # saying "optional arguments" to just "arguments":
    parser._optionals.title = 'arguments'

    parser.add_argument('--config-file',
                        help="Config file (required)", required=True)
    parser.add_argument('--older-than',
                        help="Purge builds older than provided value"
                             " (in days).", required=True)
    parser.add_argument('-y', help="Answer \"yes\" to any questions",
                        action="store_true")
    parser.add_argument('--dry-run', help="Do not change anything, show"
                        " what changes would be made",
                        action="store_true")
    parser.add_argument('--exclude-dirs', help="Do not remove commits whose"
                        " packages are included in one of the specifided"
                        " directories (comma-separated list).")

    options = parser.parse_args(sys.argv[1:])

    cp = configparser.RawConfigParser()
    cp.read(options.config_file)

    timeparsed = datetime.now() - timedelta(days=int(options.older_than))

    if options.y is False:
        ans = raw_input(("Remove all data before %s, correct? [N/y] " %
                        timeparsed.ctime()))
        if ans.lower() != "y":
            return

    session = getSession(cp.get('DEFAULT', 'database_connection'))

    # To remove builds we have to start at a point in time and move backwards
    # builds with no build date are also purged as these are legacy
    # All repositories can have the repodata directory and symlinks purged
    # But we must keep the rpm files of the most recent successful build of
    # each project as other symlinks not being purged will be pointing to them.
    topurge = getCommits(session,
                         limit=0,
                         before=int(mktime(timeparsed.timetuple()))
                         ).all()

    fullpurge = []
    for commit in topurge:
        if commit.flags & FLAG_PURGED:
            continue

        if is_commit_in_dirs(commit, options.exclude_dirs):
            # The commit RPMs are in one of the directories
            # that should not be touched.
            logger.info("Ignoring commit %s for %s, it is in one of the"
                        " excluded directories" % (commit.id,
                                                   commit.project_name))
            continue

        datadir = os.path.join(cp.get('DEFAULT', 'datadir'), "repos",
                               commit.getshardedcommitdir())
        if commit.project_name not in fullpurge and commit.status == "SUCCESS":
            # So we have not removed any commit from this project yet, and it
            # is successful. Is it the newest one?
            previouscommits = getCommits(session,
                                         project=commit.project_name,
                                         since=commit.dt_build,
                                         with_status='SUCCESS').count()

            if previouscommits == 0:
                logger.info("Keeping old commit for %s" % commit.project_name)
                continue  # this is the newest commit for this project, keep it

            try:
                for entry in os.listdir(datadir):
                    entry = os.path.join(datadir, entry)
                    if entry.endswith(".rpm") and not os.path.islink(entry):
                        continue
                    if os.path.isdir(entry):
                        logger.info("Remove %s" % entry)
                        if options.dry_run is False:
                            shutil.rmtree(entry)
                    else:
                        logger.info("Delete %s" % entry)
                        if options.dry_run is False:
                            os.unlink(entry)
            except OSError:
                logger.warning("Cannot access directory %s for purge,"
                               " ignoring." % datadir)
            fullpurge.append(commit.project_name)
            commit.flags |= FLAG_PURGED
            logger.info("Remove %s" % datadir)
            if options.dry_run is False:
                shutil.rmtree(datadir, ignore_errors=True)
        else:
            # If the commit was not successful, we need to be careful not to
            # remove the directory if there was a successful build
            if commit.status != "SUCCESS":
                othercommits = session.query(Commit).filter(
                    Commit.project_name == commit.project_name,
                    Commit.commit_hash == commit.commit_hash,
                    Commit.status == 'SUCCESS').count()

                if othercommits == 0:
                    logger.info("Remove %s" % datadir)
                    if options.dry_run is False:
                        shutil.rmtree(datadir, ignore_errors=True)
            else:
                    logger.info("Remove %s" % datadir)
                    if options.dry_run is False:
                        shutil.rmtree(datadir, ignore_errors=True)
            commit.flags |= FLAG_PURGED
    if options.dry_run is False:
        session.commit()
    closeSession(session)
Пример #53
0
def promotions_GET():
    # commit_hash(optional): commit hash
    # distro_hash(optional): distro hash
    # promote_name(optional): only report promotions for promote_name
    # offset(optional): skip the first X promotions (only 100 are shown
    #                   per query)
    commit_hash = request.json.get('commit_hash', None)
    distro_hash = request.json.get('distro_hash', None)
    promote_name = request.json.get('promote_name', None)
    offset = request.json.get('offset', 0)
    limit = request.json.get('limit', 100)

    config_options = _get_config_options(app.config['CONFIG_FILE'])

    # Make sure we do not exceed
    if limit > max_limit:
        limit = max_limit

    if ((commit_hash and not distro_hash) or
            (distro_hash and not commit_hash)):

        raise InvalidUsage('Both commit_hash and distro_hash must be '
                           'specified if any of them is.',
                           status_code=400)

    # Find the commit id for commit_hash/distro_hash
    session = getSession(app.config['DB_PATH'])
    if commit_hash and distro_hash:
        commit = _get_commit(session, commit_hash, distro_hash)
        if commit is None:
            raise InvalidUsage('commit_hash+distro_hash combination not found',
                               status_code=404)
        commit_id = commit.id
    else:
        commit_id = None

    # Now find the promotions, and filter if necessary
    promotions = session.query(Promotion)
    if commit_id is not None:
        promotions = promotions.filter(Promotion.commit_id == commit_id)
    if promote_name is not None:
        promotions = promotions.filter(
            Promotion.promotion_name == promote_name)

    promotions = promotions.order_by(desc(Promotion.timestamp)).limit(limit).\
        offset(offset)

    # And format the output
    data = []
    for promotion in promotions:
        commit = getCommits(session, limit=0).filter(
            Commit.id == promotion.commit_id).first()

        repo_hash = _repo_hash(commit)
        repo_url = "%s/%s" % (config_options.baseurl,
                              commit.getshardedcommitdir())

        d = {'timestamp': promotion.timestamp,
             'commit_hash': commit.commit_hash,
             'distro_hash': commit.distro_hash,
             'repo_hash': repo_hash,
             'repo_url': repo_url,
             'promote_name': promotion.promotion_name,
             'user': promotion.user}
        data.append(d)
    closeSession(session)
    return jsonify(data)
Пример #54
0
    for a in dir(Commit):
        if type(getattr(Commit, a)) == \
                sqlalchemy.orm.attributes.InstrumentedAttribute:
            attrs.append(a)
    data['commits'] = []
    for commit in session.query(Commit).all():
        d = {}
        for a in attrs:
            d[a] = str(getattr(commit, a))
        data['commits'].append(d)
    fp = open(yamlfile, "w")
    fp.write(yaml.dump(data, default_flow_style=False))
    fp.close()


def dumpshas2file(shafile, commit, source_repo, distgit_repo,
                  status, timestamp):
    shafile.write("%s,%s,%s,%s,%s,%s,%d\n" % (commit.project_name, source_repo,
                                              commit.commit_hash, distgit_repo,
                                              commit.distro_hash, status,
                                              timestamp)
                  )


if __name__ == '__main__':
    s = getSession('sqlite:///%s' % sys.argv[1])
    saveYAML(s, sys.argv[1] + ".yaml")
    s = getSession('sqlite://')
    loadYAML(s, sys.argv[1] + ".yaml")
    print(s.query(Commit).first().project_name)
Пример #55
0
def mocked_session(url):
    session = db.getSession(url)
    utils.loadYAML(session, './dlrn/tests/samples/commits_1.yaml')
    return session