def resolve_packageStatus(self, info, **args): project_name = args.get("projectName", None) status = args.get("status", None) if project_name: packages = [{'name': project_name}] else: # The only canonical source of information for the package list # is rdoinfo (or whatever pkginfo driver we use) config_options = _get_config_options(app.config['CONFIG_FILE']) pkginfo_driver = config_options.pkginfo_driver pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(tags=config_options.tags) i = 0 result = [] session = _get_db() for package in packages: pkg = package['name'] commits = getCommits(session, project=pkg, limit=1) # No builds if commits.count() == 0: if not status or status == 'NO_BUILD': result.append({'id': i, 'project_name': pkg, 'status': 'NO_BUILD'}) i += 1 continue last_build = commits.first() # last build was successul if last_build.status == "SUCCESS": if not status or status == 'SUCCESS': result.append({'id': i, 'project_name': pkg, 'status': 'SUCCESS'}) else: if not status or status == last_build.status: # Retrieve last successful build commits = getCommits(session, project=pkg, with_status="SUCCESS", limit=1) # No successful builds if commits.count() == 0: last_success = datetime(1970, 1, 1, 0, 0, 0) else: last_success = datetime.fromtimestamp( commits.first().dt_build) result.append({'id': i, 'project_name': pkg, 'status': 'FAILED', 'last_success': last_success, 'first_failure_commit': last_build.commit_hash}) i += 1 return result
def test_build_rpm_wrapper(self, sh_mock): commit = db.getCommits(self.session)[-1] build_rpm_wrapper(commit, False, False, False, None, True) # git and build_rpms has been called self.assertEqual(sh_mock.call_count, 2) self.assertTrue(os.path.exists(os.path.join(self.config.datadir, "dlrn-1.cfg")))
def get_report(): package_name = request.args.get('package', None) success = request.args.get('success', None) offset = request.args.get('offset', 0) if success is not None: if bool(strtobool(success)): with_status = "SUCCESS" else: with_status = "FAILED" else: with_status = None session = getSession(app.config['DB_PATH']) commits = getCommits(session, without_status="RETRY", project=package_name, with_status=with_status, limit=pagination_limit, offset=offset) count = commits.count() config_options = _get_config_options(app.config['CONFIG_FILE']) closeSession(session) return render_template('report.j2', reponame='Detailed build report', target=config_options.target, src=config_options.source, project_name=config_options.project_name, commits=commits, count=count, limit=pagination_limit)
def test_first_failed(self): commits = db.getCommits(self.session, project="python-pysaml2", with_status="FAILED", order="asc") self.assertEqual(commits.count(), 1) self.assertEqual(commits.first().id, 5874)
def test_first_failed_since(self): commits = db.getCommits(self.session, project="python-alembic", with_status="FAILED", order="asc", since="1442487440") self.assertEqual(commits.count(), 1) self.assertEqual(commits.first().id, 6230)
def test_build(self, ld_mock, sh_mock, env_mock, rc_mock): self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] try: build([], commit, None, False, False, False, True) except Exception as e: self.assertIn("No rpms built for", str(e))
def test_build_rpm_wrapper_mock_config(self, wm_mock, ld_mock, bp_mock, sh_mock, env_mock, rc_mock): self.configfile.set('kojibuild_driver', 'fetch_mock_config', 'True') self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.kojidriver.KojiBuildDriver') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] build_rpm_wrapper(commit, False, False, False, None, True) self.assertEqual(wm_mock.call_count, 1)
def get_report(): package_name = request.args.get('package', None) success = request.args.get('success', None) component = request.args.get('component', None) offset = request.args.get('offset', 0) if success is not None: if bool(strtobool(success)): with_status = "SUCCESS" else: with_status = "FAILED" else: with_status = None session = _get_db() commits = getCommits(session, without_status="RETRY", project=package_name, with_status=with_status, limit=pagination_limit, offset=offset, component=component) count = commits.count() config_options = _get_config_options(app.config['CONFIG_FILE']) commits_build_dir = {} for commit in commits: version = '' release = '' commit_dir = commit.getshardedcommitdir() src_package = find_in_artifacts(commit.artifacts, r'\w+.src.rpm') if src_package: splitted_name = re.split(r'-\d+\.[0-9]{14}.[0-9a-f]{7}\.', src_package.split('/')[-1]) # NOTE(dpawlik): Release is predictable, but versioning not. # So better is to take value after the split. version = splitted_name[0].replace(commit.project_name + '-', '') release = re.findall(r'\d+\.[0-9]{14}.[0-9a-f]{7}\.\w+', src_package.split('/')[-1]) key = "%s_%s_%s" % (commit.commit_hash, commit.distro_hash, commit.extended_hash) commits_build_dir[key] = { 'build_dir': "%s/%s" % (config_options.baseurl, commit_dir), 'version': version, 'release': ''.join(release) } return render_template('report.j2', reponame='Detailed build report', target=config_options.target, src=config_options.source, project_name=config_options.project_name, commits=commits, count=count, limit=pagination_limit, commits_build_dir=commits_build_dir)
def test_build_rpm_wrapper_release_numbering(self, ld_mock, sh_mock, env_mock, rc_mock): self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.configfile.set('DEFAULT', 'release_numbering', 'minor.date.hash') self.configfile.set('DEFAULT', 'release_minor', '2') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] build_rpm_wrapper(commit, False, False, False, None, True) self.assertEqual(os.environ['RELEASE_NUMBERING'], 'minor.date.hash') self.assertEqual(os.environ['RELEASE_MINOR'], '2')
def test_build_rpm_wrapper(self, ld_mock, sh_mock, env_mock, rc_mock): self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] build_rpm_wrapper(commit, False, False, False, None, True) # 3 sh calls: # 1- build_srpm.sh # 2- mock (handled by env_mock) # 3- restorecon (handled by rc_mock) self.assertEqual(env_mock.call_count, 2) self.assertEqual(rc_mock.call_count, 1) self.assertTrue( os.path.exists(os.path.join(self.config.datadir, "dlrn-1.cfg")))
def test_build_rpm_wrapper(self, ld_mock, sh_mock, env_mock, rc_mock): self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.config = ConfigOptions(self.configfile) commit = db.getCommits(self.session)[-1] build_rpm_wrapper(commit, False, False, False, None, True) # 3 sh calls: # 1- build_srpm.sh # 2- mock (handled by env_mock) # 3- restorecon (handled by rc_mock) self.assertEqual(env_mock.call_count, 2) self.assertEqual(rc_mock.call_count, 1) self.assertTrue(os.path.exists(os.path.join(self.config.datadir, "dlrn-1.cfg")))
def get_civotes_detail(): commit_hash = request.args.get('commit_hash', None) distro_hash = request.args.get('distro_hash', None) ci_name = request.args.get('ci_name', None) success = request.args.get('success', None) offset = request.args.get('offset', 0) session = getSession(app.config['DB_PATH']) votes = session.query(CIVote) votes = votes.filter(CIVote.ci_name != 'consistent') if commit_hash and distro_hash: commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash, Commit.distro_hash == distro_hash).first() votes = votes.from_self().filter(CIVote.commit_id == commit.id) elif ci_name: votes = votes.filter(CIVote.ci_name == ci_name) else: raise InvalidUsage( "Please specify either commit_hash+distro_hash or " "ci_name as parameters.", status_code=400) votes = votes.offset(offset).limit(pagination_limit) if success is not None: votes = votes.from_self().filter( CIVote.ci_vote == bool(strtobool(success))) votelist = votes.all() count = votes.count() for i in range(len(votelist)): commit = getCommits( session, limit=0).filter(Commit.id == votelist[i].commit_id).first() votelist[i].commit_hash = commit.commit_hash votelist[i].distro_hash = commit.distro_hash votelist[i].distro_hash_short = commit.distro_hash[:8] closeSession(session) config_options = _get_config_options(app.config['CONFIG_FILE']) return render_template('votes.j2', target=config_options.target, votes=votelist, count=count, limit=pagination_limit)
def get_civotes(): session = getSession(app.config['DB_PATH']) offset = request.args.get('offset', 0) votes = session.query(CIVote) votes = votes.filter(CIVote.ci_name != 'consistent') votes = votes.order_by(desc(CIVote.timestamp)) votes = votes.offset(offset).limit(pagination_limit) count = votes.count() # Let's find all individual commit_hash + distro_hash combinations commit_id_list = [] for vote in votes: if vote.commit_id not in commit_id_list: commit_id_list.append(vote.commit_id) # Populate list for commits repolist = [] for commit_id in commit_id_list: commit = getCommits(session, limit=0).filter(Commit.id == commit_id).first() repodetail = RepoDetail() repodetail.commit_hash = commit.commit_hash repodetail.distro_hash = commit.distro_hash repodetail.distro_hash_short = commit.distro_hash[:8] repodetail.success = votes.from_self().filter( CIVote.commit_id == commit_id, CIVote.ci_vote == 1).count() repodetail.failure = votes.from_self().filter( CIVote.commit_id == commit_id, CIVote.ci_vote == 0).count() repodetail.timestamp = votes.from_self().filter( CIVote.commit_id == commit_id).order_by(desc(CIVote.timestamp)).\ first().timestamp repolist.append(repodetail) repolist = sorted(repolist, key=lambda repo: repo.timestamp, reverse=True) closeSession(session) config_options = _get_config_options(app.config['CONFIG_FILE']) return render_template('votes_general.j2', target=config_options.target, repodetail=repolist, count=count, limit=pagination_limit)
def get_civotes(): session = getSession(app.config['DB_PATH']) offset = request.args.get('offset', 0) votes = session.query(CIVote) votes = votes.filter(CIVote.ci_name != 'consistent') votes = votes.order_by(desc(CIVote.timestamp)) votes = votes.offset(offset).limit(pagination_limit) count = votes.count() # Let's find all individual commit_hash + distro_hash combinations commit_id_list = [] for vote in votes: if vote.commit_id not in commit_id_list: commit_id_list.append(vote.commit_id) # Populate list for commits repolist = [] for commit_id in commit_id_list: commit = getCommits(session, limit=0).filter( Commit.id == commit_id).first() repodetail = RepoDetail() repodetail.commit_hash = commit.commit_hash repodetail.distro_hash = commit.distro_hash repodetail.distro_hash_short = commit.distro_hash[:8] repodetail.success = votes.from_self().filter( CIVote.commit_id == commit_id, CIVote.ci_vote == 1).count() repodetail.failure = votes.from_self().filter( CIVote.commit_id == commit_id, CIVote.ci_vote == 0).count() repodetail.timestamp = votes.from_self().filter( CIVote.commit_id == commit_id).order_by(desc(CIVote.timestamp)).\ first().timestamp repolist.append(repodetail) repolist = sorted(repolist, key=lambda repo: repo.timestamp, reverse=True) closeSession(session) config_options = _get_config_options(app.config['CONFIG_FILE']) return render_template('votes_general.j2', target=config_options.target, repodetail=repolist, count=count, limit=pagination_limit)
def get_civotes_detail(): commit_hash = request.args.get('commit_hash', None) distro_hash = request.args.get('distro_hash', None) ci_name = request.args.get('ci_name', None) success = request.args.get('success', None) offset = request.args.get('offset', 0) session = getSession(app.config['DB_PATH']) votes = session.query(CIVote) votes = votes.filter(CIVote.ci_name != 'consistent') if commit_hash and distro_hash: commit = _get_commit(session, commit_hash, distro_hash) votes = votes.from_self().filter(CIVote.commit_id == commit.id) elif ci_name: votes = votes.filter(CIVote.ci_name == ci_name) else: raise InvalidUsage("Please specify either commit_hash+distro_hash or " "ci_name as parameters.", status_code=400) votes = votes.offset(offset).limit(pagination_limit) if success is not None: votes = votes.from_self().filter( CIVote.ci_vote == bool(strtobool(success))) votelist = votes.all() count = votes.count() for i in range(len(votelist)): commit = getCommits(session, limit=0).filter( Commit.id == votelist[i].commit_id).first() votelist[i].commit_hash = commit.commit_hash votelist[i].distro_hash = commit.distro_hash votelist[i].distro_hash_short = commit.distro_hash[:8] closeSession(session) config_options = _get_config_options(app.config['CONFIG_FILE']) return render_template('votes.j2', target=config_options.target, votes=votelist, count=count, limit=pagination_limit)
def test_build_configdir(self, ld_mock, sh_mock, env_mock, rc_mock): configdir = tempfile.mkdtemp() self.configfile.set('DEFAULT', 'configdir', configdir) self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.config = ConfigOptions(self.configfile) shutil.copyfile(os.path.join("scripts", "centos.cfg"), os.path.join(configdir, "centos.cfg")) commit = db.getCommits(self.session)[-1] expected = [mock.call('%s/centos.cfg' % configdir, '%s/dlrn-1.cfg.new' % self.config.datadir), mock.call('%s/dlrn-1.cfg.new' % self.config.datadir, '%s/dlrn-1.cfg' % self.config.datadir)] with mock.patch('shutil.copyfile', side_effect=shutil.copyfile) as cp_mock: build_rpm_wrapper(commit, False, False, False, None, True) self.assertEqual(expected, cp_mock.call_args_list)
def compare(): parser = argparse.ArgumentParser() parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of " "fetching the default one using rdopkg. Only" "applies when pkginfo_driver is rdoinfo in" "projects.ini") options, args = parser.parse_known_args(sys.argv[1:]) pkginfo_driver = config_options.pkginfo_driver pkginfo_object = import_object(pkginfo_driver) packages = pkginfo_object.getpackages(local_info_repo=options.info_repo, tags=config_options.tags) compare_details = {} # Each argument is a ":" seperate filename:title, this filename is the # sqlite db file and the title is whats used in the dable being displayed table_header = ["Name", "Out of Sync"] for dbdetail in args: dbfilename, dbtitle = dbdetail.split(":") table_header.extend((dbtitle + " upstream", dbtitle + " spec")) session = getSession('sqlite:///%s' % dbfilename) for package in packages: package_name = package["name"] compare_details.setdefault(package_name, [package_name, " "]) last_success = getCommits(session, project=package_name, with_status="SUCCESS").first() if last_success: compare_details[package_name].extend( (last_success.commit_hash[:8], last_success.distro_hash[:8])) else: compare_details[package_name].extend(("None", "None")) session.close() table = PrettyTable(table_header) for name, compare_detail in compare_details.items(): if len(set(compare_detail)) > 4: compare_detail[1] = "*" table.add_row(compare_detail) print(table)
def test_build_configdir(self, ld_mock, sh_mock, env_mock, rc_mock): configdir = tempfile.mkdtemp() self.configfile.set('DEFAULT', 'configdir', configdir) self.configfile.set('DEFAULT', 'build_driver', 'dlrn.drivers.mockdriver.MockBuildDriver') self.config = ConfigOptions(self.configfile) shutil.copyfile(os.path.join("scripts", "centos8.cfg"), os.path.join(configdir, "centos8.cfg")) commit = db.getCommits(self.session)[-1] expected = [ mock.call('%s/centos8.cfg' % configdir, '%s/dlrn-1.cfg.new' % self.config.datadir), mock.call('%s/dlrn-1.cfg.new' % self.config.datadir, '%s/dlrn-1.cfg' % self.config.datadir) ] with mock.patch('shutil.copyfile', side_effect=shutil.copyfile) as cp_mock: build_rpm_wrapper(commit, False, False, False, None, True) self.assertEqual(expected, cp_mock.call_args_list)
def test_last_without_retry(self): commits = db.getCommits(self.session, project="python-tripleoclient", without_status="RETRY") self.assertEqual(commits.count(), 1) self.assertEqual(commits.first().id, 7696)
def test_last_success(self): commits = db.getCommits(self.session, project="python-tripleoclient", with_status="SUCCESS") self.assertEqual(commits.count(), 1) self.assertEqual(commits.first().id, 7696)
def test_no_results(self): commits = db.getCommits(self.session, project="dummy") self.assertEqual(commits.count(), 0) self.assertEqual(commits.first(), None)
def test_commit_compare(self): commits = db.getCommits(self.session, project="python-tripleoclient") self.assertGreater(commits[0], commits[1]) self.assertLess(commits[1], commits[0]) self.assertEqual(commits[0], commits[-1])
def genreports(packages, head_only, session, all_commits): config_options = getConfigOptions() # Generate report of the last 300 package builds target = config_options.target src = config_options.source reponame = config_options.reponame templatedir = config_options.templatedir project_name = config_options.project_name datadir = config_options.datadir repodir = os.path.join(datadir, "repos") css_file = os.path.join(templatedir, 'stylesheets/styles.css') # create directories if not os.path.exists(repodir): os.makedirs(repodir) # configure jinja and filters jinja_env = jinja2.Environment( loader=jinja2.FileSystemLoader([templatedir])) jinja_env.filters["strftime"] = _jinja2_filter_strftime jinja_env.filters["get_commit_url"] = \ partial(_jinja2_filter_get_commit_url, packages=packages) # generate build report commits = getCommits(session, without_status="RETRY", limit=300) jinja_template = jinja_env.get_template("report.j2") content = jinja_template.render(reponame=reponame, src=src, project_name=project_name, target=target, commits=commits) shutil.copy2(css_file, os.path.join(repodir, "styles.css")) report_file = os.path.join(repodir, "report.html") with open(report_file, "w") as fp: fp.write(content) # Generate status report if head_only: msg = "(all commit not built)" else: msg = "" pkgs = [] # Find the most recent successfull build # then report on failures since then for package in packages: name = package["name"] commits = getCommits(session, project=name, limit=1) # No builds if commits.count() == 0: continue pkgs.append(package) last_build = commits.first() package["last_build"] = last_build # last build was successul if last_build.status == "SUCCESS": continue # Retrieve last successful build commits = getCommits(session, project=name, with_status="SUCCESS", limit=1) # No successful builds if commits.count() == 0: commits = getCommits(session, project=name, with_status="FAILED", order="asc") package["first_failure"] = commits.first() package["days"] = -1 continue last_success = commits.first() last_success_dt = last_success.dt_build commits = getCommits(session, project=name, with_status="FAILED", order="asc", limit=None) commits = commits.filter(Commit.dt_build > last_success_dt) package["first_failure"] = commits.first() package["days"] = (datetime.now() - datetime.fromtimestamp(last_success_dt)).days pkgs = sorted(pkgs, key=itemgetter("name")) jinja_template = jinja_env.get_template("status_report.j2") content = jinja_template.render(msg=msg, reponame=reponame, src=src, project_name=project_name, target=target, pkgs=pkgs) report_file = os.path.join(repodir, "status_report.html") with open(report_file, "w") as fp: fp.write(content) jinja_template = jinja_env.get_template("status_report_csv.j2") content = jinja_template.render(msg=msg, reponame=reponame, src=src, project_name=project_name, target=target, pkgs=pkgs) report_file = os.path.join(repodir, "status_report.csv") with open(report_file, "w") as fp: fp.write(content) # Create a report for the pending packages jinja_template = jinja_env.get_template("queue.j2") pending_commits = [] for commit in all_commits: old_commit = getCommits(session, project=commit.project_name, without_status="RETRY", limit=None).filter( Commit.commit_hash == commit.commit_hash).filter( Commit.distro_hash == commit.distro_hash).first() if not old_commit: pending_commits.append(commit) content = jinja_template.render(reponame=reponame, src=src, target=target, commits=pending_commits) report_file = os.path.join(repodir, "queue.html") with open(report_file, "w") as fp: fp.write(content)
def post_build_rpm(status, packages, session, build_repo=True): config_options = getConfigOptions() commit = status[0] built_rpms = status[1] project_name = commit.project_name commit_hash = commit.commit_hash datadir = os.path.realpath(config_options.datadir) yumrepodir = os.path.join("repos", commit.getshardedcommitdir()) yumrepodir_abs = os.path.join(datadir, yumrepodir) shafile = open(os.path.join(yumrepodir_abs, "versions.csv"), "w") shafile.write("Project,Source Repo,Source Sha,Dist Repo,Dist Sha," "Status,Last Success Timestamp,Pkg NVR\n") failures = 0 for otherproject in packages: otherprojectname = otherproject["name"] if otherprojectname == project_name: # Output sha's this project dumpshas2file(shafile, commit, otherproject["upstream"], otherproject["master-distgit"], "SUCCESS", commit.dt_build, built_rpms) continue # Output sha's of all other projects represented in this repo last_success = getCommits(session, project=otherprojectname, with_status="SUCCESS", type=commit.type).first() last_processed = getCommits(session, project=otherprojectname, type=commit.type).first() if last_success: if build_repo: for rpm in last_success.artifacts.split(","): rpm_link_src = os.path.join(yumrepodir_abs, os.path.split(rpm)[1]) os.symlink(os.path.relpath(os.path.join(datadir, rpm), yumrepodir_abs), rpm_link_src) last = last_success else: last = last_processed if last: if last.artifacts: rpmlist = last.artifacts.split(",") else: rpmlist = [] upstream = otherproject.get('upstream', '') dumpshas2file(shafile, last, upstream, otherproject["master-distgit"], last_processed.status, last.dt_build, rpmlist) if last_processed.status != 'SUCCESS': failures += 1 else: failures += 1 shafile.close() if build_repo: # Use createrepo_c when available try: from sh import createrepo_c sh.createrepo = createrepo_c except ImportError: pass if config_options.include_srpm_in_repo: sh.createrepo(yumrepodir_abs) else: sh.createrepo('-x', '*.src.rpm', yumrepodir_abs) with open(os.path.join( yumrepodir_abs, "%s.repo" % config_options.reponame), "w") as fp: fp.write("[%s]\nname=%s-%s-%s\nbaseurl=%s/%s\nenabled=1\n" "gpgcheck=0\npriority=1\n" % ( config_options.reponame, config_options.reponame, project_name, commit_hash, config_options.baseurl, commit.getshardedcommitdir())) return failures
def genreports(cp, packages, options): global session session = getSession('sqlite:///commits.sqlite') # Generate report of the last 300 package builds target = cp.get("DEFAULT", "target") src = cp.get("DEFAULT", "source") reponame = cp.get("DEFAULT", "reponame") templatedir = cp.get("DEFAULT", "templatedir") datadir = cp.get("DEFAULT", "datadir") css_file = os.path.join(templatedir, 'stylesheets/styles.css') # configure jinja and filters jinja_env = jinja2.Environment( loader=jinja2.FileSystemLoader([templatedir])) jinja_env.filters["strftime"] = _jinja2_filter_strftime jinja_env.filters["get_commit_url"] = \ partial(_jinja2_filter_get_commit_url, packages=packages) # generate build report commits = getCommits(session, without_status="RETRY", limit=300) jinja_template = jinja_env.get_template("report.j2") content = jinja_template.render(reponame=reponame, src=src, target=target, commits=commits) shutil.copy2(css_file, os.path.join(datadir, "repos", "styles.css")) report_file = os.path.join(cp.get("DEFAULT", "datadir"), "repos", "report.html") with open(report_file, "w") as fp: fp.write(content) # Generate status report if options.head_only: msg = "(all commit not built)" else: msg = "" pkgs = [] # Find the most recent successfull build # then report on failures since then for package in packages: name = package["name"] commits = getCommits(session, project=name, limit=1) # No builds if commits.count() == 0: continue pkgs.append(package) last_build = commits.first() package["last_build"] = last_build # last build was successul if last_build.status == "SUCCESS": continue # Retrieve last successful build commits = getCommits(session, project=name, with_status="SUCCESS", limit=1) # No successful builds if commits.count() == 0: commits = getCommits(session, project=name, with_status="FAILED", order="asc") package["first_failure"] = commits.first() package["days"] = -1 continue last_success = commits.first() last_success_dt = last_success.dt_build commits = getCommits(session, project=name, with_status="FAILED", order="asc", limit=None) commits = commits.filter(Commit.dt_build > last_success_dt) package["first_failure"] = commits.first() package["days"] = (datetime.now() - datetime.fromtimestamp(last_success_dt)).days pkgs = sorted(pkgs, key=itemgetter("name")) jinja_template = jinja_env.get_template("status_report.j2") content = jinja_template.render(msg=msg, reponame=reponame, src=src, target=target, pkgs=pkgs) report_file = os.path.join(cp.get("DEFAULT", "datadir"), "repos", "status_report.html") with open(report_file, "w") as fp: fp.write(content)
def purge(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--older-than', help="How old commits need to be purged " "(in days).", required=True) parser.add_argument('-y', help="Answer \"yes\" to any questions", action="store_true") options, args = parser.parse_known_args(sys.argv[1:]) cp = configparser.RawConfigParser() cp.read(options.config_file) config_options = ConfigOptions(cp) timeparsed = datetime.now() - timedelta(days=int(options.older_than)) if options.y is False: ans = raw_input(("Remove all data before %s, correct? [N/y] " % timeparsed.ctime())) if ans.lower() != "y": return session = getSession('sqlite:///commits.sqlite') # To remove builds we have to start at a point in time and move backwards # builds with no build date are also purged as these are legacy # All repositories can have the repodata directory and symlinks purged # But we must keep the rpms files of the most recent successful build of # each project as other symlinks not being purged will be pointing to them. topurge = getCommits(session, limit=0, before=int(mktime(timeparsed.timetuple())) ).all() fullpurge = [] for commit in topurge: if commit.flags & FLAG_PURGED: continue datadir = os.path.join(config_options.datadir, "repos", commit.getshardedcommitdir()) if commit.project_name not in fullpurge and commit.status == "SUCCESS": # So we have not removed any commit from this project yet, and it # is successful. Is it the newest one? previouscommits = getCommits(session, project=commit.project_name, since=commit.dt_build, with_status='SUCCESS').count() if previouscommits == 0: logger.info("Keeping old commit for %s" % commit.project_name) continue # this is the newest commit for this project, keep it try: for entry in os.listdir(datadir): entry = os.path.join(datadir, entry) if entry.endswith(".rpm") and not os.path.islink(entry): continue if os.path.isdir(entry): shutil.rmtree(entry) else: os.unlink(entry) except OSError: logger.warning("Cannot access directory %s for purge," " ignoring." % datadir) fullpurge.append(commit.project_name) commit.flags |= FLAG_PURGED else: shutil.rmtree(datadir) commit.flags |= FLAG_PURGED session.commit()
def test_build(self, sh_mock): commit = db.getCommits(self.session)[-1] try: build(None, commit, None, False, False, False, True) except Exception as e: self.assertIn("No rpms built for", str(e))
def post_build_rpm(status, packages, session, build_repo=True): config_options = getConfigOptions() commit = status[0] built_rpms = status[1] project_name = commit.project_name commit_hash = commit.commit_hash datadir = os.path.realpath(config_options.datadir) yumrepodir = os.path.join("repos", commit.getshardedcommitdir()) yumrepodir_abs = os.path.join(datadir, yumrepodir) shafile = open(os.path.join(yumrepodir_abs, "versions.csv"), "w") shafile.write("Project,Source Repo,Source Sha,Dist Repo,Dist Sha," "Status,Last Success Timestamp,Component,Extended Sha," "Pkg NVR\n") failures = 0 for otherproject in packages: if (config_options.use_components and 'component' in otherproject and otherproject['component'] != commit.component): # Only dump information and create symlinks for the same component continue otherprojectname = otherproject["name"] if otherprojectname == project_name: # Output sha's this project dumpshas2file(shafile, commit, otherproject["upstream"], otherproject["master-distgit"], "SUCCESS", commit.dt_build, commit.component, built_rpms) continue # Output sha's of all other projects represented in this repo last_success = getCommits(session, project=otherprojectname, with_status="SUCCESS", type=commit.type).first() last_processed = getCommits(session, project=otherprojectname, type=commit.type).first() if last_success: if build_repo: for rpm in last_success.artifacts.split(","): rpm_link_src = os.path.join(yumrepodir_abs, os.path.split(rpm)[1]) os.symlink( os.path.relpath(os.path.join(datadir, rpm), yumrepodir_abs), rpm_link_src) last = last_success else: last = last_processed if last: if last.artifacts: rpmlist = last.artifacts.split(",") else: rpmlist = [] upstream = otherproject.get('upstream', '') dumpshas2file(shafile, last, upstream, otherproject["master-distgit"], last_processed.status, last.dt_build, commit.component, rpmlist) if last_processed.status != 'SUCCESS': failures += 1 else: failures += 1 shafile.close() if build_repo: # Use createrepo_c when available try: from sh import createrepo_c sh.createrepo = createrepo_c except ImportError: pass if config_options.include_srpm_in_repo: sh.createrepo(yumrepodir_abs) else: sh.createrepo('-x', '*.src.rpm', yumrepodir_abs) with open( os.path.join(yumrepodir_abs, "%s.repo" % config_options.reponame), "w") as fp: if config_options.use_components: repo_id = "%s-component-%s" % (config_options.reponame, commit.component) else: repo_id = config_options.reponame fp.write( "[%s]\nname=%s-%s-%s\nbaseurl=%s/%s\nenabled=1\n" "gpgcheck=0\npriority=1\n" % (repo_id, config_options.reponame, project_name, commit_hash, config_options.baseurl, commit.getshardedcommitdir())) return failures
def genreports(packages, head_only, session, all_commits): config_options = getConfigOptions() # Generate report of the last 300 package builds target = config_options.target src = config_options.source reponame = config_options.reponame templatedir = config_options.templatedir project_name = config_options.project_name datadir = config_options.datadir repodir = os.path.join(datadir, "repos") css_file = os.path.join(templatedir, 'stylesheets/styles.css') # create directories if not os.path.exists(repodir): os.makedirs(repodir) # configure jinja and filters jinja_env = jinja2.Environment( loader=jinja2.FileSystemLoader([templatedir])) jinja_env.filters["strftime"] = _jinja2_filter_strftime jinja_env.filters["get_commit_url"] = \ partial(_jinja2_filter_get_commit_url, packages=packages) # generate build report commits = getCommits(session, without_status="RETRY", limit=300) jinja_template = jinja_env.get_template("report.j2") content = jinja_template.render(reponame=reponame, src=src, project_name=project_name, target=target, commits=commits) shutil.copy2(css_file, os.path.join(repodir, "styles.css")) report_file = os.path.join(repodir, "report.html") with open(report_file, "w") as fp: fp.write(content) # Generate status report if head_only: msg = "(all commit not built)" else: msg = "" pkgs = [] # Find the most recent successfull build # then report on failures since then for package in packages: name = package["name"] commits = getCommits(session, project=name, limit=1) # No builds if commits.count() == 0: continue pkgs.append(package) last_build = commits.first() package["last_build"] = last_build # last build was successul if last_build.status == "SUCCESS": continue # Retrieve last successful build commits = getCommits(session, project=name, with_status="SUCCESS", limit=1) # No successful builds if commits.count() == 0: commits = getCommits(session, project=name, with_status="FAILED", order="asc") package["first_failure"] = commits.first() package["days"] = -1 continue last_success = commits.first() last_success_dt = last_success.dt_build commits = getCommits(session, project=name, with_status="FAILED", order="asc", limit=None) commits = commits.filter(Commit.dt_build > last_success_dt) package["first_failure"] = commits.first() package["days"] = (datetime.now() - datetime.fromtimestamp(last_success_dt)).days pkgs = sorted(pkgs, key=itemgetter("name")) jinja_template = jinja_env.get_template("status_report.j2") content = jinja_template.render(msg=msg, reponame=reponame, src=src, project_name=project_name, target=target, pkgs=pkgs) report_file = os.path.join(repodir, "status_report.html") with open(report_file, "w") as fp: fp.write(content) jinja_template = jinja_env.get_template("status_report_csv.j2") content = jinja_template.render(msg=msg, reponame=reponame, src=src, project_name=project_name, target=target, pkgs=pkgs) report_file = os.path.join(repodir, "status_report.csv") with open(report_file, "w") as fp: fp.write(content) # Create a report for the pending packages jinja_template = jinja_env.get_template("queue.j2") pending_commits = [] for commit in all_commits: old_commit = getCommits( session, project=commit.project_name, without_status="RETRY", limit=None).filter( Commit.commit_hash == commit.commit_hash).filter( Commit.distro_hash == commit.distro_hash).filter( Commit.extended_hash == commit.extended_hash).first() if not old_commit: pending_commits.append(commit) content = jinja_template.render(reponame=reponame, src=src, target=target, commits=pending_commits) report_file = os.path.join(repodir, "queue.html") with open(report_file, "w") as fp: fp.write(content)
def test_last_two(self): commits = db.getCommits(self.session, project="python-pysaml2", limit=2) self.assertEqual(commits.count(), 2) self.assertEqual([c.id for c in commits], [7835, 7834])
def promotions_GET(): # commit_hash(optional): commit hash # distro_hash(optional): distro hash # extended_hash(optional): extended hash # aggregate_hash(optional): aggregate hash # promote_name(optional): only report promotions for promote_name # offset(optional): skip the first X promotions (only 100 are shown # per query) # limit(optional): maximum number of entries to return # component(optional): only report promotions for this component commit_hash = request.args.get('commit_hash', None) distro_hash = request.args.get('distro_hash', None) extended_hash = request.args.get('extended_hash', None) agg_hash = request.args.get('aggregate_hash', None) promote_name = request.args.get('promote_name', None) offset = int(request.args.get('offset', 0)) limit = int(request.args.get('limit', 100)) component = request.args.get('component', None) if request.headers.get('Content-Type') == 'application/json': # This is the old, deprecated method of in-body parameters # We will keep it for backwards compatibility if commit_hash is None: commit_hash = request.json.get('commit_hash', None) if distro_hash is None: distro_hash = request.json.get('distro_hash', None) if extended_hash is None: extended_hash = request.json.get('extended_hash', None) if agg_hash is None: agg_hash = request.json.get('aggregate_hash', None) if promote_name is None: promote_name = request.json.get('promote_name', None) if offset == 0: offset = int(request.json.get('offset', 0)) if limit == 100: limit = int(request.json.get('limit', 100)) if component is None: component = request.json.get('component', None) config_options = _get_config_options(app.config['CONFIG_FILE']) # Make sure we do not exceed if limit > max_limit: limit = max_limit if ((commit_hash and not distro_hash) or (distro_hash and not commit_hash)): raise InvalidUsage('Both commit_hash and distro_hash must be ' 'specified if any of them is.', status_code=400) # Find the commit id for commit_hash/distro_hash session = _get_db() if commit_hash and distro_hash: commit = _get_commit(session, commit_hash, distro_hash, extended_hash) if commit is None: raise InvalidUsage('commit_hash+distro_hash+extended_hash ' 'combination not found', status_code=404) commit_id = commit.id else: commit_id = None # Now find the promotions, and filter if necessary promotions = session.query(Promotion) if commit_id is not None: promotions = promotions.filter(Promotion.commit_id == commit_id) if promote_name is not None: promotions = promotions.filter( Promotion.promotion_name == promote_name) if agg_hash is not None: promotions = promotions.filter(Promotion.aggregate_hash == agg_hash) if component is not None: promotions = promotions.filter(Promotion.component == component) promotions = promotions.order_by(desc(Promotion.id)).limit(limit).\ offset(offset) # And format the output data = [] for promotion in promotions: commit = getCommits(session, limit=0).filter( Commit.id == promotion.commit_id).first() repo_hash = _repo_hash(commit) repo_url = "%s/%s" % (config_options.baseurl, commit.getshardedcommitdir()) d = {'timestamp': promotion.timestamp, 'commit_hash': commit.commit_hash, 'distro_hash': commit.distro_hash, 'extended_hash': commit.extended_hash, 'aggregate_hash': promotion.aggregate_hash, 'repo_hash': repo_hash, 'repo_url': repo_url, 'promote_name': promotion.promotion_name, 'component': promotion.component, 'user': promotion.user} data.append(d) return jsonify(data)
def test_defaults(self): commits = db.getCommits(self.session) self.assertEqual(commits.count(), 1) self.assertEqual(commits.first().id, 7873)
def build(packages, commit, env_vars, dev_mode, use_public, bootstrap): # Set the build timestamp to now commit.dt_build = int(time()) project_name = commit.project_name datadir = os.path.realpath(config_options.datadir) yumrepodir = os.path.join("repos", commit.getshardedcommitdir()) yumrepodir_abs = os.path.join(datadir, yumrepodir) commit_hash = commit.commit_hash try: build_rpm_wrapper(commit, dev_mode, use_public, bootstrap, env_vars) except Exception as e: raise Exception("Error in build_rpm_wrapper for %s: %s" % (project_name, e)) built_rpms = [] for rpm in os.listdir(yumrepodir_abs): if rpm.endswith(".rpm"): built_rpms.append(os.path.join(yumrepodir, rpm)) if not built_rpms: raise Exception("No rpms built for %s" % project_name) notes = "OK" if not os.path.isfile(os.path.join(yumrepodir_abs, "installed")): logger.error('Build failed. See logs at: %s/%s/' % (datadir, yumrepodir)) raise Exception("Error installing %s" % project_name) else: # Overwrite installed file, adding the repo reference with open(os.path.join(yumrepodir_abs, "installed"), "w") as fp: fp.write("%s %s %s" % (commit.project_name, commit.commit_hash, commit.distro_hash)) shafile = open(os.path.join(yumrepodir_abs, "versions.csv"), "w") shafile.write("Project,Source Repo,Source Sha,Dist Repo,Dist Sha," "Status,Last Success Timestamp\n") failures = 0 for otherproject in packages: otherprojectname = otherproject["name"] if otherprojectname == project_name: # Output sha's this project dumpshas2file(shafile, commit, otherproject["upstream"], otherproject["master-distgit"], "SUCCESS", commit.dt_build) continue # Output sha's of all other projects represented in this repo last_success = getCommits(session, project=otherprojectname, with_status="SUCCESS").first() last_processed = getLastProcessedCommit(session, otherprojectname, 'INVALID STATE') if last_success: for rpm in last_success.rpms.split(","): rpm_link_src = os.path.join(yumrepodir_abs, os.path.split(rpm)[1]) os.symlink(os.path.relpath(os.path.join(datadir, rpm), yumrepodir_abs), rpm_link_src) last = last_success else: last = last_processed if last: dumpshas2file(shafile, last, otherproject["upstream"], otherproject["master-distgit"], last_processed.status, last.dt_build) if last_processed.status != 'SUCCESS': failures += 1 else: failures += 1 shafile.close() # Use createrepo_c when available try: from sh import createrepo_c sh.createrepo = createrepo_c except ImportError: pass sh.createrepo(yumrepodir_abs) with open(os.path.join( yumrepodir_abs, "%s.repo" % config_options.reponame), "w") as fp: fp.write("[%s]\nname=%s-%s-%s\nbaseurl=%s/%s\nenabled=1\n" "gpgcheck=0\npriority=1" % (config_options.reponame, config_options.reponame, project_name, commit_hash, config_options.baseurl, commit.getshardedcommitdir())) dirnames = ['current'] if failures == 0: dirnames.append('consistent') else: logger.info('%d packages not built correctly: not updating the ' 'consistent symlink' % failures) for dirname in dirnames: target_repo_dir = os.path.join(datadir, "repos", dirname) os.symlink(os.path.relpath(yumrepodir_abs, os.path.join(datadir, "repos")), target_repo_dir + "_") os.rename(target_repo_dir + "_", target_repo_dir) return built_rpms, notes
def promotions_GET(): # commit_hash(optional): commit hash # distro_hash(optional): distro hash # promote_name(optional): only report promotions for promote_name # offset(optional): skip the first X promotions (only 100 are shown # per query) commit_hash = request.json.get('commit_hash', None) distro_hash = request.json.get('distro_hash', None) promote_name = request.json.get('promote_name', None) offset = request.json.get('offset', 0) limit = request.json.get('limit', 100) config_options = _get_config_options(app.config['CONFIG_FILE']) # Make sure we do not exceed if limit > max_limit: limit = max_limit if ((commit_hash and not distro_hash) or (distro_hash and not commit_hash)): raise InvalidUsage('Both commit_hash and distro_hash must be ' 'specified if any of them is.', status_code=400) # Find the commit id for commit_hash/distro_hash session = getSession(app.config['DB_PATH']) if commit_hash and distro_hash: commit = _get_commit(session, commit_hash, distro_hash) if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) commit_id = commit.id else: commit_id = None # Now find the promotions, and filter if necessary promotions = session.query(Promotion) if commit_id is not None: promotions = promotions.filter(Promotion.commit_id == commit_id) if promote_name is not None: promotions = promotions.filter( Promotion.promotion_name == promote_name) promotions = promotions.order_by(desc(Promotion.timestamp)).limit(limit).\ offset(offset) # And format the output data = [] for promotion in promotions: commit = getCommits(session, limit=0).filter( Commit.id == promotion.commit_id).first() repo_hash = _repo_hash(commit) repo_url = "%s/%s" % (config_options.baseurl, commit.getshardedcommitdir()) d = {'timestamp': promotion.timestamp, 'commit_hash': commit.commit_hash, 'distro_hash': commit.distro_hash, 'repo_hash': repo_hash, 'repo_url': repo_url, 'promote_name': promotion.promotion_name, 'user': promotion.user} data.append(d) closeSession(session) return jsonify(data)
def purge(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--older-than', help="Purge builds older than provided value" " (in days).", required=True) parser.add_argument('-y', help="Answer \"yes\" to any questions", action="store_true") parser.add_argument('--dry-run', help="Do not change anything, show" " what changes would be made", action="store_true") parser.add_argument('--exclude-dirs', help="Do not remove commits whose" " packages are included in one of the specifided" " directories (comma-separated list).") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) cp = configparser.RawConfigParser() cp.read(options.config_file) timeparsed = datetime.now() - timedelta(days=int(options.older_than)) if options.y is False: ans = input(("Remove all data before %s, correct? [N/y] " % timeparsed.ctime())) if ans.lower() != "y": return session = getSession(cp.get('DEFAULT', 'database_connection')) # To remove builds we have to start at a point in time and move backwards # builds with no build date are also purged as these are legacy # All repositories can have the repodata directory and symlinks purged # But we must keep the rpm files of the most recent successful build of # each project as other symlinks not being purged will be pointing to them. topurge = getCommits(session, limit=0, before=int(mktime(timeparsed.timetuple())) ).all() fullpurge = [] for commit in topurge: if commit.flags & FLAG_PURGED: continue if is_commit_in_dirs(commit, options.exclude_dirs): # The commit RPMs are in one of the directories # that should not be touched. logger.info("Ignoring commit %s for %s, it is in one of the" " excluded directories" % (commit.id, commit.project_name)) continue datadir = os.path.join(cp.get('DEFAULT', 'datadir'), "repos", commit.getshardedcommitdir()) if commit.project_name not in fullpurge and commit.status == "SUCCESS": # So we have not removed any commit from this project yet, and it # is successful. Is it the newest one? previouscommits = getCommits(session, project=commit.project_name, since=commit.dt_build, with_status='SUCCESS').count() if previouscommits == 0: logger.info("Keeping old commit for %s" % commit.project_name) continue # this is the newest commit for this project, keep it try: for entry in os.listdir(datadir): entry = os.path.join(datadir, entry) if entry.endswith(".rpm") and not os.path.islink(entry): continue if os.path.isdir(entry): logger.info("Remove %s" % entry) if options.dry_run is False: shutil.rmtree(entry) else: logger.info("Delete %s" % entry) if options.dry_run is False: os.unlink(entry) except OSError: logger.warning("Cannot access directory %s for purge," " ignoring." % datadir) fullpurge.append(commit.project_name) commit.flags |= FLAG_PURGED logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: # If the commit was not successful, we need to be careful not to # remove the directory if there was a successful build if commit.status != "SUCCESS": othercommits = session.query(Commit).filter( Commit.project_name == commit.project_name, Commit.commit_hash == commit.commit_hash, Commit.status == 'SUCCESS').count() if othercommits == 0: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) commit.flags |= FLAG_PURGED if options.dry_run is False: session.commit() closeSession(session)
def purge(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--older-than', help="Purge builds older than provided value" " (in days).", required=True) parser.add_argument('-y', help="Answer \"yes\" to any questions", action="store_true") parser.add_argument('--dry-run', help="Do not change anything, show" " what changes would be made", action="store_true") parser.add_argument('--exclude-dirs', help="Do not remove commits whose" " packages are included in one of the specifided" " directories (comma-separated list).") options = parser.parse_args(sys.argv[1:]) cp = configparser.RawConfigParser() cp.read(options.config_file) timeparsed = datetime.now() - timedelta(days=int(options.older_than)) if options.y is False: ans = raw_input(("Remove all data before %s, correct? [N/y] " % timeparsed.ctime())) if ans.lower() != "y": return session = getSession(cp.get('DEFAULT', 'database_connection')) # To remove builds we have to start at a point in time and move backwards # builds with no build date are also purged as these are legacy # All repositories can have the repodata directory and symlinks purged # But we must keep the rpm files of the most recent successful build of # each project as other symlinks not being purged will be pointing to them. topurge = getCommits(session, limit=0, before=int(mktime(timeparsed.timetuple())) ).all() fullpurge = [] for commit in topurge: if commit.flags & FLAG_PURGED: continue if is_commit_in_dirs(commit, options.exclude_dirs): # The commit RPMs are in one of the directories # that should not be touched. logger.info("Ignoring commit %s for %s, it is in one of the" " excluded directories" % (commit.id, commit.project_name)) continue datadir = os.path.join(cp.get('DEFAULT', 'datadir'), "repos", commit.getshardedcommitdir()) if commit.project_name not in fullpurge and commit.status == "SUCCESS": # So we have not removed any commit from this project yet, and it # is successful. Is it the newest one? previouscommits = getCommits(session, project=commit.project_name, since=commit.dt_build, with_status='SUCCESS').count() if previouscommits == 0: logger.info("Keeping old commit for %s" % commit.project_name) continue # this is the newest commit for this project, keep it try: for entry in os.listdir(datadir): entry = os.path.join(datadir, entry) if entry.endswith(".rpm") and not os.path.islink(entry): continue if os.path.isdir(entry): logger.info("Remove %s" % entry) if options.dry_run is False: shutil.rmtree(entry) else: logger.info("Delete %s" % entry) if options.dry_run is False: os.unlink(entry) except OSError: logger.warning("Cannot access directory %s for purge," " ignoring." % datadir) fullpurge.append(commit.project_name) commit.flags |= FLAG_PURGED logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: # If the commit was not successful, we need to be careful not to # remove the directory if there was a successful build if commit.status != "SUCCESS": othercommits = session.query(Commit).filter( Commit.project_name == commit.project_name, Commit.commit_hash == commit.commit_hash, Commit.status == 'SUCCESS').count() if othercommits == 0: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) commit.flags |= FLAG_PURGED if options.dry_run is False: session.commit() closeSession(session)
def promotions_GET(): # commit_hash(optional): commit hash # distro_hash(optional): distro hash # promote_name(optional): only report promotions for promote_name # offset(optional): skip the first X promotions (only 100 are shown # per query) if request.headers['Content-Type'] != 'application/json': raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415) commit_hash = request.json.get('commit_hash', None) distro_hash = request.json.get('distro_hash', None) promote_name = request.json.get('promote_name', None) offset = request.json.get('offset', 0) limit = request.json.get('limit', 100) config_options = _get_config_options(app.config['CONFIG_FILE']) # Make sure we do not exceed if limit > max_limit: limit = max_limit if ((commit_hash and not distro_hash) or (distro_hash and not commit_hash)): raise InvalidUsage( 'Both commit_hash and distro_hash must be ' 'specified if any of them is.', status_code=400) # Find the commit id for commit_hash/distro_hash session = getSession(app.config['DB_PATH']) if commit_hash and distro_hash: commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash, Commit.distro_hash == distro_hash).first() if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) commit_id = commit.id else: commit_id = None # Now find the promotions, and filter if necessary promotions = session.query(Promotion) if commit_id is not None: promotions = promotions.filter(Promotion.commit_id == commit_id) if promote_name is not None: promotions = promotions.filter( Promotion.promotion_name == promote_name) promotions = promotions.order_by(desc(Promotion.timestamp)).limit(limit).\ offset(offset) # And format the output data = [] for promotion in promotions: commit = getCommits( session, limit=0).filter(Commit.id == promotion.commit_id).first() repo_hash = _repo_hash(commit) repo_url = "%s/%s" % (config_options.baseurl, commit.getshardedcommitdir()) d = { 'timestamp': promotion.timestamp, 'commit_hash': commit.commit_hash, 'distro_hash': commit.distro_hash, 'repo_hash': repo_hash, 'repo_url': repo_url, 'promote_name': promotion.promotion_name, 'user': promotion.user } data.append(d) closeSession(session) return jsonify(data)