def test_getsessions(self, ce_mock, sm_mock): db.getSession() db.getSession(url="sqlite:///test.db") # The 2nd call shouldn't result in a new session db.getSession() self.assertEqual(len(sm_mock.call_args_list), 2) expected = [mock.call('sqlite://'), mock.call('sqlite:///test.db')] self.assertEqual(ce_mock.call_args_list, expected)
def compare(): parser = argparse.ArgumentParser() parser.add_argument('--info-repo', help="use local rdoinfo repo instead of" "fetching default one using rdopkg") options, args = parser.parse_known_args(sys.argv[1:]) packages = getpackages(local_info_repo=options.info_repo, tags=cp.get("DEFAULT", "tags")) compare_details = {} # Each argument is a ":" seperate filename:title, this filename is the # sqlite db file and the title is whats used in the dable being displayed table_header = ["Name", "Out of Sync"] for dbdetail in args: dbfilename, dbtitle = dbdetail.split(":") table_header.extend((dbtitle + " upstream", dbtitle + " spec")) session = getSession('sqlite:///%s' % dbfilename) for package in packages: package_name = package["name"] compare_details.setdefault(package_name, [package_name, " "]) last_success = getCommits(session, project=package_name, with_status="SUCCESS").first() if last_success: compare_details[package_name].extend( (last_success.commit_hash[:8], last_success.distro_hash[:8])) else: compare_details[package_name].extend(("None", "None")) session.close() table = PrettyTable(table_header) for name, compare_detail in compare_details.items(): if len(set(compare_detail)) > 4: compare_detail[1] = "*" table.add_row(compare_detail) print(table)
def genreports(cp, packages, options): global session session = getSession('sqlite:///commits.sqlite') # Generate report of the last 300 package builds target = cp.get("DEFAULT", "target") src = cp.get("DEFAULT", "source") reponame = cp.get("DEFAULT", "reponame") templatedir = cp.get("DEFAULT", "templatedir") datadir = cp.get("DEFAULT", "datadir") css_file = os.path.join(templatedir, 'stylesheets/styles.css') # configure jinja and filters jinja_env = jinja2.Environment( loader=jinja2.FileSystemLoader([templatedir])) jinja_env.filters["strftime"] = _jinja2_filter_strftime jinja_env.filters["get_commit_url"] = \ partial(_jinja2_filter_get_commit_url, packages=packages) # generate build report commits = getCommits(session, without_status="RETRY", limit=300) jinja_template = jinja_env.get_template("report.j2") content = jinja_template.render(reponame=reponame, src=src, target=target, commits=commits) shutil.copy2(css_file, os.path.join(datadir, "repos", "styles.css")) report_file = os.path.join(cp.get("DEFAULT", "datadir"), "repos", "report.html") with open(report_file, "w") as fp: fp.write(content) # Generate status report if options.head_only: msg = "(all commit not built)" else: msg = "" pkgs = [] # Find the most recent successfull build # then report on failures since then for package in packages: name = package["name"] commits = getCommits(session, project=name, limit=1) # No builds if commits.count() == 0: continue pkgs.append(package) last_build = commits.first() package["last_build"] = last_build # last build was successul if last_build.status == "SUCCESS": continue # Retrieve last successful build commits = getCommits(session, project=name, with_status="SUCCESS", limit=1) # No successful builds if commits.count() == 0: commits = getCommits(session, project=name, with_status="FAILED", order="asc") package["first_failure"] = commits.first() package["days"] = -1 continue last_success = commits.first() last_success_dt = last_success.dt_build commits = getCommits(session, project=name, with_status="FAILED", order="asc", limit=None) commits = commits.filter(Commit.dt_build > last_success_dt) package["first_failure"] = commits.first() package["days"] = (datetime.now() - datetime.fromtimestamp(last_success_dt)).days pkgs = sorted(pkgs, key=itemgetter("name")) jinja_template = jinja_env.get_template("status_report.j2") content = jinja_template.render(msg=msg, reponame=reponame, src=src, target=target, pkgs=pkgs) report_file = os.path.join(cp.get("DEFAULT", "datadir"), "repos", "status_report.html") with open(report_file, "w") as fp: fp.write(content)
def test_getsession(self, sm_mock): db.getSession() self.assertEqual(len(sm_mock.call_args_list), 1)
def setUp(self): super(TestsWithData, self).setUp() self.session = db.getSession(new=True) utils.loadYAML(self.session, './delorean/tests/samples/commits_1.yaml')
def main(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required).", required=True) parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of " "fetching the default one using rdopkg.") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") parser.add_argument('--package-name', help="Build a specific package name only.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by delorean.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Imply --package-name") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Imply --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") options, args = parser.parse_known_args(sys.argv[1:]) cp = configparser.RawConfigParser(default_options) cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) global session session = getSession('sqlite:///commits.sqlite') packages = getpackages(local_info_repo=options.info_repo, tags=cp.get("DEFAULT", "tags")) if options.status is True: if options.package_name: names = (options.package_name, ) else: names = [p['name'] for p in packages] for name in names: commit = getLastProcessedCommit(session, name, 'invalid status') if commit: print(name, commit.status) else: print(name, 'NO_BUILD') sys.exit(0) if options.recheck is True: if not options.package_name: logger.error('Please use --package-name with --recheck.') sys.exit(1) commit = getLastProcessedCommit(session, options.package_name) if commit: if commit.status == 'SUCCESS': logger.error("Trying to recheck an already successful commit," " ignoring.") sys.exit(1) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) else: # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) else: logger.error("There are no existing commits for package %s" % options.package_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] for package in packages: project = package["name"] since = "-1" commit = getLastProcessedCommit(session, project) if commit: # This will return all commits since the last handled commit # including the last handled commit, remove it later if needed. since = "--after=%d" % (commit.dt_commit) repo = package["upstream"] distro = package["master-distgit"] if not options.package_name or package["name"] == options.package_name: project_toprocess = getinfo(cp, project, repo, distro, since, options.local, options.dev, package) # If since == -1, then we only want to trigger a build for the # most recent change if since == "-1" or options.head_only: del project_toprocess[:-1] # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check against # the last commit in the db, as multiple commits can have the same # commit date for commit_toprocess in project_toprocess: if ((options.dev is True) or options.run or (not session.query(Commit).filter( Commit.project_name == project, Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.status != "RETRY") .all())): toprocess.append(commit_toprocess) # if requested do a sort according to build and install # dependencies if options.order is True and not options.package_name: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([p['name'] for p in packages]) speclist = [] bootstraplist = [] for project_name in projects: specpath = os.path.join(cp.get("DEFAULT", "datadir"), project_name + "_distro", project_name + '.spec') speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap exercise rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: return cmp(a.dt_commit, b.dt_commit) return cmp(orders.index(a.project_name), orders.index(b.project_name)) toprocess.sort(cmp=my_cmp) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 gerrit = cp.get("DEFAULT", "gerrit") for commit in toprocess: project = commit.project_name project_info = session.query(Project).filter( Project.project_name == project).first() if not project_info: project_info = Project(project_name=project, last_email=0) commit_hash = commit.commit_hash if options.run: try: run(options.run, cp, commit, options.build_env, options.dev, options.use_public, options.order, do_build=False) except Exception as e: exit_code = 1 if options.stop: return exit_code pass continue logger.info("Processing %s %s" % (project, commit_hash)) notes = "" try: built_rpms, notes = build(cp, packages, commit, options.build_env, options.dev, options.use_public, options.order) except Exception as e: exit_code = 1 datadir = os.path.realpath(cp.get("DEFAULT", "datadir")) logfile = os.path.join(datadir, "repos", commit.getshardedcommitdir(), "rpmbuild.log") max_retries = cp.getint("DEFAULT", "maxretries") if (isknownerror(logfile) and (timesretried(project, commit_hash, commit.distro_hash) < max_retries)): logger.exception("Known error building packages for %s," " will retry later" % project) commit.status = "RETRY" commit.notes = getattr(e, "message", notes) session.add(commit) else: # If the log file hasn't been created we add what we have # This happens if the rpm build script didn't run. if not os.path.exists(logfile): with open(logfile, "w") as fp: fp.write(getattr(e, "message", notes)) if not project_info.suppress_email(): sendnotifymail(cp, packages, commit) project_info.sent_email() session.add(project_info) # allow to submit a gerrit review only if the last build was # successful or non existent to avoid creating a gerrit review # for the same problem multiple times. if gerrit is not None: if options.build_env: env_vars = list(options.build_env) else: env_vars = [] last_build = getLastProcessedCommit(session, project) if not last_build or last_build.status == 'SUCCESS': for pkg in packages: if project == pkg['name']: break else: pkg = None if pkg: url = (get_commit_url(commit, pkg) + commit.commit_hash) env_vars.append('GERRIT_URL=%s' % url) env_vars.append('GERRIT_LOG=%s/%s' % (cp.get("DEFAULT", "baseurl"), commit.getshardedcommitdir())) maintainers = ','.join(pkg['maintainers']) env_vars.append('GERRIT_MAINTAINERS=%s' % maintainers) logger.info('Creating a gerrit review using ' 'GERRIT_URL=%s ' 'GERRIT_MAINTAINERS=%s ' % (url, maintainers)) try: submit_review(cp, commit, env_vars) except Exception: logger.error('Unable to create review ' 'see review.log') else: logger.error('Unable to find info for project %s' % project) else: logger.info('Last build not successful ' 'for %s' % project) commit.status = "FAILED" commit.notes = getattr(e, "message", notes) session.add(commit) if options.stop: return exit_code else: commit.status = "SUCCESS" commit.notes = notes commit.rpms = ",".join(built_rpms) session.add(commit) if options.dev is False: session.commit() genreports(cp, packages, options) # If we were bootstrapping, set the packages that required it to RETRY if options.order is True and not options.package_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(cp, packages, options) return exit_code
attrs = [] for a in dir(Commit): if type(getattr(Commit, a)) == \ sqlalchemy.orm.attributes.InstrumentedAttribute: attrs.append(a) data['commits'] = [] for commit in session.query(Commit).all(): d = {} for a in attrs: d[a] = str(getattr(commit, a)) data['commits'].append(d) fp = open(yamlfile, "w") fp.write(yaml.dump(data, default_flow_style=False)) fp.close() def dumpshas2file(shafile, commit, source_repo, distgit_repo, status, timestamp): shafile.write("%s,%s,%s,%s,%s,%s,%d\n" % (commit.project_name, source_repo, commit.commit_hash, distgit_repo, commit.distro_hash, status, timestamp)) if __name__ == '__main__': s = getSession('sqlite:///%s' % sys.argv[1]) saveYAML(s, sys.argv[1] + ".yaml") s = getSession('sqlite://') loadYAML(s, sys.argv[1] + ".yaml") print(s.query(Commit).first().project_name)
for a in dir(Commit): if type(getattr(Commit, a)) == \ sqlalchemy.orm.attributes.InstrumentedAttribute: attrs.append(a) data['commits'] = [] for commit in session.query(Commit).all(): d = {} for a in attrs: d[a] = str(getattr(commit, a)) data['commits'].append(d) fp = open(yamlfile, "w") fp.write(yaml.dump(data, default_flow_style=False)) fp.close() def dumpshas2file(shafile, commit, source_repo, distgit_repo, status, timestamp): shafile.write("%s,%s,%s,%s,%s,%s,%d\n" % (commit.project_name, source_repo, commit.commit_hash, distgit_repo, commit.distro_hash, status, timestamp) ) if __name__ == '__main__': s = getSession('sqlite:///%s' % sys.argv[1]) saveYAML(s, sys.argv[1] + ".yaml") s = getSession('sqlite://') loadYAML(s, sys.argv[1] + ".yaml") print(s.query(Commit).first().project_name)