def update_puppet_uc(): if os.path.exists(os.path.join(".", "modules")): shutil.rmtree("./modules") info = rdoinfo.parse_info_file('rdo.yml') puppet_info = [] for package in info['packages']: if package['name'].startswith('puppet'): puppet_info.append([package['name'], package['upstream']]) for package in puppet_info: url = package[1] if 'openstack' in url: # Do not bump OpenStack modules continue module = package[0] gitpath = os.path.join("modules", module) sh.git.clone(url, gitpath) git = sh.git.bake(_cwd=gitpath, _tty_out=False) try: rev_list = str(git('rev-list', '--tags', '--max-count=1')).strip() tag = str(git.describe('--tags', rev_list)).strip() with open('upper-constraints.txt', 'a') as fp: fp.write("%s===%s\n" % (module, tag)) except Exception: continue shutil.rmtree(gitpath) update_uc = sh.Command('./update-uc.py') update_uc(UC_RELEASE)
def verify(fn, include_fns=[]): info = rdoinfo.parse_info_file(fn, include_fns=include_fns) print(yaml.dump(info)) buildsystags = list_buildsys_tags(info) for pkg in info['packages']: verify_buildsys_tags(pkg, buildsystags) print("\n%s looks OK" % fn)
def update_puppet_uc(): if os.path.exists(os.path.join(".", "modules")): shutil.rmtree("./modules") info = rdoinfo.parse_info_file("rdo.yml") puppet_info = [] for package in info["packages"]: if package["name"].startswith("puppet"): puppet_info.append([package["name"], package["upstream"]]) for package in puppet_info: url = package[1] if "openstack" in url: # Do not bump OpenStack modules continue module = package[0] gitpath = os.path.join("modules", module) sh.git.clone(url, gitpath) git = sh.git.bake(_cwd=gitpath, _tty_out=False) try: rev_list = str(git("rev-list", "--tags", "--max-count=1")).strip() tag = str(git.describe("--tags", rev_list)).strip() with open("upper-constraints.txt", "a") as fp: fp.write("%s===%s\n" % (module, tag)) except Exception: continue shutil.rmtree(gitpath) update_uc = sh.Command("./update-uc.py") update_uc(UC_RELEASE)
def find_tags_for_source_branch(package, source_branch): info = rdoinfo.parse_info_file('/tmp/rdoinfo/rdo.yml', include_fns=[]) tags = [] for pkg in info['packages']: if pkg['name'] == package: # If the package is under review, always return the master tag if 'under-review' in pkg['tags']: return [MASTER_TAG] # A second possible case is that the source branch is either # master or stable/something. In that case, if the corresponding # tag value is None, add it if source_branch == 'master': if pkg['tags'][MASTER_TAG] == None: tags.append(MASTER_TAG) elif source_branch.startswith('stable/'): tag = source_branch.replace('stable/', '') if tag in pkg['tags']: if pkg['tags'][tag] == None: tags.append(tag) # Finally, let's check for all tags to find the source_branch for tag in pkg['tags']: if pkg['tags'][tag] is not None: if 'source-branch' in pkg['tags'][tag]: if pkg['tags'][tag]['source-branch'] == source_branch: tags.append(tag) return tags
def main(): parser = argparse.ArgumentParser() parser.add_argument('--config-file', help="Config file") parser.add_argument('--info-file', help="Package info file") parser.add_argument('--local', action="store_true", help="Use local git repo's if possible") options, args = parser.parse_known_args(sys.argv[1:]) package_info = rdoinfo.parse_info_file(options.info_file) cp = ConfigParser.RawConfigParser() cp.read(options.config_file) engine = create_engine('sqlite:///commits.sqlite') Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) global session session = Session() # Build a list of commits we need to process toprocess = [] for package in package_info["packages"]: project = package["name"] since = "-1" commit = session.query(Commit).filter(Commit.project_name == project).\ order_by(desc(Commit.dt_commit)).first() if commit: since = "--after=%d" % (commit.dt_commit + 1) repo = package["upstream"] spec = package["master-distgit"] getinfo(cp, project, repo, spec, toprocess, since, options.local) toprocess.sort() for dt, commit, project, repo_dir in toprocess: logger.info("Processing %s %s" % (project, commit)) notes = "" try: built_rpms, notes = build(cp, package_info, dt, project, repo_dir, commit) except Exception as e: logger.exception("Error while building packages for %s" % project) session.add(Commit(dt_commit=dt, project_name=project, commit_hash=commit, status="FAILED", notes=getattr(e, "message", notes))) sendnotifymail(cp, package_info, project, commit) else: session.add(Commit(dt_commit=dt, project_name=project, rpms=",".join(built_rpms), commit_hash=commit, status="SUCCESS", notes=notes)) session.commit() genreport(cp)
def find_source_branch(package, tag): info = rdoinfo.parse_info_file('/tmp/rdoinfo/rdo.yml', include_fns=[]) for pkg in info['packages']: if pkg['name'] == package: # If the package is under review, always return the rpm-master branch if 'under-review' in pkg['tags']: return 'master' for key in pkg['tags']: if key == tag: if pkg['tags'][key] is not None: # We have a specific source branch defined. Use it if 'source-branch' in pkg['tags'][key]: return pkg['tags'][key]['source-branch'] # Any other case, lets derive it from the tag name if tag == MASTER_TAG: return 'master' else: return "stable/%s" % tag return None
def main(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--info-file', help="Package info file (required)", required=True) parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") parser.add_argument('--package-name', help="Build a specific package name only.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") options, args = parser.parse_known_args(sys.argv[1:]) package_info = rdoinfo.parse_info_file(options.info_file) cp = configparser.RawConfigParser() cp.read(options.config_file) engine = create_engine('sqlite:///commits.sqlite') Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) global session session = Session() # Build a list of commits we need to process toprocess = [] for package in package_info["packages"]: project = package["name"] since = "-1" commit = session.query(Commit).filter(Commit.project_name == project).\ order_by(desc(Commit.dt_commit)).\ order_by(desc(Commit.id)).first() if commit: # This will return all commits since the last handled commit # including the last handled commit, remove it later if needed. since = "--after=%d" % (commit.dt_commit) repo = package["upstream"] spec = package["master-distgit"] if not options.package_name or package["name"] == options.package_name: project_toprocess = getinfo(cp, project, repo, spec, since, options.local, options.dev, package) # If since == -1, then we only want to trigger a build for the # most recent change if since == "-1" or options.head_only: del project_toprocess[:-1] # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or spec hash has changed, we can't simply check against the # last commit in the db, as multiple commits can have the same # commit date for commit_toprocess in project_toprocess: if (options.dev is True) or \ (not session.query(Commit).filter( Commit.project_name == project, Commit.commit_hash == commit_toprocess.commit_hash, Commit.spec_hash == commit_toprocess.spec_hash).all()): toprocess.append(commit_toprocess) toprocess.sort() exit_code = 0 for commit in toprocess: project = commit.project_name project_info = session.query(Project).filter( Project.project_name == project).first() if not project_info: project_info = Project(project_name=project, last_email=0) commit_hash = commit.commit_hash logger.info("Processing %s %s" % (project, commit_hash)) notes = "" try: built_rpms, notes = build(cp, package_info, commit, options.build_env, options.dev) except Exception as e: exit_code = 1 logger.exception("Error while building packages for %s" % project) commit.status = "FAILED" commit.notes = getattr(e, "message", notes) session.add(commit) # If the log file hasn't been created we add what we have # This happens if the rpm build script didn't run. datadir = os.path.realpath(cp.get("DEFAULT", "datadir")) logfile = os.path.join(datadir, "repos", commit.getshardedcommitdir(), "rpmbuild.log") if not os.path.exists(logfile): fp = open(logfile, "w") fp.write(getattr(e, "message", notes)) fp.close() if not project_info.suppress_email(): sendnotifymail(cp, package_info, commit) project_info.sent_email() else: commit.status = "SUCCESS" commit.notes = notes commit.rpms = ",".join(built_rpms) session.add(commit) if options.dev is False: session.commit() genreports(cp, package_info) genreports(cp, package_info) return exit_code
def verify(fn): info = rdoinfo.parse_info_file(fn) print yaml.dump(info) print "\n%s looks OK" % fn
def main(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--info-file', help="Package info file (required)", required=True) parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") parser.add_argument('--package-name', help="Build a specific package name only.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") options, args = parser.parse_known_args(sys.argv[1:]) package_info = rdoinfo.parse_info_file(options.info_file) cp = ConfigParser.RawConfigParser() cp.read(options.config_file) engine = create_engine('sqlite:///commits.sqlite') Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) global session session = Session() # Build a list of commits we need to process toprocess = [] for package in package_info["packages"]: project = package["name"] since = "-1" commit = session.query(Commit).filter(Commit.project_name == project).\ order_by(desc(Commit.dt_commit)).\ order_by(desc(Commit.id)).first() if commit: # This will return all commits since the last handled commit # including the last handled commit, remove it later if needed. since = "--after=%d" % (commit.dt_commit) repo = package["upstream"] spec = package["master-distgit"] if not options.package_name or package["name"] == options.package_name: project_toprocess = getinfo(cp, project, repo, spec, since, options.local, options.dev) # If since == -1, then we only want to trigger a build for the # most recent change if since == "-1" or options.head_only: del project_toprocess[:-1] # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or spec hash has changed, we can't simply check against the # last commit in the db, as multiple commits can have the same # commit date for commit_toprocess in project_toprocess: if (options.dev is True) or \ (not session.query(Commit).filter( Commit.project_name == project, Commit.commit_hash == commit_toprocess.commit_hash, Commit.spec_hash == commit_toprocess.spec_hash).all()): toprocess.append(commit_toprocess) toprocess.sort() for commit in toprocess: project = commit.project_name project_info = session.query(Project).filter( Project.project_name == project).first() if not project_info: project_info = Project(project_name=project, last_email=0) commit_hash = commit.commit_hash logger.info("Processing %s %s" % (project, commit_hash)) notes = "" try: built_rpms, notes = build(cp, package_info, commit, options.build_env, options.dev) except Exception as e: logger.exception("Error while building packages for %s" % project) commit.status = "FAILED" commit.notes = getattr(e, "message", notes) session.add(commit) # If the log file hasn't been created we add what we have # This happens if the rpm build script didn't run. datadir = os.path.realpath(cp.get("DEFAULT", "datadir")) logfile = os.path.join(datadir, "repos", commit.getshardedcommitdir(), "rpmbuild.log") if not os.path.exists(logfile): fp = open(logfile, "w") fp.write(getattr(e, "message", notes)) fp.close() if not project_info.suppress_email(): sendnotifymail(cp, package_info, commit) project_info.sent_email() else: commit.status = "SUCCESS" commit.notes = notes commit.rpms = ",".join(built_rpms) session.add(commit) if options.dev is False: session.commit() genreports(cp, package_info) genreports(cp, package_info)