def create_sample_distgit(name, version='1.2.3', release='1', path=None, magic_comments=None): if not path: path = name assert not os.path.exists(path) if "%{?dist}" not in release: release = release + "%{?dist}" os.makedirs(path) if not magic_comments: magic_comments = '' with helpers.cdir(path): txt = SAMPLE_SPEC.format(name=name, version=version, release=release, magic_comments=magic_comments) spec = specfile.Spec(fn='%s.spec' % name, txt=txt) spec.set_tag('Name', name) spec.save() git('init', ) git('add', '.') git('commit', '-m', 'Initial import', isolated=True) return os.path.abspath(path)
def _push_pkg(upf): log.info("\nPushing update {t.bold}{upf}{t.normal}".format( t=log.term, upf=upf)) update = self._load_update_file(upf) pushed_rpms = [] try: _updated_repos = set() _updated_repo_bases = set() _pushed_build_tmp_paths = [] for build in update.builds: src_path = self._build_tmp_path(upf, build) if src_path in _pushed_build_tmp_paths: continue build_rpms = helpers.find_files(src_path, ext='.rpm') dest_repo_base_path = self._dest_repo_base_path(build.repo) if not os.path.isdir(dest_repo_base_path): raise exception.NotADirectory(path=dest_repo_base_path) dest_path = self._build_dest_path(build) for rpm in build_rpms: pushed_path = copy_package(rpm, dest_path, overwrite=self.overwrite) pushed_rpms.append(pushed_path) _pushed_build_tmp_paths.append(src_path) _updated_repo_bases.add(dest_repo_base_path) _updated_repos.add( self._dest_repo_path(build.repo, build.dist)) with helpers.cdir(self.update_repo_path): helpers.ensure_dir(self.pushed_dir) upf_base = os.path.basename(upf) pushed_upf = os.path.join(self.pushed_dir, upf_base) pushed_files_fn = pushed_upf + self.pushed_files_ext git('mv', upf, pushed_upf) pushed_files_f = open(pushed_files_fn, 'w') pushed_files_f.writelines( map(lambda x: "%s\n" % x, pushed_rpms)) pushed_files_f.close() git('add', pushed_files_fn) try: git('commit', '-m', "Push %s" % rdoupdate.core.pp_update(upf)) except Exception: git('git', 'reset', '--hard') raise updated_repos.update(_updated_repos) updated_repo_bases.update(_updated_repo_bases) except Exception as ex: if pushed_rpms: log.warn("Final push failed for %s, cleaning copied " "packages" % upf) for rpm in pushed_rpms: log.info("{t.warn}remove{t.normal} {rpm}".format( t=log.term, rpm=rpm)) os.remove(rpm) raise
def _push_pkg(upf): log.info("\nPushing update {t.bold}{upf}{t.normal}".format( t=log.term, upf=upf)) update = self._load_update_file(upf) pushed_rpms = [] try: _updated_repos = set() _updated_repo_bases = set() _pushed_build_tmp_paths = [] for build in update.builds: src_path = self._build_tmp_path(upf, build) if src_path in _pushed_build_tmp_paths: continue build_rpms = helpers.find_files(src_path, ext='.rpm') dest_repo_base_path = self._dest_repo_base_path(build.repo) if not os.path.isdir(dest_repo_base_path): raise exception.NotADirectory(path=dest_repo_base_path) dest_path = self._build_dest_path(build) for rpm in build_rpms: pushed_path = copy_package(rpm, dest_path, overwrite=self.overwrite) pushed_rpms.append(pushed_path) _pushed_build_tmp_paths.append(src_path) _updated_repo_bases.add(dest_repo_base_path) _updated_repos.add(self._dest_repo_path(build.repo, build.dist)) with helpers.cdir(self.update_repo_path): helpers.ensure_dir(self.pushed_dir) upf_base = os.path.basename(upf) pushed_upf = os.path.join(self.pushed_dir, upf_base) pushed_files_fn = pushed_upf + self.pushed_files_ext git('mv', upf, pushed_upf) pushed_files_f = open(pushed_files_fn, 'w') pushed_files_f.writelines( map(lambda x: "%s\n" % x, pushed_rpms)) pushed_files_f.close() git('add', pushed_files_fn) try: git('commit', '-m', "Push %s" % rdoupdate.core.pp_update(upf)) except Exception: git('git', 'reset', '--hard') raise updated_repos.update(_updated_repos) updated_repo_bases.update(_updated_repo_bases) except Exception as ex: if pushed_rpms: log.warn("Final push failed for %s, cleaning copied " "packages" % upf) for rpm in pushed_rpms: log.info("{t.warn}remove{t.normal} {rpm}".format( t=log.term, rpm=rpm)) os.remove(rpm) raise
def _clone(self): if self.verbose: log.info("Cloning {desc} repo: {url}\n" " {space} into: {path}".format( desc=self.repo_desc, space=len(self.repo_desc) * ' ', url=self.url, path=self.repo_path)) with helpers.cdir(self.base_path): cmd.git('clone', self.url, self.repo_name, log_cmd=self.verbose)
def _clone(self): if self.verbose: log.info("Cloning {desc} repo: {url}\n" " {space} into: {path}".format( desc=self.repo_desc, space=len(self.repo_desc) * ' ', url=self.url, path=self.repo_path)) with helpers.cdir(self.base_path): git('clone', self.url, self.repo_name, log_cmd=self.verbose)
def info_tags_diff(local_info, info_file=None, buildsys_tags=False): if not info_file: info_file = rdoinfo.info_file() di = DistroInfo(info_file, local_info=local_info) if buildsys_tags: tagsname = 'buildsys-tags' else: tagsname = 'tags' info2 = di.get_info() with helpers.cdir(di.fetcher.source): with git.git_revision('HEAD~'): info1 = di.get_info() tdiff = distroinfo.query.tags_diff(info1, info2, tagsname=tagsname) if not tdiff: sys.stderr.write("No tag changes detected.\n") else: for pkg, changes in tdiff: print("%s %s" % (pkg, changes))
def create_sample_distgit(name, version='1.2.3', release='1', path=None, magic_comments=None): if not path: path = name assert not os.path.exists(path) if "%{?dist}" not in release: release = release + "%{?dist}" os.makedirs(path) if not magic_comments: magic_comments = '' with helpers.cdir(path): txt = SAMPLE_SPEC.format(name=name, version=version, release=release, magic_comments=magic_comments) spec = specfile.Spec(fn='%s.spec' % name, txt=txt) spec.set_tag('Name', name) spec.save() git('init',) git('add', '.') git('commit', '-m', 'Initial import', isolated=True) return os.path.abspath(path)
def get_new_pinned_builds(location, release): new_pins = [] distroinfo = info.DistroInfo( info_files='rdo.yml', local_info=location) info2 = distroinfo.get_info() with helpers.cdir(location): with git.git_revision('HEAD~'): info1 = distroinfo.get_info() packages = query.tags_diff(info1, info2, tagsname='tags') for package in packages: name = package[0] tags = package[1] if release not in query.get_package(info1, name)['tags']: break pkg_tags = query.get_package(info2, name)['tags'][release] if release in tags and pkg_tags and 'source-branch' in pkg_tags.keys(): pinned_version = pkg_tags['source-branch'] new_pins.append({'name': name, 'release': release, 'version': pinned_version}) return new_pins
def get_new_pinned_builds(location, release): new_pins = [] distroinfo = info.DistroInfo(info_files='rdo.yml', local_info=location) info2 = distroinfo.get_info() with helpers.cdir(location): with git.git_revision('HEAD~'): info1 = distroinfo.get_info() packages = query.tags_diff(info1, info2, tagsname='tags') for package in packages: name = package[0] tags = package[1] if release not in query.get_package(info1, name)['tags']: break pkg_tags = query.get_package(info2, name)['tags'][release] if release in tags and pkg_tags and 'source-branch' in pkg_tags.keys(): pinned_version = pkg_tags['source-branch'] new_pins.append({ 'name': name, 'release': release, 'version': pinned_version }) return new_pins
def repo_dir(self): return helpers.cdir(self.repo_path)
if __name__ == '__main__': rdoinfo = fetch_rdoinfo(RDOINFO) wdir = tempfile.mkdtemp() if len(sys.argv) == 2: rdoinfo['packages'] = [ r for r in rdoinfo['packages'] if r['project'] == sys.argv[1] ] for pkg in rdoinfo['packages']: if pkg['conf'] == 'rpmfactory-puppet': upstream_url = pkg['upstream'] project = pkg['project'] print "Attempt to clone %s from %s to %s" % (project, upstream_url, wdir) pdir = os.path.join(wdir, project) if os.path.isdir(pdir): shutil.rmtree(pdir) try: with cdir(wdir): git('clone', upstream_url, project) except Exception, e: print "[FAILED] Clone from %s (%s)" % (upstream_url, e) with cdir(pdir): if os.path.isfile('metadata.json'): print "Attempt to generate spec file for %s" % project generate_spec_file(wdir, project, pkg)
def clone( package, force_fetch=False, use_master_distgit=False, gerrit_remotes=False, review_user=None): inforepo = rdoinfo.get_default_inforepo() inforepo.init(force_fetch=force_fetch) pkg = inforepo.get_package(package) if not pkg: raise exception.InvalidRDOPackage(package=package) if use_master_distgit: try: distgit = pkg['master-distgit'] distgit_str = 'master-distgit' except KeyError: raise exception.InvalidUsage( msg="-m/--use-master-distgit used but 'master-distgit' " "missing in rdoinfo for package: %s" % package) else: distgit = pkg['distgit'] distgit_str = 'distgit' log.info("Cloning {dg} into ./{t.bold}{pkg}{t.normal}/".format( t=log.term, dg=distgit_str, pkg=package)) patches = pkg.get('patches') upstream = pkg.get('upstream') review_patches = pkg.get('review-patches') review_origin = pkg.get('review-origin') git('clone', distgit, package) with helpers.cdir(package): if gerrit_remotes: log.info('Adding gerrit-origin remote...') git('remote', 'add', 'gerrit-origin', distgit) if patches: log.info('Adding patches remote...') git('remote', 'add', 'patches', patches) if gerrit_remotes: log.info('Adding gerrit-patches remote...') git('remote', 'add', 'gerrit-patches', patches) else: log.warn("'patches' remote information not available in rdoinfo.") if upstream: log.info('Adding upstream remote...') git('remote', 'add', 'upstream', upstream) else: log.warn("'upstream' remote information not available in rdoinfo.") if patches or upstream: git('fetch', '--all') if not review_user: # USERNAME is an env var used by gerrit review_user = os.environ.get('USERNAME') or os.environ.get('USER') msg_user = ('Using {t.bold}{u}{t.normal} as gerrit username, ' 'you can change it with ' '{t.cmd}git remote set-url {r} ...{t.normal}') if review_patches: log.info('Adding gerrit remote for patch chains reviews...') r = tidy_ssh_user(review_patches, review_user) log.info(msg_user.format(u=review_user, r='review-patches', t=log.term)) git('remote', 'add', 'review-patches', r) else: log.warn("'review-patches' remote information not available" " in rdoinfo.") if review_origin: log.info('Adding gerrit remote for reviews...') r = tidy_ssh_user(review_origin, review_user) log.info(msg_user.format(u=review_user, r='review-origin', t=log.term)) git('remote', 'add', 'review-origin', r) else: log.warn("'review-origin' remote information not available" " in rdoinfo.") git('remote', '-v', direct=True)
def clone( package, force_fetch=False, use_master_distgit=False, gerrit_remotes=False, review_user=None, distro='rdo'): rdo = rdoinfo.get_distroinfo(distro=distro) ri = rdo.get_info() pkg = get_package(ri, package) if not pkg: raise exception.InvalidPackage(package=package) if use_master_distgit: try: distgit = pkg['master-distgit'] distgit_str = 'master-distgit' except KeyError: raise exception.InvalidUsage( msg="-m/--use-master-distgit used but 'master-distgit' " "missing in rdoinfo for package: %s" % package) else: distgit = pkg['distgit'] distgit_str = 'distgit' log.info("Cloning {dg} into ./{t.bold}{pkg}{t.normal}/".format( t=log.term, dg=distgit_str, pkg=package)) patches = pkg.get('patches') upstream = pkg.get('upstream') review_patches = pkg.get('review-patches') review_origin = pkg.get('review-origin') git('clone', distgit, package) with helpers.cdir(package): if gerrit_remotes: log.info('Adding gerrit-origin remote...') git('remote', 'add', 'gerrit-origin', distgit) if patches: log.info('Adding patches remote...') git('remote', 'add', 'patches', patches) if gerrit_remotes: log.info('Adding gerrit-patches remote...') git('remote', 'add', 'gerrit-patches', patches) else: log.warn("'patches' remote information not available in rdoinfo.") if upstream: log.info('Adding upstream remote...') git('remote', 'add', 'upstream', upstream) else: log.warn("'upstream' remote information not available in rdoinfo.") if patches or upstream: git('fetch', '--all') if not review_user: # USERNAME is an env var used by gerrit review_user = os.environ.get('USERNAME') or os.environ.get('USER') msg_user = ('Using {t.bold}{u}{t.normal} as gerrit username, ' 'you can change it with ' '{t.cmd}git remote set-url {r} ...{t.normal}') if review_patches: log.info('Adding gerrit remote for patch chains reviews...') r = tidy_ssh_user(review_patches, review_user) log.info(msg_user.format(u=review_user, r='review-patches', t=log.term)) git('remote', 'add', 'review-patches', r) else: log.warn("'review-patches' remote information not available" " in rdoinfo.") if review_origin: log.info('Adding gerrit remote for reviews...') r = tidy_ssh_user(review_origin, review_user) log.info(msg_user.format(u=review_user, r='review-origin', t=log.term)) git('remote', 'add', 'review-origin', r) else: log.warn("'review-origin' remote information not available" " in rdoinfo.") git('remote', '-v', direct=True)
def get_projects_status(sf_url, user, release, rdo_project=None, branch_template='stable/%s'): """Compiles the latest tag/hash for rdo_project for <release> on branch""" url = 'ssh://%s@%s:29418' % (user, sf_url) rdoinfo = rdoinfoutils.fetch_rdoinfo() branch = branch_template % release projects = {} missing_from_rdo = {} status = {'upstream': {'name': None, 'version': None, 'hash': None, 'timestamp': None}, 'rdo': {'hash': None, 'timestamp': None}, 'branch_is_synced': False} tempdir = create_and_get_tempdir() releases_dir = os.path.join(tempdir, 'releases') # checkout the releases repo print "Cloning releases info ... " clone(releases_info, releases_dir) release_dir = os.path.join(releases_dir, 'deliverables/%s' % release) for descriptor in glob.glob(os.path.join(release_dir, '*.yaml')): version, infos, repos = get_repos_infos_for_project(descriptor) for project in infos.keys(): try: (name, distgit, upstream, sfdistgit, maints, conf, mdistgit, patches) = rdoinfoutils.fetch_project_infos( rdoinfo, project) except Exception as e: print "Skipping %s: %s" % (project, e) missing_from_rdo[project] = os_git_root + repos.get(project) continue p_status = copy.deepcopy(status) if rdo_project and name != rdo_project: continue desc_file = descriptor.split('/')[-1] if rdo_project: print 'Info for %s found in %s' % (rdo_project, desc_file) p_status['upstream'] = {'name': project, 'version': version, 'hash': infos[project], 'timestamp': None} if not os.path.isdir(os.path.join(tempdir, project)): try: clone('%s/%s.git' % (url, name), os.path.join(tempdir, project)) except CommandFailed: # we already get the output, so do nothing p_status['rdo'] = {} projects[name] = p_status continue with cdir(os.path.join(tempdir, project)): if git.ref_exists('refs/remotes/origin/%s' % branch): git('checkout', 'origin/%s' % branch) current = git('log', '-1', '--pretty=format:%H') commit = infos[project] rdo_time = get_commit_time(current) upstream_time = get_commit_time(commit) p_status['upstream']['timestamp'] = upstream_time p_status['rdo']['hash'] = current p_status['rdo']['timestamp'] = rdo_time if upstream_time <= rdo_time: # rdo is up-to-date and more p_status['branch_is_synced'] = True projects[name] = p_status nuke_dir(tempdir) return projects, missing_from_rdo
def create_remote_branch(sf_url, user, project=None, branch_template='stable/%s', release='mitaka', dry_run=True, modify_branches=False): raw_results = {'missing': {}, 'synced': {}, 'obsolete': {}, 'no_branch': {}} if dry_run: print "This is a dry run, no repository will be modified." url = 'ssh://%s@%s:29418' % (user, sf_url) tempdir = create_and_get_tempdir() projects, missing = get_projects_status(sf_url, user, release, rdo_project=project, branch_template=branch_template) todo = projects.keys() if project: todo = [project, ] branch = branch_template % release for p in todo: if projects[p]['branch_is_synced']: msg = "Branch %s is synced for project %s, nothing to do" print msg % (branch, p) raw_results['synced'][p] = False elif not projects[p].get('rdo'): # project is listed in rdo but does not exist on rpmfactory print "Project %s does not exist on %s" % (p, sf_url) raw_results['missing'][p] = None elif projects[p].get('rdo') and not projects[p]['rdo']['hash']: # project exists on rpmfactory, is missing the branch msg = "Project %s has no branch called %s" print msg % (p, branch) if dry_run: raw_results['no_branch'][p] = None else: commit = projects[p]['upstream']['hash'] print "Creating branch %s on project %s at %s .." % (branch, p, commit), # True if created in this run, False if already synced try: clone('%s/%s.git' % (url, p), os.path.join(tempdir, p)) with cdir(os.path.join(tempdir, p)): create_branch_in_gerrit(p, branch, commit) print " Done" raw_results['synced'][p] = True except Exception as e: print " Something bad happened :( %s" % e raw_results['no_branch'][p] = None else: # project on rpmfactory has an obsolete branch msg = ("Project %s has obsolete branch %s, " "set at %s, should be %s") print msg % (p, branch) if dry_run or not modify_branches: raw_results['obsolete'][p] = None else: commit = projects[p]['upstream']['hash'] print "Updating branch %s on project %s at %s .." % (branch, p, commit), try: clone('%s/%s.git' % (url, p), os.path.join(tempdir, p)) with cdir(os.path.join(tempdir, p)): update_branch_in_gerrit(p, branch, commit) print "Done" raw_results['synced'][p] = True except Exception as e: print " Something bad happened :( %s" % e raw_results['no_branch'][p] = None nuke_dir(tempdir) return raw_results