def repository_path_search(apiurl, project, search_project, search_repository): queue = [] # Initialize breadth first search queue with repositories from top project. root = ETL.fromstringlist(show_project_meta(apiurl, project)) for repository in root.xpath( 'repository[path[@project and @repository]]/@name'): queue.append((repository, project, repository)) # Perform a breadth first search and return the first repository chain with # a series of path elements targeting search project and repository. for repository_top, project, repository in queue: if root.get('name') != project: # Repositories for a single project are in a row so cache parsing. root = ETL.fromstringlist(show_project_meta(apiurl, project)) paths = root.findall('repository[@name="{}"]/path'.format(repository)) for path in paths: if path.get('project') == search_project and path.get( 'repository') == search_repository: return repository_top queue.append( (repository_top, path.get('project'), path.get('repository'))) return None
def getProjectMeta(self, project): """ getProjectMeta(project) -> string Get XML metadata for project """ return ''.join(core.show_project_meta(self.apiurl, project))
def apply(self, splitter): super(StrategyQuick, self).apply(splitter) # Leaper accepted which means any extra reviews have been added. splitter.filter_add( './review[@by_user="******" and @state="accepted"]') # No @by_project reviews that are not accepted. If not first round stage # this should also ignore previous staging project reviews or already # accepted human reviews. splitter.filter_add( 'not(./review[@by_project and @state!="accepted"])') # Only allow reviews by whitelisted groups and users as all others will # be considered non-quick (like @by_group="legal-auto"). The allowed # groups are only those configured as reviewers on the target project. meta = ET.fromstring(''.join( show_project_meta(splitter.api.apiurl, splitter.api.project))) allowed_groups = meta.xpath('group[@role="reviewer"]/@groupid') allowed_users = [] if 'repo-checker' in splitter.config: allowed_users.append(splitter.config['repo-checker']) self.filter_review_whitelist(splitter, 'by_group', allowed_groups) self.filter_review_whitelist(splitter, 'by_user', allowed_users)
def project_role_expand(apiurl, project, role='maintainer'): """ All users with a certain role on a project, including those who have the role directly assigned and those who are part of a group with that role. """ meta = ETL.fromstringlist(show_project_meta(apiurl, project)) return meta_role_expand(apiurl, meta, role)
def target_archs(apiurl, project): meta = show_project_meta(apiurl, project) meta = ET.fromstring(''.join(meta)) archs = [] for arch in meta.findall('repository[@name="standard"]/arch'): archs.append(arch.text) return archs
def generate_all_archs(self, project): meta = ET.fromstringlist(show_project_meta(self.apiurl, project)) archs = set() for arch in meta.findall('.//arch'): archs.add(arch.text) result = [] for arch in archs: result.append(f"arch_{arch}=1") return '&'.join(result)
def maintainers_get(apiurl, project, package=None): if package is None: meta = ET.fromstring(''.join(show_project_meta(apiurl, project))) return [p.get('userid') for p in meta.findall('.//person') if p.get('role') == 'maintainer'] root = owner_fallback(apiurl, project, package) maintainers = [p.get('name') for p in root.findall('.//person') if p.get('role') == 'maintainer'] if not maintainers: for group in [p.get('name') for p in root.findall('.//group') if p.get('role') == 'maintainer']: maintainers = maintainers + group_members(apiurl, group) return maintainers
def maintainers_get(apiurl, project, package=None): if package: try: meta = show_package_meta(apiurl, project, package) except HTTPError as e: if e.code == 404: # Fallback to project in the case of new package. meta = show_project_meta(apiurl, project) else: meta = show_project_meta(apiurl, project) meta = ET.fromstringlist(meta) userids = [] for person in meta.findall('person[@role="maintainer"]'): userids.append(person.get('userid')) if len(userids) == 0 and package is not None: # Fallback to project if package has no maintainers. return maintainers_get(apiurl, project) return userids
def getTargets(self, project): """ getTargets(project) -> list Get a list of targets for a project """ targets = [] tree = ElementTree.fromstring(''.join(core.show_project_meta(self.apiurl, project))) for repo in tree.findall('repository'): for arch in repo.findall('arch'): targets.append('%s/%s' % (repo.get('name'), arch.text)) return targets
def maintainers_get(apiurl, project, package=None): if package is None: meta = ET.fromstring(''.join(show_project_meta(apiurl, project))) return [p.get('userid') for p in meta.findall('.//person') if p.get('role') == 'maintainer'] root = owner_fallback(apiurl, project, package) maintainers = [p.get('name') for p in root.findall('.//person') if p.get('role') == 'maintainer'] if not maintainers: for group in [p.get('name') for p in root.findall('.//group') if p.get('role') == 'maintainer']: url = makeurl(apiurl, ('group', group)) root = ET.parse(http_GET(url)).getroot() maintainers = maintainers + [p.get('userid') for p in root.findall('./person/person')] return maintainers
def getTargets(self, project): """ getTargets(project) -> list Get a list of targets for a project """ targets = [] tree = ElementTree.fromstring(''.join( core.show_project_meta(self.apiurl, project))) for repo in tree.findall('repository'): for arch in repo.findall('arch'): targets.append('%s/%s' % (repo.get('name'), arch.text)) return targets
def maintainer(args): if args.group is None: # Default is appended to rather than overridden (upstream bug). args.group = ['factory-maintainers', 'factory-staging'] desired = set(args.group) apiurl = osc.conf.config['apiurl'] devel_projects = devel_projects_load(args) for devel_project in devel_projects: meta = ET.fromstringlist(show_project_meta(apiurl, devel_project)) groups = meta.xpath('group[@role="maintainer"]/@groupid') intersection = set(groups).intersection(desired) if len(intersection) != len(desired): print('{} missing {}'.format(devel_project, ', '.join(desired - intersection)))
def repository_path_expand(apiurl, project, repo): """Recursively list underlying projects.""" repos = [[project, repo]] meta = ET.fromstringlist(show_project_meta(apiurl, project)) paths = meta.findall('.//repository[@name="{}"]/path'.format(repo)) # The listed paths are taken as-is, except for the last one... for path in paths[:-1]: repos += [[path.get('project', project), path.get('repository')]] # ...which is expanded recursively if len(paths) > 0: repos += repository_path_expand(apiurl, paths[-1].get('project', project), paths[-1].get('repository')) return repos
def repository_path_search(apiurl, project, search_project, search_repository): queue = [] # Initialize breadth first search queue with repositories from top project. root = ETL.fromstringlist(show_project_meta(apiurl, project)) for repository in root.xpath('repository[path[@project and @repository]]/@name'): queue.append((repository, project, repository)) # Perform a breadth first search and return the first repository chain with # a series of path elements targeting search project and repository. for repository_top, project, repository in queue: if root.get('name') != project: # Repositories for a single project are in a row so cache parsing. root = ETL.fromstringlist(show_project_meta(apiurl, project)) paths = root.findall('repository[@name="{}"]/path'.format(repository)) for path in paths: if path.get('project') == search_project and path.get('repository') == search_repository: return repository_top queue.append((repository_top, path.get('project'), path.get('repository'))) return None
def _repository_path_expand(apiurl, project, repo): """Recursively list underlying projects.""" repos = OrderedDict() meta = ET.fromstringlist(show_project_meta(apiurl, project)) for path in meta.findall('.//repository[@name="{}"]/path'.format(repo)): rp = repository_path_expand(apiurl, path.get('project', project), path.get('repository')) for project, repo in rp: # only the last repo for a project is remembered by OBS if project in repos: del repos[project] repos[project] = repo return repos
def maintainers_get(apiurl, project, package=None): if package is None: meta = ETL.fromstringlist(show_project_meta(apiurl, project)) maintainers = meta.xpath('//person[@role="maintainer"]/@userid') groups = meta.xpath('//group[@role="maintainer"]/@groupid') maintainers.extend(groups_members(apiurl, groups)) return maintainers # Ugly reparse, but real xpath makes the rest much cleaner. root = owner_fallback(apiurl, project, package) root = ETL.fromstringlist(ET.tostringlist(root)) maintainers = root.xpath('//person[@role="maintainer"]/@name') groups = root.xpath('//group[@role="maintainer"]/@name') maintainers.extend(groups_members(apiurl, groups)) return maintainers
def source_has_correct_maintainers(self, source_project): """Checks whether the source project has the required maintainer If a 'required-source-maintainer' is set, it checks whether it is a maintainer for the source project. Inherited maintainership is intentionally ignored to have explicit maintainer set. source_project - source project name """ self.logger.info( 'Checking required maintainer from the source project (%s)' % self.required_maintainer ) if not self.required_maintainer: return True meta = ETL.fromstringlist(show_project_meta(self.apiurl, source_project)) maintainers = meta.xpath('//person[@role="maintainer"]/@userid') maintainers += ['group:' + g for g in meta.xpath('//group[@role="maintainer"]/@groupid')] return self.required_maintainer in maintainers
def repository_path_expand(apiurl, project, repo, repos=None): """Recursively list underlying projects.""" if repos is None: # Avoids screwy behavior where list as default shares reference for all # calls which effectively means the list grows even when new project. repos = [] if [project, repo] in repos: # For some reason devel projects such as graphics include the same path # twice for openSUSE:Factory/snapshot. Does not hurt anything, but # cleaner not to include it twice. return repos repos.append([project, repo]) meta = ET.fromstringlist(show_project_meta(apiurl, project)) for path in meta.findall('.//repository[@name="{}"]/path'.format(repo)): repository_path_expand(apiurl, path.get('project', project), path.get('repository'), repos) return repos
def apply(self, splitter): super(StrategyQuick, self).apply(splitter) # Leaper accepted which means any extra reviews have been added. splitter.filter_add('./review[@by_user="******" and @state="accepted"]') # No @by_project reviews that are not accepted. If not first round stage # this should also ignore previous staging project reviews or already # accepted human reviews. splitter.filter_add('not(./review[@by_project and @state!="accepted"])') # Only allow reviews by whitelisted groups and users as all others will # be considered non-quick (like @by_group="legal-auto"). The allowed # groups are only those configured as reviewers on the target project. meta = ET.fromstringlist(show_project_meta(splitter.api.apiurl, splitter.api.project)) allowed_groups = meta.xpath('group[@role="reviewer"]/@groupid') allowed_users = [] if 'repo-checker' in splitter.config: allowed_users.append(splitter.config['repo-checker']) self.filter_review_whitelist(splitter, 'by_group', allowed_groups) self.filter_review_whitelist(splitter, 'by_user', allowed_users)
def project_locked(apiurl, project): meta = ET.fromstringlist(show_project_meta(apiurl, project)) return meta.find('lock/enable') is not None
def target_archs(apiurl, project, repository='standard'): meta = ETL.fromstringlist(show_project_meta(apiurl, project)) return meta.xpath('repository[@name="{}"]/arch/text()'.format(repository))
def handle_wi(self, wid): """ Workitem handling function """ wid.result = False f = wid.fields p = wid.params project = None package = None if f.project and f.package: project = f.project package = f.package if p.project and p.package: project = p.project package = p.package if not project or not package: raise RuntimeError( "Missing mandatory field or parameter: package, project") if not f.repourl and not p.repourl: raise RuntimeError("Missing mandatory field or parameter: repourl") params = {} if f.repourl: params["url"] = f.repourl if p.repourl: params["url"] = p.repourl params["service"], params["repo"] = find_service_repo(params["url"]) if f.branch: params["branch"] = f.branch if p.branch: params["branch"] = p.branch params["revision"] = "" if f.revision: params["revision"] = f.revision if p.revision: params["revision"] = p.revision params["token"] = "" params["debian"] = "" params["dumb"] = "" if f.token: params["token"] = f.token if p.token: params["token"] = p.token if p.debian: params["debian"] = p.debian if f.debian: params["debian"] = f.debian if p.dumb: params["dumb"] = p.dumb if f.dumb: params["dumb"] = f.dumb if "branch" in params and params["branch"].startswith("pkg-"): if not "service" in params or not "repo" in params: raise RuntimeError( "Service/Repo not found in repourl %s " % p.repourl) service = git_pkg_service else: service = tar_git_service # the simple approach doesn't work with project links # if self.obs.isNewPackage(project, package): # self.obs.getCreatePackage(str(project), str(package)) # else: try: pkginfo = core.show_files_meta( self.obs.apiurl, str(project), str(package), expand=False, meta=True) if "<entry" not in pkginfo: # This is a link and it needs branching from the linked project # so grab the meta and extract the project from the link print "Found %s as a link in %s" % (package, project) x = etree.fromstring( "".join(core.show_project_meta(self.obs.apiurl, project))) l = x.find('link') if l is None: raise Exception( "Expected a <link> in project %s." % project) print "Got a link %s" % l linked_project = l.get('project') print "Branching %s to overwrite _service" % package core.branch_pkg(self.obs.apiurl, linked_project, str(package), target_project=str(project)) except Exception, exc: print "Doing a metatype pkg add because I caught %s" % exc print "Creating package %s in project %s" % (package, project) data = core.metatypes['pkg']['template'] data = StringIO( data % {"name": str(package), "user": self.obs.getUserName()}).readlines() u = core.makeurl( self.obs.apiurl, ['source', str(project), str(package), "_meta"]) x = core.http_PUT(u, data="".join(data)) print "HTTP PUT result of pkg add : %s" % x
def handle_wi(self, wid): """ Workitem handling function """ wid.result = False f = wid.fields p = wid.params project = None package = None if f.project and f.package: project = f.project package = f.package if p.project and p.package: project = p.project package = p.package if not project or not package: raise RuntimeError( "Missing mandatory field or parameter: package, project") if not f.repourl and not p.repourl: raise RuntimeError("Missing mandatory field or parameter: repourl") params = {} if f.repourl: params["url"] = f.repourl if p.repourl: params["url"] = p.repourl params["service"], params["repo"] = find_service_repo(params["url"]) if f.branch: params["branch"] = f.branch if p.branch: params["branch"] = p.branch params["revision"] = "" if f.revision: params["revision"] = f.revision if p.revision: params["revision"] = p.revision params["token"] = "" params["debian"] = "" params["dumb"] = "" if f.token: params["token"] = f.token if p.token: params["token"] = p.token if p.debian: params["debian"] = p.debian if f.debian: params["debian"] = f.debian if p.dumb: params["dumb"] = p.dumb if f.dumb: params["dumb"] = f.dumb if "branch" in params and params["branch"].startswith("pkg-"): if "service" not in params or "repo" not in params: raise RuntimeError( "Service/Repo not found in repourl %s " % p.repourl) service = git_pkg_service else: service = tar_git_service # the simple approach doesn't work with project links # if self.obs.isNewPackage(project, package): # self.obs.getCreatePackage(str(project), str(package)) # else: try: pkginfo = core.show_files_meta( self.obs.apiurl, str(project), str(package), expand=False, meta=True) if "<entry" not in pkginfo: # This is a link and it needs branching from the linked project # so grab the meta and extract the project from the link print("Found %s as a link in %s" % (package, project)) x = etree.fromstring( "".join(core.show_project_meta(self.obs.apiurl, project))) link = x.find('link') if link is None: raise Exception( "Expected a <link> in project %s." % project) print("Got a link %s" % link) linked_project = link.get('project') print("Branching %s to overwrite _service" % package) core.branch_pkg(self.obs.apiurl, linked_project, str(package), target_project=str(project)) except Exception as exc: print("Doing a metatype pkg add because I caught %s" % exc) print("Creating package %s in project %s" % (package, project)) data = core.metatypes['pkg']['template'] data = StringIO( data % { "name": str(package), "user": self.obs.getUserName()} ).readlines() u = core.makeurl( self.obs.apiurl, ['source', str(project), str(package), "_meta"]) x = core.http_PUT(u, data="".join(data)) print("HTTP PUT result of pkg add : %s" % x) # Set any constraint before we set the service file constraint_xml = make_constraint(package) if constraint_xml: # obs module only exposed the putFile by filepath so # this is a reimplement to avoid writing a tmpfile u = core.makeurl(self.obs.apiurl, ['source', project, package, "_constraints"]) core.http_PUT(u, data=constraint_xml) print "New _constraints file:\n%s" % constraint_xml else: print "No _constraints for %s" % package # Start with an empty XML doc try: # to get any existing _service file. # We use expand=0 as otherwise a failed service run won't # return the _service file print("Trying to get _service file for %s/%s" % (project, package)) services_xml = self.obs.getFile( project, package, "_service", expand=0) except urllib2.HTTPError as e: print("Exception %s trying to get _service file for %s/%s" % (e, project, package)) if e.code == 404: services_xml = empty_service elif e.code == 400: # HTTP Error 400: service in progress error wid.result = True print("Service in progress, could not get _service file. " "Not triggering another run.") return else: raise e services_xml = services_xml.strip() or empty_service # Replace the matching one: try: services = etree.fromstring(services_xml) except etree.XMLSyntaxError as e: print(e) raise # Create our new service (not services anymore) new_service_xml = service % params new_service = etree.fromstring(new_service_xml) svcname = new_service.find(".").get("name") old_service = services.find("./service[@name='%s']" % svcname) if old_service is not None: services.replace(old_service, new_service) else: services.append(new_service) svc_file = etree.tostring(services, pretty_print=True) print("New _service file:\n%s" % svc_file) # And send our new service file self.obs.setupService(project, package, svc_file) wid.result = True
def handle_wi(self, wid): """ Workitem handling function """ wid.result = False f = wid.fields p = wid.params project = None package = None if f.project and f.package: project = f.project package = f.package if p.project and p.package: project = p.project package = p.package if not project or not package: raise RuntimeError( "Missing mandatory field or parameter: package, project") if not f.repourl and not p.repourl: raise RuntimeError("Missing mandatory field or parameter: repourl") params = {} if f.repourl: params["url"] = f.repourl if p.repourl: params["url"] = p.repourl params["service"], params["repo"] = find_service_repo(params["url"]) if f.branch: params["branch"] = f.branch if p.branch: params["branch"] = p.branch params["revision"] = "" if f.revision: params["revision"] = f.revision if p.revision: params["revision"] = p.revision params["token"] = "" params["debian"] = "" params["dumb"] = "" if f.token: params["token"] = f.token if p.token: params["token"] = p.token if p.debian: params["debian"] = p.debian if f.debian: params["debian"] = f.debian if p.dumb: params["dumb"] = p.dumb if f.dumb: params["dumb"] = f.dumb if "branch" in params and params["branch"].startswith("pkg-"): if "service" not in params or "repo" not in params: raise RuntimeError("Service/Repo not found in repourl %s " % p.repourl) service = git_pkg_service else: service = tar_git_service # the simple approach doesn't work with project links # if self.obs.isNewPackage(project, package): # self.obs.getCreatePackage(str(project), str(package)) # else: try: pkginfo = core.show_files_meta(self.obs.apiurl, str(project), str(package), expand=False, meta=True) if "<entry" not in pkginfo: # This is a link and it needs branching from the linked project # so grab the meta and extract the project from the link self.log.debug("Found %s as a link in %s" % (package, project)) x = etree.fromstring("".join( core.show_project_meta(self.obs.apiurl, project))) link = x.find('link') if link is None: raise Exception("Expected a <link> in project %s." % project) self.log.debug("Got a link %s" % link) linked_project = link.get('project') self.log.debug("Branching %s to overwrite _service" % package) core.branch_pkg(self.obs.apiurl, linked_project, str(package), target_project=str(project)) except Exception as exc: self.log.warn("Doing a metatype pkg add because I caught %s" % exc) self.log.warn("Creating package %s in project %s" % (package, project)) data = core.metatypes['pkg']['template'] data = StringIO(data % { "name": str(package), "user": self.obs.getUserName() }).readlines() u = core.makeurl( self.obs.apiurl, ['source', str(project), str(package), "_meta"]) x = core.http_PUT(u, data="".join(data)) self.log.debug("HTTP PUT result of pkg add : %s" % x) # Set any constraint before we set the service file constraint_xml = self.make_constraint(package) if constraint_xml: # obs module only exposed the putFile by filepath so # this is a reimplement to avoid writing a tmpfile u = core.makeurl(self.obs.apiurl, ['source', project, package, "_constraints"]) core.http_PUT(u, data=constraint_xml) self.log.info("New _constraints file:\n%s" % constraint_xml) else: self.log.info("No _constraints for %s" % package) # Start with an empty XML doc try: # to get any existing _service file. # We use expand=0 as otherwise a failed service run won't # return the _service file self.log.debug("Trying to get _service file for %s/%s" % (project, package)) services_xml = self.obs.getFile(project, package, "_service", expand=0) except urllib2.HTTPError as e: self.log.debug( "Exception %s trying to get _service file for %s/%s" % (e, project, package)) if e.code == 404: services_xml = empty_service elif e.code == 400: # HTTP Error 400: service in progress error wid.result = True self.log.warn( "Service in progress, could not get _service file. " "Not triggering another run.") return else: raise e services_xml = services_xml.strip() or empty_service # Replace the matching one: try: services = etree.fromstring(services_xml) except etree.XMLSyntaxError as e: self.log.exception("Creating services xml failed") raise # Create our new service (not services anymore) new_service_xml = service % params new_service = etree.fromstring(new_service_xml) svcname = new_service.find(".").get("name") old_service = services.find("./service[@name='%s']" % svcname) if old_service is not None: services.replace(old_service, new_service) else: services.append(new_service) svc_file = etree.tostring(services, pretty_print=True) self.log.debug("New _service file:\n%s" % svc_file) # And send our new service file self.obs.setupService(project, package, svc_file) wid.result = True