def _find_obs_pkg(bs, pkg, src_project): # probably OBS package name is different from src rpm name # use obs search api to find the owner from osc import core # api call path path = 'published/binary/id' # search predicate predicate = "(@name = '%s') and path/@project='%s'" % (pkg, src_project) kwa = { path : predicate } print kwa # osc search function wants keyword args result = core.search(bs.apiurl, **kwa ) # obs search will return results from subprojects as well, so filter further filtered = result[path].findall("./binary[@project='%s']" % (src_project)) if filtered: # extract the first package name pkg = filtered[0].attrib['package'] return pkg else: predicate = "(contains(@name , '%s')) and path/@project='%s'" % (pkg, src_project) kwa = { path : predicate } print kwa # osc search function wants keyword args result = core.search(bs.apiurl, **kwa ) # obs search will return results from subprojects as well, so filter further filtered = result[path].findall("./binary[@project='%s']" % (src_project)) if filtered: pkg = filtered[0].attrib['package'] return pkg else: return None
def _find_obs_pkg(bs, pkg, src_project): # probably OBS package name is different from src rpm name # use obs search api to find the owner # api call path path = 'published/binary/id' # search predicate predicate = "(@name = '%s') and path/@project='%s'" % (pkg, src_project) kwa = {path: predicate} print kwa # osc search function wants keyword args result = core.search(bs.apiurl, **kwa) # obs search will return results from subprojects as well, # so filter further filtered = result[path].findall("./binary[@project='%s']" % (src_project)) if filtered: # extract the first package name pkg = filtered[0].attrib['package'] return pkg else: predicate = "(contains(@name , '%s')) and path/@project='%s'" % ( pkg, src_project) kwa = {path: predicate} print kwa # osc search function wants keyword args result = core.search(bs.apiurl, **kwa) # obs search will return results from subprojects as well, # so filter further filtered = result[path].findall("./binary[@project='%s']" % src_project) if filtered: pkg = filtered[0].attrib['package'] return pkg else: return None
def remove_obsoleted_develtag(self, project, package): xpath = { 'package': "@project='%s' and devel/@project=@project and devel/@package='%s'" % (project, package), } collection = search(self.api.apiurl, **xpath)['package'] for pkg in collection.findall('package'): set_devel_project(self.api.apiurl, project, pkg.attrib['name'], devprj=None)
def project_attribute_list(apiurl, attribute, value=None): xpath = 'attribute/@name="{}"'.format(attribute) if value is not None: xpath += '="{}"'.format(value) root = search(apiurl, project=xpath)['project'] for project in root.findall('project'): yield project.get('name')
def project_remote_list(apiurl): remotes = {} root = search(apiurl, project='starts-with(remoteurl, "http")')['project'] for project in root.findall('project'): # Strip ending /public as the only use-cases for manually checking # remote projects is to query them directly to use an API that does not # work over the interconnect. As such /public will have same problem. remotes[project.get('name')] = re.sub('/public$', '', project.find('remoteurl').text) return remotes
def _get_submit_reqs(new_prjs, old_prjs): reqs = { "incoming": [], "in progress": [], "outgoing": [], } for apiurl, prjs in new_prjs: bs = buildservice.BuildService(apiurl=str(apiurl)) actions = ["(action/target/@project='%s'" % prj for prj in prjs] reqs["incoming"] = core.search( bs.apiurl, request="(state/@name='new' or state/@name='review') and %s)" % "or".join(actions)) for apiurl, prjs in old_prjs: bs = buildservice.BuildService(apiurl=str(apiurl)) actions = ["(action/source/@project='%s'" % prj for prj in prjs] reqs["outgoing"] = core.search( bs.apiurl, request="(state/@name='new' or state/@name='review') and %s)" % "or".join(actions)) return reqs
def check(apiurl, entity, entity_type='group', comment=False, bot=None, threshold=2 * 3600, threshold_require=True): queries = {'request': {'limit': 1000, 'withfullhistory': 1}} xpath = 'state[@name="new"] or state[@name="review"]' if entity == 'staging-bot': xpath = xpath_join( xpath, 'review[starts-with(@by_project, "openSUSE:") and @state="new"]', op='and') xpath = xpath_join(xpath, 'history/@who="{}"'.format(entity), op='and') requests = search(apiurl, queries, request=xpath)['request'] for request in requests: age = request_age(request).total_seconds() request_debug(request, age, threshold) if age <= threshold: return True return False xpath = xpath_join(xpath, 'review[@by_{}="{}" and @state="new"]'.format( entity_type, entity), op='and') requests = search(apiurl, queries, request=xpath)['request'] print_debug('{:,} requests'.format(len(requests))) if not len(requests): # Could check to see that a review has been performed in the last week. return True all_comment = True for request in requests: kwargs = {} if comment == 'project': # Would be a lot easier with lxml, but short of reparsing or monkey. for review in request.findall('review[@by_project]'): if review.get('by_project').startswith('openSUSE:'): kwargs['project_name'] = review.get('by_project') # TODO repo-checker will miss stagings where delete only problem so # comment on request, but should be fixed by #1084. elif comment: kwargs['request_id'] = request.get('id') age = request_age(request).total_seconds() request_debug(request, age, threshold) comment_age = check_comment(apiurl, bot, **kwargs) if comment_age: if comment_age <= threshold: print_debug('comment found below threshold') return True elif age > threshold: print_debug('no comment found and above threshold') all_comment = False if threshold_require: return False else: continue else: print_debug('no comment found, but below threshold') print_debug('all comments: {}'.format(all_comment)) return all_comment
def check(apiurl, entity, entity_type='group', comment=False, bot=None, threshold=2 * 3600, threshold_require=True): queries = {'request': {'limit': 1000, 'withfullhistory': 1}} xpath = 'state[@name="new"] or state[@name="review"]' if entity == 'staging-bot': xpath = xpath_join( xpath, 'review[starts-with(@by_project, "openSUSE:") and @state="new"]', op='and') xpath = xpath_join( xpath, 'history/@who="{}"'.format(entity), op='and') requests = search(apiurl, queries, request=xpath)['request'] for request in requests: age = request_age(request).total_seconds() request_debug(request, age, threshold) if age <= threshold: return True return False xpath = xpath_join( xpath, 'review[@by_{}="{}" and @state="new"]'.format(entity_type, entity), op='and') requests = search(apiurl, queries, request=xpath)['request'] print_debug('{:,} requests'.format(len(requests))) if not len(requests): # Could check to see that a review has been performed in the last week. return True all_comment = True for request in requests: kwargs = {} if comment == 'project': # Would be a lot easier with lxml, but short of reparsing or monkey. for review in request.findall('review[@by_project]'): if review.get('by_project').startswith('openSUSE:'): kwargs['project_name'] = review.get('by_project') # TODO repo-checker will miss stagings where delete only problem so # comment on request, but should be fixed by #1084. elif comment: kwargs['request_id'] = request.get('id') age = request_age(request).total_seconds() request_debug(request, age, threshold) comment_age = check_comment(apiurl, bot, **kwargs) if comment_age: if comment_age <= threshold: print_debug('comment found below threshold') return True elif age > threshold: print_debug('no comment found and above threshold') all_comment = False if threshold_require: return False else: continue else: print_debug('no comment found, but below threshold') print_debug('all comments: {}'.format(all_comment)) return all_comment