def _bugids_to_dicts(chunk_of_bugids): # First, query bugzilla for ids bz_bugs = self._bugzilla.getbugs(chunk_of_bugids) dicts = [] for bug in bz_bugs: modified = DateTimeDisplay(str(bug.last_change_time), format='%Y%m%dT%H:%M:%S') bug_class = '' if self._is_security_bug(bug): bug_class += 'security-bug ' bug_version = bug.version if isinstance(bug_version, (list, tuple)): bug_version = bug_version[0] d = { 'id': bug.bug_id, 'status': bug.bug_status.title(), 'description': bug.summary, 'last_modified': modified.age(), 'release': '%s %s' % (bug.product, bug_version), 'bug_class': bug_class.strip(), } dicts.append(d) return dicts
def get_creation_time(self, filename): """ Return a datetime object for the date a given file was created """ data = self.repo.git.log(filename, reverse=True) lines = (l for l in data.split('\n') if l.startswith('Date')) date = ' '.join(lines.next().split()[1:-1]) fmt = '%a %b %d %H:%M:%S %Y' return DateTimeDisplay(date, format=fmt).datetime
def query_changelogs(self, start_row=None, rows_per_page=10, order=-1, sort_col=None, filters=None, **params): if not filters: filters = {} if not filters.get('package_name'): raise HTTPBadRequest('"package_name" is a required filter.') package_name = filters['package_name'] release = filters.get('release', 'rawhide') url = '/'.join([self._mdapi_url, release, 'changelog', package_name]) response = requests.get(url) if not bool(response): raise HTTPBadGateway("Failed to talk to mdapi, %r %r" % (url, response)) data = response.json() if 'files' in data: # This is the *old* way to do it data = data['files'] elif 'changelogs' in data: # This is the *new* way to do it # https://github.com/fedora-infra/mdapi/commit/c2eafd8d05171fdcb3fd699835c0a44e02088724#commitcomment-14646204 data = data['changelogs'] else: # IMPOSSIBLE! raise HTTPBadGateway("Got unexpected response from mdapi.") for i, entry in enumerate(data): entry['text'] = entry['changelog'] m = self._changelog_version_extract_re.match(entry['author']) if m: entry['author'] = escape(m.group(1)) entry['email'] = m.group(2) entry['version'] = m.group(3) else: entry['author'] = escape(entry['author']) # convert the date to a nicer format obj = DateTimeDisplay(datetime.fromtimestamp(entry['date'])) entry['display_date'] = obj.datetime.strftime("%d %b %Y") return len(data), data
def get_patches(self): """ Return a dictionary of all patches for this package """ patches = [] for patch in [blob for blob in self.repo.tree().traverse() if blob.name.endswith('.patch')]: created = self.get_creation_time(patch.name) patches.append({ 'name': patch.name, 'date': created.strftime('%d %b %Y'), 'datetime': created, 'age': DateTimeDisplay(created).age(granularity='day', general=True), }) return sorted(patches, cmp=lambda x, y: cmp(x['datetime'], y['datetime']))
def group_membership_over_time(self, group_name="cla_done"): # This is the magic time (in microseconds since the UNIX Epoch) that # Toshio gave me where the end of the initial FAS2 import lies. Any # timestamps prior to this can't be trusted. # start_date = "2008-03-12 02:06:00" start_date = 1205305560000 fas = self.create_fas_object() group = fas.people_query(constraints={ 'group': group_name, 'role_status': 'approved' }, columns=['role_approval']) approval = {} for row in group: if not row['role_approval']: continue timeobject = DateTimeDisplay(row['role_approval'].split('+')[0]) timetuple = timeobject.datetime.timetuple() timetuple_new = (timetuple.tm_year, timetuple.tm_mon, timetuple.tm_mday, 0, 0, 0, 0, 0, 0) timestamp = int(time.mktime(timetuple_new)) * 1000 if timestamp in approval.keys(): approval[timestamp] += 1 else: approval[timestamp] = 1 approval_times = approval.keys() approval_times.sort() data = [] approves = 0 for thattime in approval_times: approves += approval[thattime] if thattime > start_date: data.append((thattime, approves)) return data
def get_patch_changelog(self, patch): """ Return a list of the changes made to this patch """ commits = [] current = {} for commit in self.repo.git.log(patch).split('\n'): chunks = commit.split() if chunks: if chunks[0] == 'commit': if current: commits.append(current) current = {'msg': ''} elif chunks[0] == 'Author:': current['author'] = to_unicode(' '.join(chunks[1:])) elif chunks[0] == 'Date:': current['date'] = DateTimeDisplay( ' '.join(chunks[1:-1]), format='%a %b %d %H:%M:%S %Y').datetime else: current['msg'] += to_unicode('%s\n' % ' '.join(chunks)) commits.append(current) self.inject_links(commits) return commits
def query_userinfo(self, start_row=None, rows_per_page=None, order=-1, sort_col=None, filters=dict(), **params): filters = self._query_userinfo_filter.filter(filters) un = filters.get('username') profile = filters.get('profile', False) current_id = self._environ.get('repoze.who.identity') current_user = None if current_id: current_user = current_id['repoze.who.userid'] if profile: un = current_user if un == current_user: view = current_id['person'] else: view = self.get_user_view(un) if 'error_type' in view: return (-1, view) if not view: return None created = DateTimeDisplay(view['creation']) if created.datetime: view['created_display'] = created.datetime.strftime("%d %b %Y") else: view['created_display'] = '' # there is only ever one row returned return (1, [view])
def query_updates(self, start_row=None, rows_per_page=None, order=-1, sort_col=None, filters=None, **params): if not filters: filters = {} filters = self._query_updates_filter.filter(filters, conn=self) group_updates = filters.get('group_updates', True) params.update(filters) params['tg_paginate_no'] = int(start_row / rows_per_page) + 1 # If we're grouping updates, ask for twice as much. This is so we can # handle the case where there are two updates for each package, one for # each release. Yes, worst case we get twice as much data as we ask # for, but this allows us to do *much* more efficient database calls on # the server. if group_updates: params['tg_paginate_limit'] = rows_per_page * 2 else: params['tg_paginate_limit'] = rows_per_page results = self._bodhi_client.send_request('list', req_params=params) total_count = results[1]['num_items'] if group_updates: updates_list = self._group_updates(results[1]['updates'], num_packages=rows_per_page) else: updates_list = results[1]['updates'] for up in updates_list: versions = [] releases = [] if group_updates: up['title'] = up['dist_updates'][0]['title'] for dist_update in up['dist_updates']: versions.append(dist_update['version']) releases.append(dist_update['release_name']) up['name'] = up['package_name'] up['versions'] = versions up['releases'] = releases up['status'] = up['dist_updates'][0]['status'] up['nvr'] = up['dist_updates'][0]['title'] up['request_id'] = up['package_name'] + dist_update[ 'version'].replace('.', '') else: chunks = up['title'].split('-') up['name'] = '-'.join(chunks[:-2]) up['version'] = '-'.join(chunks[-2:]) up['versions'] = chunks[-2] up['releases'] = up['release']['long_name'] up['nvr'] = up['title'] up['request_id'] = up.get('updateid') or \ up['nvr'].replace('.', '').replace(',', '') up['id'] = up['nvr'].split(',')[0] # A unique id that we can use in HTML class fields. #up['request_id'] = up.get('updateid') or \ # up['nvr'].replace('.', '').replace(',', '') actions = [] # Right now we're making the assumption that if you're logged # in, we query by your username, thus you should be able to # modify these updates. This way, we avoid the pkgdb calls. # Ideally, we should get the real ACLs from the pkgdb connector's # cache. if filters.get('username'): # If we have multiple updates that are all in the same state, # then create a single set of action buttons to control all # of them. If not,then supply separate ones. if 'dist_updates' in up and len(up['dist_updates']) > 1: if up['dist_updates'][0]['status'] != \ up['dist_updates'][1]['status']: for update in up['dist_updates']: for action in self._get_update_actions(update): actions.append(action) else: for update in up['dist_updates']: for action in self._get_update_actions(update): actions.append(action) else: # Create a single set of action buttons if 'dist_updates' in up: update = up['dist_updates'][0] else: update = up for action in self._get_update_actions(update): actions.append(action) up['actions'] = '' for action in actions: reqs = '' if group_updates: for u in up['dist_updates']: reqs += "update_action('%s', '%s');" % (u['title'], action[0]) title = up['dist_updates'][0]['title'] else: reqs += "update_action('%s', '%s');" % (up['title'], action[0]) title = up['title'] # FIXME: Don't embed HTML up['actions'] += """ <button id="%s_%s" onclick="%s return false;">%s</button><br/> """ % (title.replace('.', ''), action[0], reqs, action[1]) # Dates if group_updates: date_submitted = up['dist_updates'][0]['date_submitted'] date_pushed = up['dist_updates'][0]['date_pushed'] else: date_submitted = up['date_submitted'] date_pushed = up['date_pushed'] granularity = filters.get('granularity', 'day') ds = DateTimeDisplay(date_submitted) up['date_submitted_display'] = ds.age(granularity=granularity, general=True) + ' ago' if date_pushed: dp = DateTimeDisplay(date_pushed) up['date_pushed'] = dp.datetime.strftime('%d %b %Y') up['date_pushed_display'] = dp.age(granularity=granularity, general=True) + ' ago' # karma # FIXME: take into account karma from both updates if group_updates: k = up['dist_updates'][0]['karma'] else: k = up['karma'] if k: up['karma_str'] = "%+d" % k else: up['karma_str'] = " %d" % k up['karma_level'] = 'meh' if k > 0: up['karma_level'] = 'good' if k < 0: up['karma_level'] = 'bad' up['details'] = self._get_update_details(up) return (total_count, updates_list)
def query_builds(self, start_row=None, rows_per_page=10, order=-1, sort_col=None, filters=None, **params): if not filters: filters = {} filters = self._query_builds_filter.filter(filters, conn=self) username = filters.get('user', '') package = filters.get('package', '') state = filters.get('state') complete_before = None complete_after = None # need a better way to specify this # completed_filter = filters.get('completed') # if completed_filter: # if completed_filter['op'] in ('>', 'after'): # complete_after = completed_filter['value'] # elif completed_filter['op'] in ('<', 'before'): # complete_before = completed_filter['value'] if order < 0: order = '-' + sort_col else: order = sort_col user = None id = None if username: user = self._koji_client.getUser(username) # we need to check if this user exists if username and not user: return (0, []) id = user['id'] pkg_id = None if package: pkg_id = self._koji_client.getPackageID(package) queryOpts = None if state: try: state = int(state) except ValueError: state_list = [] for value in state.split(','): state_list.append(int(value)) state = state_list elif state == '': state = None qo = {} if not (start_row is None): qo['offset'] = int(start_row) if not (rows_per_page is None): qo['limit'] = int(rows_per_page) if order: qo['order'] = order if qo: queryOpts = qo countQueryOpts = {'countOnly': True} self._koji_client.multicall = True self._koji_client.listBuilds(packageID=pkg_id, userID=id, state=state, completeBefore=complete_before, completeAfter=complete_after, queryOpts=countQueryOpts) self._koji_client.listBuilds(packageID=pkg_id, userID=id, state=state, completeBefore=complete_before, completeAfter=complete_after, queryOpts=queryOpts) results = self._koji_client.multiCall() builds_list = results[1][0] total_count = results[0][0] for b in builds_list: state = b['state'] b['state_str'] = koji.BUILD_STATES[state].lower() start = DateTimeDisplay(b['creation_time']) complete = b['completion_time'] completion_display = None if not complete: completion_display = { 'when': 'In progress...', 'should_display_time': False, 'time': '', } completion_display['elapsed'] = start.age(granularity='minute') else: completion_display = {} complete = DateTimeDisplay(b['completion_time']) completion_display['elapsed'] = start.age(complete, granularity='minute') completion_display['when'] = complete.age( granularity='minute', general=True) + ' ago' ident = self._request.environ.get('repoze.who.identity') if ident: username = ident.get('repoze.who.userid') tz = ident['person']['timezone'] completion_display['time'] = \ complete.astimezone(tz).strftime('%I:%M %p %Z') else: completion_display['time'] = \ complete.datetime.strftime('%I:%M %p') + ' UTC' b['completion_time_display'] = completion_display # Query the bodhi update status for each build if filters.get('query_updates'): bodhi = get_connector('bodhi') bodhi.add_updates_to_builds(builds_list) self._koji_client.multicall = False return (total_count, builds_list)
def get_creation_time(self, filename): """ Return a datetime object for the date a given file was created """ date = ' '.join( self.repo.git.log(filename, reverse=True).split('\n')[2].split()[1:-1]) return DateTimeDisplay(date, format='%a %b %d %H:%M:%S %Y').datetime
def query_updates(self, start_row=None, rows_per_page=None, order=-1, sort_col=None, filters=None, **params): if not filters: filters = {} filters = self._query_updates_filter.filter(filters, conn=self) group_updates = filters.get('group_updates', True) params.update(filters) params['tg_paginate_no'] = int(start_row/rows_per_page) + 1 # If we're grouping updates, ask for twice as much. This is so we can # handle the case where there are two updates for each package, one for # each release. Yes, worst case we get twice as much data as we ask # for, but this allows us to do *much* more efficient database calls on # the server. if group_updates: params['tg_paginate_limit'] = rows_per_page * 2 else: params['tg_paginate_limit'] = rows_per_page results = self._bodhi_client.send_request('list', req_params=params) total_count = results[1]['num_items'] if group_updates: updates_list = self._group_updates(results[1]['updates'], num_packages=rows_per_page) else: updates_list = results[1]['updates'] for up in updates_list: versions = [] releases = [] if group_updates: up['title'] = up['dist_updates'][0]['title'] for dist_update in up['dist_updates']: versions.append(dist_update['version']) releases.append(dist_update['release_name']) up['name'] = up['package_name'] up['versions'] = versions up['releases'] = releases up['status'] = up['dist_updates'][0]['status'] up['nvr'] = up['dist_updates'][0]['title'] up['request_id'] = up['package_name'] + dist_update['version'].replace('.', '') else: chunks = up['title'].split('-') up['name'] = '-'.join(chunks[:-2]) up['version'] = '-'.join(chunks[-2:]) up['versions'] = chunks[-2] up['releases'] = up['release']['long_name'] up['nvr'] = up['title'] up['request_id'] = up.get('updateid') or \ up['nvr'].replace('.', '').replace(',', '') up['id'] = up['nvr'].split(',')[0] # A unique id that we can use in HTML class fields. #up['request_id'] = up.get('updateid') or \ # up['nvr'].replace('.', '').replace(',', '') actions = [] # Right now we're making the assumption that if you're logged # in, we query by your username, thus you should be able to # modify these updates. This way, we avoid the pkgdb calls. # Ideally, we should get the real ACLs from the pkgdb connector's # cache. if filters.get('username'): # If we have multiple updates that are all in the same state, # then create a single set of action buttons to control all # of them. If not,then supply separate ones. if 'dist_updates' in up and len(up['dist_updates']) > 1: if up['dist_updates'][0]['status'] != \ up['dist_updates'][1]['status']: for update in up['dist_updates']: for action in self._get_update_actions(update): actions.append(action) else: for update in up['dist_updates']: for action in self._get_update_actions(update): actions.append(action) else: # Create a single set of action buttons if 'dist_updates' in up: update = up['dist_updates'][0] else: update = up for action in self._get_update_actions(update): actions.append(action) up['actions'] = '' for action in actions: reqs = '' if group_updates: for u in up['dist_updates']: reqs += "update_action('%s', '%s');" % (u['title'], action[0]) title = up['dist_updates'][0]['title'] else: reqs += "update_action('%s', '%s');" % (up['title'], action[0]) title = up['title'] # FIXME: Don't embed HTML up['actions'] += """ <button id="%s_%s" onclick="%s return false;">%s</button><br/> """ % (title.replace('.', ''), action[0], reqs, action[1]) # Dates if group_updates: date_submitted = up['dist_updates'][0]['date_submitted'] date_pushed = up['dist_updates'][0]['date_pushed'] else: date_submitted = up['date_submitted'] date_pushed = up['date_pushed'] granularity = filters.get('granularity', 'day') ds = DateTimeDisplay(date_submitted) up['date_submitted_display'] = ds.age(granularity=granularity, general=True) + ' ago' if date_pushed: dp = DateTimeDisplay(date_pushed) up['date_pushed'] = dp.datetime.strftime('%d %b %Y') up['date_pushed_display'] = dp.age(granularity=granularity, general=True) + ' ago' # karma # FIXME: take into account karma from both updates if group_updates: k = up['dist_updates'][0]['karma'] else: k = up['karma'] if k: up['karma_str'] = "%+d"%k else: up['karma_str'] = " %d"%k up['karma_level'] = 'meh' if k > 0: up['karma_level'] = 'good' if k < 0: up['karma_level'] = 'bad' up['details'] = self._get_update_details(up) return (total_count, updates_list)
def query_builds(self, start_row=None, rows_per_page=10, order=-1, sort_col=None, filters=None, **params): if not filters: filters = {} filters = self._query_builds_filter.filter(filters, conn=self) username = filters.get('user', '') package = filters.get('package', '') state = filters.get('state') complete_before = None complete_after = None # need a better way to specify this # completed_filter = filters.get('completed') # if completed_filter: # if completed_filter['op'] in ('>', 'after'): # complete_after = completed_filter['value'] # elif completed_filter['op'] in ('<', 'before'): # complete_before = completed_filter['value'] if order < 0: order = '-' + sort_col else: order = sort_col user = None id = None if username: user = self._koji_client.getUser(username) # we need to check if this user exists if username and not user: return (0, []) id = user['id'] pkg_id = None if package: pkg_id = self._koji_client.getPackageID(package) queryOpts = None if state: try: state = int(state) except ValueError: state_list = [] for value in state.split(','): state_list.append(int(value)) state = state_list elif state == '': state = None qo = {} if not (start_row is None): qo['offset'] = int(start_row) if not (rows_per_page is None): qo['limit'] = int(rows_per_page) if order: qo['order'] = order if qo: queryOpts = qo countQueryOpts = {'countOnly': True} self._koji_client.multicall = True self._koji_client.listBuilds( packageID=pkg_id, userID=id, state=state, completeBefore=complete_before, completeAfter=complete_after, queryOpts=countQueryOpts) self._koji_client.listBuilds( packageID=pkg_id, userID=id, state=state, completeBefore=complete_before, completeAfter=complete_after, queryOpts=queryOpts) results = self._koji_client.multiCall() builds_list = results[1][0] total_count = results[0][0] for b in builds_list: state = b['state'] b['state_str'] = koji.BUILD_STATES[state].lower() start = DateTimeDisplay(b['creation_time']) complete = b['completion_time'] completion_display = None if not complete: completion_display = { 'when': 'In progress...', 'should_display_time': False, 'time': '', } completion_display['elapsed'] = start.age(granularity='minute') else: completion_display = {} complete = DateTimeDisplay(b['completion_time']) completion_display['elapsed'] = start.age( complete, granularity='minute') completion_display['when'] = complete.age( granularity='minute', general=True) + ' ago' ident = self._request.environ.get('repoze.who.identity') if ident: username = ident.get('repoze.who.userid') tz = ident['person']['timezone'] completion_display['time'] = \ complete.astimezone(tz).strftime('%I:%M %p %Z') else: completion_display['time'] = \ complete.datetime.strftime('%I:%M %p') + ' UTC' b['completion_time_display'] = completion_display # Query the bodhi update status for each build if filters.get('query_updates'): bodhi = get_connector('bodhi') bodhi.add_updates_to_builds(builds_list) self._koji_client.multicall = False return (total_count, builds_list)
def query_updates(self, start_row=None, rows_per_page=None, order=-1, sort_col=None, filters=None, **params): if not filters: filters = {} filters = self._query_updates_filter.filter(filters, conn=self) group_updates = filters.get('group_updates', True) params.update(filters) params['page'] = int(start_row / rows_per_page) + 1 # If we're grouping updates, ask for twice as much. This is so we can # handle the case where there are two updates for each package, one for # each release. Yes, worst case we get twice as much data as we ask # for, but this allows us to do *much* more efficient database calls on # the server. if group_updates: params['rows_per_page'] = rows_per_page * 2 else: params['rows_per_page'] = rows_per_page # Convert bodhi1 query format to bodhi2. if 'package' in params: params['packages'] = params.pop('package') if 'release' in params: params['releases'] = params.pop('release') results = self._bodhi_client.send_request('updates', auth=False, params=params) total_count = results['total'] if group_updates: updates_list = self._group_updates(results['updates'], num_packages=rows_per_page) else: updates_list = results['updates'] for up in updates_list: versions = [] releases = [] if group_updates: up['title'] = up['dist_updates'][0]['title'] for dist_update in up['dist_updates']: versions.append(dist_update['version']) releases.append(dist_update['release_name']) up['name'] = up['package_name'] up['versions'] = versions up['releases'] = releases up['status'] = up['dist_updates'][0]['status'] up['nvr'] = up['dist_updates'][0]['title'] up['request_id'] = up['package_name'] + \ dist_update['version'].replace('.', '') else: chunks = up['title'].split('-') up['name'] = '-'.join(chunks[:-2]) up['version'] = '-'.join(chunks[-2:]) up['versions'] = chunks[-2] up['releases'] = up['release']['long_name'] up['nvr'] = up['title'] up['request_id'] = up.get('updateid') or \ up['nvr'].replace('.', '').replace(',', '') up['id'] = up['nvr'].split(',')[0] # A unique id that we can use in HTML class fields. #up['request_id'] = up.get('updateid') or \ # up['nvr'].replace('.', '').replace(',', '') actions = [] up['actions'] = '' for action in actions: reqs = '' if group_updates: for u in up['dist_updates']: reqs += "update_action('%s', '%s');" % (u['title'], action[0]) title = up['dist_updates'][0]['title'] else: reqs += "update_action('%s', '%s');" % (up['title'], action[0]) title = up['title'] # FIXME: Don't embed HTML up['actions'] += """ <button id="%s_%s" onclick="%s return false;">%s</button><br/> """ % (title.replace('.', ''), action[0], reqs, action[1]) # Dates if group_updates: date_submitted = up['dist_updates'][0]['date_submitted'] date_pushed = up['dist_updates'][0]['date_pushed'] else: date_submitted = up['date_submitted'] date_pushed = up['date_pushed'] granularity = filters.get('granularity', 'day') ds = DateTimeDisplay(date_submitted) up['date_submitted_display'] = ds.age(granularity=granularity, general=True) + ' ago' if date_pushed: dp = DateTimeDisplay(date_pushed) up['date_pushed'] = dp.datetime.strftime('%d %b %Y') up['date_pushed_display'] = dp.age(granularity=granularity, general=True) + ' ago' # karma # FIXME: take into account karma from both updates if group_updates: k = up['dist_updates'][0]['karma'] else: k = up['karma'] if k: up['karma_str'] = "%+d" % k else: up['karma_str'] = " %d" % k up['karma_level'] = 'meh' if k > 0: up['karma_level'] = 'good' if k < 0: up['karma_level'] = 'bad' up['details'] = self._get_update_details(up) return (total_count, updates_list)
def query_changelogs(self, start_row=None, rows_per_page=10, order=-1, sort_col=None, filters=None, **params): if not filters: filters = {} filters = self._query_changelogs_filter.filter(filters, conn=self) build_id = int(filters.get('build_id', None)) task_id = filters.get('task_id', None) state = filters.get('state', None) if order < 0: order = '-' + sort_col else: order = sort_col queryOpts = None qo = {} if not (start_row == None): qo['offset'] = int(start_row) if not (rows_per_page == None): qo['limit'] = int(rows_per_page) if order: qo['order'] = order if qo: queryOpts = qo countQueryOpts = {'countOnly': True} self._koji_client.multicall = True self._koji_client.getChangelogEntries(buildID=build_id, queryOpts=countQueryOpts) self._koji_client.getChangelogEntries(buildID=build_id, queryOpts=queryOpts) results = self._koji_client.multiCall() changelog_list = results[1][0] for entry in changelog_list: # try to extract a version and e-mail from the authors field m = self._changelog_version_extract_re.match(entry['author']) if m: entry['author'] = escape(m.group(1)) entry['email'] = m.group(2) entry['version'] = m.group(3) else: entry['author'] = escape(entry['author']) # convert the date to a nicer format entry['display_date'] = \ DateTimeDisplay(entry['date']).datetime.strftime("%d %b %Y") total_count = results[0][0] self._koji_client.multicall = False return (total_count, changelog_list)
def query_updates(self, start_row=None, rows_per_page=None, order=-1, sort_col=None, filters=None, **params): if not filters: filters = {} filters = self._query_updates_filter.filter(filters, conn=self) group_updates = filters.get('group_updates', True) params.update(filters) params['page'] = int(start_row/rows_per_page) + 1 # If we're grouping updates, ask for twice as much. This is so we can # handle the case where there are two updates for each package, one for # each release. Yes, worst case we get twice as much data as we ask # for, but this allows us to do *much* more efficient database calls on # the server. if group_updates: params['rows_per_page'] = rows_per_page * 2 else: params['rows_per_page'] = rows_per_page # Convert bodhi1 query format to bodhi2. if 'package' in params: params['packages'] = params.pop('package') if 'release' in params: params['releases'] = params.pop('release') results = self._bodhi_client.send_request('updates', auth=False, params=params) total_count = results['total'] if group_updates: updates_list = self._group_updates(results['updates'], num_packages=rows_per_page) else: updates_list = results['updates'] for up in updates_list: versions = [] releases = [] if group_updates: up['title'] = up['dist_updates'][0]['title'] for dist_update in up['dist_updates']: versions.append(dist_update['version']) releases.append(dist_update['release_name']) up['name'] = up['package_name'] up['versions'] = versions up['releases'] = releases up['status'] = up['dist_updates'][0]['status'] up['nvr'] = up['dist_updates'][0]['title'] up['request_id'] = up['package_name'] + \ dist_update['version'].replace('.', '') else: chunks = up['title'].split('-') up['name'] = '-'.join(chunks[:-2]) up['version'] = '-'.join(chunks[-2:]) up['versions'] = chunks[-2] up['releases'] = up['release']['long_name'] up['nvr'] = up['title'] up['request_id'] = up.get('updateid') or \ up['nvr'].replace('.', '').replace(',', '') up['id'] = up['nvr'].split(',')[0] # A unique id that we can use in HTML class fields. #up['request_id'] = up.get('updateid') or \ # up['nvr'].replace('.', '').replace(',', '') actions = [] up['actions'] = '' for action in actions: reqs = '' if group_updates: for u in up['dist_updates']: reqs += "update_action('%s', '%s');" % (u['title'], action[0]) title = up['dist_updates'][0]['title'] else: reqs += "update_action('%s', '%s');" % (up['title'], action[0]) title = up['title'] # FIXME: Don't embed HTML up['actions'] += """ <button id="%s_%s" onclick="%s return false;">%s</button><br/> """ % (title.replace('.', ''), action[0], reqs, action[1]) # Dates if group_updates: date_submitted = up['dist_updates'][0]['date_submitted'] date_pushed = up['dist_updates'][0]['date_pushed'] else: date_submitted = up['date_submitted'] date_pushed = up['date_pushed'] granularity = filters.get('granularity', 'day') ds = DateTimeDisplay(date_submitted) up['date_submitted_display'] = ds.age(granularity=granularity, general=True) + ' ago' if date_pushed: dp = DateTimeDisplay(date_pushed) up['date_pushed'] = dp.datetime.strftime('%d %b %Y') up['date_pushed_display'] = dp.age(granularity=granularity, general=True) + ' ago' # karma # FIXME: take into account karma from both updates if group_updates: k = up['dist_updates'][0]['karma'] else: k = up['karma'] if k: up['karma_str'] = "%+d" % k else: up['karma_str'] = " %d" % k up['karma_level'] = 'meh' if k > 0: up['karma_level'] = 'good' if k < 0: up['karma_level'] = 'bad' up['details'] = self._get_update_details(up) return (total_count, updates_list)