def _get_build_data(env, req, build, repos_name=None): chgset_url = '' if repos_name: chgset_resource = get_chgset_resource(env, repos_name, build.rev) chgset_url = get_resource_url(env, chgset_resource, req.href) platform = TargetPlatform.fetch(env, build.platform) data = {'id': build.id, 'name': build.slave, 'rev': build.rev, 'status': _status_label[build.status], 'platform': getattr(platform, 'name', 'unknown'), 'cls': _status_label[build.status].replace(' ', '-'), 'href': req.href.build(build.config, build.id), 'chgset_href': chgset_url} if build.started: data['started'] = format_datetime(build.started) data['started_delta'] = pretty_timedelta(build.started) data['duration'] = pretty_timedelta(build.started) if build.stopped: data['stopped'] = format_datetime(build.stopped) data['stopped_delta'] = pretty_timedelta(build.stopped) data['duration'] = pretty_timedelta(build.stopped, build.started) data['slave'] = { 'name': build.slave, 'ipnr': build.slave_info.get(Build.IP_ADDRESS), 'os_name': build.slave_info.get(Build.OS_NAME), 'os_family': build.slave_info.get(Build.OS_FAMILY), 'os_version': build.slave_info.get(Build.OS_VERSION), 'machine': build.slave_info.get(Build.MACHINE), 'processor': build.slave_info.get(Build.PROCESSOR) } return data
def _get_build_data(env, req, build): data = { 'id': build.id, 'name': build.slave, 'rev': build.rev, 'status': _status_label[build.status], 'cls': _status_label[build.status].replace(' ', '-'), 'href': req.href.build(build.config, build.id), 'chgset_href': req.href.changeset(build.rev) } if build.started: data['started'] = format_datetime(build.started) data['started_delta'] = pretty_timedelta(build.started) data['duration'] = pretty_timedelta(build.started) if build.stopped: data['stopped'] = format_datetime(build.stopped) data['stopped_delta'] = pretty_timedelta(build.stopped) data['duration'] = pretty_timedelta(build.stopped, build.started) data['slave'] = { 'name': build.slave, 'ipnr': build.slave_info.get(Build.IP_ADDRESS), 'os_name': build.slave_info.get(Build.OS_NAME), 'os_family': build.slave_info.get(Build.OS_FAMILY), 'os_version': build.slave_info.get(Build.OS_VERSION), 'machine': build.slave_info.get(Build.MACHINE), 'processor': build.slave_info.get(Build.PROCESSOR) } return data
def _get_build_data(env, req, build): platform = TargetPlatform.fetch(env, build.platform) data = {'id': build.id, 'name': build.slave, 'rev': build.rev, 'status': _status_label[build.status], 'platform': getattr(platform, 'name', 'unknown'), 'cls': _status_label[build.status].replace(' ', '-'), 'href': req.href.build(build.config, build.id), 'chgset_href': req.href.changeset(build.rev)} if build.started: data['started'] = format_datetime(build.started) data['started_delta'] = pretty_timedelta(build.started) data['duration'] = pretty_timedelta(build.started) if build.stopped: data['stopped'] = format_datetime(build.stopped) data['stopped_delta'] = pretty_timedelta(build.stopped) data['duration'] = pretty_timedelta(build.stopped, build.started) data['slave'] = { 'name': build.slave, 'ipnr': build.slave_info.get(Build.IP_ADDRESS), 'os_name': build.slave_info.get(Build.OS_NAME), 'os_family': build.slave_info.get(Build.OS_FAMILY), 'os_version': build.slave_info.get(Build.OS_VERSION), 'machine': build.slave_info.get(Build.MACHINE), 'processor': build.slave_info.get(Build.PROCESSOR) } return data
def process_request(self, req): req.hdf['trac.href.blog'] = req.href.blog() entries = [] for page_name in WikiSystem(self.env).get_pages(prefix='Blog'): page = WikiPage(self.env, page_name) title = page_name text = page.text match = title_split_match(page.text) if match: title = match.group(1) text = match.group(2) comments = text.count('[[SimpleBlogComment(') cutoff = text.find('[[SimpleBlogComment(') if cutoff >= 0: text = text[:cutoff].rstrip() description = wiki_to_html(text, self.env, req) original = self._get_original_post_info(page_name) event = { 'href': self.env.href.wiki(page_name), 'title': title, 'description': description, 'escaped': Markup.escape(unicode(description)), 'date': format_datetime(original['time']), 'rfcdate': http_date(original['time']), 'author': original['author'], 'comment': original['comment'], 'comments': comments, } if page.version > 1: event['updated.version'] = page.version event['updated.date'] = format_datetime(page.time) event['updated.rfcdate'] = http_date(page.time) event['updated.author'] = page.author event['updated.comment'] = page.comment entries.append((original['time'], event)) entries.sort() entries.reverse() max_count = 20 if len(entries) > max_count: entries = entries[:max_count] events = [] for date, event in entries: events.append(event) req.hdf['blog.events'] = events format = req.args.get('format') if format == 'rss': return 'blog_rss.cs', 'application/rss+xml' add_link(req, 'alternate', self.env.href.blog(format='rss'), 'RSS Feed', 'application/rss+xml', 'rss') return 'blog.cs', None
def iTest_report(env, test): import itest_mmi_data from trac.util import format_datetime testname = test.name test_type = itest_mmi_data._test_type(env, test) test_url=gl.url_log_testid_(test) #'http://itest-center/iTest/build/testresult/' + str(test.id) versionpath=test.versionpath version_num = test.version_num passratio=itest_mmi_data._test_passratio(env, test) total=itest_mmi_data._test_total_num(env, test) passed=test.passed failed=test.failed start_t=format_datetime(test.started) end=format_datetime(test.stopped) timecost=itest_mmi_data._test_timecost(env, test.started, test.stopped) result_path=itest_mmi_data._test_reportpath(env, test) caselist=itest_mmi_data._test_xmlfile_mmipath(env, test) #if 1: # comment_content = "---------------------------------------\n" # comment_content += "---------------------------------------\n" js = '' js += "<br /><br /><br /><br />" js += web_li('Test Name', testname) js += "<br />" js += web_li('Version Num', version_num) js += "<br />" js += web_li('Type', test_type) js += "<br />" js += web_li('Total Cases Num', str(total)) js += "<br />" js += web_li('Passed Cases Num', str(passed)) js += "<br />" js += web_li('Failed Cases Num', str(failed)) js += "<br />" js += web_li('Pass Ratio', passratio) js += "<br />" #js += web_li('Start Time', test.start) #js += "<br />" #js += web_li('End Time', test.end) js += "<br />" #js += web_li('Time Cost', test.timecost) #js += "<br />" #js += web_li('Status', test.status) js += "<br />" js += web_li('Result Path', result_path) js += "<br />" js += web_li('CaseList Path', caselist) js += "<br />" js += web_li('Version Path', versionpath ) js += "<br />" js += web_li('Test URL', test_url ) js += "<br /><br />" js += "<br />" return js
def process_projmanager_request(self, req, cat, page, version): # Detail view? if version: ver = ticket.Version(self.env, version) if req.method == 'POST': if req.args.get('save'): ver.name = req.args.get('name') if req.args.get('time'): try: ver.time = util.parse_date(req.args.get('time')) except ValueError: ver.time = "" ver.description = req.args.get('description') ver.update() req.redirect(self.env.href.projmanager(cat, page)) elif req.args.get('cancel'): req.redirect(self.env.href.projmanager(cat, page)) req.hdf['projmanager.version'] = { 'name': ver.name, 'time': ver.time and util.format_datetime(ver.time) or '', 'description': ver.description } else: if req.method == 'POST': # Add Version if req.args.get('add') and req.args.get('name'): ver = ticket.Version(self.env) ver.name = req.args.get('name') if req.args.get('time'): ver.time = util.parse_date(req.args.get('time')) if req.args.get('description'): ver.description = req.args.get('description') ver.insert() req.redirect(self.env.href.projmanager(cat, page)) # Remove versions elif req.args.get('remove') and req.args.get('sel'): sel = req.args.get('sel') sel = isinstance(sel, list) and sel or [sel] if not sel: raise TracError, 'No version selected' db = self.env.get_db_cnx() for name in sel: ver = ticket.Version(self.env, name, db=db) ver.delete(db=db) db.commit() req.redirect(self.env.href.projmanager(cat, page)) req.hdf['projmanager.versions'] = \ [{'name': version.name, 'time': version.time and util.format_datetime(version.time) or '', 'href': self.env.href.projmanager(cat, page, version.name) } for version in ticket.Version.select(self.env)] return 'projmanager_version.cs', None
def render_discussion(self, req): # Get request mode group, forum, topic, message = self._get_items(req) modes = self._get_modes(req, group, forum, topic, message) self.log.debug('modes: %s' % modes) # Determine moderator rights. if forum: is_moderator = (req.authname in forum['moderators']) or \ req.perm.has_permission('DISCUSSION_ADMIN') else: is_moderator = req.perm.has_permission('DISCUSSION_ADMIN') # Perform mode actions self._do_action(req, modes, group, forum, topic, message, is_moderator) # Add CSS styles add_stylesheet(req, 'common/css/wiki.css') add_stylesheet(req, 'discussion/css/discussion.css') add_stylesheet(req, 'discussion/css/admin.css') add_link(req, 'alternate', '/timeline?discussion=on&max=50&daysback=90&format=rss', 'POPFile forums', 'application/rss+xml') # Fill up HDF structure and return template req.hdf['discussion.authname'] = req.authname req.hdf['discussion.is_moderator'] = is_moderator title = 'POPFile Forums' if group: group['name'] = wiki_to_oneliner(group['name'], self.env) group['description'] = wiki_to_oneliner(group['description'], self.env) req.hdf['discussion.group'] = group if forum: forum['name'] = wiki_to_oneliner(forum['name'], self.env) forum['description'] = wiki_to_oneliner(forum['description'], self.env) forum['subject'] = wiki_to_oneliner(forum['subject'], self.env) forum['time'] = format_datetime(forum['time']) req.hdf['discussion.forum'] = forum title = 'POPFile ' + forum['name'] + ' Forum' if topic: topic['subject'] = wiki_to_oneliner(topic['subject'], self.env) topic['author'] = wiki_to_oneliner(topic['author'], self.env) topic['body'] = wiki_to_html(topic['body'], self.env, req, None, False, True) topic['time'] = format_datetime(topic['time']) req.hdf['discussion.topic'] = topic if message: message['author'] = wiki_to_oneliner(message['author'], self.env) message['body'] = wiki_to_html(message['body'], self.env, req, None, False, True) message['time'] = format_datetime(message['time']) req.hdf['discussion.message'] = message req.hdf['discussion.mode'] = modes[-1] req.hdf['discussion.time'] = format_datetime(time.time()) req.hdf['title'] = title return modes[-1] + '.cs', None
def render_discussion(self, req, cursor): # Get request mode group, forum, topic, message = self._get_items(req, cursor) modes = self._get_modes(req, group, forum, topic, message) self.log.debug('modes: %s' % modes) # Determine moderator rights. if forum: is_moderator = (req.authname in forum['moderators']) or \ req.perm.has_permission('DISCUSSION_ADMIN') else: is_moderator = req.perm.has_permission('DISCUSSION_ADMIN') # Perform mode actions self._do_action(req, cursor, modes, group, forum, topic, message, is_moderator) # Add CSS styles add_stylesheet(req, 'common/css/wiki.css') add_stylesheet(req, 'discussion/css/discussion.css') add_stylesheet(req, 'discussion/css/admin.css') # Fill up HDF structure and return template req.hdf['discussion.authname'] = req.authname req.hdf['discussion.is_moderator'] = is_moderator if group: group['name'] = wiki_to_oneliner(group['name'], self.env) group['description'] = wiki_to_oneliner(group['description'], self.env) req.hdf['discussion.group'] = group if forum: forum['name'] = wiki_to_oneliner(forum['name'], self.env) forum['description'] = wiki_to_oneliner(forum['description'], self.env) forum['subject'] = wiki_to_oneliner(forum['subject'], self.env) forum['time'] = format_datetime(forum['time']) req.hdf['discussion.forum'] = forum if topic: topic['subject'] = wiki_to_oneliner(topic['subject'], self.env) topic['author'] = wiki_to_oneliner(topic['author'], self.env) topic['body'] = wiki_to_html(topic['body'], self.env, req) topic['time'] = format_datetime(topic['time']) req.hdf['discussion.topic'] = topic if message: message['author'] = wiki_to_oneliner(message['author'], self.env) message['body'] = wiki_to_html(message['body'], self.env, req) message['time'] = format_datetime(message['time']) req.hdf['discussion.message'] = message req.hdf['discussion.mode'] = modes[-1] req.hdf['discussion.time'] = format_datetime(time.time()) return modes[-1] + '.cs', None
def formattime(self,time): """Return formatted time for ListOfWikiPages table.""" time = int(time) return [ tag.span( format_datetime ( time ) ), tag.span( " (", tag.a( pretty_timedelta ( time ), href = self.href('timeline', precision='seconds', from_= quote_plus( format_datetime (time,'iso8601') ) ) ), " ago)" ) ]
def formattime(self, time): """Return formatted time for ListOfWikiPages table.""" time = int(time) return [ tag.span(format_datetime(time)), tag.span( " (", tag.a(pretty_timedelta(time), href=self.href('timeline', precision='seconds', from_=quote_plus( format_datetime(time, 'iso8601')))), " ago)") ]
def get_timeline_events(self, req, start, stop, filters): if 'build' in filters: add_stylesheet(req, 'HudsonTrac/hudsontrac.css') feed = feedparser.parse(self.feed_url) for entry in feed.entries: # Only look at top-level entries if not self.disp_sub and entry.title.find(u'»') >= 0: continue # check time range completed = calendar.timegm(entry.updated_parsed) if completed > stop: continue if completed < start: break # create timeline entry if entry.title.find('SUCCESS') >= 0: message = 'Build finished successfully' kind = 'build-successful' else: message = 'Build failed' kind = 'build-failed' href = entry.link title = entry.title comment = message + ' at ' + format_datetime(completed) yield kind, href, title, completed, None, comment
def get_changes(env, repos, revs, full=None, req=None, format=None): db = env.get_db_cnx() changes = {} for rev in revs: changeset = repos.get_changeset(rev) message = changeset.message or '--' files = None if format == 'changelog': files = [change[0] for change in changeset.get_changes()] elif message: if not full: message = wiki_to_oneliner(message, env, db, shorten=True) else: message = wiki_to_html(message, env, req, db, absurls=(format == 'rss'), escape_newlines=True) if not message: message = '--' changes[rev] = { 'date_seconds': changeset.date, 'date': format_datetime(changeset.date), 'age': pretty_timedelta(changeset.date), 'author': changeset.author or 'anonymous', 'message': message, 'shortlog': shorten_line(message), 'files': files } return changes
def get_messages(self, req, cursor, topic, time, order_by='ORDER BY time ASC'): columns = ('id', 'replyto', 'time', 'author', 'body') sql = "SELECT id, replyto, time, author, body FROM message WHERE" \ " topic = %s " + order_by self.log.debug(sql % (topic, )) cursor.execute(sql, (topic, )) messagemap = {} messages = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req) if int(row['time']) > time: row['new'] = True row['time'] = format_datetime(row['time']) messagemap[row['id']] = row # Add top-level messages to the main list, in order of time if row['replyto'] == -1: messages.append(row) # Second pass, add replies for message in messagemap.values(): if message['replyto'] != -1: parent = messagemap[message['replyto']] if 'replies' in parent: parent['replies'].append(message) else: parent['replies'] = [message] return messages
def get_messages(self, req, cursor, topic_id, time, order_by = 'time', desc = False): order_by = 'm.' + order_by columns = ('id', 'replyto', 'time', 'author', 'body') sql = "SELECT m.id, m.replyto, m.time, m.author, m.body FROM message m WHERE" \ " m.topic = %s ORDER BY " + order_by + (" ASC", " DESC")[bool(desc)] self.log.debug(sql % (topic_id,)) cursor.execute(sql, (topic_id,)) messagemap = {} messages = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req, None, False, True) if int(row['time']) > time: row['new'] = True row['time'] = format_datetime(row['time']) messagemap[row['id']] = row # Add top-level messages to the main list, in order of time if row['replyto'] == -1: messages.append(row) # Second pass, add replies for message in messagemap.values(): if message['replyto'] != -1: parent = messagemap[message['replyto']] if 'replies' in parent: parent['replies'].append(message) else: parent['replies'] = [message] return messages;
def _render_history(self, req, db, page): """Extract the complete history for a given page and stores it in the HDF. This information is used to present a changelog/history for a given page. """ req.perm.assert_permission("WIKI_VIEW") if not page.exists: raise TracError, "Page %s does not exist" % page.name req.hdf["title"] = page.name + " (history)" history = [] for version, t, author, comment, ipnr in page.get_history(): history.append( { "url": self.env.href.wiki(page.name, version=version), "diff_url": self.env.href.wiki(page.name, version=version, action="diff"), "version": version, "time": format_datetime(t), "time_delta": pretty_timedelta(t), "author": author, "comment": wiki_to_oneliner(comment or "", self.env, db), "ipaddr": ipnr, } ) req.hdf["wiki.history"] = history
def _render_history(self, req, db, page): """Extract the complete history for a given page and stores it in the HDF. This information is used to present a changelog/history for a given page. """ req.perm.assert_permission('WIKI_VIEW') if not page.exists: raise TracError, "Page %s does not exist" % page.name req.hdf['title'] = page.name + ' (history)' history = [] for version, t, author, comment, ipnr in page.get_history(): history.append({ 'url': self.env.href.wiki(page.name, version=version), 'diff_url': self.env.href.wiki(page.name, version=version, action='diff'), 'version': version, 'time': format_datetime(t), 'time_delta': pretty_timedelta(t), 'author': author, 'comment': wiki_to_oneliner(comment or '', self.env, db), 'ipaddr': ipnr }) req.hdf['wiki.history'] = history
def get_topics(self, req, cursor, forum_id, order_by = 'time', desc = False): if not order_by in ('replies', 'lastreply',): order_by = 't.' + order_by columns = ('id', 'forum', 'time', 'subject', 'body', 'author', 'replies', 'lastreply') sql = "SELECT t.id, t.forum, t.time, t.subject, t.body, t.author," \ " m.replies, m.lastreply FROM topic t LEFT JOIN (SELECT COUNT(id)" \ " AS replies, MAX(time) AS lastreply, topic FROM message GROUP BY" \ " topic) m ON t.id = m.topic WHERE t.forum = %s ORDER BY " \ + order_by + (" ASC", " DESC")[bool(desc)] self.log.debug(sql % (forum_id,)) cursor.execute(sql, (forum_id,)) topics = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req) if row['lastreply']: row['lastreply'] = pretty_timedelta(float(row['lastreply'])) else: row['lastreply'] = 'No replies' if not row['replies']: row['replies'] = 0 row['time'] = format_datetime(row['time']) topics.append(row) return topics
def _render_stats_delete(self, req, delete=False): href_back = self.env.href.admin(self.cat, self.page, req.args.get('page_part'), req.args.get('page')) if not req.args.get('get_4'): req.redirect(href_back) id = int(req.args.get('get_4')) try: dwnl = DownloadData(self.env, req, id=id) except TracError: req.redirect(href_back) try: file = File(self.env, dwnl.file_id) except TracError: file.name = 'id=' + str(dwnl.file_id) if not delete: req.hdf['dwn_record.redir'] = \ href_back + self.get_method_string(req) req.hdf['dwn_record.id'] = dwnl.id req.hdf['dwn_record.file'] = file.name req.hdf['dwn_record.timest'] = util.format_datetime(dwnl.timestamp) req.hdf['dwn_record.items'] = dwnl.get_attr_list() return 'admin_downloader_stats_really_delete.cs', None else: dwnl.delete() if req.args.has_key('redirect_back'): req.redirect(req.args.get('redirect_back')) else: req.redirect(href_back)
def post_process_request(self, req, template, content_type): # req.hdf['wiki.page_html'] = str(req.hdf) # return (template, content_type) if not (req.hdf and 'wiki.action' in req.hdf and req.hdf['wiki.action'] == 'view'): return (template, content_type) if not ('wiki.page_name' in req.hdf and 'wiki.page_html' in req.hdf): return (template, content_type) page_name = req.hdf['wiki.page_name'] if not page_name.startswith('Blog'): return (template, content_type) match = h1_match(req.hdf['wiki.page_html']) if not match: return (template, content_type) original = self._get_original_post_info(page_name) title, body = match.groups() title = title.rstrip() body = body.lstrip() permalink = '<a href="%s" title="Permalink" style="%s">#</a>' % ( '/'.join((req.hdf['base_url'], 'wiki', page_name)), 'border-bottom-style: none;') post_info = '<p style="%s">%s | %s | %s</p>' % ( 'font-size: smaller; color: gray; margin: 0 0 0 -18px;', format_datetime(original['time']), original['author'], permalink) req.hdf['wiki.page_html'] = Markup('\n'.join( (title, post_info, body))) return (template, content_type)
def get_forums(self, req, cursor, order_by = 'ORDER BY subject ASC'): columns = ('id', 'name', 'author', 'time', 'moderators', 'group', 'subject', 'description', 'topics', 'replies', 'lastreply', 'lasttopic') sql = "SELECT id, name, author, time, moderators, forum_group," \ " subject, description, (SELECT COUNT(id) FROM topic t WHERE" \ " t.forum = forum.id) AS topics, (SELECT COUNT(id) FROM message m" \ " WHERE m.forum = forum.id) AS replies, (SELECT MAX(time) FROM" \ " message m WHERE m.forum = forum.id) AS lasttopic, (SELECT" \ " MAX(time) FROM topic t WHERE t.forum = forum.id) AS lastreply" \ " FROM forum " + order_by self.log.debug(sql) cursor.execute(sql) forums = [] for row in cursor: row = dict(zip(columns, row)) row['moderators'] = wiki_to_oneliner(row['moderators'], self.env) row['description'] = wiki_to_oneliner(row['description'], self.env) if row['lastreply']: row['lastreply'] = pretty_timedelta(row['lastreply']) else: row['lastreply'] = 'No replies' if row['lasttopic']: row['lasttopic'] = pretty_timedelta(row['lasttopic']) else: row['lasttopic'] = 'No topics' row['time'] = format_datetime(row['time']) forums.append(row) return forums
def post_process_request(self, req, template, content_type): # req.hdf['wiki.page_html'] = str(req.hdf) # return (template, content_type) if not (req.hdf and 'wiki.action' in req.hdf and req.hdf['wiki.action'] == 'view'): return (template, content_type) if not ('wiki.page_name' in req.hdf and 'wiki.page_html' in req.hdf): return (template, content_type) page_name = req.hdf['wiki.page_name'] if not page_name.startswith('Blog'): return (template, content_type) match = h1_match(req.hdf['wiki.page_html']) if not match: return (template, content_type) original = self._get_original_post_info(page_name) title, body = match.groups() title = title.rstrip() body = body.lstrip() permalink = '<a href="%s" title="Permalink" style="%s">#</a>' % ( '/'.join((req.hdf['base_url'], 'wiki', page_name)), 'border-bottom-style: none;') post_info = '<p style="%s">%s | %s | %s</p>' % ( 'font-size: smaller; color: gray; margin: 0 0 0 -18px;', format_datetime(original['time']), original['author'], permalink) req.hdf['wiki.page_html'] = Markup('\n'.join((title, post_info, body))) return (template, content_type)
def _render_stats_delete(self, req, delete=False): href_back = self.env.href.admin( self.cat, self.page, req.args['page_part'].value, req.args['page'].value ) if not req.args['get_4'].value: req.redirect(href_back) id = int(req.args['get_4'].value) try: dwnl = DownloadData(self.env, req, id=id) except TracError: req.redirect(href_back) try: file = File(self.env, dwnl.file_id) except TracError: file.name = 'id=' + str(dwnl.file_id) if not delete: req.hdf['dwn_record.redir'] = \ href_back + self.get_method_string(req) req.hdf['dwn_record.id'] = dwnl.id req.hdf['dwn_record.file'] = file.name req.hdf['dwn_record.timest'] = util.format_datetime(dwnl.timestamp) req.hdf['dwn_record.items'] = dwnl.get_attr_list() return 'admin_downloader_stats_really_delete.cs', None else: dwnl.delete() if req.args.has_key('redirect_back'): req.redirect(req.args['redirect_back'].value) else: req.redirect(href_back)
def expand_macro(self, formatter, name, content): req = formatter.req args, kwargs = parse_args(content) args += [None, None] path, limit, rev = args[:3] limit = kwargs.pop('limit', limit) rev = kwargs.pop('rev', rev) if 'CHANGESET_VIEW' not in req.perm: return Markup('<i>Changelog not available</i>') repo = self.env.get_repository(req.authname) if rev is None: rev = repo.get_youngest_rev() rev = repo.normalize_rev(rev) path = repo.normalize_path(path) if limit is None: limit = 5 else: limit = int(limit) node = repo.get_node(path, rev) out = StringIO() out.write('<div class="changelog">\n') for npath, nrev, nlog in node.get_history(limit): change = repo.get_changeset(nrev) datetime = format_datetime(change.date, '%Y/%m/%d %H:%M:%S', req.tz) out.write(wiki_to_html("'''[%s] by %s on %s'''\n\n%s" % (nrev, change.author, datetime, change.message), self.env, req)); out.write('</div>\n') return out.getvalue()
def get_timeline_events(self, req, start, stop, filters): if isinstance(start, datetime): # Trac>=0.11 from trac.util.datefmt import to_timestamp start = to_timestamp(start) stop = to_timestamp(stop) if 'build' in filters: add_stylesheet(req, 'BambooTrac/bambootrac.css') feed = feedparser.parse(self.feed_url, handlers=[self.bAuth, self.dAuth]) for entry in feed.entries: # check time range completed = calendar.timegm(entry.date_parsed) # create timeline entry if entry.title.find('SUCCESS') >= 0: message = 'Build finished successfully' kind = 'bamboo-successful' else: message = 'Build failed' kind = 'bamboo-failed' fulltitle = entry.title.split(":") newtitle = fulltitle[0] href = entry.link title = entry.title comment = message + ' at ' + format_datetime(completed) yield kind, href, newtitle, completed, None, comment
def get_forums(self, req, cursor, order_by='ORDER BY subject ASC'): columns = ('id', 'name', 'author', 'time', 'moderators', 'group', 'subject', 'description', 'topics', 'replies', 'lastreply', 'lasttopic') sql = "SELECT id, name, author, time, moderators, forum_group," \ " subject, description, (SELECT COUNT(id) FROM topic t WHERE" \ " t.forum = forum.id) AS topics, (SELECT COUNT(id) FROM message m" \ " WHERE m.forum = forum.id) AS replies, (SELECT MAX(time) FROM" \ " message m WHERE m.forum = forum.id) AS lasttopic, (SELECT" \ " MAX(time) FROM topic t WHERE t.forum = forum.id) AS lastreply" \ " FROM forum " + order_by self.log.debug(sql) cursor.execute(sql) forums = [] for row in cursor: row = dict(zip(columns, row)) row['moderators'] = wiki_to_oneliner(row['moderators'], self.env) row['description'] = wiki_to_oneliner(row['description'], self.env) if row['lastreply']: row['lastreply'] = pretty_timedelta(row['lastreply']) else: row['lastreply'] = 'No replies' if row['lasttopic']: row['lasttopic'] = pretty_timedelta(row['lasttopic']) else: row['lasttopic'] = 'No topics' row['time'] = format_datetime(row['time']) forums.append(row) return forums
def expand_macro(self, formatter, name, content): req = formatter.req args, kwargs = parse_args(content) args += [None, None] path, limit, rev = args[:3] limit = kwargs.pop('limit', limit) rev = kwargs.pop('rev', rev) if 'CHANGESET_VIEW' not in req.perm: return Markup('<i>Changelog not available</i>') repo = self.env.get_repository(req.authname) if rev is None: rev = repo.get_youngest_rev() rev = repo.normalize_rev(rev) path = repo.normalize_path(path) if limit is None: limit = 5 else: limit = int(limit) node = repo.get_node(path, rev) out = StringIO() out.write('<div class="changelog">\n') for npath, nrev, nlog in node.get_history(limit): change = repo.get_changeset(nrev) datetime = format_datetime(change.date, '%Y/%m/%d %H:%M:%S', req.tz) out.write( wiki_to_html( "'''[%s] by %s on %s'''\n\n%s" % (nrev, change.author, datetime, change.message), self.env, req)) out.write('</div>\n') return out.getvalue()
def get_topics(self, req, cursor, forum_id, order_by='time', desc=False): if not order_by in ( 'replies', 'lastreply', ): order_by = 't.' + order_by columns = ('id', 'forum', 'time', 'subject', 'body', 'author', 'replies', 'lastreply') sql = "SELECT t.id, t.forum, t.time, t.subject, t.body, t.author," \ " m.replies, m.lastreply FROM topic t LEFT JOIN (SELECT COUNT(id)" \ " AS replies, MAX(time) AS lastreply, topic FROM message GROUP BY" \ " topic) m ON t.id = m.topic WHERE t.forum = %s ORDER BY " \ + order_by + (" ASC", " DESC")[bool(desc)] self.log.debug(sql % (forum_id, )) cursor.execute(sql, (forum_id, )) topics = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req) if row['lastreply']: row['lastreply'] = pretty_timedelta(float(row['lastreply'])) else: row['lastreply'] = 'No replies' if not row['replies']: row['replies'] = 0 row['time'] = format_datetime(row['time']) topics.append(row) return topics
def get_timeline_events(self, req, start, stop, filters): if "build" in filters: add_stylesheet(req, "HudsonTrac/hudsontrac.css") feed = feedparser.parse(self.feed_url) for entry in feed.entries: # Only look at top-level entries if not self.disp_sub and entry.title.find(u"»") >= 0: continue # check time range completed = calendar.timegm(entry.updated_parsed) if completed > stop: continue if completed < start: break # create timeline entry if entry.title.find("SUCCESS") >= 0: message = "Build finished successfully" kind = "build-successful" else: message = "Build failed" kind = "build-failed" href = entry.link title = entry.title comment = message + " at " + format_datetime(completed) yield kind, href, title, completed, None, comment
def _render_stats_delete(self, req, delete=False): href_back = self.env.href.admin(self.cat, self.page, req.args.get("page_part"), req.args.get("page")) if not req.args.get("get_4"): req.redirect(href_back) id = int(req.args.get("get_4")) try: dwnl = DownloadData(self.env, req, id=id) except TracError: req.redirect(href_back) try: file = File(self.env, dwnl.file_id) except TracError: file.name = "id=" + str(dwnl.file_id) if not delete: req.hdf["dwn_record.redir"] = href_back + self.get_method_string(req) req.hdf["dwn_record.id"] = dwnl.id req.hdf["dwn_record.file"] = file.name req.hdf["dwn_record.timest"] = util.format_datetime(dwnl.timestamp) req.hdf["dwn_record.items"] = dwnl.get_attr_list() return "admin_downloader_stats_really_delete.cs", None else: dwnl.delete() if req.args.has_key("redirect_back"): req.redirect(req.args.get("redirect_back")) else: req.redirect(href_back)
def get_messages(self, req, cursor, topic, time, order_by = 'ORDER BY time ASC'): columns = ('id', 'replyto', 'time', 'author', 'body') sql = "SELECT id, replyto, time, author, body FROM message WHERE" \ " topic = %s " + order_by self.log.debug(sql % (topic,)) cursor.execute(sql, (topic,)) messagemap = {} messages = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req) if int(row['time']) > time: row['new'] = True row['time'] = format_datetime(row['time']) messagemap[row['id']] = row # Add top-level messages to the main list, in order of time if row['replyto'] == -1: messages.append(row) # Second pass, add replies for message in messagemap.values(): if message['replyto'] != -1: parent = messagemap[message['replyto']] if 'replies' in parent: parent['replies'].append(message) else: parent['replies'] = [message] return messages;
def populate_hdf(hdf, env, req=None): """Populate the HDF data set with various information, such as common URLs, project information and request-related information. """ from trac import __version__ hdf['trac'] = { 'version': __version__, 'time': format_datetime(), 'time.gmt': http_date() } hdf['trac.href'] = { 'wiki': env.href.wiki(), 'browser': env.href.browser('/'), 'timeline': env.href.timeline(), 'roadmap': env.href.roadmap(), 'milestone': env.href.milestone(None), 'report': env.href.report(), 'query': env.href.query(), 'newticket': env.href.newticket(), 'search': env.href.search(), 'about': env.href.about(), 'about_config': env.href.about('config'), 'login': env.href.login(), 'logout': env.href.logout(), 'settings': env.href.settings(), 'homepage': 'http://trac.edgewall.com/' } hdf['project'] = { 'name': env.config.get('project', 'name'), 'name_encoded': escape(env.config.get('project', 'name')), 'descr': env.config.get('project', 'descr'), 'footer': env.config.get( 'project', 'footer', 'Visit the Trac open source project at<br />' '<a href="http://trac.edgewall.com/">' 'http://trac.edgewall.com/</a>'), 'url': env.config.get('project', 'url') } if req: hdf['base_url'] = req.base_url hdf['base_host'] = req.base_url[:req.base_url.rfind(req.cgi_location)] hdf['cgi_location'] = req.cgi_location hdf['trac.authname'] = escape(req.authname) for action in req.perm.permissions(): req.hdf['trac.acl.' + action] = True for arg in [k for k in req.args.keys() if k]: if isinstance(req.args[arg], (list, tuple)): hdf['args.%s' % arg] = [v.value for v in req.args[arg]] else: hdf['args.%s' % arg] = req.args[arg].value
def _get_messages(self, req, cursor): cursor.execute("SELECT id, author, time, title, body FROM guestbook" " ORDER BY time") columns = ['id', 'author', 'time', 'title', 'body'] messages = [] for message in cursor: message = dict(zip(columns, message)) message['time'] = format_datetime(message['time']) message['title'] = wiki_to_oneliner(message['title'], self.env) message['body'] = wiki_to_html(message['body'], self.env, req) messages.append(message) return messages
def _build_row(self, name, time, author, version, comment): time = from_utimestamp(time) cols = [] # page name if self.pagename == 'short': _name = name.rsplit('/', 1)[-1] else: _name = name cols.append(tag.td(tag.a(_name, href=self.href.wiki(name)), class_='name')) # last modified href_ago = self.href('timeline', precision='seconds', from_=format_datetime(time, 'iso8601')) a_ago = [' (', tag.a(pretty_timedelta(time), href=href_ago), ' ago)'] cols.append(tag.td(format_datetime(time, self.date_format), a_ago, class_='time')) # version a_version = tag.a(version, href=self.href.wiki(name, version=version)) a_diff = tag.a('d', title='diff', href=self.href.wiki(name, action='diff', version=version)) a_history = tag.a('h', title='history', href=self.href.wiki(name, action='history')) cols.append(tag.td(a_version, ' [', a_diff, '|', a_history, ']', class_='version')) cols.append(tag.td(author, class_='author')) cols.append(tag.td(comment, class_='comment')) return tag.tr(cols)
def attachment_to_hdf(env, db, req, attachment): from trac.wiki import wiki_to_oneliner if not db: db = env.get_db_cnx() hdf = { 'filename': attachment.filename, 'description': wiki_to_oneliner(attachment.description, env, db), 'author': util.escape(attachment.author), 'ipnr': attachment.ipnr, 'size': util.pretty_size(attachment.size), 'time': util.format_datetime(attachment.time), 'href': attachment.href() } return hdf
def test_can_return_utc_time(self): #arrange ticket = self.insert_ticket("bla") ticket_time = ticket.time_changed #act self.req.args[RequestParameters.QUERY] = "*:*" data = self.process_request() result_items = data["results"].items #assert self.assertEqual(1, len(result_items)) result_datetime = result_items[0]["date"] self.env.log.debug("Ticket time: %s, Returned time: %s", ticket_time, result_datetime) self.assertEqual(format_datetime(ticket_time), result_items[0]["date"])
def test_can_return_utc_time(self): #arrange ticket = self.insert_ticket("bla") ticket_time = ticket.time_changed #act self.req.args[RequestParameters.QUERY] = "*:*" data = self.process_request() result_items = data["results"].items #assert self.assertEqual(1, len(result_items)) result_datetime = result_items[0]["date"] print "Ticket time: %s, Returned time: %s" % ( ticket_time, result_datetime) self.assertEqual(format_datetime(ticket_time), result_items[0]["date"])
def test_can_return_user_time(self): #arrange ticket = self.insert_ticket("bla") ticket_time = ticket.time_changed #act self.req.tz = FixedOffset(60, 'GMT +1:00') self.req.args[RequestParameters.QUERY] = "*:*" data = self.process_request() result_items = data["results"].items #asset self.assertEqual(1, len(result_items)) expected_datetime = format_datetime(ticket_time) result_datetime = result_items[0]["date"] print "Ticket time: %s, Formatted time: %s ,Returned time: %s" % ( ticket_time, expected_datetime,result_datetime) self.assertEqual(expected_datetime, result_datetime)
def get_flat_messages(self, req, cursor, topic_id, time, order_by = 'ORDER BY time ASC'): columns = ('id', 'replyto', 'time', 'author', 'body') sql = "SELECT m.id, m.replyto, m.time, m.author, m.body FROM message m" \ " WHERE m.topic = %s " + order_by self.log.debug(sql % (topic_id,)) cursor.execute(sql, (topic_id,)) messages = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req, None, False, True) if int(row['time']) > time: row['new'] = True row['time'] = format_datetime(row['time']) messages.append(row) return messages
def get_flat_messages(self, req, cursor, topic, time, order_by = 'ORDER BY time ASC'): columns = ('id', 'replyto', 'time', 'author', 'body') sql = "SELECT id, replyto, time, author, body FROM message WHERE" \ " topic = %s " + order_by self.log.debug(sql % (topic,)) cursor.execute(sql, (topic,)) messages = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req) if int(row['time']) > time: row['new'] = True row['time'] = format_datetime(row['time']) messages.append(row) return messages
def test_can_return_user_time(self): #arrange ticket = self.insert_ticket("bla") ticket_time = ticket.time_changed #act tzinfo = FixedOffset(60, 'GMT +1:00') self.req.tz = tzinfo self.req.args[RequestParameters.QUERY] = "*:*" data = self.process_request() result_items = data["results"].items #asset self.assertEqual(1, len(result_items)) expected_datetime = format_datetime(ticket_time, tzinfo=tzinfo) result_datetime = result_items[0]["date"] self.env.log.debug( "Ticket time: %s, Formatted time: %s ,Returned time: %s", ticket_time, expected_datetime, result_datetime) self.assertEqual(expected_datetime, result_datetime)
def get_timeline_events(self, req, start, stop, filters): if isinstance(start, datetime): # Trac>=0.11 from trac.util.datefmt import to_timestamp start = to_timestamp(start) stop = to_timestamp(stop) if 'build' in filters: add_stylesheet(req, 'HudsonTracPlus/hudsontracplus.css') hudson_api_url = self.hudson_url + 'api/python' try: hudson_json = eval(self.url_opener.open(hudson_api_url).readline()) except: self.env.log.debug("hudson_api_url='%s'" % (hudson_api_url)) return for job in hudson_json['jobs']: if len(self.jobs) > 0 and not job['name'] in self.jobs: continue job_api_url = get_job_url(job['url'] + 'api/python?depth=2', self.hudson_url) try: json = eval(self.url_opener.open(job_api_url).readline()) except: self.env.log.debug("job_api_url='%s'" % (job_api_url)) raise for build in json['builds']: # check time range completed = build['timestamp'] / 1000 if completed > stop: continue if completed < start: break # create timeline entry kind = get_build_kind(build) href = get_build_href(build, self.hudson_url, self.nav_url) title = Markup(get_build_title_markup(job, build)) comment = get_build_comment(build, format_datetime(completed)) yield kind, href, title, completed, None, comment
def get_attr_list(self, short=False): """ Gets list of attributes and it's values in download data. """ items = [] labels = {} label_list = self.get_label_list() if not short: file = File(self.env, self.file_id) items.append(['Id:', self.id]) items.append(['File:', file.name]) items.append(['Timestamp:', util.format_datetime(self.timestamp)]) for item in label_list: if (item[0] not in self.attr): continue if strip(self.attr[item[0]]) != '': items.append([item[1], self.attr[item[0]]]) return items
def _entry_to_hdf(req, entry): return { 'id': entry.id, 'time': format_datetime(entry.time), 'timedelta': pretty_timedelta(entry.time), 'path': entry.path, 'url': req.abs_href(entry.path), 'path_clipped': shorten_line(entry.path, 25), 'href': req.href(entry.path), 'admin_href': req.href.admin('spamfilter', 'monitor', entry.id), 'author': entry.author, 'author_clipped': shorten_line(entry.author, 25), 'ipnr': entry.ipnr, 'authenticated': entry.authenticated, 'headers': entry.headers, 'content': shorten_line(entry.content), 'full_content': entry.content, 'rejected': entry.rejected, 'karma': entry.karma, 'reasons': entry.reasons }
def get_forums(self, req, cursor, order_by = 'subject', desc = False): if not order_by in ('topics', 'replies', 'lasttopic', 'lastreply'): order_by = 'f.' + order_by columns = ('id', 'name', 'author', 'time', 'moderators', 'group', 'subject', 'description', 'topics', 'replies', 'lasttopic', 'lastreply') sql = "SELECT f.id, f.name, f.author, f.time, f.moderators, " \ "f.forum_group, f.subject, f.description, ta.topics, ta.replies, " \ "ta.lasttopic, ta.lastreply FROM forum f LEFT JOIN (SELECT " \ "COUNT(t.id) AS topics, MAX(t.time) AS lasttopic, SUM(ma.replies) " \ "AS replies, MAX(ma.lastreply) AS lastreply, t.forum AS forum FROM " \ " topic t LEFT JOIN (SELECT COUNT(m.id) AS replies, MAX(m.time) AS " \ "lastreply, m.topic AS topic FROM message m GROUP BY m.topic) ma ON " \ "t.id = ma.topic GROUP BY forum) ta ON f.id = ta.forum ORDER BY " + \ order_by + (" ASC", " DESC")[bool(desc)] self.log.debug(sql) cursor.execute(sql) forums = [] for row in cursor: row = dict(zip(columns, row)) row['moderators'] = wiki_to_oneliner(row['moderators'], self.env) row['description'] = wiki_to_oneliner(row['description'], self.env) if row['lastreply']: row['lastreply'] = pretty_timedelta(float(row['lastreply'])) else: row['lastreply'] = 'No replies' if row['lasttopic']: self.log.debug('lasttopic: %s' % row['lasttopic']) row['lasttopic'] = pretty_timedelta(float(row['lasttopic'])) else: row['lasttopic'] = 'No topics' if not row['topics']: row['topics'] = 0 if not row['replies']: row['replies'] = 0 else: # SUM on PosgreSQL returns float number. row['replies'] = int(row['replies']) row['time'] = format_datetime(row['time']) forums.append(row) return forums
def get_forums(self, req, cursor, asc=0, order_by='subject'): if not order_by in ('topics', 'replies', 'lasttopic', 'lastreply'): order_by = 'f.' + order_by columns = ('id', 'name', 'author', 'time', 'moderators', 'group', 'subject', 'description', 'topics', 'replies', 'lasttopic', 'lastreply') sql = "SELECT f.id, f.name, f.author, f.time, f.moderators, " \ "f.forum_group, f.subject, f.description, ta.topics, ta.replies, " \ "ta.lasttopic, ta.lastreply FROM forum f LEFT JOIN (SELECT " \ "COUNT(t.id) AS topics, MAX(t.time) AS lasttopic, SUM(ma.replies) " \ "AS replies, MAX(ma.lastreply) AS lastreply, t.forum AS forum FROM " \ " topic t LEFT JOIN (SELECT COUNT(m.id) AS replies, MAX(m.time) AS " \ "lastreply, m.topic AS topic FROM message m GROUP BY m.topic) ma ON " \ "t.id = ma.topic GROUP BY forum) ta ON f.id = ta.forum ORDER BY " + \ order_by + (" DESC", " ASC")[int(asc)] self.log.debug(sql) cursor.execute(sql) forums = [] for row in cursor: row = dict(zip(columns, row)) row['moderators'] = wiki_to_oneliner(row['moderators'], self.env) row['description'] = wiki_to_oneliner(row['description'], self.env) if row['lastreply']: row['lastreply'] = pretty_timedelta(float(row['lastreply'])) else: row['lastreply'] = 'No replies' if row['lasttopic']: self.log.debug('lasttopic: %s' % row['lasttopic']) row['lasttopic'] = pretty_timedelta(float(row['lasttopic'])) else: row['lasttopic'] = 'No topics' if not row['topics']: row['topics'] = 0 if not row['replies']: row['replies'] = 0 else: # SUM on PosgreSQL returns float number. row['replies'] = int(row['replies']) row['time'] = format_datetime(row['time']) forums.append(row) return forums
def process_downloads(self, req, cursor): # Get request mode modes = self._get_modes(req) self.log.debug('modes: %s' % modes) # Perform mode actions self._do_action(req, cursor, modes) # Add CSS styles add_stylesheet(req, 'common/css/wiki.css') add_stylesheet(req, 'downloads/css/downloads.css') add_stylesheet(req, 'downloads/css/admin.css') # Add JavaScripts add_script(req, 'common/js/trac.js') add_script(req, 'common/js/wikitoolbar.js') # Fill up HDF structure and return template req.hdf['download.authname'] = req.authname req.hdf['download.time'] = format_datetime(time.time()) return modes[-1] + '.cs', None
def get_topics(self, req, cursor, forum, order_by='ORDER BY time ASC'): columns = ('id', 'forum', 'time', 'subject', 'body', 'author', 'replies', 'lastreply') sql = "SELECT id, forum, time, subject, body, author, (SELECT" \ " COUNT(id) FROM message m WHERE m.topic = topic.id) AS replies," \ " (SELECT MAX(time) FROM message m WHERE m.topic = topic.id) AS" \ " lastreply FROM topic WHERE forum = %s " + order_by self.log.debug(sql % (forum, )) cursor.execute(sql, (forum, )) topics = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req) if row['lastreply']: row['lastreply'] = pretty_timedelta(row['lastreply']) else: row['lastreply'] = 'No replies' row['time'] = format_datetime(row['time']) topics.append(row) return topics
def get_topics(self, req, cursor, forum, order_by = 'ORDER BY time ASC'): columns = ('id', 'forum', 'time', 'subject', 'body', 'author', 'replies', 'lastreply') sql = "SELECT id, forum, time, subject, body, author, (SELECT" \ " COUNT(id) FROM message m WHERE m.topic = topic.id) AS replies," \ " (SELECT MAX(time) FROM message m WHERE m.topic = topic.id) AS" \ " lastreply FROM topic WHERE forum = %s " + order_by self.log.debug(sql % (forum,)) cursor.execute(sql, (forum,)) topics = [] for row in cursor: row = dict(zip(columns, row)) row['author'] = wiki_to_oneliner(row['author'], self.env) row['body'] = wiki_to_html(row['body'], self.env, req) if row['lastreply']: row['lastreply'] = pretty_timedelta(row['lastreply']) else: row['lastreply'] = 'No replies' row['time'] = format_datetime(row['time']) topics.append(row) return topics
def reset_orphaned_builds(self): """Reset all in-progress builds to ``PENDING`` state if they've been running so long that the configured timeout has been reached. This is used to cleanup after slaves that have unexpectedly cancelled a build without notifying the master, or are for some other reason not reporting back status updates. """ if not self.timeout: # If no timeout is set, none of the in-progress builds can be # considered orphaned return db = self.env.get_db_cnx() now = int(time.time()) for build in Build.select(self.env, status=Build.IN_PROGRESS, db=db): if now - build.last_activity < self.timeout: # This build has not reached the timeout yet, assume it's still # being executed continue self.log.info('Orphaning build %d. Last activity was %s (%s)' % \ (build.id, format_datetime(build.last_activity), pretty_timedelta(build.last_activity))) build.status = Build.PENDING build.slave = None build.slave_info = {} build.started = 0 build.stopped = 0 build.last_activity = 0 for step in list(BuildStep.select(self.env, build=build.id, db=db)): step.delete(db=db) build.update(db=db) Attachment.delete_all(self.env, 'build', build.resource.id, db) db.commit()
def get_attr_list(self, short=False): """ Gets list of attributes and it's values in download data. """ items = [] labels = {} if not short: file = File(self.env, self.file_id) items.append(['Id:', self.id]) items.append(['File:', file.name]) items.append(['Timestamp:', util.format_datetime(self.timestamp)]) for item in form_data.quest_form: # External label for radio if item.has_key('label_for'): labels[item['label_for']] = item['text'] if item.has_key('name') and self.attr.has_key(item['name']): if item.has_key('label'): label = item['label'] else: label = capitalize(item['name']) + ':' if not label.endswith(':'): label += ":" # Radio if item.has_key('type') and \ item['type'] == 'radio' and \ item.has_key('value'): if self.attr[item['name']] != item['value']: continue label = labels[item['name']] if strip(self.attr[item['name']]) != '': items.append([label, self.attr[item['name']]]) return items
def _render_stats_table(self, req): """Renders whole stats table (gets all its data).""" ## Head self._render_stats_table_head(req) ## Body # Get list of downloads according to sort and page per_page = self.config.get("downloader", "stats_per_page") try: per_page = int(per_page) except ValueError: self.config.remove("downloader", "stats_per_page") per_page = int(self.config.get("downloader", "stats_per_page")) req.hdf["stats.per_page"] = per_page if not req.args.get("page"): page = 1 else: try: page = int(req.args.get("page")) except ValueError: page = 1 # Render all setting if req.args.has_key("renderall"): page = 1 per_page = None # Fetch list of donwnloads rec_count, downloads = DownloadData.fetch_downloads_list(self.env, req, self.order, self.desc, per_page, page) rows = [] for dwn in downloads: file = File(self.env, dwn.file_id) act_show_href = self.env.href.admin(self.cat, self.page, req.args.get("page_part"), page, "show", dwn.id) act_show = util.Markup('<a href="%s">show</a>' % (act_show_href + self.get_method_string(req))) act_del_href = self.env.href.admin( self.cat, self.page, req.args.get("page_part"), page, "really_delete", dwn.id ) act_del = util.Markup('<a href="%s">delete</a>' % (act_del_href + self.get_method_string(req))) row = { "id": dwn.id, "timestamp": util.format_datetime(dwn.timestamp), "file_id": dwn.file_id, "file_name": file.name_disp, "actions": act_show + util.Markup(" | ") + act_del, } for key, attr in dwn.attr.iteritems(): row[key] = attr rows.append(row) req.hdf["stats.rows"] = rows # Render in other formats if req.args.has_key("format"): format = req.args.get("format") if format == "csv": return self._render_stats_table_special(req, self.stats_head, rows) if format == "tab": return self._render_stats_table_special(req, self.stats_head, rows, "\t", "") # List of pges page_cnt = math.ceil(rec_count * 1.0 / per_page) pg_lst = [] for num in range(1, page_cnt + 1): href = self.env.href.admin(self.cat, self.page, req.args.get("page_part"), str(num)) href += self.get_method_string(req) pg_lst.append([href, num]) req.hdf["stats.pg_lst"] = pg_lst req.hdf["stats.pg_act"] = page # Links to alternative formats of this table self.add_alternate_links(req) # Order in table head req.hdf["stats.order"] = self.order if self.desc: req.hdf["stats.desc"] = 1 req.hdf["datetime_hint"] = util.get_datetime_format_hint() return "admin_downloader_stats.cs", None