def wiki_activity(project_id, start_date, end_date, groupsize, groupcnt, db, req, authors_limit=None, ignored_authors=None): """ Get query response for specified time interval, for max `authors_limit` most active users, excluding `ignored_authors`: Data: <author>: <count wiki modifications> """ authors_limit = authors_limit or 10 ignored_authors = ignored_authors or () authors_list = _retrieve_most_active_wiki_authors(project_id, authors_limit, ignored_authors, start_date, end_date, db) wiki_pages_list = _retrieve_wiki_pages(project_id, authors_list, start_date, end_date, db) groups_list, authors_data = aggregate_events_by_periods(authors_list, wiki_pages_list, start_date, groupsize, groupcnt) query_response = QueryResponse("wiki_activity", req.href('/chrome')) query_response.set_title(_("Wiki activity from %(start_date)s to %(end_date)s", start_date=format_date(start_date, tzinfo=req.tz), end_date=format_date(end_date, tzinfo=req.tz))) columns, rows = adapt_to_table(groups_list, authors_data) query_response.set_columns(columns) query_response.set_results(rows) chart = query_response.chart_info chart.type = 'Line' chart.x_legend = _('Time periods') chart.y_legend = _('Wiki modifications') chart.x_labels = groups_list chart.data = restructure_data(authors_data) chart.tool_tip = "#key#<br>%s:#x_label#<br>%s:#val#" % (_('period'), _('Wiki modifications')) return query_response
def expand_macro(self, formatter, name, content): offset = +1 label = None if content is not None: if ',' in content: offset, label = content.split(',', 1) elif content and content[0] in '+-': offset = content else: label = content try: offset = int(offset) except ValueError: offset = 0 mp = MyPageModule(self.env) base = mp.get_mypage_base(formatter.perm.username) all_mypages = mp.get_all_mypages(base) r = formatter.resource if r.realm == 'wiki' and r.id.startswith(base): mypage = r.id else: tzinfo = getattr(formatter.context.req, 'tz', None) now = datetime.now(tzinfo or localtz) today = format_date(now, 'iso8601', tzinfo) mypage = '/'.join([base, today]) selected = base idx = bisect(all_mypages, mypage) # adjust to actual position if mypage exists if 0 <= idx - 1 < len(all_mypages) and all_mypages[idx -1] == mypage: idx -= 1 self.log.debug("Reference is %s, pos %d in %r", mypage, idx, all_mypages) # Special cases: at the beginning or at the end, the # predecessors resp. successors are "missing" missing = False if idx >= len(all_mypages) - 1 and offset > 0: missing, tooltip = True, _("(at the end)") elif idx < 1 and offset < 0: missing, tooltip = True, _("(at the beginning)") if missing: if not label: label, tooltip = tooltip, None return tag.a(label, title=tooltip, class_='missing') # Link to the targeted `MyPage` page idx += offset selected = all_mypages[max(0, min(idx, len(all_mypages) - 1))] self.log.debug("With offset %d, going to %d (adjusted to %d)", offset, idx, max(0, min(idx, len(all_mypages) - 1))) selected_day = selected.split('/')[-1] try: tooltip = _("MyPage for %(day)s", day=format_date(parse_date(selected_day))) except TracError: tooltip = _("non-day page '%(special)'", special=selected_day) return tag.a(label if label is not None else selected, title=tooltip, href=formatter.href.wiki(selected))
def ticket_activity_user(project_id, username, start_date, end_date, groupsize, groupcnt, db, req): """ Get query response for specified time interval and `username`: Data: <event>: <count events>. Events: 'created', 'closed'. """ q = ''' SELECT t.id, t.time, 'created' AS event FROM ticket t WHERE t.reporter=%s AND t.project_id=%s AND t.time >= %s AND t.time < %s UNION SELECT t.id, tc.time, 'closed' AS event FROM ticket t JOIN ticket_change tc ON t.id = tc.ticket AND tc.field='status' AND tc.newvalue='closed' WHERE t.owner=%s AND t.project_id=%s AND tc.time >= %s AND tc.time < %s ORDER BY event ''' cursor = db.cursor() cursor.execute(q, (username, project_id, to_utimestamp(start_date), to_utimestamp(end_date))*2) etypes = (N_('created'), N_('closed')) events = [(r[2], from_utimestamp(r[1]), r[0]) for r in cursor] # TODO: count closed once, use global closed set def init_set(e): return set() def add_to_set(stor, idx, event_data): stor[idx].add(event_data[2]) groups_list, groups_data = aggregate_events_by_periods(etypes, events, start_date, groupsize, groupcnt, add_to_set, init_set) for etype, groups in groups_data.iteritems(): for idx, ids in enumerate(groups): groups[idx] = len(ids) query_response = QueryResponse("ticket_activity", req.href('/chrome')) query_response.set_title(_("Ticket activity from %(start_date)s to %(end_date)s", start_date=format_date(start_date, tzinfo=req.tz), end_date=format_date(end_date, tzinfo=req.tz))) groups_data = translate_keys(groups_data) columns, rows = adapt_to_table(groups_list, groups_data) query_response.set_columns(columns) query_response.set_results(rows) chart = query_response.chart_info chart.type = 'Line' chart.width = 600 chart.x_legend = _('Time periods') chart.y_legend = _('Tickets') chart.x_labels = groups_list chart.data = restructure_data(groups_data) chart.tool_tip = "#key#<br>%s:#x_label#<br>%s:#val#" % (_('period'), _('tickets')) return query_response
def get_work_log(self, mode='all'): db = self.env.get_db_cnx() cursor = db.cursor() if mode == 'user': cursor.execute('SELECT wl.user, s.value, wl.starttime, wl.endtime, wl.ticket, t.summary, t.status, wl.comment ' 'FROM work_log wl ' 'INNER JOIN ticket t ON wl.ticket=t.id ' 'LEFT JOIN session_attribute s ON wl.user=s.sid AND s.name=\'name\' ' 'WHERE wl.user=%s ' 'ORDER BY wl.lastchange DESC', (self.authname,)) elif mode == 'summary': cursor.execute('SELECT wl.user, s.value, wl.starttime, wl.endtime, wl.ticket, t.summary, t.status, wl.comment ' 'FROM (SELECT user,MAX(lastchange) lastchange FROM work_log GROUP BY user) wlt ' 'INNER JOIN work_log wl ON wlt.user=wl.user AND wlt.lastchange=wl.lastchange ' 'INNER JOIN ticket t ON wl.ticket=t.id ' 'LEFT JOIN session_attribute s ON wl.user=s.sid AND s.name=\'name\' ' 'ORDER BY wl.lastchange DESC, wl.user') else: cursor.execute('SELECT wl.user, s.value, wl.starttime, wl.endtime, wl.ticket, t.summary, t.status, wl.comment ' 'FROM work_log wl ' 'INNER JOIN ticket t ON wl.ticket=t.id ' 'LEFT JOIN session_attribute s ON wl.user=s.sid AND s.name=\'name\' ' 'ORDER BY wl.lastchange DESC, wl.user') rv = [] for user,name,starttime,endtime,ticket,summary,status,comment in cursor: starttime = float(starttime) endtime = float(endtime) started = datetime.fromtimestamp(starttime) dispname = user if name: dispname = '%s (%s)' % (name, user) if not endtime == 0: finished = datetime.fromtimestamp(endtime) delta = 'Worked for %s (between %s %s and %s %s)' % \ (pretty_timedelta(started, finished), format_date(starttime), format_time(starttime), format_date(endtime), format_time(endtime)) else: delta = 'Started %s ago (%s %s)' % \ (pretty_timedelta(started), format_date(starttime), format_time(starttime)) rv.append({'user': user, 'name': name, 'dispname': dispname, 'starttime': int(starttime), 'endtime': int(endtime), 'delta': delta, 'ticket': ticket, 'summary': summary, 'status': status, 'comment': comment}) return rv
def test_can_not_rename_sprints_to_have_slash_in_name(self): req = self.teh.mock_request(Usernames.product_owner) req.args = dict(edit='edit', save=True, sprint_name='a/b', name='fnord', start=format_date(self.sprint.start), end=format_date(self.sprint.end), milestone=self.sprint.milestone) self.assert_raises(ICommand.NotValidError, self.view.do_post, req) self.assert_false(Sprint(self.env, name='a/b').exists)
def test_format_compatibility(self): tz = datefmt.timezone("GMT +2:00") t = datetime.datetime(2010, 8, 28, 11, 45, 56, 123456, datefmt.utc) tz_t = datetime.datetime(2010, 8, 28, 13, 45, 56, 123456, tz) # Converting babel's format to strftime format self.assertEqual(tz_t.strftime("%x %H:%M").decode("utf-8"), datefmt.format_datetime(t, "short", tz)) self.assertEqual(tz_t.strftime("%x").decode("utf-8"), datefmt.format_date(t, "short", tz)) self.assertEqual(tz_t.strftime("%H:%M").decode("utf-8"), datefmt.format_time(t, "short", tz)) for f in ("medium", "long", "full"): self.assertEqual(tz_t.strftime("%x %X").decode("utf-8"), datefmt.format_datetime(t, f, tz)) self.assertEqual(tz_t.strftime("%x").decode("utf-8"), datefmt.format_date(t, f, tz)) self.assertEqual(tz_t.strftime("%X").decode("utf-8"), datefmt.format_time(t, f, tz))
def _prep_session_table(env, spread_visits=False): """ Populate the session table with known values. :return: a tuple of lists `(auth_list, anon_list, all_list)` :since 1.0: changed `db` input parameter to `env` """ with env.db_transaction as db: db("DELETE FROM session") db("DELETE FROM session_attribute") last_visit_base = time.mktime(datetime(2010, 1, 1).timetuple()) visit_delta = 86400 if spread_visits else 0 auth_list, anon_list = [], [] with env.db_transaction as db: for x in xrange(20): sid = 'name%02d' % x authenticated = int(x < 10) last_visit = last_visit_base + (visit_delta * x) val = 'val%02d' % x data = (sid, authenticated, format_date(to_datetime(last_visit), console_date_format), val, val, None) if authenticated: auth_list.append(data) else: anon_list.append(data) db("INSERT INTO session VALUES (%s, %s, %s)", (sid, authenticated, last_visit)) db("INSERT INTO session_attribute VALUES (%s, %s, 'name', %s)", (sid, authenticated, val)) db("INSERT INTO session_attribute VALUES (%s, %s, 'email', %s)", (sid, authenticated, val)) all_list = auth_list + anon_list return (auth_list, anon_list, all_list)
def _validate_add(self, req): ty = req.args.get('reminder_type') if ty == 'interval': try: req.args['interval'] = int(req.args.get('interval', '').strip()) if req.args['interval'] <= 0: add_warning(req, "Nonpositive interval value.") return False except ValueError: add_warning(req, "Invalid or missing interval value.") return False if req.args.get('unit') not in ['day', 'week', 'month', 'year']: add_warning(req, "Please select interval unit.") return False elif ty == 'date': try: time = clear_time(parse_date(req.args.get('date', '').strip())) req.args['date'] = format_date(time) now = to_datetime(None) if time <= now: add_warning(req, "Date value not in the future.") return False except TracError: add_warning(req, "Invalid or missing date value.") return False else: add_warning(req, "Please select type.") return False return True
def get_daily_backlog_chart(self, backlog_history): ''' return data point based on Yahoo JSArray format ''' dates = backlog_history[0] backlog_stats = backlog_history[1] # create counted list. opened_tickets_dataset = [len(set(list)) for list in backlog_stats['opened']] created_tickets_dataset = [len(set(list)) for list in backlog_stats['created']] closed_tickets_dataset = [len(set(list)) for list in backlog_stats['closed']] # bmi_dataset = [] # for i in range(len(opened_tickets_dataset)): # if opened_tickets_dataset[i] == 0: # bmi_dataset.append(0.0) # else: # bmi_dataset.append(float(closed_tickets_dataset[i])*100/float(opened_tickets_dataset[i])) ds_daily_backlog = '' for idx, date_ in enumerate(dates): ds_daily_backlog = ds_daily_backlog + '{ date: "%s", opened: %d, closed: %d, created: %d}, ' \ % (format_date(date_, tzinfo=utc), opened_tickets_dataset[idx], \ closed_tickets_dataset[idx], created_tickets_dataset[idx]) return '[ ' + ds_daily_backlog + ' ];'
def get_daily_backlog_chart(self, backlog_history): numdates = backlog_history[0] backlog_stats = backlog_history[1] # create counted list. opened_tickets_dataset = [len(list) for list in backlog_stats['opened']] created_tickets_dataset = [len(list) for list in backlog_stats['created']] # need to add create and closed ticket for charting purpose. We want to show # closed tickets on top of opened ticket in bar chart. closed_tickets_dataset = [] for i in range(len(created_tickets_dataset)): closed_tickets_dataset.append(created_tickets_dataset[i] + len(backlog_stats['closed'][i])) bmi_dataset = [] for i in range(len(opened_tickets_dataset)): if opened_tickets_dataset[i] == 0: bmi_dataset.append(0.0) else: bmi_dataset.append(float(closed_tickets_dataset[i]) * 100 / float(opened_tickets_dataset[i])) # for idx, numdate in enumerate(numdates): # self.env.log.info("%s: %s, %s, %s" % (num2date(numdate), # closed_tickets_dataset[idx], # opened_tickets_dataset[idx], # created_tickets_dataset[idx])) ds_daily_backlog = '' for idx, numdate in enumerate(numdates): ds_daily_backlog = ds_daily_backlog + '{ date: "%s", opened: %d, closed: %d, created: %d}, ' \ % (format_date(num2date(numdate), tzinfo=utc), opened_tickets_dataset[idx], \ closed_tickets_dataset[idx], created_tickets_dataset[idx]) return '[ ' + ds_daily_backlog + ' ];'
def _get_active_milestones(self, exclude = None): '''Retrieve a list of milestones. If exclude is specified, it will exclude that milestone from the list and add in the unscheduled milestone.''' db = self.env.get_db_cnx() cursor = db.cursor() results = [] if exclude: num_tickets = self._get_num_tickets(cursor, '') results.append( dict(name='(unscheduled)', due='--', num_tickets=num_tickets)) cursor.execute(MILESTONE_QUERY) rows = cursor.fetchall() for row in rows: if exclude and exclude == row[0]: continue num_tickets = self._get_num_tickets(cursor, row[0]) d = dict(name=row[0], due=(row[1] and format_date(row[1])) or '--', num_tickets=num_tickets) results.append(d) return results
def _get_list(self, sids): all_anon = "anonymous" in sids or "*" in sids all_auth = "authenticated" in sids or "*" in sids sids = set(self._split_sid(sid) for sid in sids if sid not in ("anonymous", "authenticated", "*")) rows = self.env.db_query( """ SELECT DISTINCT s.sid, s.authenticated, s.last_visit, n.value, e.value, h.value FROM session AS s LEFT JOIN session_attribute AS n ON (n.sid=s.sid AND n.authenticated=s.authenticated AND n.name='name') LEFT JOIN session_attribute AS e ON (e.sid=s.sid AND e.authenticated=s.authenticated AND e.name='email') LEFT JOIN session_attribute AS h ON (h.sid=s.sid AND h.authenticated=s.authenticated AND h.name='default_handler') ORDER BY s.sid, s.authenticated """ ) for sid, authenticated, last_visit, name, email, handler in rows: if all_anon and not authenticated or all_auth and authenticated or (sid, authenticated) in sids: yield ( sid, authenticated, format_date(to_datetime(last_visit), console_date_format), name, email, handler, )
def test_can_create_sprints_for_milestones_with_slash_in_name(self): self.teh.create_milestone("milestone/fnord") req = self.teh.mock_request(Usernames.product_owner) req.args = dict(add='add', sprint_name='fnord', start=format_date(now()), duration=10, milestone="milestone/fnord") self.assert_raises(RequestDone, self.view.do_post, req)
def _do_list(self): print_table([(m.name, m.due and format_date(m.due, console_date_format), m.completed and format_datetime(m.completed, console_datetime_format)) for m in model.Milestone.select(self.env)], [_("Name"), _("Due"), _("Completed")])
def test_absolute(self): t = datetime.now(utc) - timedelta(days=1) label = "on %s at %s" % ( format_date(t, locale=locale_en, tzinfo=utc), format_time(t, locale=locale_en, tzinfo=utc), ) self.assertEqual(label, self._format_chrome(t, "absolute", False)) self.assertEqual(label, self._format_timeline(t, "absolute", False))
def formatter(self, col, cell_value): if col == 'time': return cell_value != '' and format_time(from_utimestamp(long(cell_value))) or '--' if col in ('date', 'created', 'modified'): return cell_value != '' and format_date(from_utimestamp(long(cell_value))) or '--' if col == 'datetime': return cell_value != '' and format_datetime(from_utimestamp(long(cell_value))) or '--' return cell_value
def test_session_add_sid_name(self): test_name = sys._getframe().f_code.co_name rv, output = self._execute('session add john John') self.assertEqual(0, rv) rv, output = self._execute('session list john') self.assertEqual(self.expected_results[test_name] % {'today': format_date(None, console_date_format)}, output)
def _format_developer_data(self, req, developer): render_time = lambda x: TimePropertyRenderer(self.env, x).render() if developer.load is not None: for load in developer.load: load.is_working_day = is_working_day(load.day) load.day = format_date(load.day, tzinfo=req.tz) load.remaining_time = render_time(load.remaining_time) total_capacity = getattr(developer, 'total_capacity', None) developer.total_capacity = render_time(total_capacity)
def process_request(self, req): req.perm.assert_permission('TIMELINE_VIEW') format = req.args.get('format') maxrows = int(req.args.get('max', 0)) # Parse the from date and adjust the timestamp to the last second of # the day t = time.localtime() if req.args.has_key('from'): try: t = time.strptime(req.args.get('from'), '%x') except: pass fromdate = time.mktime((t[0], t[1], t[2], 23, 59, 59, t[6], t[7], t[8])) try: daysback = max(0, int(req.args.get('daysback', ''))) except ValueError: daysback = self.default_daysback req.hdf['timeline.from'] = format_date(fromdate) req.hdf['timeline.daysback'] = daysback available_filters = [] for event_provider in self.event_providers: available_filters += event_provider.get_timeline_filters(req) filters = [] # check the request or session for enabled filters, or use default for test in (lambda f: req.args.has_key(f[0]), lambda f: req.session.get('timeline.filter.%s' % f[0], '')\ == '1', lambda f: len(f) == 2 or f[2]): if filters: break filters = [f[0] for f in available_filters if test(f)] # save the results of submitting the timeline form to the session if req.args.has_key('update'): for filter in available_filters: key = 'timeline.filter.%s' % filter[0] if req.args.has_key(filter[0]): req.session[key] = '1' elif req.session.has_key(key): del req.session[key] stop = fromdate start = stop - (daysback + 1) * 86400 events = [] for event_provider in self.event_providers: try: events += event_provider.get_timeline_events(req, start, stop, filters) except Exception, e: # cope with a failure of that provider self._provider_failure(e, req, event_provider, filters, [f[0] for f in available_filters])
def test_with_iso8601(self): tz = datefmt.timezone('GMT +2:00') t = datetime.datetime(2010, 8, 28, 11, 45, 56, 123456, tz) self.assertEqual('2010-08-28', datefmt.format_date(t, 'iso8601', tz, 'iso8601')) self.assertEqual('11:45:56+02:00', datefmt.format_time(t, 'iso8601', tz, 'iso8601')) self.assertEqual('2010-08-28T11:45:56+02:00', datefmt.format_datetime(t, 'iso8601', tz, 'iso8601'))
def _do_list(self, *sids): if not sids: sids = ['*'] print_table([(r[0], r[1], format_date(to_datetime(r[2]), console_date_format), r[3], r[4]) for r in self._get_list(sids)], [_('SID'), _('Auth'), _('Last Visit'), _('Name'), _('Email')])
def expand_macro(self, formatter, name, content): prefix = limit = None if content: argv = [arg.strip() for arg in content.split(',')] if len(argv) > 0: prefix = argv[0] if len(argv) > 1: limit = int(argv[1]) cursor = formatter.db.cursor() sql = 'SELECT name, ' \ ' max(version) AS max_version, ' \ ' max(time) AS max_time ' \ 'FROM wiki' args = [] if prefix: sql += ' WHERE name LIKE %s' args.append(prefix + '%') sql += ' GROUP BY name ORDER BY max_time DESC' if limit: sql += ' LIMIT %s' args.append(limit) cursor.execute(sql, args) entries_per_date = [] prevdate = None for name, version, ts in cursor: if not 'WIKI_VIEW' in formatter.perm('wiki', name, version): continue time = datetime.fromtimestamp(ts, utc) date = format_date(time) if date != prevdate: prevdate = date entries_per_date.append((date, [])) version = int(version) diff_href = None if version > 1: diff_href = formatter.href.wiki(name, action='diff', version=version) page_name = formatter.wiki.format_page_name(name) entries_per_date[-1][1].append((page_name, name, version, diff_href)) return tag.div([tag.h3(date) + tag.ul([tag.li(tag.a(page_name, href=formatter.href.wiki(name)), ' ', diff_href and tag.small('(', tag.a('diff', href=diff_href), ')') or None) for page_name, name, version, diff_href in entries]) for date, entries in entries_per_date])
def test_format_compatibility(self): tz = datefmt.timezone('GMT +2:00') t = datetime.datetime(2010, 8, 28, 11, 45, 56, 123456, datefmt.utc) tz_t = datetime.datetime(2010, 8, 28, 13, 45, 56, 123456, tz) # Converting babel's format to strftime format self.assertEqual(tz_t.strftime('%x %H:%M').decode('utf-8'), datefmt.format_datetime(t, 'short', tz)) self.assertEqual(tz_t.strftime('%x').decode('utf-8'), datefmt.format_date(t, 'short', tz)) self.assertEqual(tz_t.strftime('%H:%M').decode('utf-8'), datefmt.format_time(t, 'short', tz)) for f in ('medium', 'long', 'full'): self.assertEqual(tz_t.strftime('%x %X').decode('utf-8'), datefmt.format_datetime(t, f, tz)) self.assertEqual(tz_t.strftime('%x').decode('utf-8'), datefmt.format_date(t, f, tz)) self.assertEqual(tz_t.strftime('%X').decode('utf-8'), datefmt.format_time(t, f, tz))
def test_default(self): tz = datefmt.timezone('GMT +2:00') t = datetime.datetime(2010, 8, 28, 11, 45, 56, 123456, tz) self.assertEqual('2010-08-28', datefmt.format_date(t, tzinfo=tz, locale='iso8601')) self.assertEqual('11:45:56+02:00', datefmt.format_time(t, tzinfo=tz, locale='iso8601')) self.assertEqual('2010-08-28T11:45:56+02:00', datefmt.format_datetime(t, tzinfo=tz, locale='iso8601'))
def expand_macro(self, formatter, name, text): option_list = text.split(",") pattern, max_displayed, title, overdue_color = text.split(",") cursor = self.env.get_db_cnx().cursor() cursor.execute( "SELECT name, due FROM milestone WHERE name like %s AND completed = 0 ORDER BY due ASC;", [pattern] ) milestone_names = [mn[0] for mn in cursor] cursor = self.env.get_db_cnx().cursor() cursor.execute( "SELECT due FROM milestone WHERE name like %s AND completed = 0 ORDER BY due ASC;", [pattern] ) milestone_dues = [md[0] for md in cursor] out = StringIO() wikitext = "=== %s ===\n" % title cur_displayed = 0 cur_idx = 0 for m in milestone_names: if not max_displayed or cur_displayed < int(max_displayed): if milestone_dues[cur_idx]: wikitext += """ * [milestone:\"%(milestonename)s\" %(milestonename)s]""" % { "milestonename": m } date = '' if VERSION < '1.0': date = "(%s)" % format_date(milestone_dues[cur_idx]) else: date = "(%s)" % format_date(milestone_dues[cur_idx], tzinfo=formatter.req.tz, locale=formatter.req.locale) if overdue_color and datetime.now(utc) > from_utimestamp(milestone_dues[cur_idx]): wikitext += ' [[span(style=background-color: ' + overdue_color + ',' + date + ')]]' else: wikitext += ' ' + date wikitext += '\n' cur_displayed += 1 cur_idx += 1 Formatter(self.env, formatter.context).format(wikitext, out) return Markup(out.getvalue())
def repository_activity_user(project_id, username, start_date, end_date, groupsize, groupcnt, db, req): """ Get query response for specified time interval and `username`: Data: <event>: <count events>. Events: 'commit'. """ q = ''' SELECT r.time FROM revision r WHERE r.repos IN ( SELECT id FROM repository WHERE name='project_id' AND value=%s ) AND r.author=%s AND r.time >= %s AND r.time < %s ''' cursor = db.cursor() cursor.execute(q, (str(project_id), username, to_utimestamp(start_date), to_utimestamp(end_date))) etypes = (N_('commit'),) events = [('commit', from_utimestamp(r[0])) for r in cursor] groups_list, groups_data = aggregate_events_by_periods(etypes, events, start_date, groupsize, groupcnt) query_response = QueryResponse("repository_activity", req.href('/chrome')) query_response.set_title(_("Commits from %(start_date)s to %(end_date)s", start_date=format_date(start_date, tzinfo=req.tz), end_date=format_date(end_date, tzinfo=req.tz))) groups_data = translate_keys(groups_data) columns, rows = adapt_to_table(groups_list, groups_data) query_response.set_columns(columns) query_response.set_results(rows) chart = query_response.chart_info chart.type = 'Line' chart.x_legend = _('Time periods') # chart.y_legend = _('Number') chart.x_labels = groups_list chart.data = restructure_data(groups_data) chart.tool_tip = "#key#<br>%s:#x_label#<br>%s:#val#" % (_('period'), _('number')) return query_response
def test_format_compatibility(self): tz = datefmt.timezone("GMT +2:00") t = datetime.datetime(2010, 8, 28, 11, 45, 56, 123456, datefmt.utc) tz_t = datetime.datetime(2010, 8, 28, 13, 45, 56, 123456, tz) en_US = Locale.parse("en_US") # Converting default format to babel's format self.assertEqual("Aug 28, 2010 1:45:56 PM", datefmt.format_datetime(t, "%x %X", tz, en_US)) self.assertEqual("Aug 28, 2010", datefmt.format_datetime(t, "%x", tz, en_US)) self.assertEqual("1:45:56 PM", datefmt.format_datetime(t, "%X", tz, en_US)) self.assertEqual("Aug 28, 2010", datefmt.format_date(t, "%x", tz, en_US)) self.assertEqual("1:45:56 PM", datefmt.format_time(t, "%X", tz, en_US))
def test_with_babel_format(self): tz = datefmt.timezone("GMT +2:00") t = datetime.datetime(2010, 8, 28, 11, 45, 56, 123456, tz) for f in ("short", "medium", "long", "full"): self.assertEqual("2010-08-28", datefmt.format_date(t, f, tz, "iso8601")) self.assertEqual("11:45", datefmt.format_time(t, "short", tz, "iso8601")) self.assertEqual("2010-08-28T11:45", datefmt.format_datetime(t, "short", tz, "iso8601")) self.assertEqual("11:45:56", datefmt.format_time(t, "medium", tz, "iso8601")) self.assertEqual("2010-08-28T11:45:56", datefmt.format_datetime(t, "medium", tz, "iso8601")) for f in ("long", "full"): self.assertEqual("11:45:56+02:00", datefmt.format_time(t, f, tz, "iso8601")) self.assertEqual("2010-08-28T11:45:56+02:00", datefmt.format_datetime(t, f, tz, "iso8601"))
def render_macro(self, req, name, content): prefix = limit = None if content: argv = [arg.strip() for arg in content.split(",")] if len(argv) > 0: prefix = argv[0] if len(argv) > 1: limit = int(argv[1]) db = self.env.get_db_cnx() cursor = db.cursor() sql = "SELECT name, " " max(version) AS max_version, " " max(time) AS max_time " "FROM wiki" args = [] if prefix: sql += " WHERE name LIKE %s" args.append(prefix + "%") sql += " GROUP BY name ORDER BY max_time DESC" if limit: sql += " LIMIT %s" args.append(limit) cursor.execute(sql, args) entries_per_date = [] prevdate = None for name, version, time in cursor: date = format_date(time) if date != prevdate: prevdate = date entries_per_date.append((date, [])) entries_per_date[-1][1].append((name, int(version))) wiki = WikiSystem(self.env) return html.DIV( [ html.H3(date) + html.UL( [ html.LI( html.A(wiki.format_page_name(name), href=req.href.wiki(name)), " ", version > 1 and html.SMALL( "(", html.A("diff", href=req.href.wiki(name, action="diff", version=version)), ")" ) or None, ) for name, version in entries ] ) for date, entries in entries_per_date ] )
def get_commit_by_date_chart(self, commit_history): numdates = commit_history[0] numcommits = commit_history[1] ds_commits = '' for idx, numdate in enumerate(numdates): ds_commits = ds_commits + '{ date: "%s", commits: %d}, ' \ % (format_date(num2date(numdate), tzinfo=utc), numcommits[idx]) return '[ ' + ds_commits + ' ];'
absurls=(format == 'rss')) value['parsed'] = format == 'rss' and unicode(desc) or desc elif column == 'reporter': if cell.find('@') != -1: value['rss'] = cell elif cell in email_map: value['rss'] = email_map[cell] elif column == 'report': value['report_href'] = req.href.report(cell) elif column in ('time', 'date', 'changetime', 'created', 'modified'): if cell == 'None': value['date'] = value['time'] = cell value['datetime'] = value['gmt'] = cell else: value['date'] = format_date(cell) value['time'] = format_time(cell) value['datetime'] = format_datetime(cell) value['gmt'] = http_date(cell) prefix = 'report.items.%d.%s' % (row_idx, unicode(column)) req.hdf[prefix] = unicode(cell) for key in value.keys(): req.hdf[prefix + '.' + key] = value[key] col_idx += 1 row_idx += 1 req.hdf['report.numrows'] = row_idx if format == 'rss': return 'report_rss.cs', 'application/rss+xml' elif format == 'csv':
def expand_macro(self, formatter, name, arguments): """Returns macro content.""" env = self.env req = formatter.req tz = req.tz # Parse arguments from macro invocation. args, kwargs = parse_args(arguments, strict=False) # Enable week number display regardless of argument position. week_pref = 'w' in args and args.pop(args.index('w')) week_pref = week_pref and week_pref or kwargs.get('w') week_start = None week_num_start = None # Parse per-instance week calculation rules, if available. if week_pref: if ':' not in week_pref: # Treat undelimitted setting as week start. week_pref += ':' w_start, wn_start = week_pref.split(':') try: week_start = int(w_start) except ValueError: week_start = None else: week_start = week_start > -1 and week_start < 7 and \ week_start or None try: week_num_start = int(wn_start) except ValueError: week_num_start = None else: week_num_start = week_num_start in (1, 4, 7) and \ week_num_start or None # Respect user's locale, if available. try: locale = Locale.parse(str(req.locale)) except (AttributeError, UnknownLocaleError): # Attribute 'req.locale' vailable since Trac 0.12. locale = None if has_babel: if locale: if not locale.territory: # Search first locale, which has the same `language` and # territory in preferred languages. for l in req.languages: l = l.replace('-', '_').lower() if l.startswith(locale.language.lower() + '_'): try: l = Locale.parse(l) if l.territory: locale = l break # first one rules except UnknownLocaleError: pass if not locale.territory and locale.language in LOCALE_ALIASES: locale = Locale.parse(LOCALE_ALIASES[locale.language]) else: # Default fallback. locale = Locale('en', 'US') env.log.debug('Locale setting for wiki calendar: %s' % locale.get_display_name('en')) if not week_start: if week_pref and week_pref.lower().startswith('iso'): week_start = 0 week_num_start = 4 elif has_babel: week_start = locale.first_week_day else: import calendar week_start = calendar.firstweekday() # ISO calendar will remain as default. if not week_num_start: if week_start == 6: week_num_start = 1 else: week_num_start = 4 env.log.debug('Effective settings: first_week_day=%s, ' '1st_week_of_year_rule=%s' % (week_start, week_num_start)) # Find year and month of interest. year = req.args.get('year') # Not clicked on any previous/next button, next look for macro args. if not year and len(args) >= 1 and args[0] != "*": year = args[0] year = year and year.isnumeric() and int(year) or None month = req.args.get('month') # Not clicked on any previous/next button, next look for macro args. if not month and len(args) >= 2 and args[1] != "*": month = args[1] month = month and month.isnumeric() and int(month) or None now = datetime.now(tz) # Force offset from start-of-day to avoid a false 'today' marker, # but use it only on request of different month/year. now.replace(second=1) today = None if month and month != now.month: today = now.replace(month=month) if year and year != now.year: today = today and today.replace(year=year) or \ now.replace(year=year) # Use current month and year, if nothing else has been requested. if not today: today = now.replace(hour=0, minute=0, second=0, microsecond=0) showbuttons = True if len(args) >= 3 or kwargs.has_key('nav'): try: showbuttons = kwargs['nav'] in _TRUE_VALUES except KeyError: showbuttons = args[2] in _TRUE_VALUES wiki_page_format = "%Y-%m-%d" if len(args) >= 4 and args[3] != "*" or kwargs.has_key('wiki'): try: wiki_page_format = str(kwargs['wiki']) except KeyError: wiki_page_format = str(args[3]) # Support relative paths in macro arguments for wiki page links. wiki_page_format = resolve_relative_name(wiki_page_format, formatter.resource.id) list_condense = 0 show_t_open_dates = True wiki_subpages = [] # Read optional check plan. check = [] if kwargs.has_key('check'): check = kwargs['check'].split('.') if name == 'WikiTicketCalendar': if len(args) >= 5 or kwargs.has_key('cdate'): try: show_t_open_dates = kwargs['cdate'] in _TRUE_VALUES except KeyError: show_t_open_dates = args[4] in _TRUE_VALUES # TracQuery support for ticket selection query_args = "id!=0" if len(args) >= 7 or kwargs.has_key('query'): # prefer query arguments provided by kwargs try: query_args = kwargs['query'] except KeyError: query_args = args[6] provider = WikiCalendarTicketProvider(env) tickets = provider.harvest(req, query_args) # compress long ticket lists if len(args) >= 8 or kwargs.has_key('short'): # prefer query arguments provided by kwargs try: list_condense = int(kwargs['short']) except KeyError: list_condense = int(args[7]) # control calendar display width cal_width = "100%;" if len(args) >= 9 or kwargs.has_key('width'): # prefer query arguments provided by kwargs try: cal_width = kwargs['width'] except KeyError: cal_width = args[8] # multiple wiki (sub)pages per day if kwargs.has_key('subpages'): wiki_subpages = kwargs['subpages'].split('|') # Prepare datetime objects for previous/next navigation link creation. prev_year = month_offset(today, -12) prev_quarter = month_offset(today, -3) prev_month = month_offset(today, -1) next_month = month_offset(today, 1) next_quarter = month_offset(today, 3) next_year = month_offset(today, 12) # Find first and last calendar day, probably in last/next month, # using datetime objects exactly at start-of-day here. # Note: Calendar days are numbered 0 (Mo) - 6 (Su). first_day_month = today.replace(day=1, second=0) first_day = first_day_month - timedelta( week_index(first_day_month, week_start)) last_day_month = next_month.replace(day=1) - timedelta(1) if ((last_day_month - first_day).days + 1) % 7 > 0: last_day = last_day_month + timedelta(7 - ( (last_day_month - first_day).days + 1) % 7) else: last_day = last_day_month # Finally building the output now. # Begin with caption and optional navigation links. buff = tag.tr() if showbuttons is True: # Create calendar navigation buttons. nx = 'next' pv = 'prev' nav_pv_y = _nav_link(req, '<<', pv, prev_year, locale) nav_pv_q = _nav_link(req, ' «', pv, prev_quarter, locale) nav_pv_m = _nav_link(req, ' <', pv, prev_month, locale) nav_nx_m = _nav_link(req, '> ', nx, next_month, locale) nav_nx_q = _nav_link(req, '» ', nx, next_quarter, locale) nav_nx_y = _nav_link(req, '>>', nx, next_year, locale) # Add buttons for going to previous months and year. buff(nav_pv_y, nav_pv_q, nav_pv_m) # The caption will always be there. if has_babel: heading = tag.td(format_datetime(today, 'MMMM y', locale=locale)) else: heading = tag.td(format_date(today, '%B %Y')) buff = buff(heading(class_='y')) if showbuttons is True: # Add buttons for going to next months and year. buff(nav_nx_m, nav_nx_q, nav_nx_y) buff = tag.caption(tag.table(tag.tbody(buff))) buff = tag.table(buff) if name == 'WikiTicketCalendar': if cal_width.startswith('+') is True: width = ":".join(['min-width', cal_width]) buff(class_='wikitcalendar', style=width) else: buff(class_='wikitcalendar') if name == 'WikiCalendar': buff(class_='wiki-calendar') heading = tag.tr() heading(align='center') if week_pref: # Add an empty cell matching the week number column below. heading(tag.th()) day_names = [(idx, day_name) for idx, day_name in get_day_names( 'abbreviated', 'format', locale).iteritems()] # Read day names after shifting into correct position. for idx, name_ in day_names[week_start:7] + day_names[0:week_start]: col = tag.th(name_) if has_babel: weekend = idx >= locale.weekend_start and \ idx <= locale.weekend_end else: weekend = idx > 4 col(class_=('workday', 'weekend')[weekend], scope='col') heading(col) heading = buff(tag.thead(heading)) # Building main calendar table body buff = tag.tbody() day = first_day while day.date() <= last_day.date(): # Insert a new row for every week. if (day - first_day).days % 7 == 0: line = tag.tr() line(align='right') if week_pref: cell = tag.td( week_num(env, day, week_start, week_num_start)) line(cell(class_='week')) if not (day < first_day_month or day > last_day_month): wiki = format_date(day, wiki_page_format) if day == today: a_class = 'day today' td_class = 'today' else: a_class = 'day' td_class = 'day' if uts: day_ts = to_utimestamp(day) day_ts_eod = day_ts + 86399999999 else: day_ts = to_timestamp(day) day_ts_eod = day_ts + 86399 # Check for milestone(s) on that day. db = env.get_db_cnx() cursor = db.cursor() cursor.execute( """ SELECT name FROM milestone WHERE due >= %s and due <= %s """, (day_ts, day_ts_eod)) milestones = tag() for row in cursor: if not a_class.endswith('milestone'): a_class += ' milestone' milestone = to_unicode(row[0]) url = env.href.milestone(milestone) milestone = '* ' + milestone milestones = tag( milestones, tag.div(tag.a(milestone, href=url), class_='milestone')) label = tag.span(day.day) label(class_='day') # Generate wiki page links with name specified in # 'wiki_page_format', and check their existence. if len(wiki_subpages) > 0: pages = tag(label, Markup('<br />')) for page in wiki_subpages: label = tag(' ', page[0]) page = '/'.join([wiki, page]) pages( self._wiki_link(req, args, kwargs, page, label, 'subpage', check)) else: pages = self._wiki_link(req, args, kwargs, wiki, label, a_class, check) cell = tag.td(pages) cell(class_=td_class, valign='top') if name == 'WikiCalendar': line(cell) else: if milestones: cell(milestones) else: cell(tag.br()) match = [] match_od = [] ticket_heap = tag('') ticket_list = tag.div('') ticket_list(align='left', class_='condense') # Get tickets with due date set to day. for t in tickets: due = t.get(self.tkt_due_field) if due is None or due in ('', '--'): continue else: if self.tkt_due_format == 'ts': if not isinstance(due, datetime): continue if uts: due_ts = to_utimestamp(due) else: due_ts = to_timestamp(due) if due_ts < day_ts or due_ts > day_ts_eod: continue else: # Beware: Format might even be unicode string, # but str is required by the function. duedate = format_date(day, str(self.tkt_due_format)) if not due == duedate: continue tkt_id = t.get('id') ticket, short = _ticket_links(env, formatter, t) ticket_heap(ticket) if not tkt_id in match: if len(match) == 0: ticket_list(short) else: ticket_list(', ', short) match.append(tkt_id) # Optionally, get tickets created on day too. if show_t_open_dates is True: ticket_od_list = tag.div('') ticket_od_list(align='left', class_='opendate_condense') for t in tickets: if uts: ticket_ts = to_utimestamp(t.get('time')) else: ticket_ts = to_timestamp(t.get('time')) if ticket_ts < day_ts or ticket_ts > day_ts_eod: continue a_class = 'opendate_' tkt_id = t.get('id') ticket, short = _ticket_links( env, formatter, t, a_class) ticket_heap(ticket) if not tkt_id in match: if len(match_od) == 0: ticket_od_list(short) else: ticket_od_list(', ', short) match_od.append(tkt_id) matches = len(match) + len(match_od) if list_condense > 0 and matches >= list_condense: if len(match_od) > 0: if len(match) > 0: ticket_list(', ') ticket_list = tag(ticket_list, ticket_od_list) line(cell(ticket_list)) else: line(cell(ticket_heap)) else: if name == 'WikiCalendar': wiki = format_date(day, wiki_page_format) a_class = 'day adjacent_month' pages = self._wiki_link(req, args, kwargs, wiki, day.day, a_class) cell = tag.td(pages, class_='day adjacent_month') line(cell) else: cell = tag.td('', class_='day adjacent_month') line(cell) # Append completed week rows. if (day - first_day).days % 7 == 6: buff(line) day += timedelta(1) buff = tag.div(heading(buff)) if name == 'WikiTicketCalendar': if cal_width.startswith('+') is True: width = ":".join(['width', cal_width]) buff(class_='wikitcalendar', style=width) else: buff(class_='wikitcalendar') if name == 'WikiCalendar': buff(class_='wiki-calendar') # Add common CSS stylesheet. if self.internal_css and not req.args.get('wikicalendar'): # Put definitions directly into the output. f = open('/'.join([self.htdocs_path, 'wikicalendar.css']), 'Ur') css = tag.style(Markup('<!--\n'), '\n'.join(f.readlines()), Markup('-->\n'))(type="text/css") f.close() # Add hint to prevent multiple inclusions. req.args['wikicalendar'] = True return tag(css, buff) elif not req.args.get('wikicalendar'): add_stylesheet(req, 'wikicalendar/wikicalendar.css') return buff
def process_request(self, req): testmanagersystem = TestManagerSystem(self.env) tc_statuses = testmanagersystem.get_tc_statuses_by_color() if 'testmanager' in self.config: self.default_days_back = self.config.getint( 'testmanager', 'default_days_back', TESTMANAGER_DEFAULT_DAYS_BACK) self.default_interval = self.config.getint( 'testmanager', 'default_interval', TESTMANAGER_DEFAULT_INTERVAL) req_content = req.args.get('content') testplan = None catpath = None testplan_contains_all = True self.env.log.debug("Test Stats - process_request: %s" % req_content) grab_testplan = req.args.get('testplan') if grab_testplan and not grab_testplan == "__all": testplan = grab_testplan.partition('|')[0] catpath = grab_testplan.partition('|')[2] tp = TestPlan(self.env, testplan, catpath) testplan_contains_all = tp['contains_all'] today = datetime.today() today = today.replace(tzinfo=req.tz) + timedelta(2) # Stats start from two years back beginning = today - timedelta(720) if (not req_content == None) and (req_content == "piechartdata"): num_successful = 0 for tc_outcome in tc_statuses['green']: num_successful += self._get_num_tcs_by_status( beginning, today, tc_outcome, testplan, req) num_failed = 0 for tc_outcome in tc_statuses['red']: num_failed += self._get_num_tcs_by_status( beginning, today, tc_outcome, testplan, req) num_to_be_tested = 0 if testplan_contains_all: num_to_be_tested = self._get_num_testcases( beginning, today, catpath, req) - num_successful - num_failed else: for tc_outcome in tc_statuses['yellow']: num_to_be_tested += self._get_num_tcs_by_status( beginning, today, tc_outcome, testplan, req) jsdstr = """ [ {"response": "%s", "count": %s}, {"response": "%s", "count": %s}, {"response": "%s", "count": %s} ] """ % (_("Successful"), num_successful, _("Failed"), num_failed, _("To be tested"), num_to_be_tested) jsdstr = jsdstr.strip() if isinstance(jsdstr, unicode): jsdstr = jsdstr.encode('utf-8') req.send_header("Content-Length", len(jsdstr)) req.write(jsdstr) return if not None in [ req.args.get('end_date'), req.args.get('start_date'), req.args.get('resolution') ]: # form submit grab_at_date = req.args.get('end_date') grab_from_date = req.args.get('start_date') grab_resolution = req.args.get('resolution') self.env.log.debug("Start date: %s", grab_from_date) self.env.log.debug("End date: %s", grab_at_date) # validate inputs if None in [grab_at_date, grab_from_date]: raise TracError('Please specify a valid range.') if None in [grab_resolution]: raise TracError('Please specify the graph interval.') if 0 in [ len(grab_at_date), len(grab_from_date), len(grab_resolution) ]: raise TracError( 'Please ensure that all fields have been filled in.') if not grab_resolution.isdigit(): raise TracError( 'The graph interval field must be an integer, days.') if compatibility: at_date = parse_date(grab_at_date, req.tz) + timedelta(2) from_date = parse_date(grab_from_date, req.tz) else: at_date = user_time(req, parse_date, grab_at_date, hint='date') from_date = user_time(req, parse_date, grab_from_date, hint='date') graph_res = int(grab_resolution) else: # default data todays_date = datetime.today() at_date = todays_date #+ timedelta(1) # datetime.combine(todays_date,time(23,59,59,0,req.tz)) at_date = at_date.replace(tzinfo=req.tz) + timedelta(2) from_date = at_date - timedelta(self.default_days_back) graph_res = self.default_interval count = [] # Calculate 0th point last_date = from_date - timedelta(graph_res) # Calculate remaining points for cur_date in daterange(from_date, at_date, graph_res): datestr = format_date(cur_date) if graph_res != 1: datestr = "%s thru %s" % (format_date(last_date), datestr) if (not req_content == None) and (req_content == "ticketchartdata"): num_total = self._get_num_tickets_total( beginning, cur_date, testplan, req) num_closed = self._get_num_tickets_by_status( beginning, cur_date, 'closed', testplan, req) num_active = num_total - num_closed count.append({ 'from_date': format_date(last_date), 'to_date': datestr, 'date': datestr, 'active_tickets': num_active, 'closed_tickets': num_closed, 'tot_tickets': num_total }) else: # Handling custom test case outcomes here num_new = self._get_num_testcases(last_date, cur_date, catpath, req) num_successful = 0 for tc_outcome in tc_statuses['green']: num_successful += self._get_num_tcs_by_status( last_date, cur_date, tc_outcome, testplan, req) num_failed = 0 for tc_outcome in tc_statuses['red']: num_failed += self._get_num_tcs_by_status( last_date, cur_date, tc_outcome, testplan, req) num_all_successful = 0 for tc_outcome in tc_statuses['green']: num_all_successful += self._get_num_tcs_by_status( from_date, cur_date, tc_outcome, testplan, req) num_all_failed = 0 for tc_outcome in tc_statuses['red']: num_all_failed += self._get_num_tcs_by_status( from_date, cur_date, tc_outcome, testplan, req) num_all = 0 num_all_untested = 0 if testplan_contains_all: num_all = self._get_num_testcases(None, cur_date, catpath, req) num_all_untested = num_all - num_all_successful - num_all_failed else: for tc_outcome in tc_statuses['yellow']: num_all_untested += self._get_num_tcs_by_status( from_date, cur_date, tc_outcome, testplan, req) num_all = num_all_untested + num_all_successful + num_all_failed count.append({ 'from_date': format_date(last_date), 'to_date': datestr, 'date': datestr, 'new_tcs': num_new, 'successful': num_successful, 'failed': num_failed, 'all_tcs': num_all, 'all_successful': num_all_successful, 'all_untested': num_all_untested, 'all_failed': num_all_failed }) last_date = cur_date # if chartdata is requested, raw text is returned rather than data object # for templating if (not req_content == None) and (req_content == "chartdata"): jsdstr = '{"chartdata": [\n' for x in count: jsdstr += '{"date": "%s",' % x['date'] jsdstr += ' "new_tcs": %s,' % x['new_tcs'] jsdstr += ' "successful": %s,' % x['successful'] jsdstr += ' "failed": %s,' % x['failed'] jsdstr += ' "all_tcs": %s,' % x['all_tcs'] jsdstr += ' "all_successful": %s,' % x['all_successful'] jsdstr += ' "all_untested": %s,' % x['all_untested'] jsdstr += ' "all_failed": %s},\n' % x['all_failed'] jsdstr = jsdstr[:-2] + '\n]}' if isinstance(jsdstr, unicode): jsdstr = jsdstr.encode('utf-8') req.send_header("Content-Length", len(jsdstr)) req.write(jsdstr) return elif (not req_content == None) and (req_content == "downloadcsv"): csvstr = "Date from;Date to;New Test Cases;Successful;Failed;Total Test Cases;Total Successful;Total Untested;Total Failed\r\n" for x in count: csvstr += '%s;' % x['from_date'] csvstr += '%s;' % x['to_date'] csvstr += '%s;' % x['new_tcs'] csvstr += '%s;' % x['successful'] csvstr += '%s;' % x['failed'] csvstr += '%s;' % x['all_tcs'] csvstr += '%s;' % x['all_successful'] csvstr += '%s;' % x['all_untested'] csvstr += '%s\r\n' % x['all_failed'] if isinstance(csvstr, unicode): csvstr = csvstr.encode('utf-8') req.send_header("Content-Length", len(csvstr)) req.send_header("Content-Disposition", "attachment;filename=Test_stats.csv") req.write(csvstr) return elif (not req_content == None) and (req_content == "ticketchartdata"): jsdstr = '{"ticketchartdata": [\n' for x in count: jsdstr += '{"date": "%s",' % x['date'] jsdstr += ' "tot_tickets": %s,' % x['tot_tickets'] jsdstr += ' "active_tickets": %s,' % x['active_tickets'] jsdstr += ' "closed_tickets": %s},\n' % x['closed_tickets'] jsdstr = jsdstr[:-2] + '\n]}' if isinstance(jsdstr, unicode): jsdstr = jsdstr.encode('utf-8') req.send_header("Content-Length", len(jsdstr)) req.write(jsdstr) return else: # Normal rendering of first chart showall = req.args.get('show') == 'all' testplan_list = [] for planid, catid, catpath, name, author, ts_str in testmanagersystem.list_all_testplans( ): testplan_list.append({ 'planid': planid, 'catpath': catpath, 'name': name }) data = {} data['testcase_data'] = count data['resolution'] = str(graph_res) data['baseurl'] = req.base_url data['testplans'] = testplan_list data['ctestplan'] = testplan if compatibility: data['start_date'] = format_date(from_date) data['end_date'] = format_date(at_date) return 'testmanagerstats_compatible.html', data, None else: data['start_date'] = from_date data['end_date'] = at_date Chrome(self.env).add_jquery_ui(req) data.update({ 'date_hint': get_date_format_hint(req.lc_time), }) is_iso8601 = req.lc_time == 'iso8601' add_script_data(req, jquery_ui={ 'month_names': get_month_names_jquery_ui(req), 'day_names': get_day_names_jquery_ui(req), 'date_format': get_date_format_jquery_ui(req.lc_time), 'time_format': get_time_format_jquery_ui(req.lc_time), 'ampm': not is_24_hours(req.lc_time), 'first_week_day': get_first_week_day_jquery_ui(req), 'timepicker_separator': 'T' if is_iso8601 else ' ', 'show_timezone': is_iso8601, 'timezone_list': get_timezone_list_jquery_ui() \ if is_iso8601 else [], 'timezone_iso8601': is_iso8601, }) return 'testmanagerstats.html', data, None
def process_request(self, req): req.perm('timeline').require('TIMELINE_VIEW') format = req.args.get('format') maxrows = req.args.getint('max', 50 if format == 'rss' else 0) lastvisit = req.session.as_int('timeline.lastvisit', 0) # indication of new events is unchanged when form is updated by user revisit = any(a in req.args for a in ['update', 'from', 'daysback', 'author']) if revisit: lastvisit = req.session.as_int('timeline.nextlastvisit', lastvisit) # Parse the from date and adjust the timestamp to the last second of # the day fromdate = datetime_now(req.tz) today = truncate_datetime(fromdate) yesterday = to_datetime(today.replace(tzinfo=None) - timedelta(days=1), req.tz) precisedate = precision = None if 'from' in req.args: # Acquire from date only from non-blank input reqfromdate = req.args.get('from').strip() if reqfromdate: try: precisedate = user_time(req, parse_date, reqfromdate) except TracError as e: add_warning(req, e) else: fromdate = precisedate.astimezone(req.tz) precision = req.args.get('precision', '') if precision.startswith('second'): precision = timedelta(seconds=1) elif precision.startswith('minute'): precision = timedelta(minutes=1) elif precision.startswith('hour'): precision = timedelta(hours=1) else: precision = None fromdate = to_datetime(datetime(fromdate.year, fromdate.month, fromdate.day, 23, 59, 59, 999999), req.tz) pref = req.session.as_int('timeline.daysback', self.default_daysback) default = 90 if format == 'rss' else pref daysback = req.args.as_int('daysback', default, min=1, max=self.max_daysback) authors = req.args.get('authors') if authors is None and format != 'rss': authors = req.session.get('timeline.authors') authors = (authors or '').strip() data = {'fromdate': fromdate, 'daysback': daysback, 'authors': authors, 'today': today, 'yesterday': yesterday, 'precisedate': precisedate, 'precision': precision, 'events': [], 'filters': [], 'abbreviated_messages': self.abbreviated_messages} available_filters = [] for event_provider in self.event_providers: with component_guard(self.env, req, event_provider): available_filters += (event_provider.get_timeline_filters(req) or []) # check the request or session for enabled filters, or use default filters = [f[0] for f in available_filters if f[0] in req.args] if not filters and format != 'rss': filters = [f[0] for f in available_filters if req.session.as_int('timeline.filter.' + f[0])] if not filters: filters = [f[0] for f in available_filters if len(f) == 2 or f[2]] # save the results of submitting the timeline form to the session if 'update' in req.args: for filter_ in available_filters: key = 'timeline.filter.%s' % filter_[0] if filter_[0] in req.args: req.session[key] = '1' elif key in req.session: del req.session[key] stop = fromdate start = to_datetime(stop.replace(tzinfo=None) - timedelta(days=daysback + 1), req.tz) # create author include and exclude sets include = set() exclude = set() for match in self._authors_pattern.finditer(authors): name = (match.group(2) or match.group(3) or match.group(4)).lower() if match.group(1): exclude.add(name) else: include.add(name) # gather all events for the given period of time events = [] for provider in self.event_providers: with component_guard(self.env, req, provider): for event in provider.get_timeline_events(req, start, stop, filters) or []: author = (event[2] or '').lower() if ((not include or author in include) and author not in exclude): events.append( self._event_data(req, provider, event, lastvisit)) # prepare sorted global list events = sorted(events, key=lambda e: e['datetime'], reverse=True) if maxrows: events = events[:maxrows] data['events'] = events if format == 'rss': rss_context = web_context(req, absurls=True) rss_context.set_hints(wiki_flavor='html', shorten_lines=False) data['context'] = rss_context return 'timeline.rss', data, {'content_type': 'application/rss+xml'} else: req.session.set('timeline.daysback', daysback, self.default_daysback) req.session.set('timeline.authors', authors, '') # store lastvisit if events and not revisit: lastviewed = to_utimestamp(events[0]['datetime']) req.session['timeline.lastvisit'] = max(lastvisit, lastviewed) req.session['timeline.nextlastvisit'] = lastvisit html_context = web_context(req) html_context.set_hints(wiki_flavor='oneliner', shorten_lines=self.abbreviated_messages) data['context'] = html_context add_stylesheet(req, 'common/css/timeline.css') rss_href = req.href.timeline([(f, 'on') for f in filters], daysback=90, max=50, authors=authors, format='rss') add_link(req, 'alternate', auth_link(req, rss_href), _('RSS Feed'), 'application/rss+xml', 'rss') Chrome(self.env).add_jquery_ui(req) for filter_ in available_filters: data['filters'].append({'name': filter_[0], 'label': filter_[1], 'enabled': filter_[0] in filters}) # Navigation to the previous/next period of 'daysback' days previous_start = fromdate.replace(tzinfo=None) - \ timedelta(days=daysback + 1) previous_start = format_date(previous_start, format='iso8601', tzinfo=req.tz) add_link(req, 'prev', req.href.timeline(from_=previous_start, authors=authors, daysback=daysback), _("Previous Period")) if today - fromdate > timedelta(days=0): next_start = fromdate.replace(tzinfo=None) + \ timedelta(days=daysback + 1) next_start = format_date(to_datetime(next_start, req.tz), format='iso8601', tzinfo=req.tz) add_link(req, 'next', req.href.timeline(from_=next_start, authors=authors, daysback=daysback), _("Next Period")) prevnext_nav(req, _("Previous Period"), _("Next Period")) return 'timeline.html', data
def _do_list(self): print_table( [(v.name, format_date(v.time, console_date_format) if v.time else None) for v in model.Version.select(self.env)], [_("Name"), _("Time")])
def stop_work(self, stoptime=None, comment=''): active = self.get_active_task() if not active: self.explanation = 'You cannot stop working as you appear to be a complete slacker already!' return False if stoptime: if stoptime <= active['starttime']: self.explanation = 'You cannot set your stop time to that value as it is before the start time!' return False elif stoptime >= self.now: self.explanation = 'You cannot set your stop time to that value as it is in the future!' return False else: stoptime = self.now - 1 stoptime = float(stoptime) db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( 'UPDATE work_log ' 'SET endtime=%s, lastchange=%s, comment=%s ' 'WHERE worker=%s AND lastchange=%s AND endtime=0', (stoptime, stoptime, comment, self.authname, active['lastchange'])) db.commit() plugtne = self.config.getbool( 'worklog', 'timingandestimation') and self.config.get( 'ticket-custom', 'hours') plughrs = self.config.getbool('worklog', 'trachoursplugin') and self.config.get( 'ticket-custom', 'totalhours') message = '' hours = '0.0' # Leave a comment if the user has configured this or if they have entered # a work log comment. if plugtne or plughrs: round_delta = float(self.config.getint('worklog', 'roundup') or 1) # Get the delta in minutes delta = float(int(stoptime) - int(active['starttime'])) / float(60) # Round up if needed delta = int( round((delta / round_delta) + float(0.5))) * int(round_delta) # This hideous hack is here because I don't yet know how to do variable-DP rounding in python - sorry! # It's meant to round to 2 DP, so please replace it if you know how. Many thanks, MK. hours = str(float(int(100 * float(delta) / 60) / 100.0)) if plughrs: message = 'Hours recorded automatically by the worklog plugin. %s hours' % hours elif self.config.getbool('worklog', 'comment') or comment: started = datetime.fromtimestamp(active['starttime']) finished = datetime.fromtimestamp(stoptime) message = '%s worked on this ticket for %s between %s %s and %s %s.' % \ (self.authname, pretty_timedelta(started, finished), \ format_date(active['starttime']), format_time(active['starttime']), \ format_date(stoptime), format_time(stoptime)) if comment: message += "\n[[BR]]\n" + comment if plugtne or plughrs: if not message: message = 'Hours recorded automatically by the worklog plugin.' tckt = Ticket(self.env, active['ticket']) if plugtne: tckt['hours'] = hours self.save_ticket(tckt, message) message = '' if message: tckt = Ticket(self.env, active['ticket']) self.save_ticket(tckt, message) return True
def get_work_log(self, mode='all'): db = self.env.get_db_cnx() cursor = db.cursor() if mode == 'user': cursor.execute( 'SELECT wl.worker, s.value, wl.starttime, wl.endtime, wl.ticket, t.summary, t.status, wl.comment ' 'FROM work_log wl ' 'INNER JOIN ticket t ON wl.ticket=t.id ' 'LEFT JOIN session_attribute s ON wl.worker=s.sid AND s.name=\'name\' ' 'WHERE wl.worker=%s ' 'ORDER BY wl.lastchange DESC', (self.authname, )) elif mode == 'summary': cursor.execute( 'SELECT wl.worker, s.value, wl.starttime, wl.endtime, wl.ticket, t.summary, t.status, wl.comment ' 'FROM (SELECT worker,MAX(lastchange) AS lastchange FROM work_log GROUP BY worker) wlt ' 'INNER JOIN work_log wl ON wlt.worker=wl.worker AND wlt.lastchange=wl.lastchange ' 'INNER JOIN ticket t ON wl.ticket=t.id ' 'LEFT JOIN session_attribute s ON wl.worker=s.sid AND s.name=\'name\' ' 'ORDER BY wl.lastchange DESC, wl.worker') else: cursor.execute( 'SELECT wl.worker, s.value, wl.starttime, wl.endtime, wl.ticket, t.summary, t.status, wl.comment ' 'FROM work_log wl ' 'INNER JOIN ticket t ON wl.ticket=t.id ' 'LEFT JOIN session_attribute s ON wl.worker=s.sid AND s.name=\'name\' ' 'ORDER BY wl.lastchange DESC, wl.worker') rv = [] for user, name, starttime, endtime, ticket, summary, status, comment in cursor: starttime = float(starttime) endtime = float(endtime) started = datetime.fromtimestamp(starttime) dispname = user if name: dispname = '%s (%s)' % (name, user) if not endtime == 0: finished = datetime.fromtimestamp(endtime) delta = 'Worked for %s (between %s %s and %s %s)' % \ (pretty_timedelta(started, finished), format_date(starttime), format_time(starttime), format_date(endtime), format_time(endtime)) else: delta = 'Started %s ago (%s %s)' % \ (pretty_timedelta(started), format_date(starttime), format_time(starttime)) rv.append({ 'user': user, 'name': name, 'dispname': dispname, 'starttime': int(starttime), 'endtime': int(endtime), 'delta': delta, 'ticket': ticket, 'summary': summary, 'status': status, 'comment': comment }) return rv
def expand_macro(self, formatter, name, arguments): self.ref = formatter self.tz_info = formatter.req.tz self.thistime = datetime.datetime.now(self.tz_info) # Parse arguments from macro invocation args, kwargs = parse_args(arguments, strict=False) # Find out whether use http param, current or macro param year/month http_param_year = formatter.req.args.get('year', '') http_param_month = formatter.req.args.get('month', '') if http_param_year == "": # not clicked on a prev or next button if len(args) >= 1 and args[0] <> "*": # year given in macro parameters year = int(args[0]) else: # use current year year = self.thistime.year else: # year in http params (clicked by user) overrides everything year = int(http_param_year) if http_param_month == "": # not clicked on a prev or next button if len(args) >= 2 and args[1] <> "*": # month given in macro parameters month = int(args[1]) else: # use current month month = self.thistime.month else: # month in http params (clicked by user) overrides everything month = int(http_param_month) showbuttons = True if len(args) >= 3 or kwargs.has_key('nav'): try: showbuttons = kwargs['nav'] in ["True", "true", "yes", "1"] except KeyError: showbuttons = args[2] in ["True", "true", "yes", "1"] wiki_page_format = "%Y-%m-%d" if len(args) >= 4 and args[3] != "*" or kwargs.has_key('wiki'): try: wiki_page_format = str(kwargs['wiki']) except KeyError: wiki_page_format = str(args[3]) # Support relative paths in macro arguments for wiki page links. wiki_page_format = self._resolve_relative_name(wiki_page_format, formatter.resource.id) list_condense = 0 show_t_open_dates = True wiki_page_template = "" wiki_subpages = [] # Read optional check plan. check = [] if kwargs.has_key('check'): check = kwargs['check'].split('.') if name == 'WikiTicketCalendar': if len(args) >= 5 or kwargs.has_key('cdate'): try: show_t_open_dates = kwargs['cdate'] in \ ["True", "true", "yes", "1"] except KeyError: show_t_open_dates = args[4] in \ ["True", "true", "yes", "1"] # Optional page template to create new wiki pages. # The default (empty page) is used, if the template name is invalid. if len(args) >= 6 or kwargs.has_key('base'): try: wiki_page_template = kwargs['base'] except KeyError: wiki_page_template = args[5] if name == 'WikiTicketCalendar': # TracQuery support for ticket selection query_args = "id!=0" if len(args) >= 7 or kwargs.has_key('query'): # prefer query arguments provided by kwargs try: query_args = kwargs['query'] except KeyError: query_args = args[6] tickets = WikiCalendarTicketProvider(self.env) self.tickets = tickets.harvest(formatter.req, query_args) # compress long ticket lists if len(args) >= 8 or kwargs.has_key('short'): # prefer query arguments provided by kwargs try: list_condense = int(kwargs['short']) except KeyError: list_condense = int(args[7]) # control calendar display width cal_width = "100%;" if len(args) >= 9 or kwargs.has_key('width'): # prefer query arguments provided by kwargs try: cal_width = kwargs['width'] except KeyError: cal_width = args[8] # multiple wiki (sub)pages per day if kwargs.has_key('subpages'): wiki_subpages = kwargs['subpages'].split('|') # Can use this to change the day the week starts on, # but this is a system-wide setting. calendar.setfirstweekday(calendar.MONDAY) cal = calendar.monthcalendar(year, month) curr_day = None if year == self.thistime.year and month == self.thistime.month: curr_day = self.thistime.day # for prev/next navigation links prevMonth = month - 1 nextMonth = month + 1 nextYear = prevYear = year # check for year change (KISS version) if prevMonth == 0: prevMonth = 12 prevYear -= 1 if nextMonth == 13: nextMonth = 1 nextYear += 1 # for fast-forward/-rewind navigation links ffYear = frYear = year if month < 4: frMonth = month + 9 frYear -= 1 else: frMonth = month - 3 if month > 9: ffMonth = month - 9 ffYear += 1 else: ffMonth = month + 3 last_week_prevMonth = calendar.monthcalendar(prevYear, prevMonth)[-1] first_week_nextMonth = calendar.monthcalendar(nextYear, nextMonth)[0] # Switch to user's locale, if available. try: loc_req = str(formatter.req.locale) except AttributeError: # Available since in Trac 0.12 . loc_req = None if loc_req: loc = locale.getlocale() loc_prop = locale.normalize(loc_req) try: locale.setlocale(locale.LC_TIME, loc_prop) except locale.Error: try: # Re-try with UTF-8 as last resort. loc_prop = '.'.join([loc_prop.split('.')[0], 'utf8']) locale.setlocale(locale.LC_TIME, loc_prop) except locale.Error: loc_prop = None self.env.log.debug('Locale setting for calendar: ' + str(loc_prop)) # Finally building the output # Begin with caption and optional navigation links buff = tag.tr() if showbuttons is True: # calendar navigation buttons nx = 'next' pv = 'prev' nav_pvY = self._mknav('<<', pv, month, year - 1) nav_frM = self._mknav(' <', pv, frMonth, frYear) nav_pvM = self._mknav(' «', pv, prevMonth, prevYear) nav_nxM = self._mknav('» ', nx, nextMonth, nextYear) nav_ffM = self._mknav('> ', nx, ffMonth, ffYear) nav_nxY = self._mknav('>>', nx, month, year + 1) # add buttons for going to previous months and year buff(nav_pvY, nav_frM, nav_pvM) # The caption will always be there. heading = tag.td( to_unicode(format_date(self._mkdatetime(year, month), '%B %Y'))) buff = buff(heading(class_='y')) if showbuttons is True: # add buttons for going to next months and year buff(nav_nxM, nav_ffM, nav_nxY) buff = tag.caption(tag.table(tag.tbody(buff))) buff = tag.table(buff) if name == 'WikiTicketCalendar': if cal_width.startswith('+') is True: width = ":".join(['min-width', cal_width]) buff(class_='wikitcalendar', style=width) else: buff(class_='wikitcalendar') if name == 'WikiCalendar': buff(class_='wiki-calendar') heading = tag.tr() heading(align='center') for day in calendar.weekheader(2).split()[:-2]: col = tag.th(to_unicode(day)) col(class_='workday', scope='col') heading(col) for day in calendar.weekheader(2).split()[-2:]: col = tag.th(to_unicode(day)) col(class_='weekend', scope='col') heading(col) heading = buff(tag.thead(heading)) # Building main calendar table body buff = tag.tbody() w = -1 for week in cal: w = w + 1 line = tag.tr() line(align='right') d = -1 for day in week: d = d + 1 if day: # check for wikipage with name specified in # 'wiki_page_format' wiki = format_date(self._mkdatetime(year, month, day), wiki_page_format) if day == curr_day: a_class = 'day today' td_class = 'today' else: a_class = 'day' td_class = 'day' day_dt = self._mkdatetime(year, month, day) if uts: day_ts = to_utimestamp(day_dt) day_ts_eod = day_ts + 86399999999 else: day_ts = to_timestamp(day_dt) day_ts_eod = day_ts + 86399 # check for milestone(s) on that day db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( """ SELECT name FROM milestone WHERE due >= %s and due <= %s """, (day_ts, day_ts_eod)) milestones = tag() for row in cursor: if not a_class.endswith('milestone'): a_class += ' milestone' milestone = to_unicode(row[0]) url = self.env.href.milestone(milestone) milestone = '* ' + milestone milestones = tag( milestones, tag.div(tag.a(milestone, href=url), class_='milestone')) day = tag.span(day) day(class_='day') if len(wiki_subpages) > 0: pages = tag(day, Markup('<br />')) for page in wiki_subpages: label = tag(' ', page[0]) page = '/'.join([wiki, page]) url = self.env.href.wiki(page) pages( self._gen_wiki_links(page, label, 'subpage', url, wiki_page_template, check)) else: url = self.env.href.wiki(wiki) pages = self._gen_wiki_links(wiki, day, a_class, url, wiki_page_template, check) cell = tag.td(pages) cell(class_=td_class, valign='top') if name == 'WikiCalendar': line(cell) else: if milestones: cell(milestones) else: cell(tag.br()) match = [] match_od = [] ticket_heap = tag('') ticket_list = tag.div('') ticket_list(align='left', class_='condense') # get tickets with due date set to day for t in self.tickets: due = t.get(self.tkt_due_field) if due is None or due in ['', '--']: continue else: if self.tkt_due_format == 'ts': if not isinstance(due, datetime.datetime): continue if uts: due_ts = to_utimestamp(due) else: due_ts = to_timestamp(due) if due_ts < day_ts or due_ts > day_ts_eod: continue else: # Beware: Format might even be unicode str duedate = format_date( day_dt, str(self.tkt_due_format)) if not due == duedate: continue id = t.get('id') ticket, short = self._gen_ticket_entry(t) ticket_heap(ticket) if not id in match: if len(match) == 0: ticket_list(short) else: ticket_list(', ', short) match.append(id) # optionally get tickets created on day if show_t_open_dates is True: ticket_od_list = tag.div('') ticket_od_list(align='left', class_='opendate_condense') for t in self.tickets: if uts: ticket_ts = to_utimestamp(t.get('time')) else: ticket_ts = to_timestamp(t.get('time')) if ticket_ts < day_ts or \ ticket_ts > day_ts_eod: continue a_class = 'opendate_' id = t.get('id') ticket, short = self._gen_ticket_entry( t, a_class) ticket_heap(ticket) if not id in match: if len(match_od) == 0: ticket_od_list(short) else: ticket_od_list(', ', short) match_od.append(id) matches = len(match) + len(match_od) if list_condense > 0 and matches >= list_condense: if len(match_od) > 0: if len(match) > 0: ticket_list(', ') ticket_list = tag(ticket_list, ticket_od_list) line(cell(ticket_list)) else: line(cell(ticket_heap)) else: if name == 'WikiCalendar': if w == 0: day = last_week_prevMonth[d] wiki = format_date( self._mkdatetime(prevYear, prevMonth, day), wiki_page_format) else: day = first_week_nextMonth[d] wiki = format_date( self._mkdatetime(nextYear, nextMonth, day), wiki_page_format) url = self.env.href.wiki(wiki) a_class = 'day adjacent_month' pages = self._gen_wiki_links(wiki, day, a_class, url, wiki_page_template) cell = tag.td(pages) cell(class_='day adjacent_month') line(cell) else: cell = tag.td('') cell(class_='day adjacent_month') line(cell) buff(line) if loc_req and loc_prop: # We may have switched to users locale, resetting now. try: locale.setlocale(locale.LC_ALL, loc) self.env.log.debug('Locale setting restored: ' + str(loc)) except locale.Error: pass buff = tag.div(heading(buff)) if name == 'WikiTicketCalendar': if cal_width.startswith('+') is True: width = ":".join(['width', cal_width]) buff(class_='wikitcalendar', style=width) else: buff(class_='wikitcalendar') if name == 'WikiCalendar': buff(class_='wiki-calendar') # Add common CSS stylesheet if self.internal_css and not self.ref.req.args.get('wikicalendar'): # Put definitions directly into the output. f = open('/'.join([self.htdocs_path, 'wikicalendar.css']), 'Ur') css = tag.style(Markup('<!--\n'), '\n'.join(f.readlines()), Markup('-->\n'))(type="text/css") f.close() # Add hint to prevent multiple inclusions. self.ref.req.args['wikicalendar'] = True return tag(css, buff) elif not self.ref.req.args.get('wikicalendar'): add_stylesheet(self.ref.req, 'wikicalendar/wikicalendar.css') return buff
def test_session_add_sid_name(self): rv, output = self.execute('session add john John') self.assertEqual(0, rv, output) rv, output = self.execute('session list john') self.assertExpectedResult( output, {'today': format_date(None, console_date_format)})
def _render_view(self, req, db, milestone): milestone_groups = [] available_groups = [] component_group_available = False ticket_fields = TicketSystem(self.env).get_ticket_fields() # collect fields that can be used for grouping for field in ticket_fields: if field['type'] == 'select' and field['name'] != 'milestone' \ or field['name'] in ('owner', 'reporter'): available_groups.append({ 'name': field['name'], 'label': field['label'] }) if field['name'] == 'component': component_group_available = True # determine the field currently used for grouping by = None if component_group_available: by = 'component' elif available_groups: by = available_groups[0]['name'] by = req.args.get('by', by) tickets = get_tickets_for_milestone(self.env, db, milestone.name, by) stat = get_ticket_stats(self.stats_provider, tickets) tstat = get_ticket_stats(self.tickettype_stats_provider, tickets) # Parse the from date and adjust the timestamp to the last second of # the day today = to_datetime(None, req.tz) # Get milestone start date from session or use default day back. # TODO: add logic to remember the start date either in db or session. # if req.session.get('mdashboard.fromdate') != None: # # fromdate = parse_date(req.session.get('mdashboard.fromdate'), req.tz) # else: fromdate = today - timedelta(days=self.default_daysback + 1) fromdate = fromdate.replace(hour=23, minute=59, second=59) # Data for milestone and timeline data = { 'fromdate': fromdate, 'milestone': milestone, 'tickethistory': [], 'dates': [], 'ticketstat': {}, 'yui_base_url': self.yui_base_url } data.update(milestone_stats_data(self.env, req, stat, milestone.name)) ticketstat = {'name': 'ticket type'} ticketstat.update( milestone_stats_data(self.env, req, tstat, milestone.name)) data['ticketstat'] = ticketstat #self.env.log.info("ticketstat = %s" % (ticketstat,)) # get list of ticket ids that in the milestone #ctickets = get_tickets_for_milestone(self.env, db, milestone.name, 'type') everytickets = get_every_tickets_in_milestone(db, milestone.name) if everytickets != []: #tkt_history = {} # collect_tickets_status_history(self.env, db, tkt_history, \ # everytickets, milestone) tkt_history = collect_tickets_status_history( self.env, db, everytickets, milestone) if tkt_history != {}: # Sort the key in the history list # returns sorted list of tuple of (key, value) sorted_events = sorted(tkt_history.items(), key=lambda (k, v): (k)) #debug self.env.log.info("sorted_event content") for event in sorted_events: self.env.log.info( "date: %s: event: %s" % (format_date(to_datetime(event[0])), event[1])) # Get first date that ticket enter the milestone min_time = min(sorted_events)[0] #in Epoch Seconds begin_date = to_datetime(min_time).date() end_date = milestone.completed or to_datetime(None).date() # this is array of date in numpy numdates = drange(begin_date, end_date + timedelta(days=1), timedelta(days=1)) tkt_history_table = make_ticket_history_table( self.env, numdates, sorted_events) #debug #self.env.log.info("tkt_history_table: %s", (tkt_history_table,)) #Create a data for the cumulative flow chart. tkt_cumulative_table = make_cumulative_data( self.env, tkt_history_table) #debug #self.env.log.info(tkt_cumulative_table) # creat list of dateobject from dates dates = [] for numdate in numdates: utc_date = num2date(numdate) dates.append(utc_date) #self.env.log.info("%s: %s" % (utc_date, format_date(utc_date, tzinfo=utc))) #prepare Yahoo datasource for comulative flow chart dscumulative = '' for idx, date in enumerate(dates): dscumulative = dscumulative + '{ date: "%s", enter: %d, leave: %d, finish: %d}, ' \ % (format_date(date, tzinfo=utc), tkt_cumulative_table['Enter'][idx], \ tkt_cumulative_table['Leave'][idx], tkt_cumulative_table['Finish'][idx]) data['tickethistory'] = tkt_cumulative_table data['dates'] = dates data['dscumulative'] = '[ ' + dscumulative + ' ];' return 'mdashboard.html', data, None
class TimelineModule(Component): implements(INavigationContributor, IPermissionRequestor, IRequestHandler) event_providers = ExtensionPoint(ITimelineEventProvider) default_daysback = IntOption( 'timeline', 'default_daysback', 30, """Default number of days displayed in the Timeline, in days. (''since 0.9.'')""") # INavigationContributor methods def get_active_navigation_item(self, req): return 'timeline' def get_navigation_items(self, req): if not req.perm.has_permission('TIMELINE_VIEW'): return yield ('mainnav', 'timeline', html.A(u'时间线索', href=req.href.timeline(), accesskey=2)) # IPermissionRequestor methods def get_permission_actions(self): return ['TIMELINE_VIEW'] # IRequestHandler methods def match_request(self, req): return re.match(r'/timeline/?', req.path_info) is not None def process_request(self, req): req.perm.assert_permission('TIMELINE_VIEW') format = req.args.get('format') maxrows = int(req.args.get('max', 0)) # Parse the from date and adjust the timestamp to the last second of # the day t = time.localtime() if req.args.has_key('from'): try: t = time.strptime(req.args.get('from'), '%x') except: pass fromdate = time.mktime( (t[0], t[1], t[2], 23, 59, 59, t[6], t[7], t[8])) try: daysback = max(0, int(req.args.get('daysback', ''))) except ValueError: daysback = self.default_daysback req.hdf['timeline.from'] = format_date(fromdate) req.hdf['timeline.daysback'] = daysback available_filters = [] for event_provider in self.event_providers: available_filters += event_provider.get_timeline_filters(req) filters = [] # check the request or session for enabled filters, or use default for test in (lambda f: req.args.has_key(f[0]), lambda f: req.session.get('timeline.filter.%s' % f[0], '')\ == '1', lambda f: len(f) == 2 or f[2]): if filters: break filters = [f[0] for f in available_filters if test(f)] # save the results of submitting the timeline form to the session if req.args.has_key('update'): for filter in available_filters: key = 'timeline.filter.%s' % filter[0] if req.args.has_key(filter[0]): req.session[key] = '1' elif req.session.has_key(key): del req.session[key] stop = fromdate start = stop - (daysback + 1) * 86400 events = [] for event_provider in self.event_providers: try: events += event_provider.get_timeline_events( req, start, stop, filters) except Exception, e: # cope with a failure of that provider self._provider_failure(e, req, event_provider, filters, [f[0] for f in available_filters]) events.sort(lambda x, y: cmp(y[3], x[3])) if maxrows and len(events) > maxrows: del events[maxrows:] # 网页的标题 req.hdf['title'] = 'Timeline' # Get the email addresses of all known users email_map = {} for username, name, email in self.env.get_known_users(): if email: email_map[username] = email idx = 0 for kind, href, title, date, author, message in events: event = { 'kind': kind, 'title': title, 'href': href, 'author': author or 'anonymous', 'date': format_date(date), 'time': format_time(date, '%H:%M'), 'dateuid': int(date), 'message': message } if format == 'rss': # Strip/escape HTML markup if isinstance(title, Markup): title = title.plaintext(keeplinebreaks=False) event['title'] = title event['message'] = to_unicode(message) if author: # For RSS, author must be an email address if author.find('@') != -1: event['author.email'] = author elif email_map.has_key(author): event['author.email'] = email_map[author] event['date'] = http_date(date) req.hdf['timeline.events.%s' % idx] = event idx += 1 if format == 'rss': return 'timeline_rss.cs', 'application/rss+xml' add_stylesheet(req, 'common/css/timeline.css') rss_href = req.href.timeline([(f, 'on') for f in filters], daysback=90, max=50, format='rss') add_link(req, 'alternate', rss_href, 'RSS Feed', 'application/rss+xml', 'rss') ZhAvailable_filters = { 'milestone': u'里程碑', 'ticket': u'被更新的传票', 'changeset': u'svn库更新' } for idx, fltr in enumerate(available_filters): req.hdf['timeline.filters.%d' % idx] = { 'name': fltr[0], 'label': ZhAvailable_filters.get(fltr[0], fltr[1]), 'enabled': int(fltr[0] in filters) } return 'timeline.cs', None
def test_format_date_accepts_date_instances(self): a_date = datetime.date(2009, 8, 20) self.assertEqual('2009-08-20', datefmt.format_date(a_date, format='%Y-%m-%d'))
def write_date(name, value, params={}): params['VALUE'] = 'DATE' write_prop(name, format_date(value, '%Y%m%d', req.tz), params)
def myformat_date(dte): if dte: return format_date(dte, '%e %b %Y') return "No date set"
def process_request(self, req): req.perm.assert_permission('NARCISSUS_VIEW') img = req.args.get('img', None) if img: self._load_config() img_path = os.path.join(self.cache_dir, img) return req.send_file(img_path, mimeview.get_mimetype(img_path)) ### add_stylesheet(req, 'nar/css/narcissus.css') ### add_script(req, 'nar/js/narcissus.js') params = {} params['page'] = 'narcissus' params['href_narcissus'] = self.env.href.narcissus() params['href_configure'] = self.env.href.narcissus('configure') params['href_user_guide'] = self.env.href.narcissus('user_guide') params['error'] = None params['msg'] = "" self.db = self.env.get_db_cnx() self._settings = NarcissusSettings(self.db) # Ensure Narciussus has been configured to incluude some group members if not self._settings.members: params[ 'msg'] = '''No group members have been selected for visualisation. Please add group members using the configuration page.''' return 'narcissus.xhtml', params, None # Parse the from date and adjust the timestamp to the last second of # the day (taken from Timeline.py, (c) Edgewall Software) t = time.localtime() if req.args.has_key('from'): try: t = time.strptime(req.args.get('from'), '%x') except: pass fromdate = time.mktime( (t[0], t[1], t[2], 23, 59, 59, t[6], t[7], t[8])) try: daysback = max(0, int(req.args.get('daysback', ''))) except ValueError: daysback = 14 # Default value of one fortnight params['date_end'] = fromdate params['date_from'] = format_date(fromdate) params['date_daysback'] = daysback self._update_data(req) self._create_legend(req, params) trouble, msg = self._get_font() if trouble: params['error'] = msg.getvalue() else: view = req.args.get('view', 'group') params['view'] = view if view == 'group': self._draw_group(req, params) elif view == 'project': self._draw_project(req, params), elif view == 'ticket': self._draw_ticket(req, params) #print>>sys.stderr, 'return request' return 'narcissus.xhtml', params, None
def expand_macro(self, formatter, name, args): """ @param formatter: @param name: @param args: @return: """ args = _parse_args(args) args = _get_args_defaults(formatter.env, args) d_date_range = args["daterange"].split(";") if len(d_date_range) == 1: d_date_range.append("") from_date = _parse_relative_time(d_date_range[0] or "10y", utc) at_date = _parse_relative_time(d_date_range[1] or "now", utc) graph_res = int(args["res_days"]) if "query" in args: query = args["query"] query_object = Query.from_string(self.env, query) sql_format_string, format_string_arguments = query_object.get_sql() # Hack to remove extra columns, I don't know another way to do it sql_format_string = "SELECT t.id " + \ sql_format_string[ sql_format_string.index("FROM ticket"):] ticketFilter = "AND t.id IN (%s)" % \ (sql_format_string % tuple(format_string_arguments)) else: ticketFilter = "" chart_title = args["title"] req = formatter.req count = [] # Calculate 0th point last_date = from_date - timedelta(graph_res) last_num_open = self._get_num_open_tix(last_date, req, ticketFilter) # Calculate remaining points for cur_date in date_range(from_date, at_date, graph_res): num_open = self._get_num_open_tix(cur_date, req, ticketFilter) num_closed = self._get_num_closed_tix(last_date, cur_date, req, ticketFilter) date = format_date(cur_date) if graph_res != 1: date = "%s thru %s" % (format_date(last_date), date) count.append({ 'date': date, 'new': num_open - last_num_open + num_closed, 'closed': num_closed, 'open': num_open}) last_num_open = num_open last_date = cur_date chart_data = ", \n".join(['{date: \'%(date)s\', new_tickets: %(new)d, ' 'closed: %(closed)d, open: %(open)d}' % d for d in count]) data = { 'chart_title': chart_title, 'chart_data': chart_data, 'height': args['height'], 'column_width': args['column_width'], 'id': random.randint(1, 9999999) } template = Chrome(self.env).load_template('ticketstats_macro.html') return template.generate(**data)
class TimelineModule(Component): implements(INavigationContributor, IPermissionRequestor, IRequestHandler, IRequestFilter, ITemplateProvider, IWikiSyntaxProvider) event_providers = ExtensionPoint(ITimelineEventProvider) default_daysback = IntOption( 'timeline', 'default_daysback', 30, """Default number of days displayed in the Timeline, in days. (''since 0.9.'')""") max_daysback = IntOption( 'timeline', 'max_daysback', 90, """Maximum number of days (-1 for unlimited) displayable in the Timeline. (''since 0.11'')""") abbreviated_messages = BoolOption( 'timeline', 'abbreviated_messages', True, """Whether wiki-formatted event messages should be truncated or not. This only affects the default rendering, and can be overriden by specific event providers, see their own documentation. (''Since 0.11'')""") _authors_pattern = re.compile(r'(-)?(?:"([^"]*)"|\'([^\']*)\'|([^\s]+))') # INavigationContributor methods def get_active_navigation_item(self, req): return 'timeline' def get_navigation_items(self, req): if 'TIMELINE_VIEW' in req.perm: yield ('mainnav', 'timeline', tag.a(_("Timeline"), href=req.href.timeline(), accesskey=2)) # IPermissionRequestor methods def get_permission_actions(self): return ['TIMELINE_VIEW'] # IRequestHandler methods def match_request(self, req): return req.path_info == '/timeline' def process_request(self, req): req.perm.assert_permission('TIMELINE_VIEW') format = req.args.get('format') maxrows = int(req.args.get('max', 50 if format == 'rss' else 0)) lastvisit = int(req.session.get('timeline.lastvisit', '0')) # indication of new events is unchanged when form is updated by user revisit = any(a in req.args for a in ['update', 'from', 'daysback', 'author']) if revisit: lastvisit = int( req.session.get('timeline.nextlastvisit', lastvisit)) # Parse the from date and adjust the timestamp to the last second of # the day fromdate = today = datetime.now(req.tz) precisedate = precision = None if 'from' in req.args: # Acquire from date only from non-blank input reqfromdate = req.args['from'].strip() if reqfromdate: precisedate = user_time(req, parse_date, reqfromdate) fromdate = precisedate precision = req.args.get('precision', '') if precision.startswith('second'): precision = timedelta(seconds=1) elif precision.startswith('minute'): precision = timedelta(minutes=1) elif precision.startswith('hour'): precision = timedelta(hours=1) else: precision = None fromdate = fromdate.replace(hour=23, minute=59, second=59, microsecond=999999) daysback = as_int(req.args.get('daysback'), 90 if format == 'rss' else None) if daysback is None: daysback = as_int(req.session.get('timeline.daysback'), None) if daysback is None: daysback = self.default_daysback daysback = max(0, daysback) if self.max_daysback >= 0: daysback = min(self.max_daysback, daysback) authors = req.args.get('authors') if authors is None and format != 'rss': authors = req.session.get('timeline.authors') authors = (authors or '').strip() data = { 'fromdate': fromdate, 'daysback': daysback, 'authors': authors, 'today': user_time(req, format_date, today), 'yesterday': user_time(req, format_date, today - timedelta(days=1)), 'precisedate': precisedate, 'precision': precision, 'events': [], 'filters': [], 'abbreviated_messages': self.abbreviated_messages, 'lastvisit': lastvisit } available_filters = [] for event_provider in self.event_providers: available_filters += event_provider.get_timeline_filters(req) or [] # check the request or session for enabled filters, or use default filters = [f[0] for f in available_filters if f[0] in req.args] if not filters and format != 'rss': filters = [ f[0] for f in available_filters if req.session.get('timeline.filter.' + f[0]) == '1' ] if not filters: filters = [f[0] for f in available_filters if len(f) == 2 or f[2]] # save the results of submitting the timeline form to the session if 'update' in req.args: for filter in available_filters: key = 'timeline.filter.%s' % filter[0] if filter[0] in req.args: req.session[key] = '1' elif key in req.session: del req.session[key] stop = fromdate start = stop - timedelta(days=daysback + 1) # create author include and exclude sets include = set() exclude = set() for match in self._authors_pattern.finditer(authors): name = (match.group(2) or match.group(3) or match.group(4)).lower() if match.group(1): exclude.add(name) else: include.add(name) # gather all events for the given period of time events = [] for provider in self.event_providers: try: for event in provider.get_timeline_events( req, start, stop, filters) or []: # Check for 0.10 events author = (event[2 if len(event) < 6 else 4] or '').lower() if (not include or author in include) \ and not author in exclude: events.append(self._event_data(provider, event)) except Exception, e: # cope with a failure of that provider self._provider_failure(e, req, provider, filters, [f[0] for f in available_filters]) # prepare sorted global list events = sorted(events, key=lambda e: e['date'], reverse=True) if maxrows: events = events[:maxrows] data['events'] = events if format == 'rss': data['email_map'] = Chrome(self.env).get_email_map() rss_context = web_context(req, absurls=True) rss_context.set_hints(wiki_flavor='html', shorten_lines=False) data['context'] = rss_context return 'timeline.rss', data, 'application/rss+xml' else: req.session.set('timeline.daysback', daysback, self.default_daysback) req.session.set('timeline.authors', authors, '') # store lastvisit if events and not revisit: lastviewed = to_utimestamp(events[0]['date']) req.session['timeline.lastvisit'] = max(lastvisit, lastviewed) req.session['timeline.nextlastvisit'] = lastvisit html_context = web_context(req) html_context.set_hints(wiki_flavor='oneliner', shorten_lines=self.abbreviated_messages) data['context'] = html_context add_stylesheet(req, 'common/css/timeline.css') rss_href = req.href.timeline([(f, 'on') for f in filters], daysback=90, max=50, authors=authors, format='rss') add_link(req, 'alternate', auth_link(req, rss_href), _('RSS Feed'), 'application/rss+xml', 'rss') Chrome(self.env).add_jquery_ui(req) for filter_ in available_filters: data['filters'].append({ 'name': filter_[0], 'label': filter_[1], 'enabled': filter_[0] in filters }) # Navigation to the previous/next period of 'daysback' days previous_start = format_date(fromdate - timedelta(days=daysback + 1), format='%Y-%m-%d', tzinfo=req.tz) add_link( req, 'prev', req.href.timeline(from_=previous_start, authors=authors, daysback=daysback), _('Previous Period')) if today - fromdate > timedelta(days=0): next_start = format_date(fromdate + timedelta(days=daysback + 1), format='%Y-%m-%d', tzinfo=req.tz) add_link( req, 'next', req.href.timeline(from_=next_start, authors=authors, daysback=daysback), _('Next Period')) prevnext_nav(req, _('Previous Period'), _('Next Period')) return 'timeline.html', data, None