def ticket_activity(project_id, start_date, end_date, db, req): """ Get query response for specified time interval: Data: <ticket status>: <count tickets> """ sql_expr = """ SELECT t.status, COUNT(t.id) FROM ticket t WHERE t.project_id=%s AND t.changetime >= %s AND t.changetime < %s GROUP BY t.status; """ cursor = db.cursor() cursor.execute(sql_expr, (project_id, to_utimestamp(start_date), to_utimestamp(end_date)) ) results = [(r[0], r[1]) for r in cursor] query_response = QueryResponse("ticket_activity", req.href('/chrome')) query_response.set_title(_("Ticket activity")) query_response.set_columns((_('ticket status'), _('tickets'))) query_response.set_results(results) chart = query_response.chart_info chart.type = "Pie" chart.width = 480 chart.height = 300 chart.tool_tip = "%s:#x_label#<br>%s:#val#" % (_('status'), _('tickets')) chart.line_color = "#000000" chart.x_labels = [row[0] for row in results] chart.data = [row[1] for row in results] return query_response
def test_initial_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changesets = [Mock(Changeset, repos, 0, '', '', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Import', 'joe', t2, get_changes=lambda: iter(changes))] cache = CachedRepository(self.env, repos, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT rev,time,author,message FROM revision") self.assertEquals(('0', to_utimestamp(t1), '', ''), cursor.fetchone()) self.assertEquals(('1', to_utimestamp(t2), 'joe', 'Import'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone()) cursor.execute(""" SELECT rev,path,node_type,change_type,base_path,base_rev FROM node_change """) self.assertEquals(('1', 'trunk', 'D', 'A', None, None), cursor.fetchone()) self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def query(env, status=None, threshold=None): ret = [] where_clause = '' if status is not None: if status == 'active': where_clause = ' WHERE status<>\'closed\'' elif status == 'closed': where_clause = ' WHERE status=\'closed\'' elif status == 'new': where_clause = ' WHERE status=\'new\'' else: where_clause = ' WHERE status=\'%s\'' % status if threshold is not None: (threshold_column, threshold_time) = threshold if where_clause: where_clause += ' AND %s < %i' % (threshold_column, to_utimestamp(threshold_time)) else: where_clause = ' WHERE %s < %i' % (threshold_column, to_utimestamp(threshold_time)) fields = CrashDumpSystem(env).get_crash_fields() std_fields = [] for f in fields: if f.get('custom'): pass else: std_fields.append(f['name']) # Fetch the standard crashdump fields for row in env.db_query("SELECT id,%s FROM crashdump %s" % (','.join(std_fields), where_clause)): crash = CrashDump(env=env, must_exist=True, row=row) ret.append(crash) return ret
def test_initial_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes = [ ("trunk", Node.DIRECTORY, Changeset.ADD, None, None), ("trunk/README", Node.FILE, Changeset.ADD, None, None), ] changesets = [ Mock(Changeset, repos, 0, "", "", t1, get_changes=lambda: []), Mock(Changeset, repos, 1, "Import", "joe", t2, get_changes=lambda: iter(changes)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: rows = db("SELECT rev, time, author, message FROM revision") self.assertEquals(len(rows), 2) self.assertEquals(("0", to_utimestamp(t1), "", ""), rows[0]) self.assertEquals(("1", to_utimestamp(t2), "joe", "Import"), rows[1]) rows = db( """ SELECT rev, path, node_type, change_type, base_path, base_rev FROM node_change""" ) self.assertEquals(len(rows), 2) self.assertEquals(("1", "trunk", "D", "A", None, None), rows[0]) self.assertEquals(("1", "trunk/README", "F", "A", None, None), rows[1])
def modify_comment(self, cdate, author, comment, when=None): """Modify a ticket comment specified by its date, while keeping a history of edits. """ ts = to_utimestamp(cdate) if when is None: when = datetime.now(utc) when_ts = to_utimestamp(when) @self.env.with_transaction() def do_modify(db): cursor = db.cursor() # Find the current value of the comment cursor.execute(""" SELECT newvalue FROM ticket_change WHERE ticket=%s AND time=%s AND field='comment' """, (self.id, ts)) old_comment = False for old_comment, in cursor: break if comment == (old_comment or ''): return # Comment history is stored in fields named "_comment%d" # Find the next edit number cursor.execute(""" SELECT field FROM ticket_change WHERE ticket=%%s AND time=%%s AND field %s """ % db.like(), (self.id, ts, db.like_escape('_comment') + '%')) fields = list(cursor) rev = fields and max(int(field[8:]) for field, in fields) + 1 or 0 cursor.execute(""" INSERT INTO ticket_change (ticket,time,author,field,oldvalue,newvalue) VALUES (%s,%s,%s,%s,%s,%s) """, (self.id, ts, author, '_comment%d' % rev, old_comment or '', str(when_ts))) if old_comment is False: # There was no comment field, add one, find the original author # in one of the other changed fields cursor.execute(""" SELECT author FROM ticket_change WHERE ticket=%%s AND time=%%s AND NOT field %s LIMIT 1 """ % db.like(), (self.id, ts, db.like_escape('_') + '%')) old_author = None for old_author, in cursor: break cursor.execute(""" INSERT INTO ticket_change (ticket,time,author,field,oldvalue,newvalue) VALUES (%s,%s,%s,'comment','',%s) """, (self.id, ts, old_author, comment)) else: cursor.execute(""" UPDATE ticket_change SET newvalue=%s WHERE ticket=%s AND time=%s AND field='comment' """, (comment, self.id, ts))
def test_sync_changeset(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes1 = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changesets = [ Mock(Changeset, repos, 0, '**empty**', 'joe', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Initial Import', 'joe', t2, get_changes=lambda: iter(changes1)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync_changeset(0) cursor = self.db.cursor() cursor.execute("SELECT time,author,message FROM revision ORDER BY rev") self.assertEquals((to_utimestamp(t1), 'joe', '**empty**'), cursor.fetchone()) self.assertEquals((to_utimestamp(t2), 'joe', 'Import'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def test_update_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (("0", to_utimestamp(t1), "", ""), []), ( ("1", to_utimestamp(t2), "joe", "Import"), [("trunk", "D", "A", None, None), ("trunk/README", "F", "A", None, None)], ), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes = [("trunk/README", Node.FILE, Changeset.EDIT, "trunk/README", 1)] changesets = [ None, Mock(Changeset, repos, 1, "", "", t2, get_changes=lambda: []), Mock(Changeset, repos, 2, "Update", "joe", t3, get_changes=lambda: iter(changes)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: self.assertEquals( [(to_utimestamp(t3), "joe", "Update")], db("SELECT time, author, message FROM revision WHERE rev='2'") ) self.assertEquals( [("trunk/README", "F", "E", "trunk/README", "1")], db( """SELECT path, node_type, change_type, base_path, base_rev FROM node_change WHERE rev='2'""" ), )
def test_update_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README', 1)] changesets = [ None, Mock(Changeset, repos, 1, '', '', t2, get_changes=lambda: []), Mock(Changeset, repos, 2, 'Update', 'joe', t3, get_changes=lambda: iter(changes)) ] cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: self.assertEquals([(to_utimestamp(t3), 'joe', 'Update')], db("SELECT time, author, message FROM revision WHERE rev='2'")) self.assertEquals([('trunk/README', 'F', 'E', 'trunk/README', '1')], db("""SELECT path, node_type, change_type, base_path, base_rev FROM node_change WHERE rev='2'"""))
def set_date_range(self, start_date, stop_date): db = self.env.get_read_db() cursor = db.cursor() cursor.execute(''' SELECT rev, time, author FROM revision WHERE repos IN ( SELECT id FROM repository WHERE name='project_id' AND value=%s ) AND time >= %s AND time < %s ORDER BY time ''', (str(self.pid), to_utimestamp(start_date), to_utimestamp(stop_date))) self.changesets = [] for rev, time, author in cursor: self.changesets.append((rev,time,author)) self.start_date = start_date self.stop_date = stop_date if not self.changesets: self.first_rev = self.last_rev = 0 else: self.first_rev = self.changesets[0][0] self.last_rev = self.changesets[-1][0]
def save(self, when=None): if when is None: when = self._cachetime when_ts = to_utimestamp(when) # Build the update/insert sequences # NB The ordering means the same sequences may be reused in both cases field_names = self.table_fields values_dict = dict(self.values) values_dict.update({'time': to_utimestamp(values_dict['time']), 'changetime': to_utimestamp(values_dict['changetime']), 'cachetime': when_ts, 'remote_name': self.remote_name, 'id': self.id}) values = [values_dict[name] for name in field_names] @self.env.with_transaction() def do_save(db): cursor = db.cursor() # Update the existing entry (if any) sql = ('''UPDATE remote_tickets SET %s WHERE remote_name=%%s and id=%%s''' % ','.join('%s=%%s' % name for name in field_names[:-2])) cursor.execute(sql, values) # If a row was updated then our work is done if cursor.rowcount > 0: return # If no rows were updated then this remote ticket is new to us sql = ('''INSERT INTO remote_tickets (%s) VALUES (%s)''' % (','.join(field_names), ','.join(['%s'] * len(field_names)))) cursor.execute(sql, values)
def test_missing_comment_edit(self): """Modify a comment where one edit is missing""" ticket = Ticket(self.env, self.id) t1 = self.created + timedelta(seconds=70) ticket.modify_comment(self._find_change(ticket, 1), "joe", "New comment 1", t1) t2 = self.created + timedelta(seconds=80) ticket.modify_comment(self._find_change(ticket, 1), "joe", "Other comment 1", t2) self.assertChange( ticket, 1, self.t1, "jack", comment=dict(author="jack", old="1", new="Other comment 1"), _comment0=dict(author="joe", old="Comment 1", new=str(to_utimestamp(t1))), _comment1=dict(author="joe", old="New comment 1", new=str(to_utimestamp(t2))), ) cursor = self.db.cursor() cursor.execute("DELETE FROM ticket_change " "WHERE field='_comment0'") self.db.commit() t3 = self.created + timedelta(seconds=90) ticket.modify_comment(self._find_change(ticket, 1), "joe", "Newest comment 1", t3) self.assertChange( ticket, 1, self.t1, "jack", comment=dict(author="jack", old="1", new="Newest comment 1"), _comment1=dict(author="joe", old="New comment 1", new=str(to_utimestamp(t2))), _comment2=dict(author="joe", old="Other comment 1", new=str(to_utimestamp(t3))), )
def _process_add(self, req, ticket): if req.method == "POST" and self._validate_add(req): if req.args.get('reminder_type') == 'interval': time = clear_time(to_datetime(None)) delta = _time_intervals[req.args.get('unit')](req.args.get('interval')) time += delta time = to_utimestamp(time) else: time = to_utimestamp(parse_date(req.args.get('date'))) origin = to_utimestamp(to_datetime(None)) self.env.db_transaction(""" INSERT INTO ticketreminder (ticket, time, author, origin, reminded, description) VALUES (%s, %s, %s, %s, 0, %s) """, (ticket.id, time, get_reporter_id(req, 'author'), origin, req.args.get('description'))) add_notice(req, "Reminder has been added.") req.redirect(get_resource_url(self.env, ticket.resource, req.href) + "#reminders") add_script(req, 'ticketreminder/js/ticketreminder.js') data = { 'ticket': ticket, 'date_hint': get_date_format_hint(), } return ("ticket_reminder_add.html", data, None)
def _get_num_closed_tix(self, from_date, at_date, milestone, req): ''' Returns an integer of the number of close ticket events counted between from_date to at_date. ''' status_map = {'new': 0, 'reopened': 0, 'assigned': 0, 'closed': 1, 'edit': 0} count=0 ma_milestone_str = "" if milestone != None: ma_milestone_str = " AND t.milestone = \"%s\" " % milestone db = self.env.get_db_cnx() cursor = db.cursor() # TODO clean up this query cursor.execute("SELECT t.id, tc.field, tc.time, tc.oldvalue, tc.newvalue, t.priority FROM enum p, ticket_change tc INNER JOIN ticket t ON t.id = tc.ticket AND tc.time > %s AND tc.time <= %s WHERE p.name = t.priority AND p.type = 'priority' %s ORDER BY tc.time" % (to_utimestamp(from_date), to_utimestamp(at_date), ma_milestone_str)) for id, field, time, old, status, priority in cursor: if field == 'status': if status in ('new', 'assigned', 'reopened', 'closed', 'edit'): count+=status_map[status] return count
def update(self, author=None): """Update the milestone. """ self.name = simplify_whitespace(self.name) if not self.name: raise TracError(_("Invalid milestone name.")) old = self._old.copy() with self.env.db_transaction as db: if self.name != old['name']: # Update milestone field in tickets self.move_tickets(self.name, author, "Milestone renamed") # Reparent attachments Attachment.reparent_all(self.env, self.realm, old['name'], self.realm, self.name) self.env.log.info("Updating milestone '%s'", old['name']) db("""UPDATE milestone SET name=%s, due=%s, completed=%s, description=%s WHERE name=%s """, (self.name, to_utimestamp(self.due), to_utimestamp(self.completed), self.description, old['name'])) self.checkin() # Fields need reset if renamed or completed/due changed TicketSystem(self.env).reset_ticket_fields() old_values = dict((k, v) for k, v in old.iteritems() if getattr(self, k) != v) for listener in TicketSystem(self.env).milestone_change_listeners: listener.milestone_changed(self, old_values)
def timeval(self, name, default): if name in self.kwargs: try: val = self.kwargs[name] try: val = int(val) text = \ str(val) + self.tunits_name['s'] + ['s',''][val == 1] except: unit = val[-1].lower() val = float(val[:-1]) text = \ str(val).strip('.0') + self.tunits_name[unit] \ + ['s',''][val == 1] val = int( val * self.tunits[ unit ] ) val = int(unixtime()) - val # mod for trac 0.12 nval = to_utimestamp(to_datetime(val)) except: raise TracError("Invalid value '%s' for argument '%s'! " % (self.kwargs[name],name) ) return (nval,text) else: defval, deftext = default ndef = to_utimestamp(to_datetime(defval)) return (ndef,deftext)
def test_missing_comment_edit(self): """Modify a comment where one edit is missing""" ticket = Ticket(self.env, self.id) t1 = self.created + timedelta(seconds=70) ticket.modify_comment(self._find_change(ticket, 1), 'joe', 'New comment 1', t1) t2 = self.created + timedelta(seconds=80) ticket.modify_comment(self._find_change(ticket, 1), 'joe', 'Other comment 1', t2) self.assertChange(ticket, 1, self.t1, 'jack', comment=dict(author='jack', old='1', new='Other comment 1'), _comment0=dict(author='joe', old='Comment 1', new=str(to_utimestamp(t1))), _comment1=dict(author='joe', old='New comment 1', new=str(to_utimestamp(t2)))) self.env.db_transaction( "DELETE FROM ticket_change WHERE field='_comment0'") t3 = self.created + timedelta(seconds=90) ticket.modify_comment(self._find_change(ticket, 1), 'joe', 'Newest comment 1', t3) self.assertChange(ticket, 1, self.t1, 'jack', comment=dict(author='jack', old='1', new='Newest comment 1'), _comment1=dict(author='joe', old='New comment 1', new=str(to_utimestamp(t2))), _comment2=dict(author='joe', old='Other comment 1', new=str(to_utimestamp(t3))))
def test_sync_changeset(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (("0", to_utimestamp(t1), "", ""), []), ( ("1", to_utimestamp(t2), "joe", "Import"), [("trunk", "D", "A", None, None), ("trunk/README", "F", "A", None, None)], ), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes1 = [ ("trunk", Node.DIRECTORY, Changeset.ADD, None, None), ("trunk/README", Node.FILE, Changeset.ADD, None, None), ] changesets = [ Mock(Changeset, repos, 0, "**empty**", "joe", t1, get_changes=lambda: []), Mock(Changeset, repos, 1, "Initial Import", "joe", t2, get_changes=lambda: iter(changes1)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync_changeset(0) rows = self.env.db_query("SELECT time, author, message FROM revision ORDER BY rev") self.assertEquals(2, len(rows)) self.assertEquals((to_utimestamp(t1), "joe", "**empty**"), rows[0]) self.assertEquals((to_utimestamp(t2), "joe", "Import"), rows[1])
def modify_comment(self, cdate, author, comment, when=None): """Modify a ticket comment specified by its date, while keeping a history of edits. """ ts = to_utimestamp(cdate) if when is None: when = datetime.now(utc) when_ts = to_utimestamp(when) with self.env.db_transaction as db: # Find the current value of the comment old_comment = False for old_comment, in db(""" SELECT newvalue FROM ticket_change WHERE ticket=%s AND time=%s AND field='comment' """, (self.id, ts)): break if comment == (old_comment or ''): return # Comment history is stored in fields named "_comment%d" # Find the next edit number fields = db("""SELECT field FROM ticket_change WHERE ticket=%%s AND time=%%s AND field %s """ % db.prefix_match(), (self.id, ts, db.prefix_match_value('_comment'))) rev = max(int(field[8:]) for field, in fields) + 1 if fields else 0 db("""INSERT INTO ticket_change (ticket,time,author,field,oldvalue,newvalue) VALUES (%s,%s,%s,%s,%s,%s) """, (self.id, ts, author, '_comment%d' % rev, old_comment or '', str(when_ts))) if old_comment is False: # There was no comment field, add one, find the # original author in one of the other changed fields for old_author, in db(""" SELECT author FROM ticket_change WHERE ticket=%%s AND time=%%s AND NOT field %s LIMIT 1 """ % db.prefix_match(), (self.id, ts, db.prefix_match_value('_'))): db("""INSERT INTO ticket_change (ticket,time,author,field,oldvalue,newvalue) VALUES (%s,%s,%s,'comment','',%s) """, (self.id, ts, old_author, comment)) else: db("""UPDATE ticket_change SET newvalue=%s WHERE ticket=%s AND time=%s AND field='comment' """, (comment, self.id, ts)) # Update last changed time db("UPDATE ticket SET changetime=%s WHERE id=%s", (when_ts, self.id)) self.values['changetime'] = when old_comment = old_comment or '' for listener in TicketSystem(self.env).change_listeners: if hasattr(listener, 'ticket_comment_modified'): listener.ticket_comment_modified(self, cdate, author, comment, old_comment)
def get_timeline_events(self, req, start, stop, filters): if ('sensitive_activity' in filters and 'SENSITIVE_ACTIVITY_VIEW' in req.perm and 'SENSITIVE_VIEW' not in req.perm): ts_start = to_utimestamp(start) ts_stop = to_utimestamp(stop) db = self.env.get_db_cnx() cursor = db.cursor() if 'ticket_details' in filters: # only show sensitive ticket changes (edits, closure) if the 'ticket_details' filter is on: cursor.execute(""" SELECT DISTINCT t.id,tc.time,tc.oldvalue FROM ticket_change tc INNER JOIN ticket t ON t.id = tc.ticket AND tc.time >= %s AND tc.time <= %s AND tc.field = %s INNER JOIN ticket_custom td ON t.id = td.ticket AND td.name = %s AND td.value = %s ORDER BY tc.time """, (ts_start, ts_stop, 'comment', 'sensitive', '1')) for tid,t,cid in cursor: yield ('sensitive_activity', from_utimestamp(t), 'redacted', (tid, cid)) # always show new sensitive tickets: cursor.execute(''' SELECT DISTINCT id, time FROM ticket t INNER JOIN ticket_custom tc ON t.id = tc.ticket AND t.time >= %s AND t.time <= %s AND tc.name = %s AND tc.value = %s ORDER BY time ''', (ts_start, ts_stop, 'sensitive', '1')) for tid,t in cursor: yield ('sensitive_activity', from_utimestamp(t), 'redacted', (tid, None))
def test_modify_missing_cnums_and_comment(self): """Editing a comment when all cnums are missing and one comment field is missing """ with self.env.db_transaction as db: db("UPDATE ticket_change SET oldvalue='' WHERE oldvalue='1'") db("""DELETE FROM ticket_change WHERE field='comment' AND oldvalue='1.2'""") db("UPDATE ticket_change SET oldvalue='' WHERE oldvalue='3'") # Modify after missing comment ticket = Ticket(self.env, self.id) t = self.created + timedelta(seconds=50) ticket.modify_comment(self._find_change(ticket, 3), 'joe', 'New comment 3', t) self.assertChange(ticket, 3, self.t3, 'jim', keywords=dict(author='jim', old='a, b, c', new='a, b'), comment=dict(author='jim', old='', new='New comment 3'), _comment0=dict(author='joe', old='Comment 3', new=str(to_utimestamp(t)))) # Modify missing comment t = self.created + timedelta(seconds=60) ticket.modify_comment(self._find_change(ticket, 2), 'joe', 'New comment 2', t) self.assertChange(ticket, 2, self.t2, 'john', owner=dict(author='john', old='john', new='jack'), comment=dict(author='john', old='', new='New comment 2'), _comment0=dict(author='joe', old='', new=str(to_utimestamp(t))))
def get_timeline_events(self, req, start, stop, filters): if 'project changes' in filters: cnx=self.env.get_db_cnx() cur=cnx.cursor() cur.execute("select who,change,time from project_change where time>=%s AND time<=%s"%\ (to_utimestamp(start), to_utimestamp(stop))); for who,change,ts in cur: yield('project',from_utimestamp(ts),who,change)
def delete_change(self, cnum=None, cdate=None, when=None): """Delete a ticket change identified by its number or date.""" if cdate is None: row = self._find_change(cnum) if not row: return cdate = from_utimestamp(row[0]) ts = to_utimestamp(cdate) if when is None: when = datetime.now(utc) when_ts = to_utimestamp(when) with self.env.db_transaction as db: # Find modified fields and their previous value fields = [(field, old, new) for field, old, new in db(""" SELECT field, oldvalue, newvalue FROM ticket_change WHERE ticket=%s AND time=%s """, (self.id, ts)) if field != 'comment' and not field.startswith('_')] for field, oldvalue, newvalue in fields: # Find the next change for next_ts, in db("""SELECT time FROM ticket_change WHERE ticket=%s AND time>%s AND field=%s LIMIT 1 """, (self.id, ts, field)): # Modify the old value of the next change if it is equal # to the new value of the deleted change db("""UPDATE ticket_change SET oldvalue=%s WHERE ticket=%s AND time=%s AND field=%s AND oldvalue=%s """, (oldvalue, self.id, next_ts, field, newvalue)) break else: # No next change, edit ticket field if field in self.std_fields: db("UPDATE ticket SET %s=%%s WHERE id=%%s" % field, (oldvalue, self.id)) else: db("""UPDATE ticket_custom SET value=%s WHERE ticket=%s AND name=%s """, (oldvalue, self.id, field)) # Delete the change db("DELETE FROM ticket_change WHERE ticket=%s AND time=%s", (self.id, ts)) # Update last changed time db("UPDATE ticket SET changetime=%s WHERE id=%s", (when_ts, self.id)) self._fetch_ticket(self.id) changes = dict((field, (oldvalue, newvalue)) for field, oldvalue, newvalue in fields) for listener in TicketSystem(self.env).change_listeners: if hasattr(listener, 'ticket_change_deleted'): listener.ticket_change_deleted(self, cdate, changes)
def ticket_activity_user(project_id, username, start_date, end_date, groupsize, groupcnt, db, req): """ Get query response for specified time interval and `username`: Data: <event>: <count events>. Events: 'created', 'closed'. """ q = ''' SELECT t.id, t.time, 'created' AS event FROM ticket t WHERE t.reporter=%s AND t.project_id=%s AND t.time >= %s AND t.time < %s UNION SELECT t.id, tc.time, 'closed' AS event FROM ticket t JOIN ticket_change tc ON t.id = tc.ticket AND tc.field='status' AND tc.newvalue='closed' WHERE t.owner=%s AND t.project_id=%s AND tc.time >= %s AND tc.time < %s ORDER BY event ''' cursor = db.cursor() cursor.execute(q, (username, project_id, to_utimestamp(start_date), to_utimestamp(end_date))*2) etypes = (N_('created'), N_('closed')) events = [(r[2], from_utimestamp(r[1]), r[0]) for r in cursor] # TODO: count closed once, use global closed set def init_set(e): return set() def add_to_set(stor, idx, event_data): stor[idx].add(event_data[2]) groups_list, groups_data = aggregate_events_by_periods(etypes, events, start_date, groupsize, groupcnt, add_to_set, init_set) for etype, groups in groups_data.iteritems(): for idx, ids in enumerate(groups): groups[idx] = len(ids) query_response = QueryResponse("ticket_activity", req.href('/chrome')) query_response.set_title(_("Ticket activity from %(start_date)s to %(end_date)s", start_date=format_date(start_date, tzinfo=req.tz), end_date=format_date(end_date, tzinfo=req.tz))) groups_data = translate_keys(groups_data) columns, rows = adapt_to_table(groups_list, groups_data) query_response.set_columns(columns) query_response.set_results(rows) chart = query_response.chart_info chart.type = 'Line' chart.width = 600 chart.x_legend = _('Time periods') chart.y_legend = _('Tickets') chart.x_labels = groups_list chart.data = restructure_data(groups_data) chart.tool_tip = "#key#<br>%s:#x_label#<br>%s:#val#" % (_('period'), _('tickets')) return query_response
def do_insert(db): cursor = db.cursor() self.env.log.debug("Creating new milestone '%s'" % self.name) cursor.execute(""" INSERT INTO milestone (name,due,completed,description) VALUES (%s,%s,%s,%s) """, (self.name, to_utimestamp(self.due), to_utimestamp(self.completed), self.description)) self._to_old() TicketSystem(self.env).reset_ticket_fields()
def get_changesets(self, start, stop): for rev, in self.env.db_query(""" SELECT rev FROM revision WHERE repos=%s AND time >= %s AND time < %s ORDER BY time DESC, rev DESC """, (self.id, to_utimestamp(start), to_utimestamp(stop))): try: yield self.get_changeset(rev) except NoSuchChangeset: pass # skip changesets currently being resync'ed
def get_changesets(self, start, stop): db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute("SELECT rev FROM revision " "WHERE repos=%s AND time >= %s AND time < %s " "ORDER BY time DESC, rev DESC", (self.id, to_utimestamp(start), to_utimestamp(stop))) for rev, in cursor: try: yield self.get_changeset(rev) except NoSuchChangeset: pass # skip changesets currently being resync'ed
def _iso8601_to_ts(s): """Parse ISO-8601 string to microsecond POSIX timestamp.""" try: s = str(s) if s.isnumeric(): # Valid type, no conversion required. return long(s) tm = time.strptime(s, '%Y-%m-%d %H:%M:%S') dt = datetime.datetime(*(tm[0:6] + (0, utc))) return to_utimestamp(dt) except (AttributeError, TypeError, ValueError): # Create a valid timestamp anyway. return to_utimestamp(datetime_now(utc))
def test_update_milestone(self): self.env.db_transaction("INSERT INTO milestone (name) VALUES ('Test')") milestone = Milestone(self.env, 'Test') t1 = datetime(2001, 01, 01, tzinfo=utc) t2 = datetime(2002, 02, 02, tzinfo=utc) milestone.due = t1 milestone.completed = t2 milestone.description = 'Foo bar' milestone.update() self.assertEqual( [('Test', to_utimestamp(t1), to_utimestamp(t2), 'Foo bar')], self.env.db_query("SELECT * FROM milestone WHERE name='Test'"))
def test_clean_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes1 = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changes2 = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README', 1)] changesets = [ Mock(Changeset, repos, 0, '**empty**', 'joe', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Initial Import', 'joe', t2, get_changes=lambda: iter(changes1)), Mock(Changeset, repos, 2, 'Update', 'joe', t3, get_changes=lambda: iter(changes2)) ] cache = CachedRepository(self.env, repos, self.log) cache.sync(clean=True) cursor = self.db.cursor() cursor.execute("SELECT time,author,message FROM revision") self.assertEquals((to_utimestamp(t1), 'joe', '**empty**'), cursor.fetchone()) self.assertEquals((to_utimestamp(t2), 'joe', 'Initial Import'), cursor.fetchone()) self.assertEquals((to_utimestamp(t3), 'joe', 'Update'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone()) cursor.execute(""" SELECT rev,path,node_type,change_type,base_path,base_rev FROM node_change ORDER BY rev """) self.assertEquals(('1', 'trunk', 'D', 'A', None, None), cursor.fetchone()) self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), cursor.fetchone()) self.assertEquals(('2', 'trunk/README', 'F', 'E', 'trunk/README', '1'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def test_component_change(self): """New ticket owner is updated when the component is changed. """ self._add_component('component3', 'cowner3') self._add_component('component4', 'cowner4') ticket = Ticket(self.env) ticket.populate({ 'reporter': 'reporter1', 'summary': 'the summary', 'component': 'component3', 'owner': 'cowner3', 'status': 'new', }) tkt_id = ticket.insert() req = self._create_request(method='POST', args={ 'id': tkt_id, 'field_component': 'component4', 'submit': True, 'action': 'leave', 'view_time': str(to_utimestamp(ticket['changetime'])), }) self.assertRaises(RequestDone, self.ticket_module.process_request, req) ticket = Ticket(self.env, tkt_id) self.assertEqual('component4', ticket['component']) self.assertEqual('cowner4', ticket['owner'])
def test_new_component_has_no_owner(self): """Ticket is not disowned when the component is changed to a component with no owner. """ self._add_component('component3', 'cowner3') self._add_component('component4', '') ticket = Ticket(self.env) ticket.populate({ 'reporter': 'reporter1', 'summary': 'the summary', 'component': 'component3', 'owner': 'cowner3', 'status': 'new', }) tkt_id = ticket.insert() req = MockRequest(self.env, method='POST', args={ 'id': tkt_id, 'field_component': 'component4', 'submit': True, 'action': 'leave', 'view_time': str(to_utimestamp(ticket['changetime'])), }) self.assertRaises(RequestDone, self.ticket_module.process_request, req) ticket = Ticket(self.env, tkt_id) self.assertEqual('component4', ticket['component']) self.assertEqual('cowner3', ticket['owner'])
def update(self, db=None): """Update the version. :since 1.0: the `db` parameter is no longer needed and will be removed in version 1.1.1 """ assert self.exists, "Cannot update non-existent version" self.name = simplify_whitespace(self.name) if not self.name: raise TracError(_("Invalid version name.")) old_name = self._old_name with self.env.db_transaction as db: self.env.log.info("Updating version '%s'", self.name) db( """UPDATE version SET name=%s, time=%s, description=%s WHERE name=%s """, (self.name, to_utimestamp( self.time), self.description, self._old_name)) if self.name != self._old_name: # Update tickets db("UPDATE ticket SET version=%s WHERE version=%s", (self.name, self._old_name)) self._old_name = self.name TicketSystem(self.env).reset_ticket_fields() #todo: add support of old_values for time and description fields old_values = dict() if self.name != old_name: old_values["name"] = old_name ResourceSystem(self.env).resource_changed(self, old_values)
def get_history(self, start, stop, realm): """Return an iterable of tuples describing changes to attachments on a particular object realm. The tuples are in the form (change, realm, id, filename, time, description, author). `change` can currently only be `created`. FIXME: no iterator """ for realm, id, filename, ts, description, author in \ self.env.db_query(""" SELECT type, id, filename, time, description, author FROM attachment WHERE time > %s AND time < %s AND type = %s """, (to_utimestamp(start), to_utimestamp(stop), realm)): time = from_utimestamp(ts or 0) yield ('created', realm, id, filename, time, description, author)
def apply_action_side_effects(self, req, ticket, action): config = self.config['ticket-workflow'] if config.get(action + '.earn_value', '') != '': value = 0 time = to_utimestamp(datetime_now(FixedOffset(0, 'UTC'))) try: evdef = config.get(action + '.earn_value', '').strip() if evdef.isdigit(): value = float(evdef) elif evdef.endswith('%') and 'activity_earn_value' in ticket: value = float(ticket['activity_earn_value']) * float(evdef[:-1]) / 100 except Exception as e: self.log.warning(e) with self.env.db_transaction as db: cursor = db.cursor() cursor.execute(""" INSERT INTO earn_value VALUES (%s, %s, %s, %s, %s, %s, %s); """, (ticket.id, 'workflow', action, value, req.authname, time, 0)) if config.get(action + '.update_time', '') != '': field = config.get(action + '.update_time').strip() if field in ticket: ticket[field] = datetime_now(FixedOffset(0, 'UTC')) ticket.save_changes()
def _post_process_request_history(self, req, data): history = [] page_histories = data.get('history', []) resource = data['resource'] tags_histories = tag_changes(self.env, resource) for page_history in page_histories: while tags_histories and \ tags_histories[0][0] >= page_history['date']: tags_history = tags_histories.pop(0) date = tags_history[0] author = tags_history[1] comment = render_tag_changes(tags_history[2], tags_history[3]) url = req.href(resource.realm, resource.id, version=page_history['version'], tags_version=to_utimestamp(date)) history.append({ 'version': '*', 'url': url, 'date': date, 'author': author, 'comment': comment, 'ipnr': '' }) history.append(page_history) data.update( dict(history=history, wiki_to_oneliner=self._wiki_to_oneliner))
def insert_changeset(self, rev, cset): """Create revision and node_change records for the given changeset instance.""" srev = self.db_rev(rev) with self.env.db_transaction as db: # 1. Attempt to resync the 'revision' table. In case of # concurrent syncs, only such insert into the `revision` table # will succeed, the others will fail and raise an exception. db( """ INSERT INTO revision (repos,rev,time,author,message) VALUES (%s,%s,%s,%s,%s) """, (self.id, srev, to_utimestamp( cset.date), cset.author, cset.message)) # 2. now *only* one process was able to get there (i.e. there # *shouldn't* be any race condition here) for path, kind, action, bpath, brev in cset.get_changes(): self.log.debug("Caching node change in [%s]: %r", rev, (path, kind, action, bpath, brev)) kind = _inverted_kindmap[kind] action = _inverted_actionmap[action] db( """ INSERT INTO node_change (repos,rev,path,node_type,change_type,base_path, base_rev) VALUES (%s,%s,%s,%s,%s,%s,%s) """, (self.id, srev, path, kind, action, bpath, brev))
def test_update_milestone(self): self.env.db_transaction("INSERT INTO milestone (name) VALUES ('Test')") milestone = Milestone(self.env, 'Test') t1 = datetime(2001, 01, 01, tzinfo=utc) t2 = datetime(2002, 02, 02, tzinfo=utc) milestone.due = t1 milestone.completed = t2 milestone.description = 'Foo bar' milestone.update() self.assertEqual( [('Test', to_utimestamp(t1), to_utimestamp(t2), 'Foo bar', self.default_product)], self.env.db_query("SELECT * FROM milestone WHERE name='Test'"))
def test_can_serialize_task_to_dict(self): task = AgiloTicket(self.env, t_type=Type.TASK) self.assertNotEqual('fixed', task[Key.RESOLUTION]) task[Key.SUMMARY] = 'My Summary' task.insert() expected = { # required Key.ID: task.id, Key.TYPE: Type.TASK, Key.SUMMARY: 'My Summary', Key.DESCRIPTION: '', Key.STATUS: '', Key.RESOLUTION: '', Key.REPORTER: '', Key.OWNER: '', # type specific Key.SPRINT: '', Key.REMAINING_TIME: '', Key.RESOURCES: '', # Key.Options is not used in order to reduce required data to # transfer for a backlog load. 'outgoing_links': [], 'incoming_links': [], 'time_of_last_change': to_timestamp(task.time_changed), 'ts': str(task.time_changed), } if AgiloTicketSystem.is_trac_1_0(): from trac.util.datefmt import to_utimestamp expected.update( {'view_time': str(to_utimestamp(task.time_changed))}) self.assert_equals(expected, task.as_dict())
def tag_changes(env, resource, start=None, stop=None): """Return tag history for one or all tagged Trac resources.""" if resource: # Resource changelog events query. return [(to_datetime(row[0]), row[1], row[2], row[3]) for row in env.db_query(""" SELECT time,author,oldtags,newtags FROM tags_change WHERE tagspace=%s AND name=%s ORDER BY time DESC """, (resource.realm, to_unicode(resource.id)))] # Timeline events query. return [(to_datetime(row[0]), row[1], row[2], row[3], row[4], row[5]) for row in env.db_query(""" SELECT time,author,tagspace,name,oldtags,newtags FROM tags_change WHERE time>%s AND time<%s """, (to_utimestamp(start), to_utimestamp(stop)))]
def test_existing_page(self): t = datetime(2001, 1, 1, 1, 1, 1, 0, utc) self.env.db_transaction( "INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)", ('TestPage', 1, to_utimestamp(t), 'joe', '::1', 'Bla bla', 'Testing', 0)) page = WikiPage(self.env, 'TestPage') self.assertTrue(page.exists) self.assertEqual('TestPage', page.name) self.assertEqual(1, page.version) self.assertIsNone(page.resource.version) # FIXME: Intentional? self.assertEqual('Bla bla', page.text) self.assertEqual(0, page.readonly) self.assertEqual('joe', page.author) self.assertEqual('Testing', page.comment) self.assertEqual(t, page.time) history = list(page.get_history()) self.assertEqual(1, len(history)) self.assertEqual((1, t, 'joe', 'Testing', '::1'), history[0]) page = WikiPage(self.env, 'TestPage', 1) self.assertEqual(1, page.resource.version) self.assertEqual(1, page.version) resource = Resource('wiki', 'TestPage') page = WikiPage(self.env, resource, 1) self.assertEqual(1, page.version)
def test_create_page(self): page = WikiPage(self.env) page.name = 'TestPage' page.text = 'Bla bla' t = datetime(2001, 1, 1, 1, 1, 1, 0, utc) page.save('joe', 'Testing', '::1', t) self.assertTrue(page.exists) self.assertEqual(1, page.version) self.assertEqual(1, page.resource.version) self.assertEqual(0, page.readonly) self.assertEqual('joe', page.author) self.assertEqual('Testing', page.comment) self.assertEqual(t, page.time) self.assertEqual( [(1, to_utimestamp(t), 'joe', '::1', 'Bla bla', 'Testing', 0)], self.env.db_query( """ SELECT version, time, author, ipnr, text, comment, readonly FROM wiki WHERE name=%s """, ('TestPage', ))) listener = TestWikiChangeListener(self.env) self.assertEqual(page, listener.added[0])
def test_old_owner_not_old_component_owner(self): """New ticket owner is not updated if old owner is not the owner of the old component. """ self._add_component('component3', 'cowner3') self._add_component('component4', 'cowner4') ticket = Ticket(self.env) ticket.populate({ 'reporter': 'reporter1', 'summary': 'the summary', 'component': 'component3', 'owner': 'owner1', 'status': 'new', }) tkt_id = ticket.insert() req = MockRequest(self.env, method='POST', args={ 'id': tkt_id, 'field_component': 'component4', 'submit': True, 'action': 'leave', 'view_time': str(to_utimestamp(ticket['changetime'])), }) self.assertRaises(RequestDone, self.ticket_module.process_request, req) ticket = Ticket(self.env, tkt_id) self.assertEqual('component4', ticket['component']) self.assertEqual('owner1', ticket['owner'])
def __setitem__(self, name, value): """Log crash modifications so the table crashdump_change can be updated """ if name in self.values and self.values[name] == value: return if name not in self._old: # Changed field if name in self.time_fields: self._old[name] = to_utimestamp(self.values.get(name)) else: self._old[name] = self.values.get(name) elif self._old[name] == value: # Change of field reverted del self._old[name] if value: if isinstance(value, list): if len(value) == 1: value = value[0] else: raise ValueError( _("Multi-values field %s not supported yet: %s") % (name, value)) field = [field for field in self.fields if field['name'] == name] if field: field_type = field[0].get('type') if field_type == 'time': pass elif field_type != 'textarea': if isinstance(value, basestring): value = value.strip() self.values[name] = value
def _insert_rows(self): rows = [ ('joe', 1, 'email', 'text/plain', 1, 'always', 'EmailSubscriber1'), ('joe', 1, 'email', 'text/html', 2, 'always', 'EmailSubscriber2'), ('joe', 1, 'email', 'text/plain', 3, 'always', 'EmailSubscriber3'), ('joe', 1, 'xmpp', 'text/html', 1, 'always', 'XmppSubscriber1'), ('joe', 1, 'xmpp', 'text/plain', 2, 'never', 'XmppSubscriber2'), ('joe', 1, 'xmpp', 'text/html', 3, 'never', 'XmppSubscriber3'), ('joe', 1, 'irc', 'text/plain', 1, 'never', 'IrcSubscriber1'), ('joe', 1, 'irc', 'text/plain', 2, 'never', 'IrcSubscriber2'), ('joe', 1, 'irc', 'text/plain', 3, 'never', 'IrcSubscriber3'), ('jes', 1, 'email', 'text/html', 1, 'always', 'EmailSubscriber1'), ('jes', 1, 'email', 'text/plain', 2, 'never', 'EmailSubscriber2'), ('jes', 1, 'email', 'text/html', 3, 'always', 'EmailSubscriber3'), ('jan', 1, 'xmpp', 'text/plain', 1, 'always', 'XmppSubscriber1'), ('jan', 1, 'xmpp', 'text/html', 2, 'never', 'XmppSubscriber2'), ('jan', 1, 'xmpp', 'text/plain', 3, 'never', 'XmppSubscriber3'), ('jim', 1, 'irc', 'text/html', 1, 'always', 'IrcSubscriber1'), ('jim', 1, 'irc', 'text/plain', 2, 'never', 'IrcSubscriber2'), ('jim', 1, 'irc', 'text/html', 3, 'always', 'IrcSubscriber3'), ] ts = to_utimestamp(datetime(2016, 2, 3, 12, 34, 56, 987654, utc)) with self.env.db_transaction as db: cursor = db.cursor() cursor.executemany( """ INSERT INTO notify_subscription ( time, changetime, sid, authenticated, distributor, format, priority, adverb, class) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)""", [(ts + idx, ts + idx * 2) + row for idx, row in enumerate(rows)])
def add_timestamp(self, ticket, parameters): parameter_name = None timestamp = None changetime = ticket.time_changed if trac_version < Version(major=0, minor=12): # Trac 0.11 and previous versions only used timestamps without # microseconds parameter_name = 'ts' timestamp = str(changetime.replace(microsecond=0)) elif trac_version < Version(major=1, minor=0): # Trac 0.12 switched to millisecond precision in its timestamps parameter_name = 'ts' timestamp = str(changetime) else: # Trac 1.0 changed the parameter name and also the ".time_changed" # attribute returns a datetime instance # "to_utimestamp()" was added in Trac revision 9210 (Trac 1.0dev) # so let's import this here and not at the class level to avoid a # hard dependecy (for tests) on Trac 1.x. from trac.util.datefmt import to_utimestamp parameter_name = 'view_time' timestamp = str(to_utimestamp(changetime)) if parameter_name not in parameters: parameters[parameter_name] = timestamp return parameters
def test_component_change(self): """New ticket owner is updated when the component is changed. """ self._add_component('component3', 'cowner3') self._add_component('component4', 'cowner4') ticket = insert_ticket(self.env, reporter='reporter1', summary='the summary', component='component3', owner='cowner3', status='new') req = MockRequest(self.env, method='POST', args={ 'id': ticket.id, 'field_component': 'component4', 'submit': True, 'action': 'leave', 'view_time': str(to_utimestamp(ticket['changetime'])), }) self.assertRaises(RequestDone, self.ticket_module.process_request, req) ticket = Ticket(self.env, ticket.id) self.assertEqual('component4', ticket['component']) self.assertEqual('cowner4', ticket['owner'])
def test_create_page(self): page = WikiPage(self.env) page.name = 'TestPage' page.text = 'Bla bla' t = datetime(2001, 1, 1, 1, 1, 1, 0, utc) page.save('joe', 'Testing', '::1', t) self.assertEqual(True, page.exists) self.assertEqual(1, page.version) self.assertEqual(1, page.resource.version) self.assertEqual(0, page.readonly) self.assertEqual('joe', page.author) self.assertEqual('Testing', page.comment) self.assertEqual(t, page.time) cursor = self.db.cursor() cursor.execute( "SELECT version,time,author,ipnr,text,comment," "readonly FROM wiki WHERE name=%s", ('TestPage', )) self.assertEqual( (1, to_utimestamp(t), 'joe', '::1', 'Bla bla', 'Testing', 0), cursor.fetchone()) listener = TestWikiChangeListener(self.env) self.assertEqual(page, listener.added[0])
def _do_check_and_send(self): now = to_utimestamp(to_datetime(None)) for row in self.env.db_query( """ SELECT id, ticket, author, origin, description FROM ticketreminder WHERE reminded=0 AND %s>=time """, (now, )): self._do_send(*row)
def get_change(self, cnum=None, cdate=None, db=None): """Return a ticket change by its number or date. :since 1.0: the `db` parameter is no longer needed and will be removed in version 1.1.1 """ if cdate is None: row = self._find_change(cnum) if not row: return cdate = from_utimestamp(row[0]) ts = to_utimestamp(cdate) fields = {} change = {'date': cdate, 'fields': fields} for field, author, old, new in self.env.db_query( """ SELECT field, author, oldvalue, newvalue FROM ticket_change WHERE ticket=%s AND time=%s """, (self.id, ts)): fields[field] = {'author': author, 'old': old, 'new': new} if field == 'comment': change['author'] = author elif not field.startswith('_'): change.setdefault('author', author) if fields: return change
def make_req(authname): change_time = Ticket(self.env, 1)['changetime'] return MockRequest( self.env, authname=authname, method='POST', path_info='/ticket/1', args={'comment': comment, 'action': 'leave', 'submit': True, 'view_time': unicode(to_utimestamp(change_time))})
def _event_data(self, provider, event): """Compose the timeline event date from the event tuple and prepared provider methods""" if len(event) == 6: # 0.10 events kind, url, title, date, author, markup = event data = {'url': url, 'title': title, 'description': markup} render = lambda field, context: data.get(field) else: # 0.11 events if len(event) == 5: # with special provider kind, date, author, data, provider = event else: kind, date, author, data = event render = lambda field, context: \ provider.render_timeline_event(context, field, event) if not isinstance(date, datetime): date = datetime.fromtimestamp(date, utc) dateuid = to_utimestamp(date) return { 'kind': kind, 'author': author, 'date': date, 'dateuid': dateuid, 'render': render, 'event': event, 'data': data, 'provider': provider }
def do_import(db): cursor = db.cursor() # Make sure we don't insert the exact same page twice cursor.execute("SELECT text FROM wiki WHERE name=%s " "ORDER BY version DESC LIMIT 1", (title,)) old = list(cursor) if old and title in create_only: printout(_(' %(title)s already exists', title=title)) result[0] = False return if old and data == old[0][0]: printout(_(' %(title)s is already up to date', title=title)) result[0] = False return if replace and old: cursor.execute("UPDATE wiki SET text=%s WHERE name=%s " " AND version=(SELECT max(version) FROM wiki " " WHERE name=%s)", (data, title, title)) else: cursor.execute("INSERT INTO wiki(version,name,time,author," " ipnr,text) " "SELECT 1+COALESCE(max(version),0),%s,%s," " 'trac','127.0.0.1',%s FROM wiki " "WHERE name=%s", (title, to_utimestamp(datetime.now(utc)), data, title)) if not old: del WikiSystem(self.env).pages
def create_message_id(env, targetid, from_email, time, more=''): """Generate a predictable, but sufficiently unique message ID.""" s = '%s.%s.%d.%s' % (env.project_url.encode('utf-8'), targetid, to_utimestamp(time), more.encode('ascii', 'ignore')) dig = md5(s).hexdigest() host = from_email[from_email.find('@') + 1:] return '<%03d.%s@%s>' % (len(s), dig, host)
def create_message_id(env, targetid, from_email, time, more=None): """Generate a predictable, but sufficiently unique message ID. In case you want to set the "Message ID" header, this convenience function will generate one by running a hash algorithm over a number of properties. :param env: the `Environment` :param targetid: a string that identifies the target, like `NotificationEvent.target` :param from_email: the email address that the message is sent from :param time: a Python `datetime` :param more: a string that contains additional information that makes this message unique """ items = [env.project_url, targetid, to_utimestamp(time)] if more is not None: items.append(more.encode('ascii', 'ignore')) source = b'.'.join( item if isinstance(item, bytes) else str(item).encode('utf-8') for item in items) hash_type = NotificationSystem(env).message_id_hash try: h = hashlib.new(hash_type) except: raise ConfigurationError( _("Unknown hash type '%(type)s'", type=hash_type)) h.update(source) host = from_email[from_email.find('@') + 1:] return '<%03d.%s@%s>' % (len(source), h.hexdigest(), host)
def test_component_change_and_owner_change(self): """New ticket owner is not updated if owner is explicitly changed. """ self._add_component('component3', 'cowner3') self._add_component('component4', 'cowner4') ticket = Ticket(self.env) ticket.populate({ 'reporter': 'reporter1', 'summary': 'the summary', 'component': 'component3', 'status': 'new', }) tkt_id = ticket.insert() req = self._create_request(method='POST', args={ 'id': tkt_id, 'field_component': 'component4', 'submit': True, 'action': 'change_owner', 'action_change_owner_reassign_owner': 'owner1', 'view_time': str(to_utimestamp(ticket['changetime'])), }) self.assertRaises(RequestDone, self.ticket_module.process_request, req) ticket = Ticket(self.env, tkt_id) self.assertEqual('component4', ticket['component']) self.assertEqual('owner1', ticket['owner'])
def test_submit_with_time_field(self): self.env.config.set('ticket-custom', 'timefield', 'time') self._insert_ticket(summary='Time fields', timefield='') ticket = Ticket(self.env, 1) args_base = {'submit': '*', 'action': 'leave', 'id': '1', 'field_summary': ticket['summary'], 'field_reporter': ticket['reporter'], 'field_description': ticket['description'], 'view_time': str(to_utimestamp(ticket['changetime']))} for f in ticket.fields: args_base['field_%s' % f['name']] = ticket[f['name']] or '' args = args_base.copy() args['field_timefield'] = 'invalid datetime' req = MockRequest(self.env, method='POST', path_info='/ticket/1', args=args) self.assertTrue(self.ticket_module.match_request(req)) self.ticket_module.process_request(req) warnings = req.chrome['warnings'] self.assertNotEqual([], warnings) self.assertEqual(1, len(warnings)) self.assertIn('is an invalid date, or the date format is not known.', unicode(warnings[0])) ticket = Ticket(self.env, 1) self.assertEqual(None, ticket['timefield']) args = args_base.copy() args['field_timefield'] = '2016-01-02T12:34:56Z' req = MockRequest(self.env, method='POST', path_info='/ticket/1', args=args) self.assertTrue(self.ticket_module.match_request(req)) self.assertRaises(RequestDone, self.ticket_module.process_request, req) ticket = Ticket(self.env, 1) self.assertEqual(datetime(2016, 1, 2, 12, 34, 56, tzinfo=utc), ticket['timefield'])
def _implementation(db): for id in selectedTickets: if id in tickets: t = Ticket(env, int(id)) new_changetime = datetime.now(utc) log_msg = "" if not modify_changetime: original_changetime = to_utimestamp(t.time_changed) _values = new_values.copy() for field in [f for f in new_values.keys() \ if f in self._fields_as_list]: _values[field] = self._merge_keywords( t.values[field], new_values[field], log) t.populate(_values) t.save_changes(req.authname, comment, when=new_changetime) if send_notifications: tn = TicketNotifyEmail(env) tn.notify(t, newticket=0, modtime=new_changetime) if not modify_changetime: self._reset_changetime(env, original_changetime, t) log_msg = "(changetime not modified)" log.debug('BatchModifyPlugin: saved changes to #%s %s' % (id, log_msg))
def move(cls, env, rule_id, priority, sid=None, authenticated=None): with env.db_transaction as db: kwargs = {'id': rule_id} if sid is not None or authenticated is not None: kwargs['sid'] = sid kwargs['authenticated'] = 1 if authenticated else 0 for sub in cls._find(env, **kwargs): break else: return subs = cls.find_by_sid_and_distributor(env, sub['sid'], sub['authenticated'], sub['distributor']) if not (1 <= priority <= len(subs)): return for idx, sub in enumerate(subs): if sub['id'] == rule_id: break else: return subs.insert(priority - 1, subs.pop(idx)) now = to_utimestamp(datetime_now(utc)) values = [(new_priority, now, sub['id']) for new_priority, sub in enumerate(subs, 1) if new_priority != sub['priority']] db.executemany( """ UPDATE notify_subscription SET priority=%s, changetime=%s WHERE id=%s """, values)
def test_action_side_effects_applied(self): self.env.config.set( 'ticket', 'workflow', 'ConfigurableTicketWorkflow, ' 'MockTicketOperation') ticket = self._insert_ticket(reporter='reporter', summary='the summary', status='new') change_time = Ticket(self.env, ticket.id)['changetime'] view_time = str(to_utimestamp(change_time)) req = MockRequest(self.env, method='POST', path_info='/ticket/1', args={ 'submit': True, 'action': 'mock', 'id': '1', 'view_time': view_time }) operation = self.mock_ticket_operation(self.env) self.assertEqual(0, operation.side_effect_count) self.assertTrue(self.ticket_module.match_request(req)) with self.assertRaises(RequestDone): self.ticket_module.process_request(req) self.assertEqual(1, operation.side_effect_count) self.assertIn(('DEBUG', "Side effect for MockTicketOperation"), self.env.log_messages)
def do_transaction(db): cursor = db.cursor() # 1.1 Attempt to resync the 'revision' table self.log.info("Trying to sync revision [%s]", next_youngest) cset = self.repos.get_changeset(next_youngest) try: cursor.execute( """ INSERT INTO revision (repos,rev,time,author,message) VALUES (%s,%s,%s,%s,%s) """, (self.id, srev, to_utimestamp( cset.date), cset.author, cset.message)) except Exception, e: # *another* 1.1. resync attempt won self.log.warning('Revision %s already cached: %r', next_youngest, e) # also potentially in progress, so keep ''previous'' # notion of 'youngest' self.repos.clear(youngest_rev=youngest) # FIXME: This aborts a containing transaction db.rollback() exit[0] = True return