def timeval(self, name, default): if name in self.kwargs: try: val = self.kwargs[name] try: val = int(val) text = \ str(val) + self.tunits_name['s'] + ['s',''][val == 1] except: unit = val[-1].lower() val = float(val[:-1]) text = \ str(val).strip('.0') + self.tunits_name[unit] \ + ['s',''][val == 1] val = int( val * self.tunits[ unit ] ) val = int(unixtime()) - val # mod for trac 0.12 nval = to_utimestamp(to_datetime(val)) except: raise TracError("Invalid value '%s' for argument '%s'! " % (self.kwargs[name],name) ) return (nval,text) else: defval, deftext = default ndef = to_utimestamp(to_datetime(defval)) return (ndef,deftext)
def _process_add(self, req, ticket): if req.method == "POST" and self._validate_add(req): if req.args.get('reminder_type') == 'interval': time = clear_time(to_datetime(None)) delta = _time_intervals[req.args.get('unit')](req.args.get('interval')) time += delta time = to_utimestamp(time) else: time = to_utimestamp(parse_date(req.args.get('date'))) origin = to_utimestamp(to_datetime(None)) self.env.db_transaction(""" INSERT INTO ticketreminder (ticket, time, author, origin, reminded, description) VALUES (%s, %s, %s, %s, 0, %s) """, (ticket.id, time, get_reporter_id(req, 'author'), origin, req.args.get('description'))) add_notice(req, "Reminder has been added.") req.redirect(get_resource_url(self.env, ticket.resource, req.href) + "#reminders") add_script(req, 'ticketreminder/js/ticketreminder.js') data = { 'ticket': ticket, 'date_hint': get_date_format_hint(), } return ("ticket_reminder_add.html", data, None)
def expand_macro(self, formatter, name, content): env = formatter.env req = formatter.req if not 'VOTE_VIEW' in req.perm: return # Simplify function calls. format_author = partial(Chrome(self.env).format_author, req) if not content: args = [] compact = None kw = {} top = 5 else: args, kw = parse_args(content) compact = 'compact' in args and True top = as_int(kw.get('top'), 5, min=0) if name == 'LastVoted': lst = tag.ul() for i in self.get_votes(req, top=top): resource = Resource(i[0], i[1]) # Anotate who and when. voted = ('by %s at %s' % (format_author(i[3]), format_datetime(to_datetime(i[4])))) lst(tag.li(tag.a( get_resource_description(env, resource, compact and 'compact' or 'default'), href=get_resource_url(env, resource, formatter.href), title=(compact and '%+i %s' % (i[2], voted) or None)), (not compact and Markup(' %s %s' % (tag.b('%+i' % i[2]), voted)) or ''))) return lst elif name == 'TopVoted': realm = kw.get('realm') lst = tag.ul() for i in self.get_top_voted(req, realm=realm, top=top): if 'up-only' in args and i[2] < 1: break resource = Resource(i[0], i[1]) lst(tag.li(tag.a( get_resource_description(env, resource, compact and 'compact' or 'default'), href=get_resource_url(env, resource, formatter.href), title=(compact and '%+i' % i[2] or None)), (not compact and ' (%+i)' % i[2] or ''))) return lst elif name == 'VoteList': lst = tag.ul() resource = resource_from_path(env, req.path_info) for i in self.get_votes(req, resource, top=top): vote = ('at %s' % format_datetime(to_datetime(i[4]))) lst(tag.li( compact and format_author(i[3]) or Markup(u'%s by %s %s' % (tag.b('%+i' % i[2]), tag(format_author(i[3])), vote)), title=(compact and '%+i %s' % (i[2], vote) or None))) return lst
def _parse_build(self, res): data = json.loads(res.read()) if not 'results' in data or not (type(data['results']) == int): status = "running" else: status = "successful" if data['results'] == 0 else "failed" build = dict({ 'builder': data['builderName'], 'status': status, 'start' : to_datetime(int(data['times'][0]), utc), 'num': data['number'], }) if len(data['times']) > 1 and type(data['times'][1]) == float: build['finish'] = to_datetime(int(data['times'][1]), utc) for prop in data['properties']: if prop[0] == 'got_revision' and prop[1] != "": build["rev"] = prop[1] break if status == "failed": build['error'] = ', '.join(data['text']) try: for step in data['steps']: if "results" in step and step["results"][0] == 2: build['error_log'] = step['logs'][0][1] break except (IndexError, KeyError): pass return build
def get_search_results(self, req, keywords, filters): self.log.debug("PHPDOCBUG: kw=%s f=%s" % (keywords, filters)) if not 'phpdoc' in filters: return # We have to search for the raw bytes... keywords = [k.encode(self.encoding) for k in keywords] for doc in os.listdir(self.base_path): # Search in documentation directories path = os.path.join(self.base_path, doc) path = os.path.join(path, self.html_output) self.log.debug("looking in doc (%s) dir: %s:" % (doc, path)) if os.path.isdir(path): index = os.path.join(path, 'search.idx') if os.path.exists(index): creation = os.path.getctime(index) for result in self._search_in_documentation(doc, keywords): result['url'] = req.href.phpdoc(doc) + '/' \ + result['url'] yield result['url'], result['name'], to_datetime(creation), \ 'phpdoc', None # Search in common documentation directory index = os.path.join(self.base_path, self.html_output) index = os.path.join(index, 'search.idx') self.log.debug("looking in doc (%s) search.idx: %s:" % (doc, index)) if os.path.exists(index): creation = os.path.getctime(index) for result in self._search_in_documentation('', keywords): result['url'] = req.href.phpdoc() + '/' + \ result['url'] yield result['url'], result['name'], to_datetime(creation), 'phpdoc', \ None
def _fetch_fields(self, version=0): """ Returns a dict with field/value combinations for the content of a specific version of a blog post, or last/current version if version is 0. Returns emtpy dict if no such post or post/version exists. """ self.versions = self.get_versions() if not self.versions or (version and not version in self.versions): # No blog post with the name exists return {} version = version or self.versions[-1] cnx = self.env.get_db_cnx() cursor = cnx.cursor() cursor.execute("SELECT title, body, publish_time, version_time, " "version_comment, version_author, author, categories " "FROM fullblog_posts " "WHERE name=%s AND version=%s", (self.name, version) ) fields = {} for row in cursor: fields['version'] = version fields['title'] = row[0] fields['body'] = row[1] fields['publish_time'] = to_datetime(row[2], utc) fields['version_time'] = to_datetime(row[3], utc) fields['version_comment'] = row[4] fields['version_author'] = row[5] fields['author'] = row[6] fields['categories'] = row[7] fields['category_list'] = set(_parse_categories(row[7])) return fields
def test_to_datetime_microsecond_negative_timestamps(self): # Work around issue1646728 in Python 2.4 expected = datetime.datetime.fromtimestamp(-2345, datefmt.localtz) - datetime.timedelta(seconds=0.678912) self.assertEqual(datefmt.to_datetime(-2345678912).microsecond, 321088) # 1000000 - 678912 self.assertEqual(datefmt.to_datetime(-2345678912), expected) self.assertEqual(datefmt.to_datetime(-2345678912L), expected) self.assertEqual(datefmt.to_datetime(-2345678912.0), expected)
def get_work_log(self, pid, username=None, mode='all'): db = self.env.get_read_db() cursor = db.cursor() if mode == 'user': assert username is not None cursor.execute('SELECT wl.worker, wl.starttime, wl.endtime, wl.ticket, t.summary, t.status, wl.comment ' 'FROM work_log wl ' 'JOIN ticket t ON wl.ticket=t.id ' 'WHERE t.project_id=%s AND wl.worker=%s ' 'ORDER BY wl.lastchange DESC', (pid, username)) elif mode == 'latest': cursor.execute(''' SELECT worker, starttime, endtime, ticket, summary, status, comment FROM ( SELECT wl.worker, wl.starttime, wl.endtime, wl.ticket, wl.comment, wl.lastchange, MAX(wl.lastchange) OVER (PARTITION BY wl.worker) latest, t.summary, t.status FROM work_log wl JOIN ticket t ON wl.ticket=t.id AND project_id=%s ) wll WHERE lastchange=latest ORDER BY lastchange DESC, worker ''', (pid,)) else: cursor.execute('SELECT wl.worker, wl.starttime, wl.endtime, wl.ticket, t.summary, t.status, wl.comment ' 'FROM work_log wl ' 'JOIN ticket t ON wl.ticket=t.id ' 'WHERE t.project_id=%s ' 'ORDER BY wl.lastchange DESC, wl.worker', (pid,)) rv = [] for user,starttime,endtime,ticket,summary,status,comment in cursor: started = to_datetime(starttime) if endtime != 0: finished = to_datetime(endtime) delta = 'Worked for %s (between %s and %s)' % ( pretty_timedelta(started, finished), format_datetime(started), format_datetime(finished)) else: finished = 0 delta = 'Started %s ago (%s)' % ( pretty_timedelta(started), format_datetime(started)) rv.append({'user': user, 'starttime': started, 'endtime': finished, 'delta': delta, 'ticket': ticket, 'summary': summary, 'status': status, 'comment': comment}) return rv
def format_date(self,content,propname,d,force_date=False,tzinfo=None): if type(d) == datetime.datetime and force_date == False: tz = tzinfo or localtz t = to_datetime(d, tzinfo).astimezone(tz) content.write("%s:%s\r\n" % (propname,t.strftime("%Y%m%dT%H%M%S"))) elif type(d) == datetime.datetime and force_date == True: tz = tzinfo or localtz t = to_datetime(d, tzinfo).astimezone(tz) content.write("%s;VALUE=DATE:%s\r\n" % (propname,t.strftime("%Y%m%d"))) else: content.write("%s;VALUE=DATE:%s\r\n" % (propname,d.strftime("%Y%m%d")))
def better_parse_date(text, tzinfo=None): tzinfo = tzinfo or localtz if text == "now": # TODO: today, yesterday, etc. return datetime.now(utc) tm = None text = text.strip() # normalize ISO time match = datefmt._ISO_8601_RE.match(text) if match: try: g = match.groups() years = g[0] months = g[1] or "01" days = g[2] or "01" hours, minutes, seconds = [x or "00" for x in g[3:6]] z, tzsign, tzhours, tzminutes = g[6:10] if z: tz = timedelta(hours=int(tzhours or "0"), minutes=int(tzminutes or "0")).seconds / 60 if tz == 0: tzinfo = utc else: tzinfo = datefmt.FixedOffset(tzsign == "-" and -tz or tz, "%s%s:%s" % (tzsign, tzhours, tzminutes)) tm = strptime("%s " * 6 % (years, months, days, hours, minutes, seconds), "%Y %m %d %H %M %S ") except ValueError: pass else: for format in ["%x %X", "%x, %X", "%X %x", "%X, %x", "%x", "%c", "%b %d, %Y"]: try: tm = strptime(text, format) break except ValueError: continue if tm == None: hint = datefmt.get_date_format_hint() raise TracError( '"%s" is an invalid date, or the date format ' 'is not known. Try "%s" instead.' % (text, hint), "Invalid Date", ) if not hasattr(tzinfo, "localize"): # This is a tzinfo define by trac which don't have to deal with dst dt = datetime(*(tm[0:6] + (0, tzinfo))) else: # We need to detect daylight saving correctly - see #... dt = tzinfo.localize(datetime(*tm[0:6])) # Make sure we can convert it to a timestamp and back - fromtimestamp() # may raise ValueError if larger than platform C localtime() or gmtime() try: datefmt.to_datetime(datefmt.to_timestamp(dt), tzinfo) except ValueError: raise TracError( 'The date "%s" is outside valid range. ' "Try a date closer to present time." % (text,), "Invalid Date" ) return dt
def set_status_dt(env,ticket_id,new_status=None,new_time=None,db=None): order_lst = env.config.getlist('querychart', 'order') order =[] custom_fields = {} for m in order_lst: ms = m.split(':') if len(ms) >= 2: order.append(ms[0]) custom_fields[ms[0]] = ':'.join(ms[1:]) else: order.append(m) if not db: db = env.get_db_cnx() cursor = db.cursor() cursor.execute("SELECT newvalue,time,ticket ,field from ticket_change where ticket=%s" " and field=%s" " order by time",(ticket_id,'status')) history=[(row[0],to_datetime(row[1])) for row in cursor] if new_status: history.append((new_status,new_time)) result ={} for new_status,time in history: #set date by priority of 'order' #if status date (higher priority than next status) is none, set date to higher priority. #and set none to lower priority status date. if not new_status in order: continue idx = order.index(new_status) formated_date = format_date(to_datetime(time)) for m_idx in range(len(order)-1, -1, -1): if not order[m_idx] in custom_fields: continue m_field = custom_fields[order[m_idx]] if not m_field in result: result[m_field] = None if idx==m_idx: result[m_field]=formated_date elif idx<m_idx: result[m_field]=None else: if result[m_field]==None: result[m_field]=formated_date else: formated_date=result[m_field] return result
def format_cell(self, name, value): value_type = self.determine_type(name) if value_type == 'time': return to_datetime(value).strftime('%Y-%m-%d') if value_type == 'date': return str(to_datetime(value)); if type(value) == StringType: return str(value) if type(value) == UnicodeType: return value.encode('ascii', 'xmlcharrefreplace') else: return value
def get_timeline_events(self, req, start, stop, filters, pid, syllabus_id): if pid is None: return is_multi = isinstance(pid, (list, tuple)) if is_multi: # TODO: return # Worklog changes show_starts = 'workstart' in filters show_stops = 'workstop' in filters if show_starts or show_stops: add_stylesheet(req, "worklog/worklogplugin.css") ts_start = to_timestamp(start) ts_stop = to_timestamp(stop) ticket_realm = Resource('ticket') db = self.env.get_read_db() cursor = db.cursor() cursor.execute(""" SELECT wl.worker,wl.ticket,wl.time,wl.starttime,wl.comment,wl.kind,t.summary,t.status,t.resolution,t.type FROM ( SELECT worker, ticket, starttime AS time, starttime, comment, 'start' AS kind FROM work_log UNION SELECT worker, ticket, endtime AS time, starttime, comment, 'stop' AS kind FROM work_log ) AS wl JOIN ticket t ON t.id = wl.ticket AND project_id=%s AND wl.time>=%s AND wl.time<=%s ORDER BY wl.time""", (pid, ts_start, ts_stop)) for worker,tid,ts,ts_start,comment,kind,summary,status,resolution,type in cursor: ticket = ticket_realm(id=tid) time = to_datetime(ts) started = None if kind == 'start': if not show_starts: continue yield ('workstart', pid, time, worker, (ticket,summary,status,resolution,type, started, "")) else: if not show_stops: continue started = to_datetime(ts_start) if comment: comment = "(Time spent: %s)\n\n%s" % (pretty_timedelta(started, time), comment) else: comment = '(Time spent: %s)' % pretty_timedelta(started, time) yield ('workstop', pid, time, worker, (ticket,summary,status,resolution,type, started, comment))
def _format_reminder(self, req, ticket, id, time, author, origin, description, delete_button=True): now = to_datetime(None) time = to_datetime(time) if now >= time: when = tag(tag.strong("Right now"), " (pending)") else: when = tag("In ", tag.strong(pretty_timedelta(time)), " (", format_date(time), ")") if description: context = Context.from_request(req, ticket.resource) desc = tag.div(format_to_oneliner(self.env, context, description), class_="description") else: desc = tag() return tag(self._reminder_delete_form(req, id) if delete_button else None, when, " - added by ", tag.em(Chrome(self.env).authorinfo(req, author)), " ", tag.span(pretty_timedelta(origin), title=format_datetime(origin, req.session.get('datefmt', 'iso8601'), req.tz)), " ago.", desc)
def extend(self): ''' Check for all Changetimes and Return the highest Changetime as Int ''' timemax = to_datetime( 0, utc ) timenow = to_datetime( datetime.datetime.now(utc) ) for k in self.__ts: v = self.__ts[ k ] ticketdt = to_datetime( v.getfielddef( 'changetime', timenow ) ) if ticketdt > timemax: timemax = ticketdt if timemax == timenow: break return timemax
def get_search_results(self, req, terms, filters): if not 'discussion' in filters: return # Create context. context = Context.from_request(req) context.realm = 'discussion-core' # Get database access. db = self.env.get_db_cnx() cursor = db.cursor() # Search in topics. query, args = search_to_sql(db, ['author', 'subject', 'body'], terms) columns = ('id', 'forum', 'time', 'subject', 'body', 'author') sql = ("SELECT id, forum, time, subject, body, author " "FROM topic " " WHERE %s" % (query,)) self.log.debug(sql) cursor.execute(sql, args) for row in cursor: row = dict(zip(columns, row)) row['time'] = to_datetime(row['time'], utc) yield (req.href.discussion('topic', row['id']) + '#-1', "Topic #%d: %s" % (row['id'], shorten_line(row['subject'])), row['time'], row['author'], shorten_result(row['body'], [query])) # Search in messages query, args = search_to_sql(db, ['m.author', 'm.body', 't.subject'], terms) columns = ('id', 'forum', 'topic', 'time', 'author', 'body', 'subject') sql = ("SELECT m.id, m.forum, m.topic, m.time, m.author, m.body, " "t.subject " "FROM message m " "LEFT JOIN " "(SELECT subject, id " "FROM topic) t " "ON t.id = m.topic " "WHERE %s" % (query)) self.log.debug(sql) cursor.execute(sql, args) for row in cursor: row = dict(zip(columns, row)) row['time'] = to_datetime(row['time'], utc) yield (req.href.discussion('message', row['id']) + '#%s' % ( row['id']), "Message #%d: %s" % (row['id'], shorten_line( row['subject'])), row['time'], row['author'], shorten_result( row['body'], [query]))
def get_timeline_events(self, req, start, stop, filters): self.log.debug("start: %s, stop: %s, filters: %s" % (start, stop, filters)) if ('discussion' in filters) and 'DISCUSSION_VIEW' in req.perm: # Create request context. context = Context.from_request(req) context.realm = 'discussion-core' # Get database access. db = self.env.get_db_cnx() context.cursor = db.cursor() # Get API component. api = self.env[DiscussionApi] # Add CSS styles and scripts. add_stylesheet(context.req, 'discussion/css/discussion.css') # Get forum events. for forum in api.get_changed_forums(context, start, stop): # Return event. title = 'New forum %s created' % (forum['name'],) description = tag(format_to_oneliner(self.env, context, forum['subject']), ' - ', format_to_oneliner(self.env, context, forum['description'])) ids = ('forum', forum['id']) yield ('discussion unsolved', to_datetime(forum['time'], utc), forum['author'], (title, description, ids)) # Get topic events. for topic in api.get_changed_topics(context, start, stop): title = 'New topic on %s created' % (topic['forum_name'],) description = format_to_oneliner(self.env, context, topic['subject']) ids = ('topic', topic['id']) yield ('discussion solved' if 'solved' in topic['status'] else 'discussion unsolved', to_datetime(topic['time'], utc), topic['author'], (title, description, ids)) # Get message events. for message in api.get_changed_messages(context, start, stop): title = 'New reply on %s created' % (message['forum_name'],) description = format_to_oneliner(self.env, context, message['topic_subject']) ids = (('topic',message['topic']),'message', message['id']) yield ('discussion unsolved', to_datetime(message['time'], utc), message['author'], (title, description, ids))
def modify_comment(self, req, id, comment, cnum, author='', when=None): """ Modify ticket's comment :param req: :param id: :param comment: :param cnum: :return: """ ticket = model.Ticket(self.env, id) # custom author? if author and not (req.authname == 'anonymous' \ or 'TICKET_ADMIN' in req.perm(ticket.resource)): # only allow custom author if anonymous is permitted or user is admin self.log.warn("RPC ticket.update: %r not allowed to change author " "to %r for comment on #%d", req.authname, author, id) author = '' author = author or req.authname # custom change timestamp? if when and not 'TICKET_ADMIN' in req.perm(ticket.resource): self.log.warn("RPC ticket.update: %r not allowed to update #%d with " "non-current timestamp (%r)", author, id, when) when = None when = when or to_datetime(None, utc) change = ticket.get_change(cnum) if not (req.authname and req.authname != 'anonymous' and change and change['author'] == req.authname): req.perm(ticket.resource).require('TICKET_EDIT_COMMENT') ticket.modify_comment(change['date'], author, comment, when) return ticket.get_change(cnum)
def user_locked(self, user): """Returns whether the user account is currently locked. Expect True, if locked, False, if not and None otherwise. """ if self.login_attempt_max_count < 1 or not user or \ not user_known(self.env, user): self.log.debug( "AccountGuard.user_locked(%s) = None (%s)" % (user, self.login_attempt_max_count < 1 and \ 'disabled by configuration' or 'anonymous user')) return None count = self.failed_count(user, reset=None) if count < self.login_attempt_max_count: self.log.debug( "AccountGuard.user_locked(%s) = False (try left)" % user) return False ts_release = self.release_time(user) if ts_release == 0: # Account locked permanently. self.log.debug( "AccountGuard.user_locked(%s) = True (permanently)" % user) return True # Time-locked or time-lock expired. ts_now = to_timestamp(to_datetime(None)) locked = ts_release - ts_now > 0 self.log.debug( "AccountGuard.user_locked(%s) = %s (%s)" % (user, locked, locked and 'time-lock' or 'lock expired')) return locked
def make_ticket_history_table(env, dates, sorted_events): """ This function takes list of dates in milestone and ticket events then produce a dictionary with key as milestone event and value as that list of ticket counts each day. dates is the numerical array of date in UTC time. sorted_event is dictionary of events that occurs in milestone """ #Initialize the count using key in events tkt_counts = {'Enter':[], 'Leave':[], 'Finish':[]} #initialize the table for date in dates: #env.log.info("Date:%s" % (num2date(date),)) for key in tkt_counts: tkt_counts[key].append(0) #Create dictionary of list that hold ticket count each day in dates for event in sorted_events: #Time in epoch time date = to_datetime(event[0]) #Get the index of this date in the dates list index = bisect(dates, date2num(date)) - 1 for key in tkt_counts: tkt_counts[key][index] = tkt_counts[key][index] + len(event[1][key]) return tkt_counts
def pretty_release_time(self, req, user): """Convenience method for formatting lock time to string.""" ts_release = self.release_time(user) if ts_release is None: return None return format_datetime(to_datetime( self.release_time(user)), tzinfo=req.tz)
def _get_job_done(self, mil_names, tkt_type=None, db=None): db= db or self.env.get_db_cnx() cursor = db.cursor() base_sql = None params = list(mil_names) if self.count_burndown_on =='quantity': base_sql = "SELECT m.completed, t.type, count(t.id) FROM ticket t, milestone m"+ \ " WHERE m.name IN (%s)" % ("%s,"*len(mil_names))[:-1] + \ " AND m.completed IS NOT NULL AND m.completed>0"+ \ " AND m.name=t.milestone" else: base_sql = "SELECT m.completed, t.type, sum("+db.cast(db.concat('0','tc.value'),'int')+")"+ \ " FROM ticket t, ticket_custom tc, milestone m "+ \ " WHERE m.name IN (%s)" % ("%s,"*len(mil_names))[:-1] + \ " AND m.completed IS NOT NULL AND m.completed>0"+ \ " AND m.name=t.milestone AND t.id=tc.ticket AND tc.name=%s" params +=[self.count_burndown_on] if tkt_type:#we nave ticket type limitations if isinstance(tkt_type, basestring): base_sql +=" AND t.type=%s" params +=[tkt_type] else: base_sql +=" AND t.type IN (%s)" % ("%s,"*len(tkt_type))[:-1] params += list(tkt_type) cursor.execute(base_sql+" GROUP BY t.type, m.completed ORDER BY 1", params) data = [(to_datetime(dt), ttype, sum) for dt, ttype, sum in cursor] return data
def _render_editor(self, req, milestone): # Suggest a default due time of 18:00 in the user's timezone now = datetime.now(req.tz) default_due = datetime(now.year, now.month, now.day, 18) if now.hour > 18: default_due += timedelta(days=1) default_due = to_datetime(default_due, req.tz) data = { 'milestone': milestone, 'datetime_hint': get_datetime_format_hint(req.lc_time), 'default_due': default_due, 'milestone_groups': [], } if milestone.exists: req.perm(milestone.resource).require('MILESTONE_MODIFY') milestones = [m for m in Milestone.select(self.env) if m.name != milestone.name and 'MILESTONE_VIEW' in req.perm(m.resource)] data['milestone_groups'] = group_milestones(milestones, 'TICKET_ADMIN' in req.perm) else: req.perm(milestone.resource).require('MILESTONE_CREATE') chrome = Chrome(self.env) chrome.add_jquery_ui(req) chrome.add_wiki_toolbars(req) return 'milestone_edit.html', data, None
def events(self, req): user = req.authname event_id = req.args.get("obj_id") or None event = Event(self.env, event_id) cal_id = event_id and event.calendar or req.args.get("calendar") own = True if not event_id: event.calendar = cal_id event.allday = req.args.get("allDay") == "true" and 1 or 0 ticket = req.args.get("ticket") ticket = ticket and Ticket(self.env, int(ticket)) or None if ticket and ticket.exists and "TICKET_VIEW" in req.perm(ticket.resource): event.ticket = ticket.id event.title = ticket["summary"] event.time_track = TimeTrack(self.env) getdate = lambda x: to_datetime(long(req.args[x]), utc) event.dtstart = getdate("date") event.dtend = event.dtstart + timedelta(minutes=60) else: cal = Calendar(self.env, event.calendar) own = cal.owner == user tt = TimeTrack(self.env, event.id, user) event.time_track = tt data = { "event": event and event_as_dict(event, own) or None, "tickets": TicketConfigRPC(self.env).my_active_tickets(req), "calendars": [ cal_as_dict(cal, user) for cal in Calendar.select(self.env, owner=user) if cal.type != CalendarType.Reference ], } return "itteco_event_form.html", data, None
def test_converted_doctest(self): self.repos.get_changeset=lambda x: Mock(date=to_datetime(12345, utc)) BuildConfig(self.env, name='trunk', path='trunk').insert() Build(self.env, rev=123, config='trunk', rev_time=12345, platform=1 ).insert() rpt = Report(self.env, build=1, step='test', category='coverage') rpt.items.append({'file': 'foo.py', 'line_hits': '5 - 0'}) rpt.insert() ann = TestCoverageAnnotator(self.env) req = Mock(href=Href('/'), perm=MockPerm(), chrome={'warnings': []}, args={}) # Version in the branch should not match: context = Context.from_request(req, 'source', '/branches/blah/foo.py', 123) self.assertEquals(ann.get_annotation_data(context), []) # Version in the trunk should match: context = Context.from_request(req, 'source', '/trunk/foo.py', 123) data = ann.get_annotation_data(context) self.assertEquals(data, [u'5', u'-', u'0']) def annotate_row(lineno, line): row = tag.tr() ann.annotate_row(context, row, lineno, line, data) return unicode(row.generate().render('html')) self.assertEquals(annotate_row(1, 'x = 1'), u'<tr><th class="covered">5</th></tr>') self.assertEquals(annotate_row(2, ''), u'<tr><th></th></tr>') self.assertEquals(annotate_row(3, 'y = x'), u'<tr><th class="uncovered">0</th></tr>')
def get_all_user_tasks(self, username, projects): """ Get tasks that are assigned or owned by user with 'username' in the context of certain projects """ tasks = [] # Index values (just to keep this understandable :) URL = 0 SUMMARY = 1 DESCRIPTION = 2 PRIORITY = 3 TIME = 4 PRIORITY_SORT = 5 from trac.util.datefmt import to_datetime # Find tasks for given projects for project in projects: prjtasks = project.get_user_tasks(username) if len(prjtasks) > 0: for row in prjtasks: time = row[TIME] if row[TIME] > 9999999999: time = row[TIME] / 1000000 new_row = [project, row[URL], row[SUMMARY], row[DESCRIPTION], row[PRIORITY], to_datetime(time), row[PRIORITY_SORT], project.project_name] tasks.append(new_row) return tasks
def failed_count(self, user, ipnr=None, reset=False): """Report number of previously logged failed login attempts. Enforce login policy with regards to tracking of login attempts and user account lock behavior. Default `False` for reset value causes logging of another attempt. `None` value for reset just reads failed login attempts count. `True` value for reset triggers final log deletion. """ value = get_user_attribute(self.env, user, 1, 'failed_logins_count') count = value and int(value[user][1].get('failed_logins_count')) or 0 if reset is None: # Report failed attempts count only. return count if not reset: # Trigger the failed attempt logger. attempts = self.get_failed_log(user) log_length = len(attempts) if log_length > self.login_attempt_max_count: # Truncate attempts list preserving most recent events. del attempts[:(log_length - self.login_attempt_max_count)] attempts.append({'ipnr': ipnr, 'time': to_utimestamp(to_datetime(None))}) count += 1 # Update or create attempts counter and list. set_user_attribute(self.env, user, 'failed_logins', str(attempts)) set_user_attribute(self.env, user, 'failed_logins_count', count) self.log.debug("AcctMgr:failed_count(%s): %s" % (user, count)) else: # Delete existing attempts counter and list. del_user_attribute(self.env, user, 1, 'failed_logins') del_user_attribute(self.env, user, 1, 'failed_logins_count') # Delete the lock count too. self.lock_count(user, 'reset') return count
def user_locked(self, user): """Returns whether the user account is currently locked. Expect True, if locked, False, if not and None otherwise. """ if not self.login_attempt_max_count > 0: # Account locking turned off by configuration. return None count = self.failed_count(user, reset=None) ts_release = self.release_time(user) if count < self.login_attempt_max_count: self.log.debug( "AcctMgr:user_locked(%s): False (try left)" % user) return False else: if ts_release is None: # Account locked permanently. self.log.debug( "AcctMgr:user_locked(%s): True (permanently)" % user) return True # Time-locked or time-lock expired. ts_now = to_utimestamp(to_datetime(None)) self.log.debug( "AcctMgr:user_locked(%s): %s" % (user, (ts_release - ts_now > 0))) return (ts_release - ts_now > 0)
def prepare_to_cumulate(sorted_events): dhist = {} for date, date_events in groupby(sorted_events, lambda (t, events): to_datetime(t).date()): evset = {'Enter': set(), 'Leave': set(), 'Finish': set()} dhist[date] = evset date_events_list = list(date_events) for (t, events) in date_events_list: for k, ids in events.iteritems(): evset[k] |= ids # resolve Enter / Leave conflicts enter_leave_ids = evset['Enter'] & evset['Leave'] if enter_leave_ids: evs = {'Enter': None, 'Leave': None} last = {'Enter': None, 'Leave': None} for k in ('Enter', 'Leave'): evs[k] = sorted([(t, evs['Enter']) for (t, evs) in date_events_list], key=lambda (t, ids): t) for id in enter_leave_ids: for k in ('Enter', 'Leave'): last[k] = 0 for t, ids in reversed(evs[k]): if id in ids: last[k] = t break to_del = (last['Enter'] > last['Leave']) and 'Leave' or 'Enter' evset[to_del].remove(id)
def get_blog_comments(env, post_name='', from_dt=None, to_dt=None): """ Returns comments as a list of tuples from search based on AND input for post_name, and datetime span (from_dt and to_dt): (post_name, number, comment, author, time) Instantiate BlogComment objects to get further details of each. Example of sorting the output by time, newest first: from trac.util.compat import sorted, itemgetter comments = get_blog_comments(env) sorted(comments, key=itemgetter(4), reverse=True) """ # Build the list of WHERE restrictions args = [post_name and ("name=%s", post_name) or None, from_dt and ("time>%s", to_timestamp(from_dt)) or None, to_dt and ("time<%s", to_timestamp(to_dt)) or None] args = [arg for arg in args if arg] where_clause = "" where_values = None if args: where_clause = "WHERE " + " AND ".join([arg[0] for arg in args]) where_values = tuple([arg[1] for arg in args]) # Do the SELECT cnx = env.get_db_cnx() cursor = cnx.cursor() sql = "SELECT name, number, comment, author, time " \ "FROM fullblog_comments " + where_clause env.log.debug("get_blog_comments() SQL: %r (%r)" % (sql, where_values)) cursor.execute(sql, where_values or None) # Return the items we have found return [(row[0], row[1], row[2], row[3], to_datetime(row[4], utc)) for row in cursor]
def _do_check_and_send(self): db = self.env.get_db_cnx() cursor = db.cursor() now = to_utimestamp(to_datetime(None)) cursor.execute( "SELECT id, ticket, author, origin, description FROM ticketreminder WHERE reminded=0 AND %s>=time", (now, )) for row in cursor: self._do_send(*row)
def _format_screenshot(self, context, screenshot): screenshot['author'] = format_to_oneliner(self.env, context, screenshot['author']) screenshot['name'] = format_to_oneliner(self.env, context, screenshot['name']) screenshot['description'] = format_to_oneliner( self.env, context, screenshot['description']) screenshot['width'] = int(screenshot['width']) screenshot['height'] = int(screenshot['height']) screenshot['time'] = pretty_timedelta( to_datetime(screenshot['time'], utc)) return screenshot
def test_create_build(self): BuildConfig(self.env, 'test', path='somepath', active=True).insert() platform = TargetPlatform(self.env, config='test', name="Unix") platform.rules.append(('family', 'posix')) platform.insert() self.repos = Mock( get_node=lambda path, rev=None: Mock( get_entries=lambda: [Mock(), Mock()], get_history=lambda: [('somepath', 123, 'edit'), ('somepath', 121, 'edit'), ('somepath', 120, 'edit')]), get_changeset=lambda rev: Mock(date=to_datetime(42, utc)), normalize_path=lambda path: path, rev_older_than=lambda rev1, rev2: rev1 < rev2) inheaders = {'Content-Type': 'application/x-bitten+xml'} inbody = StringIO("""<slave name="hal" version="%d"> <platform>Power Macintosh</platform> <os family="posix" version="8.1.0">Darwin</os> <package name="java" version="2.4.3"/> </slave>""" % PROTOCOL_VERSION) outheaders = {} outbody = StringIO() req = Mock(method='POST', base_path='', path_info='/builds', href=Href('/trac'), abs_href=Href('http://example.org/trac'), remote_addr='127.0.0.1', args={}, perm=PermissionCache(self.env, 'hal'), get_header=lambda x: inheaders.get(x), read=inbody.read, send_response=lambda x: outheaders.setdefault('Status', x), send_header=lambda x, y: outheaders.setdefault(x, y), write=outbody.write, incookie=Cookie('trac_auth=')) module = BuildMaster(self.env) assert module.match_request(req) self.assertRaises(RequestDone, module.process_request, req) self.assertEqual(201, outheaders['Status']) self.assertEqual('text/plain', outheaders['Content-Type']) location = outheaders['Location'] mo = re.match('http://example.org/trac/builds/(\d+)', location) assert mo, 'Location was %r' % location self.assertEqual('Build pending', outbody.getvalue()) build = Build.fetch(self.env, int(mo.group(1))) self.assertEqual(Build.IN_PROGRESS, build.status) self.assertEqual('hal', build.slave)
def test_populate_thread_race_condition(self): messages = [] self.env.log = Mock(info=lambda msg, *args: messages.append(msg)) def get_history(): yield ('somepath', 123, 'edit') yield ('somepath', 121, 'edit') yield ('somepath', 120, 'edit') time.sleep(1) # sleep to make sure both threads collect self.repos.get_changeset=lambda rev: Mock( date=to_datetime(rev * 1000, utc)) self.repos.get_node=lambda path, rev=None: Mock( get_entries=lambda: [Mock(), Mock()], get_history=get_history) self.repos.normalize_path=lambda path: path self.repos.rev_older_than=lambda rev1, rev2: rev1 < rev2 BuildConfig(self.env, 'test', path='somepath', active=True).insert() platform1 = TargetPlatform(self.env, config='test', name='P1') platform1.insert() platform2 = TargetPlatform(self.env, config='test', name='P2') platform2.insert() def build_populator(): queue = BuildQueue(self.env, build_all=True) queue.populate() thread1 = threading.Thread(target=build_populator) thread2 = threading.Thread(target=build_populator) # tiny sleep is to avoid odd segementation faults # (on Linux) and bus errors (on Mac OS X) thread1.start(); time.sleep(0.01); thread2.start() thread1.join(); thread2.join() # check builds got added builds = list(Build.select(self.env, config='test')) builds.sort(lambda a, b: cmp(a.platform, b.platform)) self.assertEqual(6, len(builds)) self.assertEqual(platform1.id, builds[0].platform) self.assertEqual('123', builds[0].rev) self.assertEqual(platform1.id, builds[1].platform) self.assertEqual('121', builds[1].rev) self.assertEqual(platform1.id, builds[2].platform) self.assertEqual('120', builds[2].rev) self.assertEqual(platform2.id, builds[3].platform) self.assertEqual('123', builds[3].rev) self.assertEqual(platform2.id, builds[4].platform) self.assertEqual('121', builds[4].rev) self.assertEqual(platform2.id, builds[5].platform) self.assertEqual('120', builds[5].rev) # check attempts at duplicate inserts were logged. failure_messages = [x for x in messages if x.startswith('Failed to insert build')] self.assertEqual(6, len(failure_messages))
def _do_list(self, *sids): if not sids: sids = ['*'] print_table( [(r[0], r[1], format_date(to_datetime(r[2]), console_date_format), r[3], r[4]) for r in self._get_list(sids)], [_('SID'), _('Auth'), _('Last Visit'), _('Name'), _('Email')])
def testCalendarForTeamMember(self): """Tests the calendar for a TeamMember""" today = to_datetime(datetime(2008, 9, 1)).date() self.tmc.set_hours_for_day(4, today) self.assert_equals(self.tmc.get_hours_for_day(today), 4) self.assert_equals(self.tmc.get_hours_for_day(today + one_day), 6) # Now save and check if is still there self.assert_true(self.tmc.save()) # reload tmc2 = TeamMemberCalendar(self.teh.get_env(), self.tm) self.assert_equals(tmc2.get_hours_for_day(today), 4) self.assert_equals(tmc2.get_hours_for_day(today + one_day), 6)
def _get_job_done(self, mil_names, tkt_type=None, db=None): started_at = to_timestamp(datetime(tzinfo=localtz, \ *(StructuredMilestone(self.env, mil_names[0]).started.timetuple()[:3]))) db = db or self.env.get_db_cnx() cursor = db.cursor() base_sql = None params = list(mil_names) group_by = " GROUP BY t.id, t.type" final_statuses = IttecoEvnSetup(self.env).final_statuses status_params = ("%s," * len(final_statuses))[:-1] params = final_statuses + final_statuses + params if self.count_burndown_on == 'quantity': base_sql = """SELECT MAX(c.time), t.id, t.type, 1 FROM ticket t LEFT JOIN milestone m ON m.name=t.milestone LEFT OUTER JOIN ticket_change c ON t.id=c.ticket AND c.field='status' AND c.newvalue IN (%s) WHERE IN (%s) AND m.name IN (%s)""" % \ ( status_params, status_params, ("%s,"*len(mil_names))[:-1] ) else: base_sql = "SELECT MAX(c.time), t.id, t.type, "+db.cast(db.concat('0','tc.value'),'int')+ \ """FROM ticket t LEFT JOIN milestone m ON m.name=t.milestone LEFT JOIN ticket_custom tc ON t.id=tc.ticket AND tc.name=%%s LEFT OUTER JOIN ticket_change c ON t.id=c.ticket AND c.field='status' AND c.newvalue IN (%s) WHERE t.status IN (%s) AND m.name IN (%s)""" % \ ( status_params, status_params, ("%s,"*len(mil_names))[:-1] ) params = [self.count_burndown_on] + params group_by += ", tc.value" if tkt_type: if isinstance(tkt_type, basestring): base_sql += " AND t.type=%s" params += [tkt_type] else: base_sql += " AND t.type IN (%s)" % ("%s," * len(tkt_type))[:-1] params += list(tkt_type) cursor.execute(base_sql + group_by + " ORDER BY 1", params) data = [(to_datetime((dt < started_at) and started_at or dt), ttype, sum or 0) for dt, tkt_id, ttype, sum in cursor] return data
def get_timeline_events(self, req, start, stop, filters): try: master = BuildBotSystem(self.buildbot_url) except Exception as e: print('Error hitting BuildBot', e) return # This was a comprehension: the loop is clearer for build in master.getAllBuildsInInterval(to_timestamp(start), to_timestamp(stop)): # BuildBot builds are reported as # (builder_name, num, end, branch, rev, results, text) print('Reporting build', build) yield ('build', to_datetime(build[2]), '', build)
def _delete_change(self, req, id, ts, field): """Delete the change to a field on the specified ticket at the specified timestamp.""" ticket = Ticket(self.env, id) db = self.env.get_db_cnx() cursor = db.cursor() dt = to_datetime(int(ts)) changelog = ticket.get_changelog(dt) if changelog: if field == 'change': # Iterate over all the fields that have changed for change in changelog: self._delete_change(req, id, ts, change[2]) elif field == 'attachment': # Delete the attachment cursor.execute(""" DELETE FROM attachment WHERE type = 'ticket' AND id = %s AND time = %s""", (id, ts)) else: # Revert the field to its old value if it's the newest change to that field exists_newer = [True for change in ticket.get_changelog() if to_timestamp(change[0]) > int(ts) and field == change[2]] if field != 'comment' and not exists_newer: oldval = [change[3] for change in changelog if change[2] == field] if oldval: custom_fields = [f['name'] for f in ticket.fields if f.get('custom')] if field in custom_fields: cursor.execute(""" UPDATE ticket_custom SET value=%s WHERE ticket=%s AND name=%s""", (oldval[0], id, field)) else: cursor.execute(""" UPDATE ticket SET %s=%%s WHERE id=%%s""" % field, (oldval[0], id)) # Delete the ticket change cursor.execute(""" DELETE FROM ticket_change WHERE ticket=%s AND time=%s AND field=%s """, (id, ts, field)) else: raise TracError(""" Ticket change with timestamp %s (datetime: %s) not found in ticket #%s changelog. """ % (ts, dt, id)) db.commit() msg = "Change to field \"%s\" of ticket #%s at %s has been deleted." \ % (field, id, dt) add_notice(req, msg) self.log.debug("TicketDelete: " + msg)
def insert(self, filename, fileobj, size, t=None): """Create a new Attachment record and save the file content. """ self.size = int(size) if size else 0 self.filename = None if t is None: t = datetime_now(utc) elif not isinstance(t, datetime): # Compatibility with 0.11 t = to_datetime(t, utc) self.date = t parent_resource = Resource(self.parent_realm, self.parent_id) if not resource_exists(self.env, parent_resource): raise ResourceNotFound( _("%(parent)s doesn't exist, can't create attachment", parent=get_resource_name(self.env, parent_resource))) # Make sure the path to the attachment is inside the environment # attachments directory attachments_dir = os.path.join(os.path.normpath(self.env.path), 'files', 'attachments') dir = self.path commonprefix = os.path.commonprefix([attachments_dir, dir]) if commonprefix != attachments_dir: raise TracError( _( 'Cannot create attachment "%(att)s" as ' '%(realm)s:%(id)s is invalid', att=filename, realm=self.parent_realm, id=self.parent_id)) if not os.access(dir, os.F_OK): os.makedirs(dir) filename, targetfile = self._create_unique_file(dir, filename) with targetfile: with self.env.db_transaction as db: db("INSERT INTO attachment VALUES (%s,%s,%s,%s,%s,%s,%s,%s)", (self.parent_realm, self.parent_id, filename, self.size, to_utimestamp(t), self.description, self.author, self.ipnr)) shutil.copyfileobj(fileobj, targetfile) self.filename = filename self.env.log.info("New attachment: %s by %s", self.title, self.author) for listener in AttachmentModule(self.env).change_listeners: listener.attachment_added(self)
def test_to_and_from_datetime(self): from datetime import datetime from trac.util.datefmt import to_datetime, utc from tracrpc.xml_rpc import to_xmlrpc_datetime, from_xmlrpc_datetime now = to_datetime(None, utc) now_timetuple = now.timetuple()[:6] xmlrpc_now = to_xmlrpc_datetime(now) self.assertTrue(isinstance(xmlrpc_now, xmlrpclib.DateTime), "Expected xmlprc_now to be an xmlrpclib.DateTime") self.assertEquals(str(xmlrpc_now), now.strftime("%Y%m%dT%H:%M:%S")) now_from_xmlrpc = from_xmlrpc_datetime(xmlrpc_now) self.assertTrue(isinstance(now_from_xmlrpc, datetime), "Expected now_from_xmlrpc to be a datetime") self.assertEquals(now_from_xmlrpc.timetuple()[:6], now_timetuple) self.assertEquals(now_from_xmlrpc.tzinfo, utc)
def _get_day_key(self, day): """Returns the key and the date for the given day""" d_day = None if isinstance(day, datetime): d_day = day.date() elif isinstance(day, date): d_day = day elif isinstance(day, basestring): d_day = parse_date(day).date() elif isinstance(day, (int, long)): try: d_day = to_datetime(day).date() except TypeError, e: warning(self, _("Unable to covert %s to a date: %s" % \ (day, to_unicode(e))))
def _get_default_postname(self, user=''): """ Parses and returns the setting for default_postname. """ opt = self.env.config.get('fullblog', 'default_postname') if not opt: return '' # Perform substitutions try: now = to_datetime(None, utc).timetuple() name = strftime(opt, now) name = name.replace('$USER', user) return name except: self.env.log.debug( "FullBlog: Error parsing default_postname option: %s" % opt) return ''
def _load_comment(self, number): """ Loads a comment from database if found. """ self.log.debug("Fetching blog comment number %d for %r", number, self.post_name) for comment, author, time in self.env.db_query( """ SELECT comment, author, time FROM fullblog_comments WHERE name=%s AND number=%s """, (self.post_name, number)): self.number = number self.comment = comment self.author = author self.time = to_datetime(time, utc) return True return False
def search_blog_comments(env, terms): """ Free text search for content of blog posts. Input is a list of terms. Returns a list of tuples with: (post_name, comment_number, comment, comment_author, comment_time) """ assert terms columns = ['author', 'comment'] with env.db_query as db: search_clause, args = search_to_sql(db, columns, terms) return [(row[0], row[1], row[2], row[3], to_datetime(row[4], utc)) for row in db( """ SELECT name, number, comment, author, time FROM fullblog_comments WHERE %s """ % search_clause, args)]
def get_commit_by_date(self): numdates = drange(self.start_date, self.stop_date, timedelta(days=1)) numcommits = [0 for i in numdates] for rev, time, author in self.changesets: date = to_datetime(time, utc).date() #get index of day in the dates list index = bisect(numdates, date2num(date)) - 1 numcommits[index] += 1 return (numdates, numcommits)
def _get_keyword_values(self, keywords): keywords = self._split_keywords(keywords) if not keywords: return None node = self.node mtime = to_datetime(node.last_modified, utc) shortdate = mtime.strftime('%Y-%m-%d %H:%M:%SZ') longdate = mtime.strftime('%Y-%m-%d %H:%M:%S +0000 (%a, %d %b %Y)') created_rev = unicode(node.created_rev) # Note that the `to_unicode` has a small probability to mess-up binary # properties, see #4321. author = to_unicode(self._get_revprop(core.SVN_PROP_REVISION_AUTHOR)) path = node.path.lstrip('/') url = node.repos.get_path_url(path, node.rev) or path root_url = node.repos.get_path_url('', node.rev) or '/' id_ = ' '.join((node.name, created_rev, shortdate, author)) data = { 'rev': created_rev, 'author': author, 'url': url, 'date': longdate, 'id': id_, 'header': ' '.join((url, created_rev, shortdate, author)), '%a': author, '%b': node.name, '%d': shortdate, '%D': longdate, '%P': path, '%r': created_rev, '%R': root_url, '%u': url, '%_': ' ', '%%': '%', '%I': id_, '%H': ' '.join((path, created_rev, shortdate, author)), } def expand(match): match = match.group(0) return data.get(match, match) values = {} for name, aliases in self.KEYWORD_GROUPS.iteritems(): if any(kw in keywords for kw in aliases): values.update((kw, data[name]) for kw in aliases) for keyword in keywords: if '=' not in keyword: continue name, definition = keyword.split('=', 1) if name not in self.KEYWORDS: values[name] = self.KEYWORD_EXPAND_RE.sub(expand, definition) if values: return dict((key, value.encode('utf-8')) for key, value in values.iteritems()) else: return None
def get_timeline_events(self, req, start, stop, filters): if 'build' not in filters: return # Attachments (will be rendered by attachment module) for event in AttachmentModule(self.env).get_timeline_events( req, Resource('build'), start, stop): yield event start = to_timestamp(start) stop = to_timestamp(stop) add_stylesheet(req, 'bitten/bitten.css') db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( "SELECT b.id,b.config,c.label,c.path, b.rev,p.name," "b.stopped,b.status FROM bitten_build AS b" " INNER JOIN bitten_config AS c ON (c.name=b.config) " " INNER JOIN bitten_platform AS p ON (p.id=b.platform) " "WHERE b.stopped>=%s AND b.stopped<=%s " "AND b.status IN (%s, %s) ORDER BY b.stopped", (start, stop, Build.SUCCESS, Build.FAILURE)) repos = self.env.get_repository(authname=req.authname) assert repos, 'No "(default)" Repository: Add a repository or alias ' \ 'named "(default)" to Trac.' event_kinds = { Build.SUCCESS: 'successbuild', Build.FAILURE: 'failedbuild' } for id_, config, label, path, rev, platform, stopped, status in cursor: if not _has_permission(req.perm, repos, path, rev=rev): continue errors = [] if status == Build.FAILURE: for step in BuildStep.select(self.env, build=id_, status=BuildStep.FAILURE, db=db): errors += [(step.name, error) for error in step.errors] display_rev = repos.normalize_rev(rev) yield (event_kinds[status], to_datetime(stopped, utc), None, (id_, config, label, display_rev, platform, status, errors))
def _load_comment(self, number): """ Loads a comment from database if found. """ cnx = self.env.get_db_cnx() cursor = cnx.cursor() self.env.log.debug("Fetching blog comment number %d for %r" % (number, self.post_name)) cursor.execute( "SELECT comment, author, time " "FROM fullblog_comments " "WHERE name=%s AND number=%s", (self.post_name, number)) for row in cursor: self.number = number self.comment = row[0] self.author = row[1] self.time = to_datetime(row[2], utc) return True return False
def _event_data(self, req, provider, event, lastvisit): """Compose the timeline event date from the event tuple and prepared provider methods""" if len(event) == 5: # with special provider kind, datetime, author, data, provider = event else: kind, datetime, author, data = event render = lambda field, context: \ provider.render_timeline_event(context, field, event) localized_datetime = to_datetime(datetime, tzinfo=req.tz) localized_date = truncate_datetime(localized_datetime) datetime_uid = to_utimestamp(localized_datetime) return {'kind': kind, 'author': author, 'date': localized_date, 'datetime': localized_datetime, 'datetime_uid': datetime_uid, 'render': render, 'unread': lastvisit and lastvisit < datetime_uid, 'event': event, 'data': data, 'provider': provider}
def _render_editor(self, req, milestone): # Suggest a default due time of 18:00 in the user's timezone now = datetime.now(req.tz) default_due = datetime(now.year, now.month, now.day, 18) if now.hour > 18: default_due += timedelta(days=1) default_due = to_datetime(default_due, req.tz) data = { 'milestone': milestone, 'datetime_hint': get_datetime_format_hint(req.lc_time), 'default_due': default_due, 'milestone_groups': [], } if milestone.exists: req.perm(milestone.resource).require('MILESTONE_MODIFY') milestones = [ m for m in Milestone.select(self.env) if m.name != milestone.name and 'MILESTONE_VIEW' in req.perm(m.resource) ] num_tickets = self.env.db_query( """ SELECT COUNT(*) FROM ticket WHERE milestone=%s""", (milestone.name, ))[0][0] data['milestone_groups'] = group_milestones( milestones, 'TICKET_ADMIN' in req.perm) data['num_tickets'] = num_tickets data['retarget_to'] = self.default_retarget_to else: req.perm(milestone.resource).require('MILESTONE_CREATE') if milestone.name: add_notice( req, _( "Milestone %(name)s does not exist. You can" " create it here.", name=milestone.name)) chrome = Chrome(self.env) chrome.add_jquery_ui(req) chrome.add_wiki_toolbars(req) add_stylesheet(req, 'common/css/roadmap.css') return 'milestone_edit.html', data, None
def search_testcases(env, terms): """ Free text search for content of blog posts. Input is a list of terms. Returns a list of tuples with: ( ... ) """ cnx = env.get_db_cnx() cursor = cnx.cursor() columns = ['title', 'description', 'author', 'acceptance'] search_clause, args = search_to_sql(cnx, columns, terms) sql = """ SELECT author, title, description FROM qa_testcase WHERE """ + search_clause env.log.debug("search_testcases() SQL: %r" % sql) cursor.execute(sql, args) return [(row[0], row[1], to_datetime(row[2], utc), row[3], \ row[4], row[5]) for row in cursor]
def search_blog_comments(env, terms): """ Free text search for content of blog posts. Input is a list of terms. Returns a list of tuples with: (post_name, comment_number, comment, comment_author, comment_time) """ assert terms cnx = env.get_db_cnx() cursor = cnx.cursor() # SQL columns = ['author', 'comment'] search_clause, args = search_to_sql(cnx, columns, terms) sql = "SELECT name, number, comment, author, time " \ "FROM fullblog_comments WHERE " + search_clause env.log.debug("search_blog_comments() SQL: %r" % sql) cursor.execute(sql, args) # Return the items we have found return [(row[0], row[1], row[2], row[3], to_datetime(row[4], utc)) for row in cursor]
class TracBuildBotWatcher(Component): implements(ITimelineEventProvider, IRequestHandler, ITemplateProvider, INavigationContributor) buildbot_url = Option( 'bbwatcher', 'buildmaster', '127.0.0.1:8010', 'The location of the BuildBot webserver. Do not include the /xmlrpc') BUILDER_REGEX = r'/buildbot/builder(?:/(.+))?$' BUILDER_RE = re.compile(BUILDER_REGEX) # Template Provider def get_htdocs_dirs(self): return [] def get_templates_dirs(self): return [pkg_resources.resource_filename('bbwatcher', 'templates')] # Nav Contributor def get_active_navigation_item(self, req): return 'buildbot' def get_navigation_items(self, req): yield 'mainnav', 'buildbot', tag.a('BuildBot', href=req.href.buildbot()) # Timeline Methods def get_timeline_filters(self, req): yield ('bbwatcher', 'Builds', False) def get_timeline_events(self, req, start, stop, filters): #if not 'bbwatcher' in filters: # return try: master = BuildBotSystem(self.buildbot_url) except Exception, e: print 'Error hitting BuildBot', e return # This was a comprehension: the loop is clearer for build in master.getAllBuildsInInterval(to_timestamp(start), to_timestamp(stop)): # BuildBot builds are reported as # (builder_name, num, end, branch, rev, results, text) print 'Reporting build', build yield ('build', to_datetime(build[2]), '', build)
def get_search_results(self, req, terms, filters): if not 'form' in filters: return env = self.env results = self.search_tracforms(env, terms) for id, realm, parent, subctxt, state, author, updated_on in results: # DEVEL: support for handling form revisions not implemented yet #form = Form(env, realm, parent, subctxt, id, version) form = Form(env, realm, parent, subctxt, id) if 'FORM_VIEW' in req.perm(form): form = form.resource # build a more human-readable form values representation, # especially with unicode character escapes removed state = _render_values(state) yield (get_resource_url(env, form, req.href), get_resource_description(env, form), to_datetime(updated_on), author, shorten_result(state, terms))
def _get_posts(self, user, projects): #project_name, subject, body, createtime """ Get posts """ posts = [] query = None if self.env.project_identifier == "home": return posts with trac_db_query(self.env) as cursor: for prj in projects: query = ("SELECT id, forum, 0, time, subject, body FROM `%(dbname)s`.`topic` " "WHERE author = '%(user)s' " "UNION ALL " "SELECT m.id, m.forum, m.topic, m.time, t.subject, m.body FROM " "`%(dbname)s`.`message` m, `%(dbname)s`.`topic` t " "WHERE m.author = '%(user)s' AND m.topic = t.id" % {'dbname': safe_string(prj.env_name), 'user': safe_string(user)}) try: cursor.execute(query) for row in cursor: posts.append({ 'project': prj, 'id': row[0], 'forum_id': row[1], 'topic_id': row[2], 'time': to_datetime(row[3]), 'subject': unicode((row[4] != '') and row[4] or '<no subject>'), 'body': unicode(row[5]), 'type': (row[2] == 0) and 'NEWTOPIC' or 'POST' }) except: self.log.exception( "MyProjectsModule._get_posts query failed for project %s with query: '''%s'''" % (prj.env_name, query)) import operator posts.sort(key = operator.itemgetter('time'), reverse = True) return posts
def _mock_failed_attempt(self, requests=1): ipnr = '127.0.0.1' ts = to_timestamp(to_datetime(None)) attempts = eval(self.session.get('failed_logins', '[]')) count = int(self.session.get('failed_logins_count', 0)) lock_count = int(self.session.get('lock_count', 0)) max = self.env.config.getint('account-manager', 'login_attempt_max_count') for r in range(requests): attempts.append(dict(ipnr=ipnr, time=ts)) count += 1 # Assume, that every lock is enforced. if not count < max: lock_count += 1 self.session['failed_logins'] = str(attempts) self.session['failed_logins_count'] = count self.session['lock_count'] = lock_count self.session.save() return ts
def failed_count(self, user, ipnr=None, reset=False): """Report number of previously logged failed login attempts. Enforce login policy with regards to tracking of login attempts and user account lock behavior. Default `False` for reset value causes logging of another attempt. `None` value for reset just reads failed login attempts count. `True` value for reset triggers final log deletion. """ if not user or not user_known(self.env, user): return 0 key = 'failed_logins_count' value = get_user_attribute(self.env, user, 1, key) count = value and user in value and int(value[user][1].get(key)) or 0 if reset is None: # Report failed attempts count only. return count if not reset: # Trigger the failed attempt logger. attempts = self.get_failed_log(user) log_length = len(attempts) if log_length > self.login_attempt_max_count: # Truncate attempts list preserving most recent events. del attempts[:(log_length - self.login_attempt_max_count)] attempts.append({ 'ipnr': ipnr, 'time': to_timestamp(to_datetime(None)) }) count += 1 # Update or create attempts counter and list. set_user_attribute(self.env, user, 'failed_logins', str(attempts)) set_user_attribute(self.env, user, key, count) self.log.debug("AccountGuard.failed_count(%s) = %s" % (user, count)) else: # Delete existing attempts counter and list. del_user_attribute(self.env, user, 1, 'failed_logins') del_user_attribute(self.env, user, 1, key) # Delete the lock count too. self.lock_count(user, 'reset') return count
def _load_comment(self, number): """ Loads a comment from database if found. """ self.env.log.debug("Fetching blog comment number %d for %r" % (number, self.post_name)) sql = "SELECT comment, author, time " \ "FROM fullblog_comments " \ "WHERE name=%s AND number=%s" args = (self.post_name, number) if hasattr(self.env, 'db_query'): cursor = self.env.db_query(sql, args) else: db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute(sql, args) for row in cursor: self.number = number self.comment = row[0] self.author = row[1] self.time = to_datetime(row[2], utc) return True return False
def _load_index(self, book): values = self._get_values(book) book_path = values[0] book_sgml = values[3] full_path = os.path.join(book_path, book_sgml) fp = open(full_path) stats = os.stat(full_path) date = datefmt.to_datetime(stats.st_mtime) user = pwd.getpwuid(stats.st_uid)[0] ids = {} for line in fp.readlines(): match = self._regex.match(line) if match: ids[match.group(1).replace('-', '_')] = (os.path.join( book, match.group(2)), date, user) fp.close() return ids