Ejemplo n.º 1
0
def get_num_closed_tix(db, from_date, at_date, milestone):
    """Returns an integer of the number of close ticket events counted
    between from_date to at_date."""

    cursor = db.cursor()

    args = [to_timestamp(from_date), to_timestamp(at_date)]
    milestone_str = ''
    if milestone:
        args.append(milestone)
        milestone_str += 'AND t.milestone = %s'

    # Count tickets between two dates (note: does not account for tickets
    # that were closed and then reopened between the two dates)
    cursor.execute("""
        SELECT newvalue
        FROM ticket_change tc
        INNER JOIN ticket t ON t.id = tc.ticket AND tc.time > %%s
          AND tc.time <= %%s AND tc.field == 'status' %s
        ORDER BY tc.time""" % milestone_str, args)

    closed_count = 0
    for (status,) in cursor:
        if status == 'closed':
            closed_count += 1

    return closed_count
Ejemplo n.º 2
0
    def test_get_changes(self):
        t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
        t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
        cursor = self.db.cursor()
        cursor.execute("INSERT INTO revision (rev,time,author,message) "
                       "VALUES (0,%s,'','')", (to_timestamp(t1),))
        cursor.execute("INSERT INTO revision (rev,time,author,message) "
                       "VALUES (1,%s,'joe','Import')", (to_timestamp(t2),))
        cursor.executemany("INSERT INTO node_change (rev,path,node_type,"
                           "change_type,base_path,base_rev) "
                           "VALUES ('1',%s,%s,%s,%s,%s)",
                           [('trunk', 'D', 'A', None, None),
                            ('trunk/README', 'F', 'A', None, None)])
        cursor.execute("UPDATE system SET value='1' WHERE name='youngest_rev'")

        repos = Mock(Repository, 'test-repos', None, self.log,
                     get_changeset=lambda x: None,
                     get_youngest_rev=lambda: 1,
                     get_oldest_rev=lambda: 0,
                     next_rev=lambda x: None,
                     normalize_rev=lambda rev: rev)
        cache = CachedRepository(self.db, repos, None, self.log)
        self.assertEqual('1', cache.youngest_rev)
        changeset = cache.get_changeset(1)
        self.assertEqual('joe', changeset.author)
        self.assertEqual('Import', changeset.message)
        self.assertEqual(t2, changeset.date)
        changes = changeset.get_changes()
        self.assertEqual(('trunk', Node.DIRECTORY, Changeset.ADD, None, None),
                         changes.next())
        self.assertEqual(('trunk/README', Node.FILE, Changeset.ADD, None, None),
                         changes.next())
        self.assertRaises(StopIteration, changes.next)
Ejemplo n.º 3
0
    def _get_num_closed_tix(self, from_date, at_date, req, ticketFilter=""):
        """Returns an integer of the number of close ticket events counted
        between from_date to at_date."""

        status_map = {
            'new': 0,
            'reopened': 0,
            'assigned': 0,
            'closed': 1,
            'edit': 0
        }

        count = 0

        db = self.env.get_db_cnx()
        cursor = db.cursor()

        cursor.execute(
            """
            SELECT t.id, tc.field, tc.time, tc.oldvalue, tc.newvalue,
              t.priority
            FROM ticket_change tc
              INNER JOIN ticket t ON t.id = tc.ticket
              INNER JOIN enum p ON p.name = t.priority AND p.type = 'priority'
            WHERE tc.time > %s AND tc.time <= %s %s
            ORDER BY tc.time
            """ %
            (to_timestamp(from_date), to_timestamp(at_date), ticketFilter))

        for tid, field, time, old, status, priority in cursor:
            if field == 'status':
                if status in ('new', 'assigned', 'reopened', 'closed', 'edit'):
                    count += status_map[status]

        return count
Ejemplo n.º 4
0
    def test_initial_sync(self):
        t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
        t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
        changes = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None),
                   ('trunk/README', Node.FILE, Changeset.ADD, None, None)]
        changesets = [Mock(Changeset, 0, '', '', t1,
                           get_changes=lambda: []),
                      Mock(Changeset, 1, 'Import', 'joe', t2,
                           get_changes=lambda: iter(changes))]
        repos = Mock(Repository, 'test-repos', None, self.log,
                     get_changeset=lambda x: changesets[int(x)],
                     get_oldest_rev=lambda: 0,
                     get_youngest_rev=lambda: 1,
                     normalize_rev=lambda x: x,
                     next_rev=lambda x: int(x) == 0 and 1 or None)
        cache = CachedRepository(self.db, repos, None, self.log)
        cache.sync()

        cursor = self.db.cursor()
        cursor.execute("SELECT rev,time,author,message FROM revision")
        self.assertEquals(('0', to_timestamp(t1), '', ''), cursor.fetchone())
        self.assertEquals(('1', to_timestamp(t2), 'joe', 'Import'), cursor.fetchone())
        self.assertEquals(None, cursor.fetchone())
        cursor.execute("SELECT rev,path,node_type,change_type,base_path,"
                       "base_rev FROM node_change")
        self.assertEquals(('1', 'trunk', 'D', 'A', None, None),
                          cursor.fetchone())
        self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None),
                          cursor.fetchone())
        self.assertEquals(None, cursor.fetchone())
Ejemplo n.º 5
0
 def get_timeline_events(self, req, start, stop, filters):
     if 'codereview' in filters:
         crp = CodeReviewPool(self.env)
         for t, author, text, cr_id, status, version, message in \
             crp.get_codereviews_by_time(to_timestamp(start), to_timestamp(stop)):
             if status == str_status["NoNeedToReview"]:
                 continue
             elif status == str_status["CompletelyReview"]:
                 title = Markup(
                     'CodeReview : [ <em title="%s" >%s</em> ] completed by %s'
                     % (message, cr_id, author))
             elif version == 1:
                 title = Markup(
                     'CodeReview : [ <em title="%s" >%s</em> ] created by %s'
                     % (message, cr_id, author))
             else:
                 title = Markup(
                     'CodeReview : [ <em title="%s" >%s</em> ] edited by %s'
                     % (message, cr_id, author))
             href = "%s/%s" % (self.env.href.CodeReview(), cr_id)
             text = wiki_to_oneliner(text,
                                     self.env,
                                     self.env.get_db_cnx(),
                                     shorten=True,
                                     req=req)
             yield 'codereview', href, title, t, author, text
Ejemplo n.º 6
0
    def get_timeline_events(self, req, start, stop, filters):
        if isinstance(start, datetime): # Trac>=0.11
                from trac.util.datefmt import to_timestamp
                start = to_timestamp(start)
                stop = to_timestamp(stop)

        if 'build' in filters:
            add_stylesheet(req, 'BambooTrac/bambootrac.css')

            feed = feedparser.parse(self.feed_url, handlers=[self.bAuth, self.dAuth])

            for entry in feed.entries:

                # check time range
                completed = calendar.timegm(entry.date_parsed)

                # create timeline entry
                if entry.title.find('SUCCESS') >= 0:
                    message = 'Build finished successfully'
                    kind = 'bamboo-successful'
                else:
                    message = 'Build failed'
                    kind = 'bamboo-failed'

                fulltitle = entry.title.split(":")
                newtitle = fulltitle[0]
				
                href = entry.link
                title = entry.title

                comment = message + ' at ' + format_datetime(completed)

                yield kind, href, newtitle, completed, None, comment
Ejemplo n.º 7
0
    def get_timeline_events(self, req, start, stop, filters):
        if 'mailarchive' in filters:
            add_stylesheet(req, 'mailarchive/css/mailarchive.css')

            db = self.env.get_db_cnx()
            mailarchive_realm = Resource('mailarchive')
            cursor = db.cursor()

            cursor.execute("SELECT id,category as mlname,utcdate as localdate,"
                           "fromname,fromaddr , subject  FROM mailarc "
                           "WHERE utcdate>=%s AND utcdate<=%s ",
                           (to_timestamp(start), to_timestamp(stop)))
            for id,category,localdate, fromname, fromaddr,subject in cursor:
                #if 'WIKI_VIEW' not in req.perm('wiki', name):
                #    continue
                author = get_author(fromname,fromaddr)
                #ctx = context('mailarchive', id)
                
                resource = mailarchive_realm(id=id,version=None)
                if 'MAILARCHIVE_VIEW' not in req.perm(resource):
                    continue
                yield ('mailarchive',
                       datetime.fromtimestamp(localdate, utc),
                       author or '--',
                       (resource,(category,author,subject)))
Ejemplo n.º 8
0
    def get_timeline_events(self, req, start, stop, filters):
        if isinstance(start, datetime):  # Trac>=0.11
            from trac.util.datefmt import to_timestamp
            start = to_timestamp(start)
            stop = to_timestamp(stop)

        if 'build' in filters:
            add_stylesheet(req, 'BambooTrac/bambootrac.css')

            feed = feedparser.parse(self.feed_url,
                                    handlers=[self.bAuth, self.dAuth])

            for entry in feed.entries:

                # check time range
                completed = calendar.timegm(entry.date_parsed)

                # create timeline entry
                if entry.title.find('SUCCESS') >= 0:
                    message = 'Build finished successfully'
                    kind = 'bamboo-successful'
                else:
                    message = 'Build failed'
                    kind = 'bamboo-failed'

                fulltitle = entry.title.split(":")
                newtitle = fulltitle[0]

                href = entry.link
                title = entry.title

                comment = message + ' at ' + format_datetime(completed)

                yield kind, href, newtitle, completed, None, comment
Ejemplo n.º 9
0
def get_blog_comments(env, post_name='', from_dt=None, to_dt=None):
    """ Returns comments as a list of tuples from search based on
    AND input for post_name, and datetime span (from_dt and to_dt):
        (post_name, number, comment, author, time) 
    Instantiate BlogComment objects to get further details of each.
    Example of sorting the output by time, newest first:
        from trac.util.compat import sorted, itemgetter
        comments = get_blog_comments(env)
        sorted(comments, key=itemgetter(4), reverse=True) """

    # Build the list of WHERE restrictions
    args = [post_name and ("name=%s", post_name) or None,
            from_dt and ("time>%s", to_timestamp(from_dt)) or None,
            to_dt and ("time<%s", to_timestamp(to_dt)) or None]
    args = [arg for arg in args if arg]
    where_clause = ""
    where_values = None
    if args:
        where_clause = "WHERE " + " AND ".join([arg[0] for arg in args])
        where_values = tuple([arg[1] for arg in args])

    # Do the SELECT
    cnx = env.get_db_cnx()
    cursor = cnx.cursor()
    sql = "SELECT name, number, comment, author, time " \
            "FROM fullblog_comments " + where_clause
    env.log.debug("get_blog_comments() SQL: %r (%r)" % (sql, where_values))
    cursor.execute(sql, where_values or None)

    # Return the items we have found
    return [(row[0], row[1], row[2], row[3], to_datetime(row[4], utc))
            for row in cursor]
Ejemplo n.º 10
0
def get_blog_comments(env, post_name='', from_dt=None, to_dt=None):
    """ Returns comments as a list of tuples from search based on
    AND input for post_name, and datetime span (from_dt and to_dt):
        (post_name, number, comment, author, time)
    Instantiate BlogComment objects to get further details of each.
    Example of sorting the output by time, newest first:
        from trac.util.compat import sorted, itemgetter
        comments = get_blog_comments(env)
        sorted(comments, key=itemgetter(4), reverse=True) """

    # Build the list of WHERE restrictions
    clauses = [
        post_name and ("name=%s", post_name) or None,
        from_dt and ("time>%s", to_timestamp(from_dt)) or None,
        to_dt and ("time<%s", to_timestamp(to_dt)) or None
    ]
    clauses = [arg for arg in clauses if arg]
    where_clause = ""
    args = None
    if clauses:
        where_clause = "WHERE " + " AND ".join([arg[0] for arg in clauses])
        args = tuple([arg[1] for arg in clauses])

    # Return the items we have found
    return [(row[0], row[1], row[2], row[3], to_datetime(row[4], utc))
            for row in env.db_query(
                """
                SELECT name, number, comment, author, time
                FROM fullblog_comments
                """ + where_clause, args)]
Ejemplo n.º 11
0
    def get_timeline_events(self, req, start, stop, filters):
        """
        Return a list of events in the time range given by the `start` and
        `stop` parameters.

        The `filters` parameters is a list of the enabled filters, each item
        being the name of the tuples returned by `get_timeline_filters`.

        Since 0.11, the events are `(kind, date, author, data)` tuples,
        where `kind` is a string used for categorizing the event, `date`
        is a `datetime` object, `author` is a string and `data` is some
        private data that the component will reuse when rendering the event.

        When the event has been created indirectly by another module,
        like this happens when calling `AttachmentModule.get_timeline_events()`
        the tuple can also specify explicitly the provider by returning tuples
        of the following form: `(kind, date, author, data, provider)`.
        """
        if 'main_git_repository' in filters or \
            'cloned_git_repository' in filters:
            
            for event in GitHubEvent.get_commit_by_date(
                self.env, to_timestamp(start), to_timestamp(stop), git_url=self.github_url):
                
                if event.is_clone() and 'cloned_git_repository' in filters:
                    yield ('cloned_git_repository',
                        datetime.fromtimestamp(event.time, utc),
                        event.author,
                        event)
                elif not event.is_clone() and 'main_git_repository' in filters:
                    yield ('main_git_repository',
                        datetime.fromtimestamp(event.time, utc),
                        event.author,
                        event) # TODO: only sent needed data
Ejemplo n.º 12
0
    def save(self, db=None):
        """Save changes or add a new paste."""
        if db:
            handle_ta = False
        else:
            handle_ta = True
            db = self.env.get_db_cnx()
        cursor = db.cursor()

        if self.time is None:
            self.time = datetime.now(utc)

        if self.id is None:
            cursor.execute(
                'INSERT INTO pastes (title, author, mimetype, '
                'data, time) VALUES (%s, %s, %s, %s, %s)',
                (self.title, self.author, self.mimetype, self.data,
                 to_timestamp(self.time)))
            self.id = db.get_last_id(cursor, 'pastes')
        else:
            cursor.execute(
                'UPDATE pastes SET title=%s, author=%s, mimetype=%s,'
                'data=%s, time=%s WHERE id = %s',
                (self.title, self.author, self.mimetype, self.data,
                 to_timestamp(self.time), self.id))

        if handle_ta:
            db.commit()
Ejemplo n.º 13
0
    def _get_num_closed_tix(self, from_date, at_date, req, ticketFilter=""):
        """Returns an integer of the number of close ticket events counted
        between from_date to at_date."""

        status_map = {
            'new': 0,
            'reopened': 0,
            'assigned': 0,
            'closed': 1,
            'edit': 0
        }

        count = 0

        db = self.env.get_db_cnx()
        cursor = db.cursor()

        cursor.execute("""
            SELECT t.id, tc.field, tc.time, tc.oldvalue, tc.newvalue,
              t.priority
            FROM ticket_change tc
              INNER JOIN ticket t ON t.id = tc.ticket
              INNER JOIN enum p ON p.name = t.priority AND p.type = 'priority'
            WHERE tc.time > %s AND tc.time <= %s %s
            ORDER BY tc.time
            """ % (to_timestamp(from_date), to_timestamp(at_date),
                   ticketFilter))

        for tid, field, time, old, status, priority in cursor:
            if field == 'status':
                if status in ('new', 'assigned', 'reopened', 'closed', 'edit'):
                    count += status_map[status]

        return count
Ejemplo n.º 14
0
def get_blog_comments(env, post_name='', from_dt=None, to_dt=None):
    """ Returns comments as a list of tuples from search based on
    AND input for post_name, and datetime span (from_dt and to_dt):
        (post_name, number, comment, author, time) 
    Instantiate BlogComment objects to get further details of each.
    Example of sorting the output by time, newest first:
        from trac.util.compat import sorted, itemgetter
        comments = get_blog_comments(env)
        sorted(comments, key=itemgetter(4), reverse=True) """

    # Build the list of WHERE restrictions
    args = [
        post_name and ("name=%s", post_name) or None,
        from_dt and ("time>%s", to_timestamp(from_dt)) or None,
        to_dt and ("time<%s", to_timestamp(to_dt)) or None
    ]
    args = [arg for arg in args if arg]
    where_clause = ""
    where_values = None
    if args:
        where_clause = "WHERE " + " AND ".join([arg[0] for arg in args])
        where_values = tuple([arg[1] for arg in args])

    # Do the SELECT
    cnx = env.get_db_cnx()
    cursor = cnx.cursor()
    sql = "SELECT name, number, comment, author, time " \
            "FROM fullblog_comments " + where_clause
    env.log.debug("get_blog_comments() SQL: %r (%r)" % (sql, where_values))
    cursor.execute(sql, where_values or None)

    # Return the items we have found
    return [(row[0], row[1], row[2], row[3], to_datetime(row[4], utc))
            for row in cursor]
Ejemplo n.º 15
0
    def get_timeline_events(self, req, start, stop, filters):
        if 'mentions' not in filters:
            return
        ts_start = to_timestamp(start)
        ts_stop = to_timestamp(stop)

        def make_event(mention):
            ts = mention[4]
            return (
                'mention-%s' % mention[1],
                datetime.fromtimestamp(ts, utc),
                None,
                mention,
            )

        db = self.env.get_db_cnx()
        cursor = db.cursor()
        try:
            cursor.execute("SELECT mentioned, location, uri, text, at FROM mentions WHERE at>=%s AND at<=%s",
                           (ts_start, ts_stop,))
            for r in cursor:
                yield make_event(r)
        except sqlite.OperationalError, e:
            # db lock, table doesn't exist, or something else that's hopefully transient
            self.env.log.info("Failed to fetch mentions: %s" % str(e))
Ejemplo n.º 16
0
def get_num_closed_tix(db, from_date, at_date, milestone):
    """Returns an integer of the number of close ticket events counted
    between from_date to at_date."""

    cursor = db.cursor()

    args = [to_timestamp(from_date), to_timestamp(at_date)]
    milestone_str = ''
    if milestone:
        args.append(milestone)
        milestone_str += 'AND t.milestone = %s'

    # Count tickets between two dates (note: does not account for tickets
    # that were closed and then reopened between the two dates)
    cursor.execute(
        """
        SELECT newvalue
        FROM ticket_change tc
        INNER JOIN ticket t ON t.id = tc.ticket AND tc.time > %%s
          AND tc.time <= %%s AND tc.field == 'status' %s
        ORDER BY tc.time""" % milestone_str, args)

    closed_count = 0
    for (status, ) in cursor:
        if status == 'closed':
            closed_count += 1

    return closed_count
Ejemplo n.º 17
0
    def test_update_page(self):
        cursor = self.db.cursor()
        t = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
        t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
        cursor.execute("INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
                       ('TestPage', 1, to_timestamp(t), 'joe', '::1', 'Bla bla',
                        'Testing', 0))

        page = WikiPage(self.env, 'TestPage')
        page.text = 'Bla'
        page.save('kate', 'Changing', '192.168.0.101', t2)
        self.assertEqual(2, page.resource.version)

        cursor.execute("SELECT version,time,author,ipnr,text,comment,"
                       "readonly FROM wiki WHERE name=%s", ('TestPage',))
        self.assertEqual((1, to_timestamp(t), 'joe', '::1', 'Bla bla', 'Testing', 0),
                         cursor.fetchone())
        self.assertEqual((2, to_timestamp(t2), 'kate', '192.168.0.101', 'Bla',
                          'Changing', 0), cursor.fetchone())

        listener = TestWikiChangeListener(self.env)
        self.assertEqual((page, 2, t2, 'Changing', 'kate', '192.168.0.101'),
                         listener.changed[0])

        page = WikiPage(self.env, 'TestPage')
        history = list(page.get_history())
        self.assertEqual(2, len(history))
        self.assertEqual((2, t2, 'kate', 'Changing', '192.168.0.101'),
                         history[0])
        self.assertEqual((1, t, 'joe', 'Testing', '::1'), history[1])
Ejemplo n.º 18
0
    def update(self, db=None):
        assert self.name, 'Cannot update milestone with no name'
        if not db:
            db = self.env.get_db_cnx()
            handle_ta = True
        else:
            handle_ta = False

        self.name = simplify_whitespace(self.name)
        cursor = db.cursor()
        self.env.log.info('Updating milestone "%s"' % self.name)
        cursor.execute("UPDATE milestone SET name=%s,due=%s,"
                       "completed=%s,description=%s WHERE name=%s",
                       (self.name, to_timestamp(self.due), to_timestamp(self.completed),
                        self.description,
                        self._old_name))
        self.env.log.info('Updating milestone field of all tickets '
                          'associated with milestone "%s"' % self.name)
        cursor.execute("UPDATE ticket SET milestone=%s WHERE milestone=%s",
                       (self.name, self._old_name))
        self._old_name = self.name

        if handle_ta:
            db.commit()
        TicketSystem(self.env).reset_ticket_fields()
Ejemplo n.º 19
0
    def save(self, db=None):
        """Save changes or add a new paste."""
        if db:
            handle_ta = False
        else:
            handle_ta = True
            db = self.env.get_db_cnx()
        cursor = db.cursor()

        if self.time is None:
            self.time = datetime.now(utc)

        if self.id is None:
            cursor.execute('INSERT INTO pastes (title, author, mimetype, '
                           'data, time) VALUES (%s, %s, %s, %s, %s)',
                           (self.title, self.author, self.mimetype, self.data,
                            to_timestamp(self.time)))
            self.id = db.get_last_id(cursor, 'pastes')
        else:
            cursor.execute('UPDATE pastes SET title=%s, author=%s, mimetype=%s,'
                           'data=%s, time=%s WHERE id = %s', (
                self.title, self.author, self.mimetype, self.data,
                to_timestamp(self.time), self.id
            ))

        if handle_ta:
            db.commit()
Ejemplo n.º 20
0
def get_pastes(env,
               number=None,
               offset=None,
               from_dt=None,
               to_dt=None,
               db=None):
    """Returns a list of pastes as dicts without data.

    One or more filters need to be set:
     * number - maximum number of items that may be returned
     * offset - number of items to skip in returned results
     * from_dt - pasted on or after the given time (datetime object)
     * to_dt - pasted before or on the given time (datetime object)

    Returns dictionary of the form:
        (id, title, author, time)
    where time is in UTC.

    To get the paste data, use id to instantiate a Paste object."""

    db = db or env.get_db_cnx()
    cursor = db.cursor()

    sql = "SELECT id, title, author, time FROM pastes"
    order_clause = " ORDER BY id DESC"
    limit_clause = ""
    if number:
        limit_clause += " LIMIT %s" % number
    if offset:
        limit_clause += " OFFSET %s" % offset

    where_clause = ""
    where_values = None
    args = [
        from_dt and ("time>%s", to_timestamp(from_dt)) or None,
        to_dt and ("time<%s", to_timestamp(to_dt)) or None
    ]
    args = [arg for arg in args if arg]  # Get rid of the None values
    if args:
        where_clause = " WHERE " + " AND ".join([arg[0] for arg in args])
        where_values = tuple([arg[1] for arg in args])

    sql += where_clause + order_clause + limit_clause

    env.log.debug("get_pastes() SQL: %r (%r)" % (sql, where_values))
    cursor.execute(sql, where_values)

    result = []
    for row in cursor:
        result.append({
            'id': row[0],
            'title': row[1],
            'author': row[2],
            'time': datetime.fromtimestamp(row[3], utc)
        })
    return result
Ejemplo n.º 21
0
    def fetch_tickets(self, tickets, ids, detailed):
        for id in ids:
            t = { 'id': id }
            try:
                ticket = model.Ticket(self.env, id)
            except:
                self.log.error('Failed to fetch ticket %d' % id)
                if str(id) in tickets:
                    delattr(tickets, str(id))
                continue

            # Get mandatory fields
            for field_name in self.mandatory_fields:
                t[field_name] = ticket.get_value_or_default(field_name)

            if id in detailed:
                # Get fields that are are always shown in detail dialog
                for field_name in self.always_shown_fields:
                    if field_name not in t:
                        t[field_name] = ticket.get_value_or_default(field_name)

                # Get user specified extra fields
                for field_name in self.fields:
                    if field_name not in self.mandatory_fields:
                        t[field_name] = ticket.get_value_or_default(field_name)

                # Convert DateTimes to (millisecond) timestamps
                if 'time' in t:
                    t['time'] = to_timestamp(t['time']) * 1000
                if 'changetime' in t:
                    t['changetime'] = to_timestamp(t['changetime']) * 1000

                # Get changes and comments and group changes from same action together
                t['changelog'] = []
                changelog = ticket.get_changelog()
                time_entry = None
                for log_item in changelog:
                    current_time = to_timestamp(log_item[0]) * 1000
                    if time_entry is None or time_entry['time'] < current_time:
                        if time_entry is not None:
                            t['changelog'].append(time_entry)
                        time_entry = {}
                        time_entry['time'] = current_time
                        time_entry['author'] = log_item[1]
                        time_entry['changes'] = []

                    change_entry = {}
                    change_entry['field'] = log_item[2]
                    change_entry['oldValue'] = log_item[3]
                    change_entry['newValue'] = log_item[4]
                    time_entry['changes'].append(change_entry)

                if time_entry is not None:
                    t['changelog'].append(time_entry)

            tickets[str(id)] = t
Ejemplo n.º 22
0
    def fetch_tickets(self, tickets, ids, detailed):
        for id in ids:
            t = { 'id': id }
            try:
                ticket = model.Ticket(self.env, id)
            except:
                self.log.error('Failed to fetch ticket %d' % id)
                if str(id) in tickets:
                    delattr(tickets, str(id))
                continue

            # Get mandatory fields
            for field_name in self.mandatory_fields:
                t[field_name] = ticket.get_value_or_default(field_name)

            if id in detailed:
                # Get fields that are are always shown in detail dialog
                for field_name in self.always_shown_fields:
                    if field_name not in t:
                        t[field_name] = ticket.get_value_or_default(field_name)

                # Get user specified extra fields
                for field_name in self.fields:
                    if field_name not in self.mandatory_fields:
                        t[field_name] = ticket.get_value_or_default(field_name)

                # Convert DateTimes to (millisecond) timestamps
                if 'time' in t:
                    t['time'] = to_timestamp(t['time']) * 1000
                if 'changetime' in t:
                    t['changetime'] = to_timestamp(t['changetime']) * 1000

                # Get changes and comments and group changes from same action together
                t['changelog'] = []
                changelog = ticket.get_changelog()
                time_entry = None
                for log_item in changelog:
                    current_time = to_timestamp(log_item[0]) * 1000
                    if time_entry is None or time_entry['time'] < current_time:
                        if time_entry is not None:
                            t['changelog'].append(time_entry)
                        time_entry = {}
                        time_entry['time'] = current_time
                        time_entry['author'] = log_item[1]
                        time_entry['changes'] = []

                    change_entry = {}
                    change_entry['field'] = log_item[2]
                    change_entry['oldValue'] = log_item[3]
                    change_entry['newValue'] = log_item[4]
                    time_entry['changes'].append(change_entry)

                if time_entry is not None:
                    t['changelog'].append(time_entry)

            tickets[str(id)] = t
Ejemplo n.º 23
0
    def get_timeline_events(self, req, start, stop, filters):
        self.log.debug("start: %s, stop: %s, filters: %s", start, stop,
                       filters)
        if ('downloads' in filters) and ('DOWNLOADS_VIEW' in req.perm):
            api = self.env[DownloadsApi]

            # Get message events
            for download in api.get_new_downloads(to_timestamp(start),
                                                  to_timestamp(stop)):
                yield ('newticket', to_datetime(download['time'], utc),
                       download['author'], download['id'])
Ejemplo n.º 24
0
    def get_annotation_data(self, context):
        add_stylesheet(context.req, 'bitten/bitten_coverage.css')

        resource = context.resource

        # attempt to use the version passed in with the request,
        # otherwise fall back to the latest version of this file.
        version = context.req.args.get('rev', resource.version)
        # get the last change revision for the file so that we can
        # pick coverage data as latest(version >= file_revision)
        created = context.req.args.get('created', resource.version)

        repos = self.env.get_repository()
        version_time = to_timestamp(repos.get_changeset(version).date)
        if version != created:
            created_time = to_timestamp(repos.get_changeset(created).date)
        else:
            created_time = version_time

        self.log.debug("Looking for coverage report for %s@%s [%s:%s]..." %
                       (resource.id, str(resource.version), created, version))

        db = self.env.get_db_cnx()
        cursor = db.cursor()
        cursor.execute(
            """
                SELECT b.id, b.rev, i2.value
                FROM bitten_config AS c
                    INNER JOIN bitten_build AS b ON c.name=b.config
                    INNER JOIN bitten_report AS r ON b.id=r.build
                    INNER JOIN bitten_report_item AS i1 ON r.id=i1.report
                    INNER JOIN bitten_report_item AS i2 ON (i1.item=i2.item
                                                    AND i1.report=i2.report)
                WHERE i2.name='line_hits'
                    AND b.rev_time>=%s
                    AND b.rev_time<=%s
                    AND i1.name='file'
                    AND """ + db.concat('c.path', "'/'", 'i1.value') + """=%s
                ORDER BY b.rev_time DESC LIMIT 1""",
            (created_time, version_time, resource.id.lstrip('/')))

        row = cursor.fetchone()
        if row:
            build_id, build_rev, line_hits = row
            coverage = line_hits.split()
            self.log.debug("Coverage annotate for %s@%s using build %d: %s",
                           resource.id, build_rev, build_id, coverage)
            return coverage
        add_warning(
            context.req, "No coverage annotation found for "
            "/%s for revision range [%s:%s]." %
            (resource.id.lstrip('/'), version, created))
        return []
Ejemplo n.º 25
0
 def get_timeline_events(self, req, start, stop, filters):
     try:
         master = BuildBotSystem(self.buildbot_url)
     except Exception as e:
         print('Error hitting BuildBot', e)
         return
     # This was a comprehension: the loop is clearer
     for build in master.getAllBuildsInInterval(to_timestamp(start), to_timestamp(stop)):
         # BuildBot builds are reported as
         # (builder_name, num, end, branch, rev, results, text)
         print('Reporting build', build)
         yield ('build', to_datetime(build[2]), '', build)
Ejemplo n.º 26
0
 def get_timeline_events(self, req, start, stop, filters):
     self.log.debug("Monit: get_timeline_events() called")
     conn = self.get_db_cnx()
     
     myfilter = [f for f in filters if f.startswith('monit_')]
     event_filter = [k for k,v in srv_types.items() if v in [f.split('_')[1] for f in myfilter]]
     self.log.debug("Input: %s, filtered: %s, Types: %s" % (filters, myfilter, event_filter))
     
     if event_filter:
         #monit_realm = Resource('monit')
         cur = conn.cursor()
         sql = "SELECT COUNT(*) AS events FROM event WHERE \
                 collected_sec >=? AND collected_sec <=? AND type IN (%s)" % ','.join(['?' for e in event_filter])
         cur.execute(sql, (to_timestamp(start), to_timestamp(stop))+tuple(event_filter))
         
         self.log.debug("There are currently %s events for the range from %s to %s" % (
                     cur.fetchall(), start, stop))
         
         sql = "SELECT * FROM event WHERE collected_sec >=? \
                 AND collected_sec <=? AND type IN (%s)" % ','.join(['?' for e in event_filter])
         cur.execute(sql, (to_timestamp(start), to_timestamp(stop))+tuple(event_filter))
         events = cur.fetchall()
         
         for evt in events:
             self.log.debug("Found monit event  %s" % evt)
             srv_table = "%s_service" % srv_types[evt['type']]
             
             #self.log.debug("Selecting service entry with %s" )
             cur.execute("SELECT * FROM %s WHERE id=? LIMIT 1" % srv_table, (evt['service_id'],))
             srv = cur.fetchone()
             #self.log.debug("Found service entry %s" % srv)
             
             if srv:
                 cur.execute("SELECT * FROM monit WHERE id=?", (srv['monit_id'],))
                 monit = cur.fetchone()
                 #self.log.debug("Found monit instance %s" % monit)
                 
                 if monit:
                     msg = ('monit', datetime.fromtimestamp(evt['collected_sec'], utc), 
                         'monit@%s' % monit['localhostname'], (evt, srv, monit))
                 else:        
                     self.log.warning("No monit entry with id '%s' found while rendering event '%s'." % (
                          srv['monit_id'], evt['id']))
                     msg = ('monit', datetime.fromtimestamp(evt['collected_sec'], utc), 
                         'monit@unknown', (evt, srv, None))
             else:            
                 self.log.warning("No service entry with id '%s' found while rendering event '%s'." % (
                          evt['service_id'], evt['id']))            
                 msg = ('monit', datetime.fromtimestamp(evt['collected_sec'], utc), 
                         'monit@unknown', (evt, None, None))
             yield msg
     conn.close()
Ejemplo n.º 27
0
    def get_annotation_data(self, context):
        add_stylesheet(context.req, 'bitten/bitten_coverage.css')

        resource = context.resource

        # attempt to use the version passed in with the request,
        # otherwise fall back to the latest version of this file.
        version = context.req.args.get('rev') or resource.version
        # get the last change revision for the file so that we can
        # pick coverage data as latest(version >= file_revision)
        created = context.req.args.get('created') or resource.version

        full_path = get_resource_path(resource)
        _name, repos, _path = get_repos(self.env, full_path, None)
        version_time = to_timestamp(repos.get_changeset(version).date)
        if version != created:
            created_time = to_timestamp(repos.get_changeset(created).date)
        else:
            created_time = version_time

        self.log.debug("Looking for coverage report for %s@%s [%s:%s]..." % (
                        full_path, str(resource.version), created, version))

        db = self.env.get_db_cnx()
        cursor = db.cursor()
        cursor.execute("""
                SELECT b.id, b.rev, i2.value
                FROM bitten_config AS c
                    INNER JOIN bitten_build AS b ON c.name=b.config
                    INNER JOIN bitten_report AS r ON b.id=r.build
                    INNER JOIN bitten_report_item AS i1 ON r.id=i1.report
                    INNER JOIN bitten_report_item AS i2 ON (i1.item=i2.item
                                                    AND i1.report=i2.report)
                WHERE i2.name='line_hits'
                    AND b.rev_time>=%s
                    AND b.rev_time<=%s
                    AND i1.name='file'
                    AND """ + db.concat('c.path', "'/'", 'i1.value') + """=%s
                ORDER BY b.rev_time DESC LIMIT 1""" ,
            (created_time, version_time, full_path))

        row = cursor.fetchone()
        if row:
            build_id, build_rev, line_hits = row
            coverage = line_hits.split()
            self.log.debug("Coverage annotate for %s@%s using build %d: %s",
                            resource.id, build_rev, build_id, coverage)
            return coverage
        add_warning(context.req, "No coverage annotation found for "
                                 "/%s for revision range [%s:%s]." % (
                                 resource.id.lstrip('/'), version, created))
        return []
Ejemplo n.º 28
0
 def get_timeline_events(self, req, start, stop, filters):
     if Key.SPRINT not in filters:
         return
     # prepare select criteria
     criteria = {'start': '>=%d' % to_timestamp(start),
                 'start': '<=%d' % to_timestamp(stop)}
     for sprint in self.sp_manager.select(criteria=criteria):
         # the first value of the data tuple tells if we're showing
         # a start or an end date (True=start, False=end), see next function
         if sprint.is_currently_running:
             yield(Key.SPRINT, sprint.start, '', (True, sprint))
         if sprint.is_closed:
             yield(Key.SPRINT, sprint.end, '', (False, sprint))
Ejemplo n.º 29
0
 def get_timeline_events(self, req, start, stop, filters):
     try:
         master = BuildBotSystem(self.buildbot_url)
     except Exception as e:
         print('Error hitting BuildBot', e)
         return
     # This was a comprehension: the loop is clearer
     for build in master.getAllBuildsInInterval(to_timestamp(start),
                                                to_timestamp(stop)):
         # BuildBot builds are reported as
         # (builder_name, num, end, branch, rev, results, text)
         print('Reporting build', build)
         yield ('build', to_datetime(build[2]), '', build)
Ejemplo n.º 30
0
 def get_changesets(self, start, stop):
     db = self.getdb()
     cursor = db.cursor()
     cursor.execute("SELECT rev FROM revision "
                    "WHERE time >= %s AND time < %s "
                    "ORDER BY time DESC, rev DESC",
                    (to_timestamp(start), to_timestamp(stop)))
     for rev, in cursor:
         try:
             if self.authz.has_permission_for_changeset(rev):
                 yield self.get_changeset(rev)
         except NoSuchChangeset:
             pass # skip changesets currently being resync'ed
Ejemplo n.º 31
0
 def test_can_use_simple_sqlalchemy_queries(self):
     yesterday = DateTime.now(utc) - TimeDelta(days=4)
     self._create_user(u'foo', last_visit=yesterday)
     self.session.commit()
     
     user = User.query(self.session).filter(and_(
         User.username == u'foo',
         User._authenticated == True,
         User.last_visit == to_timestamp(yesterday),
     )).one()
     assert_equals(u'foo', user.username)
     assert_true(user._authenticated)
     assert_equals(to_timestamp(yesterday), user.last_visit)
Ejemplo n.º 32
0
    def get_timeline_events(self, req, start, stop, filters, pid, syllabus_id):
        if pid is None:
            return
        is_multi = isinstance(pid, (list, tuple))
        if is_multi:
            # TODO:
            return

        # Worklog changes
        show_starts = 'workstart' in filters
        show_stops = 'workstop' in filters
        if show_starts or show_stops:
            add_stylesheet(req, "worklog/worklogplugin.css")

            ts_start = to_timestamp(start)
            ts_stop = to_timestamp(stop)

            ticket_realm = Resource('ticket')
            db = self.env.get_read_db()
            cursor = db.cursor()

            cursor.execute("""
                SELECT wl.worker,wl.ticket,wl.time,wl.starttime,wl.comment,wl.kind,t.summary,t.status,t.resolution,t.type
                FROM (
                    SELECT worker, ticket, starttime AS time, starttime, comment, 'start' AS kind
                    FROM work_log
                    UNION
                    SELECT worker, ticket, endtime AS time, starttime, comment, 'stop' AS kind
                    FROM work_log
                ) AS wl
                JOIN ticket t ON t.id = wl.ticket AND project_id=%s AND wl.time>=%s AND wl.time<=%s 
                ORDER BY wl.time""", (pid, ts_start, ts_stop))

            for worker,tid,ts,ts_start,comment,kind,summary,status,resolution,type in cursor:
                ticket = ticket_realm(id=tid)
                time = to_datetime(ts)
                started = None
                if kind == 'start':
                    if not show_starts:
                        continue
                    yield ('workstart', pid, time, worker, (ticket,summary,status,resolution,type, started, ""))
                else:
                    if not show_stops:
                        continue
                    started = to_datetime(ts_start)
                    if comment:
                        comment = "(Time spent: %s)\n\n%s" % (pretty_timedelta(started, time), comment)
                    else:
                        comment = '(Time spent: %s)' % pretty_timedelta(started, time)
                    yield ('workstop', pid, time, worker, (ticket,summary,status,resolution,type, started, comment))
Ejemplo n.º 33
0
def get_pastes(env, number=None, offset=None, from_dt=None, to_dt=None, db=None):
    """Returns a list of pastes as dicts without data.

    One or more filters need to be set:
     * number - maximum number of items that may be returned
     * offset - number of items to skip in returned results
     * from_dt - pasted on or after the given time (datetime object)
     * to_dt - pasted before or on the given time (datetime object)

    Returns dictionary of the form:
        (id, title, author, time)
    where time is in UTC.

    To get the paste data, use id to instantiate a Paste object."""

    db = db or env.get_db_cnx()
    cursor = db.cursor()

    sql = "SELECT id, title, author, time FROM pastes"
    order_clause = " ORDER BY id DESC"
    limit_clause = ""
    if number:
        limit_clause += " LIMIT %s" % number
    if offset:
        limit_clause += " OFFSET %s" % offset

    where_clause = ""
    where_values = None
    args = [from_dt and ("time>%s", to_timestamp(from_dt)) or None,
            to_dt and ("time<%s", to_timestamp(to_dt)) or None]
    args = [arg for arg in args if arg]  # Get rid of the None values
    if args:
        where_clause = " WHERE " + " AND ".join([arg[0] for arg in args])
        where_values = tuple([arg[1] for arg in args])

    sql += where_clause + order_clause + limit_clause

    env.log.debug("get_pastes() SQL: %r (%r)" % (sql, where_values))
    cursor.execute(sql, where_values)

    result = []
    for row in cursor:
        result.append({
            'id':           row[0],
            'title':        row[1],
            'author':       row[2],
            'time':         datetime.fromtimestamp(row[3], utc)
        })
    return result
Ejemplo n.º 34
0
 def get_timeline_events(self, req, start, stop, filters):
     if Key.SPRINT not in filters:
         return
     # prepare select criteria
     criteria = {
         'start': '>=%d' % to_timestamp(start),
         'start': '<=%d' % to_timestamp(stop)
     }
     for sprint in self.sp_manager.select(criteria=criteria):
         # the first value of the data tuple tells if we're showing
         # a start or an end date (True=start, False=end), see next function
         if sprint.is_currently_running:
             yield (Key.SPRINT, sprint.start, '', (True, sprint))
         if sprint.is_closed:
             yield (Key.SPRINT, sprint.end, '', (False, sprint))
Ejemplo n.º 35
0
 def get_timeline_events(self, req, start, stop, filters):
     if ('downloads' in filters) and ('DOWNLOADS_VIEW' in req.perm):
         # Create context.
         context = Context.from_request(req)('downloads-timeline')
         db = self.env.get_db_cnx()
         context.cursor = db.cursor()
         try:
             # Get API component.
             api = self.env[DownloadsApi]
             # Get message events
             for download in api.get_new_downloads(context, to_timestamp(start), to_timestamp(stop)):
                 yield ('newdownload', download['time'], download['author'], download['id'])
         except:
             # API maybe disabled or tables are not updated to db 
             pass
Ejemplo n.º 36
0
 def get_timeline_events(self, req, start, stop, filters):
     if ("downloads" in filters) and ("DOWNLOADS_VIEW" in req.perm):
         # Create context.
         context = Context.from_request(req)("downloads-timeline")
         db = self.env.get_db_cnx()
         context.cursor = db.cursor()
         try:
             #  Get API component.
             api = self.env[DownloadsApi]
             # Get message events
             for download in api.get_new_downloads(context, to_timestamp(start), to_timestamp(stop)):
                 yield ("newdownload", download["time"], download["author"], download["id"])
         except:
             # API maybe disabled or tables are not updated to db
             pass
Ejemplo n.º 37
0
    def expand_macro(self, formatter, name, content):

        arg, kwarg = parse_args(content)

        includepattern = kwarg.get('include', '')
        #excludepattern = kwarg.get('exclude', '')
        length = int(kwarg.get('max', -1))
        ignorenoduedate = kwarg.get('ignore') == 'noduedate' or None

        if length == -1:
            length = None

        out = StringIO()

        include = re.compile(includepattern)
        #exclude = re.compile(excludepattern)

        milestones = []

        for milestone in Milestone.select(self.env, include_completed=False):
            if include.match(
                    milestone.name):  # and not exclude.match(milestone.name):
                milestones.append(milestone)

        out.write('<ul>\n')
        for milestone in milestones[0:length]:

            if milestone.due:
                #TODO: add one day to tdelta
                tdelta = (to_timestamp(milestone.due) -
                          to_timestamp(datetime.now(formatter.req.tz)))
                if tdelta > 0:
                    date = format_date(milestone.due, '%Y-%m-%d',
                                       formatter.req.tz)
                else:
                    date = None
            elif not ignorenoduedate:
                date = Markup('<i>(Unspecified)</i>')
            else:
                date = None

            if date:
                out.write('<li>%s - <a href="%s">%s</a></li>\n' %
                          (date, self.env.href.milestone(
                              milestone.name), milestone.name))

        out.write('</ul>\n')
        return Markup(out.getvalue())
Ejemplo n.º 38
0
    def get_timeline_events(self, req, start, stop, filters):
        # Worklog changes
        show_starts = 'workstart' in filters
        show_stops = 'workstop' in filters
        if show_starts or show_stops:
            add_stylesheet(req, "worklog/worklogplugin.css")
            
            ts_start = to_timestamp(start)
            ts_stop = to_timestamp(stop)

            ticket_realm = Resource('ticket')
            db = self.env.get_db_cnx()
            cursor = db.cursor()

            cursor.execute("""SELECT wl.worker,wl.ticket,wl.time,wl.starttime,wl.comment,wl.kind,t.summary,t.status,t.resolution,t.type
                             FROM (
                             
                             SELECT worker, ticket, starttime AS time, starttime, comment, 'start' AS kind
                             FROM work_log

                             UNION

                             SELECT worker, ticket, endtime AS time, starttime, comment, 'stop' AS kind
                             FROM work_log

                             ) AS wl
                             INNER JOIN ticket t ON t.id = wl.ticket 
                                 AND wl.time>=%s AND wl.time<=%s 
                           ORDER BY wl.time"""
                           % (ts_start, ts_stop))
            previous_update = None
            for worker,tid,ts,ts_start,comment,kind,summary,status,resolution,type in cursor:
                ticket = ticket_realm(id=tid)
                time = datetime.fromtimestamp(ts, utc)
                started = None
                if kind == 'start':
                    if not show_starts:
                        continue
                    yield ('workstart', time, worker, (ticket,summary,status,resolution,type, started, ""))
                else:
                    if not show_stops:
                        continue
                    started = datetime.fromtimestamp(ts_start, utc)
                    if comment:
                        comment = "(Time spent: %s)[[BR]]%s" % (pretty_timedelta(started, time), comment)
                    else:
                        comment = '(Time spent: %s)' % pretty_timedelta(started, time)
                    yield ('workstop', time, worker, (ticket,summary,status,resolution,type, started, comment))
Ejemplo n.º 39
0
 def _event_data(self, provider, event):
     """Compose the timeline event date from the event tuple and prepared
     provider methods"""
     if len(event) == 6:  # 0.10 events
         kind, url, title, date, author, markup = event
         data = {"url": url, "title": title, "description": markup}
         render = lambda field, context: data.get(field)
     else:  # 0.11 events
         if len(event) == 5:  # with special provider
             kind, date, author, data, provider = event
         else:
             kind, date, author, data = event
         render = lambda field, context: provider.render_timeline_event(context, field, event)
     if isinstance(date, datetime):
         dateuid = to_timestamp(date)
     else:
         dateuid = date
         date = datetime.fromtimestamp(date, utc)
     return {
         "kind": kind,
         "author": author,
         "date": date,
         "dateuid": dateuid,
         "render": render,
         "event": event,
         "data": data,
         "provider": provider,
     }
Ejemplo n.º 40
0
    def _save_ticket_changes(self, req, env, log, selectedTickets, tickets,
                             values, comment, modify_changetime,
                             send_notifications):
        for id in selectedTickets:
            if id in tickets:
                t = Ticket(env, int(id))
                new_changetime = datetime.now(utc)

                log_msg = ""
                if not modify_changetime:
                    original_changetime = to_timestamp(t.time_changed)

                _values = values.copy()
                for field in [f for f in values.keys() \
                              if f in self._fields_as_list]:
                    _values[field] = self._merge_keywords(
                        t.values[field], values[field], log)

                t.populate(_values)
                t.save_changes(req.authname, comment, when=new_changetime)

                if send_notifications:
                    tn = TicketNotifyEmail(env)
                    tn.notify(t, newticket=0, modtime=new_changetime)

                if not modify_changetime:
                    self._reset_changetime(env, original_changetime, t)
                    log_msg = "(changetime not modified)"

                log.debug('BatchModifyPlugin: saved changes to #%s %s' %
                          (id, log_msg))
Ejemplo n.º 41
0
    def user_locked(self, user):
        """Returns whether the user account is currently locked.

        Expect True, if locked, False, if not and None otherwise.
        """
        if self.login_attempt_max_count < 1 or not user or \
                not user_known(self.env, user):
            self.log.debug(
                "AccountGuard.user_locked(%s) = None (%s)"
                % (user, self.login_attempt_max_count < 1 and \
                   'disabled by configuration' or 'anonymous user'))
            return None
        count = self.failed_count(user, reset=None)
        if count < self.login_attempt_max_count:
            self.log.debug(
                "AccountGuard.user_locked(%s) = False (try left)" % user)
            return False
        ts_release = self.release_time(user)
        if ts_release == 0:
            # Account locked permanently.
            self.log.debug(
                "AccountGuard.user_locked(%s) = True (permanently)" % user)
            return True
        # Time-locked or time-lock expired.
        ts_now = to_timestamp(to_datetime(None))
        locked = ts_release - ts_now > 0
        self.log.debug(
            "AccountGuard.user_locked(%s) = %s (%s)"
            % (user, locked, locked and 'time-lock' or 'lock expired'))
        return locked
Ejemplo n.º 42
0
 def get_remaining_time(self, day=None):
     """Returns the remaining time on a specific day, passed as a date or
     ordinal value. If none, returns remaining time for today"""
     if day is None:
         # if is today, just return the current remaining time
         return float(self.task[Key.REMAINING_TIME] or 0)
     timestamp = self._get_timestamp(day)
     
     available_timestamps = sorted(self.history)
     remaining_time = None
     if len(available_timestamps) > 0:
         if timestamp < available_timestamps[0]:
             return 0.0
         elif timestamp >= available_timestamps[-1]:
             return self.history[available_timestamps[-1]]
         else:
             last_timestamp = available_timestamps[0]
             for a_timestamp in available_timestamps:
                 if a_timestamp > timestamp:
                     # the last one was the good one
                     remaining_time = self.history[last_timestamp]
                     break
                 last_timestamp = a_timestamp
     else:
         # In case timestamp was built from an ordinal, we must use 0:00 to 
         # check instead of the current time
         if timestamp >= to_timestamp(midnight(today(), tz=localtz)):
             remaining_time = float(self.task[Key.REMAINING_TIME] or 0)
         else:
             remaining_time = 0.0
     return remaining_time
Ejemplo n.º 43
0
def _create_sprints_for_milestones(env, cursor, sprint_table):
    team_name = _create_team(cursor)
    colum_names = [c.name for c in sprint_table.columns]
    new_sprint_query = ("INSERT INTO sprint (%s) " % ", ".join(colum_names)) + \
                       "VALUES ('%(milestone)s', NULL, %(start)s, %(end)s, '%(milestone)s', '%(team)s')"

    tickets_by_milestone = _fetch_tasks_and_stories(cursor)
    cursor.execute(
        'select name, due, duration from milestone where due is not null and duration is not null'
    )
    for milestone_name, due, duration in cursor.fetchall():
        sprint_end = due
        end_date = to_datetime(sprint_end)
        start_date = end_date - timedelta(days=duration) + timedelta(days=2)
        sprint_start = to_timestamp(start_date)

        ticket_ids = tickets_by_milestone.get(milestone_name, [])
        if len(ticket_ids) > 0:
            parameters = dict(milestone=milestone_name,
                              start=sprint_start,
                              end=sprint_end,
                              team=team_name)
            sql = new_sprint_query % parameters
            cursor.execute(sql)

            for ticket_id in tickets_by_milestone.get(milestone_name, []):
                sql = "INSERT INTO ticket_custom (ticket, name, value) values (%d, 'sprint', '%s')"
                cursor.execute(sql % (ticket_id, milestone_name))
Ejemplo n.º 44
0
    def _save_ticket_changes(self, req, env, log, selectedTickets, tickets,
                             values, comment, modify_changetime, send_notifications):
        for id in selectedTickets:
            if id in tickets:
                t = Ticket(env, int(id))
                new_changetime = datetime.now(utc)
                
                log_msg = ""
                if not modify_changetime:
                    original_changetime = to_timestamp(t.time_changed)
                
                _values = values.copy()
                for field in [f for f in values.keys() \
                              if f in self._fields_as_list]:
                    _values[field] = self._merge_keywords(t.values[field], 
                                                          values[field],
                                                          log)
                
                t.populate(_values)
                t.save_changes(req.authname, comment, when=new_changetime)
  
                if send_notifications:
                    tn = TicketNotifyEmail(env)
                    tn.notify(t, newticket=0, modtime=new_changetime)

                if not modify_changetime:
                    self._reset_changetime(env, original_changetime, t)
                    log_msg = "(changetime not modified)"

                log.debug('BatchModifyPlugin: saved changes to #%s %s' % 
                          (id, log_msg))
Ejemplo n.º 45
0
 def get_changelog(self, when=None, db=None):
     """Return a iterable of the form:
     (time, author, key, oldvalue, newvalue)
     """
     db = db or self.env.get_db_cnx()
     cursor = db.cursor()
     when_ts = when and to_timestamp(when) or 0
     if when_ts:
         cursor.execute('SELECT time, author, key, oldvalue, newvalue '
                        'FROM boxdb_changes '
                        'WHERE document=%s AND time=%s'
                        'ORDER BY time',
                        (self.name, when_ts))
     else:
         cursor.execute('SELECT time, author, key, oldvalue, newvalue '
                        'FROM boxdb_changes '
                        'WHERE document=%s'
                        'ORDER BY time',
                        (self.name,))
     for t, author, key, oldvalue, newvalue in cursor:
         t = datetime.fromtimestamp(int(t), utc)
         
         if oldvalue:
             oldvalue = simplejson.loads(oldvalue)
         else:
             oldvalue = None
         
         if newvalue:
             newvalue = simplejson.loads(newvalue)
         else:
             newvalue = None
         
         yield t, author, key, oldvalue, newvalue
Ejemplo n.º 46
0
    def populate(self):
        """Add a build for the next change on each build configuration to the
        queue.

        The next change is the latest repository check-in for which there isn't
        a corresponding build on each target platform. Repeatedly calling this
        method will eventually result in the entire change history of the build
        configuration being in the build queue.
        """
        repos = self.env.get_repository()
        assert repos, 'No "(default)" Repository: Add a repository or alias ' \
                      'named "(default)" to Trac.'

        db = self.env.get_db_cnx()
        builds = []

        for config in BuildConfig.select(self.env, db=db):
            platforms = []
            for platform, rev, build in collect_changes(repos, config, db):

                if not self.build_all and platform.id in platforms:
                    # We've seen this platform already, so these are older
                    # builds that should only be built if built_all=True
                    self.log.debug('Ignoring older revisions for configuration '
                                   '%r on %r', config.name, platform.name)
                    break

                platforms.append(platform.id)

                if build is None:
                    self.log.info('Enqueuing build of configuration "%s" at '
                                  'revision [%s] on %s', config.name, rev,
                                  platform.name)

                    rev_time = to_timestamp(repos.get_changeset(rev).date)
                    age = int(time.time()) - rev_time
                    if self.stabilize_wait and age < self.stabilize_wait:
                        self.log.info('Delaying build of revision %s until %s '
                                      'seconds pass. Current age is: %s '
                                      'seconds' % (rev, self.stabilize_wait,
                                      age))
                        continue

                    build = Build(self.env, config=config.name,
                                  platform=platform.id, rev=str(rev),
                                  rev_time=rev_time)
                    builds.append(build)

        for build in builds:
            try:
                build.insert(db=db)
                db.commit()
            except Exception, e:
                # really only want to catch IntegrityErrors raised when
                # a second slave attempts to add builds with the same
                # (config, platform, rev) as an existing build.
                self.log.info('Failed to insert build of configuration "%s" '
                    'at revision [%s] on platform [%s]: %s',
                    build.config, build.rev, build.platform, e)
                db.rollback()
Ejemplo n.º 47
0
    def test_can_serialize_task_to_dict(self):
        task = AgiloTicket(self.env, t_type=Type.TASK)
        self.assertNotEqual('fixed', task[Key.RESOLUTION])
        task[Key.SUMMARY] = 'My Summary'
        task.insert()
        expected = {
            # required
            Key.ID: task.id,
            Key.TYPE: Type.TASK,
            Key.SUMMARY: 'My Summary',
            Key.DESCRIPTION: '',
            Key.STATUS: '',
            Key.RESOLUTION: '',
            Key.REPORTER: '',
            Key.OWNER: '',
            # type specific
            Key.SPRINT: '',
            Key.REMAINING_TIME: '',
            Key.RESOURCES: '',

            # Key.Options is not used in order to reduce required data to
            # transfer for a backlog load.
            'outgoing_links': [],
            'incoming_links': [],
            'time_of_last_change': to_timestamp(task.time_changed),
            'ts': str(task.time_changed),
        }
        if AgiloTicketSystem.is_trac_1_0():
            from trac.util.datefmt import to_utimestamp
            expected.update(
                {'view_time': str(to_utimestamp(task.time_changed))})

        self.assert_equals(expected, task.as_dict())
Ejemplo n.º 48
0
    def _version_edit(self, data):
        if data.get('milestone'):
            milestone = data.get('milestone').name
        else:
            milestone = ''
        for version, in self.env.db_query(
                """
                SELECT version FROM milestone_version WHERE milestone=%s
                """, (milestone, )):
            break
        else:
            version = None

        return tag.div(tag.label(
            'Version:', tag.br(),
            tag.select(tag.option(), [
                tag.option(name, selected=(version == name or None))
                for name, in self.env.db_query(
                    """
                        SELECT name FROM version
                        WHERE time IS NULL OR time = 0 OR time>%s OR name = %s
                        ORDER BY name""", (to_timestamp(None), version))
            ],
                       name="version")),
                       class_="field")
Ejemplo n.º 49
0
 def _event_data(self, provider, event):
     """Compose the timeline event date from the event tuple and prepared
     provider methods"""
     if len(event) == 6:  # 0.10 events
         kind, url, title, date, author, markup = event
         data = {'url': url, 'title': title, 'description': markup}
         render = lambda field, context: data.get(field)
     else:  # 0.11 events
         if len(event) == 5:  # with special provider
             kind, date, author, data, provider = event
         else:
             kind, date, author, data = event
         render = lambda field, context: \
                 provider.render_timeline_event(context, field, event)
     if isinstance(date, datetime):
         dateuid = to_timestamp(date)
     else:
         dateuid = date
         date = datetime.fromtimestamp(date, utc)
     return {
         'kind': kind,
         'author': author,
         'date': date,
         'dateuid': dateuid,
         'render': render,
         'event': event,
         'data': data,
         'provider': provider
     }
Ejemplo n.º 50
0
    def test_can_serialize_task_to_dict(self):
        task = AgiloTicket(self.env, t_type=Type.TASK)
        self.assertNotEqual('fixed', task[Key.RESOLUTION])
        task[Key.SUMMARY] = 'My Summary'
        task.insert()
        expected = {
            # required
            Key.ID: task.id,
            Key.TYPE: Type.TASK,
            Key.SUMMARY: 'My Summary',
            Key.DESCRIPTION: '',
            Key.STATUS: '',
            Key.RESOLUTION: '',
            Key.REPORTER: '',
            Key.OWNER: '',
            # type specific
            Key.SPRINT: '',
            Key.REMAINING_TIME: '',
            Key.RESOURCES: '',
            
            # Key.Options is not used in order to reduce required data to 
            # transfer for a backlog load.
            
            'outgoing_links': [],
            'incoming_links': [],
            'time_of_last_change': to_timestamp(task.time_changed),
            'ts': str(task.time_changed),
        }
        if AgiloTicketSystem.is_trac_1_0():
            from trac.util.datefmt import to_utimestamp
            expected.update({'view_time': str(to_utimestamp(task.time_changed))})

        self.assert_equals(expected, task.as_dict())
Ejemplo n.º 51
0
 def _resolve_milestone(self, name, include_kids, show_completed):
     def _flatten_and_get_names(mil, include_kids, show_completed):
         names= []
         if mil:
             mil = isinstance(mil, StructuredMilestone) and [mil,] or mil
             for m in mil:
                 if show_completed or not m.completed:
                     names.append(m.name)
                     if include_kids:
                         names.extend(_flatten_and_get_names(m.kids, include_kids, show_completed))
         return names
     if name=='nearest':
         db = self.env.get_db_cnx()
         cursor = db.cursor()
         cursor.execute(
             'SELECT name FROM milestone WHERE due>%s ORDER BY due LIMIT 1', \
             (to_timestamp(datetime.now(utc)),))
         row = cursor.fetchone()
         name=row and row[0] or 'none'
     elif name=='not_completed_milestones':
         return _flatten_and_get_names(StructuredMilestone.select(self.env, False), \
             include_kids, show_completed)
     if name=='none':
         return ''
     try:    
         mil = StructuredMilestone(self.env, name)            
         names = _flatten_and_get_names(mil, include_kids, show_completed)
         if not names:
             names = mil.name
         return names
     except ResourceNotFound:
         return ''
Ejemplo n.º 52
0
    def notify_old_tickets(self, req, id, addMessage, changer, new_text):
        #try:
            estimate_rs = getEstimateResultSet(self.env, id)
            tickets = estimate_rs.value('tickets', 0)
            old_text = estimate_rs.value('diffcomment', 0)
            tickets = intlist(tickets)
            self.log.debug('About to render the diffs for tickets: %s ' % (tickets, ))
            comment = """{{{
#!html
%s
}}} """ % self.get_diffs(req, old_text, new_text, id)
            self.log.debug('Notifying old tickets of estimate change: %s \n %s' % (tickets, comment))
            return [(estimateChangeTicketComment,
                     [t,
                    #there were problems if we update the same tickets comment in the same tick
                    # so we subtract an arbitrary tick to get around this
                      to_timestamp(datetime.datetime.now(utc)) - 1,
                      req.authname,
                      comment
                      ])
                    for t in tickets]
        #except Exception, e:
            self.log.error("Error saving old ticket changes: %s" % e)
            addMessage("Tickets must be numbers")
            return None
Ejemplo n.º 53
0
    def start_process_row(self, row_idx, ticket_id):
        from ticket import PatchedTicket
        if ticket_id > 0:
            # existing ticket
            self.ticket = PatchedTicket(self.env, tkt_id=ticket_id, db=self.db)

            # 'Ticket.time_changed' is a datetime in 0.11, and an int in 0.10.
            # if we have trac.util.datefmt.to_datetime, we're likely with 0.11
            try:
                from trac.util.datefmt import to_timestamp
                time_changed = to_timestamp(self.ticket.time_changed)
            except ImportError:
                time_changed = int(self.ticket.time_changed)

            if time_changed > self.tickettime:
                # just in case, verify if it wouldn't be a ticket that has been modified in the future
                # (of course, it shouldn't happen... but who know). If it's the case, don't report it as an error
                if time_changed < int(time.time()):
                    # TODO: this is not working yet...
                    #
                    #raise TracError("Sorry, can not execute the import. "
                    #"The ticket #" + str(ticket_id) + " has been modified by someone else "
                    #"since preview. You must re-upload and preview your file to avoid overwriting the other changes.")
                    pass

        else:
            self.ticket = PatchedTicket(self.env, db=self.db)
        self.comment = ''
Ejemplo n.º 54
0
    def notify_old_tickets(self, req, id, addMessage, changer, new_text):
        #try:
        estimate_rs = getEstimateResultSet(self.env, id)
        tickets = estimate_rs.value('tickets', 0)
        old_text = estimate_rs.value('diffcomment', 0)
        tickets = intlist(tickets)
        self.log.debug('About to render the diffs for tickets: %s ' %
                       (tickets, ))
        comment = """{{{
#!html
%s
}}} """ % self.get_diffs(req, old_text, new_text, id)
        self.log.debug('Notifying old tickets of estimate change: %s \n %s' %
                       (tickets, comment))
        return [
            (
                estimateChangeTicketComment,
                [
                    t,
                    #there were problems if we update the same tickets comment in the same tick
                    # so we subtract an arbitrary tick to get around this
                    to_timestamp(datetime.datetime.now(utc)) - 1,
                    req.authname,
                    comment
                ]) for t in tickets
        ]
        #except Exception, e:
        self.log.error("Error saving old ticket changes: %s" % e)
        addMessage("Tickets must be numbers")
        return None
Ejemplo n.º 55
0
    def _add_step_1(self, req, templateData):
        release = model.Release()
        release.version = req.args.get("selectReleaseVersion")
        if not release.version:
            return 'release_add_1.html', templateData, None

        # here a version has already been selected, assume it's name as the release name
        v = data.loadVersion(self, release.version)
        templateData['releaseName'] = v['name']
        release.planned_date = v['time']
        release.description = v['description']
        release.author = req.authname
        release.creation_date = datefmt.to_timestamp(datefmt.to_datetime(None))

        templateData[
            'releaseAvailableProcedures'] = data.findInstallProcedures(self)

        # Setting tickets according to the selected version"
        templateData['releaseTickets'] = ""
        release.tickets = data.getVersionTickets(self, release.version)
        for ticket in release.tickets:
            templateData['releaseTickets'] = templateData[
                'releaseTickets'] + str(ticket.ticket_id) + ","

        templateData['release'] = release

        return ('release_add_2.html', templateData, None)
Ejemplo n.º 56
0
def create_zipinfo(filename, mtime=None, dir=False, executable=False, symlink=False,
                   comment=None):
    """Create a instance of `ZipInfo`.

    :param filename: file name of the entry
    :param mtime: modified time of the entry
    :param dir: if `True`, the entry is a directory
    :param executable: if `True`, the entry is a executable file
    :param symlink: if `True`, the entry is a symbolic link
    :param comment: comment of the entry
    """
    from zipfile import ZipInfo, ZIP_DEFLATED, ZIP_STORED
    zipinfo = ZipInfo()

    # The general purpose bit flag 11 is used to denote
    # UTF-8 encoding for path and comment. Only set it for
    # non-ascii files for increased portability.
    # See http://www.pkware.com/documents/casestudies/APPNOTE.TXT
    if any(ord(c) >= 128 for c in filename):
        zipinfo.flag_bits |= 0x0800
    zipinfo.filename = filename.encode('utf-8')

    if mtime is not None:
        mtime = to_datetime(mtime, utc)
        zipinfo.date_time = mtime.utctimetuple()[:6]
        # The "extended-timestamp" extra field is used for the
        # modified time of the entry in unix time. It avoids
        # extracting wrong modified time if non-GMT timezone.
        # See http://www.opensource.apple.com/source/zip/zip-6/unzip/unzip
        #     /proginfo/extra.fld
        zipinfo.extra += struct.pack(
            '<hhBl',
            0x5455,                 # extended-timestamp extra block type
            1 + 4,                  # size of this block
            1,                      # modification time is present
            to_timestamp(mtime))    # time of last modification

    # external_attr is 4 bytes in size. The high order two
    # bytes represent UNIX permission and file type bits,
    # while the low order two contain MS-DOS FAT file
    # attributes, most notably bit 4 marking directories.
    if dir:
        if not zipinfo.filename.endswith('/'):
            zipinfo.filename += '/'
        zipinfo.compress_type = ZIP_STORED
        zipinfo.external_attr = 040755 << 16L        # permissions drwxr-xr-x
        zipinfo.external_attr |= 0x10                # MS-DOS directory flag
    else:
        zipinfo.compress_type = ZIP_DEFLATED
        zipinfo.external_attr = 0644 << 16L          # permissions -r-wr--r--
        if executable:
            zipinfo.external_attr |= 0755 << 16L     # -rwxr-xr-x
        if symlink:
            zipinfo.compress_type = ZIP_STORED
            zipinfo.external_attr |= 0120000 << 16L  # symlink file type

    if comment:
        zipinfo.comment = comment.encode('utf-8')

    return zipinfo
Ejemplo n.º 57
0
    def get_timeline_events(self, req, start, stop, filters):
        if 'build' not in filters:
            return

        # Attachments (will be rendered by attachment module)
        for event in AttachmentModule(self.env).get_timeline_events(
                req, Resource('build'), start, stop):
            yield event

        start = to_timestamp(start)
        stop = to_timestamp(stop)

        add_stylesheet(req, 'bitten/bitten.css')

        db = self.env.get_db_cnx()
        cursor = db.cursor()
        cursor.execute(
            "SELECT b.id,b.config,c.label,c.path, b.rev,p.name,"
            "b.stopped,b.status FROM bitten_build AS b"
            "  INNER JOIN bitten_config AS c ON (c.name=b.config) "
            "  INNER JOIN bitten_platform AS p ON (p.id=b.platform) "
            "WHERE b.stopped>=%s AND b.stopped<=%s "
            "AND b.status IN (%s, %s) ORDER BY b.stopped",
            (start, stop, Build.SUCCESS, Build.FAILURE))

        repos = self.env.get_repository(authname=req.authname)
        assert repos, 'No "(default)" Repository: Add a repository or alias ' \
                      'named "(default)" to Trac.'

        event_kinds = {
            Build.SUCCESS: 'successbuild',
            Build.FAILURE: 'failedbuild'
        }

        for id_, config, label, path, rev, platform, stopped, status in cursor:
            if not _has_permission(req.perm, repos, path, rev=rev):
                continue
            errors = []
            if status == Build.FAILURE:
                for step in BuildStep.select(self.env,
                                             build=id_,
                                             status=BuildStep.FAILURE,
                                             db=db):
                    errors += [(step.name, error) for error in step.errors]
            display_rev = repos.normalize_rev(rev)
            yield (event_kinds[status], to_datetime(stopped, utc), None,
                   (id_, config, label, display_rev, platform, status, errors))