Пример #1
0
def expandWithContext(chunks,
                      old_lines,
                      new_lines,
                      context_lines,
                      highlight=True):
    if not chunks: return []

    groups = []
    group = []

    chunks = iter(chunks)

    try:
        previousChunk = chunks.next()
        group.append(previousChunk)

        while True:
            nextChunk = chunks.next()

            distance = nextChunk.delete_offset - (previousChunk.delete_offset +
                                                  previousChunk.delete_count)
            gap_between = distance - 2 * context_lines

            if gap_between >= 3:
                groups.append(group)
                group = []

            group.append(nextChunk)
            previousChunk = nextChunk
    except StopIteration:
        pass

    groups.append(group)

    macro_chunks = []

    for group in groups:
        delete_offset = max(1, group[0].delete_offset - context_lines)
        insert_offset = max(1, group[0].insert_offset - context_lines)

        lines = []

        for chunk in group:
            while delete_offset < chunk.delete_offset:
                lines.append(
                    diff.Line(diff.Line.CONTEXT, delete_offset,
                              old_lines[delete_offset - 1], insert_offset,
                              new_lines[insert_offset - 1]))
                delete_offset += 1
                insert_offset += 1

            if chunk.analysis:
                mappings = chunk.analysis.split(';')

                for mapping in mappings:
                    if ':' in mapping:
                        mapped_lines, ops = mapping.split(':')
                    else:
                        mapped_lines = mapping
                        ops = None

                    delete_line, insert_line = mapped_lines.split('=')
                    delete_line = chunk.delete_offset + int(delete_line)
                    insert_line = chunk.insert_offset + int(insert_line)

                    while delete_offset < delete_line and insert_offset < insert_line:
                        lines.append(
                            diff.Line(diff.Line.MODIFIED,
                                      delete_offset,
                                      old_lines[delete_offset - 1],
                                      insert_offset,
                                      new_lines[insert_offset - 1],
                                      is_whitespace=chunk.is_whitespace))
                        delete_offset += 1
                        insert_offset += 1

                    while delete_offset < delete_line:
                        lines.append(
                            diff.Line(diff.Line.DELETED, delete_offset,
                                      old_lines[delete_offset - 1],
                                      insert_offset, None))
                        delete_offset += 1

                    while insert_offset < insert_line:
                        lines.append(
                            diff.Line(diff.Line.INSERTED, delete_offset, None,
                                      insert_offset,
                                      new_lines[insert_offset - 1]))
                        insert_offset += 1

                    deleted_line = old_lines[delete_offset - 1]
                    inserted_line = new_lines[insert_offset - 1]

                    if highlight and ops:
                        deleted_line, inserted_line = diff.html.lineDiffHTML(
                            ops, deleted_line, inserted_line)

                    lines.append(
                        diff.Line(diff.Line.MODIFIED,
                                  delete_offset,
                                  deleted_line,
                                  insert_offset,
                                  inserted_line,
                                  is_whitespace=chunk.is_whitespace))

                    delete_offset += 1
                    insert_offset += 1

            deleteStop = chunk.delete_offset + chunk.delete_count
            insertStop = chunk.insert_offset + chunk.insert_count

            while delete_offset < deleteStop and insert_offset < insertStop:
                lines.append(
                    diff.Line(diff.Line.REPLACED,
                              delete_offset,
                              old_lines[delete_offset - 1],
                              insert_offset,
                              new_lines[insert_offset - 1],
                              is_whitespace=chunk.is_whitespace))
                delete_offset += 1
                insert_offset += 1

            while delete_offset < deleteStop:
                lines.append(
                    diff.Line(diff.Line.DELETED, delete_offset,
                              old_lines[delete_offset - 1], insert_offset,
                              None))
                delete_offset += 1

            while insert_offset < insertStop:
                lines.append(
                    diff.Line(diff.Line.INSERTED, delete_offset, None,
                              insert_offset, new_lines[insert_offset - 1]))
                insert_offset += 1

        deleteStop = min(len(old_lines) + 1, delete_offset + context_lines)

        while delete_offset < deleteStop:
            lines.append(
                diff.Line(diff.Line.CONTEXT, delete_offset,
                          old_lines[delete_offset - 1], insert_offset,
                          new_lines[insert_offset - 1]))
            delete_offset += 1
            insert_offset += 1

        macro_chunks.append(diff.MacroChunk(group, lines))

    return macro_chunks
Пример #2
0
def renderCodeCommentChain(db,
                           target,
                           user,
                           review,
                           chain,
                           context_lines=3,
                           compact=False,
                           tabify=False,
                           original=False,
                           changeset=None,
                           linkify=False):
    repository = review.repository

    old_sha1 = None
    new_sha1 = None

    old = 1
    new = 2

    cursor = db.cursor()

    file_id = chain.file_id
    file_path = dbutils.describe_file(db, file_id)

    if (chain.state != "addressed"
            or original) and chain.first_commit == chain.last_commit:
        sha1 = chain.first_commit.getFileSHA1(file_path)

        cursor.execute(
            "SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s",
            (chain.id, sha1))
        first_line, last_line = cursor.fetchone()

        file = diff.File(file_id,
                         file_path,
                         sha1,
                         sha1,
                         review.repository,
                         chunks=[])
        file.loadNewLines(True)

        start = max(1, first_line - context_lines)
        end = min(file.newCount(), last_line + context_lines)
        count = end + 1 - start

        lines = file.newLines(True)
        lines = [
            diff.Line(diff.Line.CONTEXT, start + index,
                      lines[start + index - 1], start + index,
                      lines[start + index - 1]) for index in range(count)
        ]

        file.macro_chunks = [diff.MacroChunk([], lines)]

        use = new
        display_type = "new"
        commit_url_component = "sha1=%s" % chain.first_commit.sha1
    else:
        if chain.state == "addressed" and not original and review.containsCommit(
                db, chain.addressed_by):
            parent = gitutils.Commit.fromSHA1(db, review.repository,
                                              chain.addressed_by.parents[0])
            child = chain.addressed_by
            use = old
        else:
            parent = chain.first_commit
            child = chain.last_commit

            if parent == child:
                if chain.origin == "old":
                    cursor.execute(
                        """SELECT changesets.child
                                        FROM changesets, reviewchangesets
                                       WHERE changesets.parent=%s
                                         AND reviewchangesets.changeset=changesets.id
                                         AND reviewchangesets.review=%s""",
                        [child.getId(db), review.id])

                    try:
                        child = gitutils.Commit.fromId(db, repository,
                                                       cursor.fetchone()[0])
                    except:
                        parent = gitutils.Commit.fromSHA1(
                            db, repository, child.parents[0])
                else:
                    parent = gitutils.Commit.fromSHA1(db, repository,
                                                      child.parents[0])

            if chain.origin == "old": use = old
            else: use = new

        if parent.sha1 in child.parents and len(child.parents) == 1:
            commit = child
            from_commit = None
            to_commit = None
        else:
            commit = None
            from_commit = parent
            to_commit = child

        if changeset:
            assert ((changeset.parent == from_commit
                     and changeset.child == to_commit) if commit is None else
                    (changeset.parent.sha1 == commit.parents[0]
                     and changeset.child == commit))
            assert changeset.getFile(file_id)
        else:
            changeset = changeset_utils.createChangeset(
                db,
                user,
                repository,
                commit=commit,
                from_commit=from_commit,
                to_commit=to_commit,
                filtered_file_ids=set((file_id, )))[0]

        file = changeset.getFile(file_id)

        if not file:
            if chain.state == "addressed" and not original:
                renderCodeCommentChain(db,
                                       target,
                                       user,
                                       review,
                                       chain,
                                       context_lines,
                                       compact,
                                       tabify,
                                       original=True)
                return
            else:
                raise

        # Commit so that the diff and its analysis, written to the database by createChangeset(),
        # can be reused later.
        db.commit()

        old_sha1 = file.old_sha1
        new_sha1 = file.new_sha1

        if use == old and old_sha1 == '0' * 40: use = new
        elif use == new and new_sha1 == '0' * 40: use = old

        if use == old: sha1 = old_sha1
        else: sha1 = new_sha1

        cursor.execute(
            "SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s",
            [chain.id, sha1])

        first_line, last_line = cursor.fetchone()

        def readChunks():
            return [
                diff.Chunk(delete_offset,
                           delete_count,
                           insert_offset,
                           insert_count,
                           analysis=analysis,
                           is_whitespace=is_whitespace)
                for delete_offset, delete_count, insert_offset, insert_count,
                analysis, is_whitespace in cursor.fetchall()
            ]

        first_context_line = first_line - context_lines
        last_context_line = last_line + context_lines

        def includeChunk(chunk):
            if use == old:
                chunk_first_line, chunk_last_line = chunk.delete_offset, chunk.delete_offset + chunk.delete_count - 1
            else:
                chunk_first_line, chunk_last_line = chunk.insert_offset, chunk.insert_offset + chunk.insert_count - 1

            return chunk_last_line >= first_context_line and chunk_first_line <= last_context_line

        def lineFilter(line):
            if use == old:
                linenr = line.old_offset
                if linenr == first_context_line and line.type == diff.Line.INSERTED:
                    return False
            else:
                linenr = line.new_offset
                if linenr == first_context_line and line.type == diff.Line.DELETED:
                    return False

            return first_context_line <= linenr <= last_context_line

        file.loadOldLines(True)
        file.loadNewLines(True)

        context = diff.context.ContextLines(file, file.chunks,
                                            [(chain, use == old)])
        file.macro_chunks = context.getMacroChunks(context_lines,
                                                   highlight=True,
                                                   lineFilter=lineFilter)

        try:
            macro_chunk = file.macro_chunks[0]
        except:
            raise repr((parent.sha1, child.sha1))

        display_type = "both"

        if chain.state != "addressed":
            first_line_type = macro_chunk.lines[0].type
            if first_line_type == diff.Line.CONTEXT or (
                    use == old and first_line_type == diff.Line.DELETED) or (
                        use == new and first_line_type == diff.Line.INSERTED):
                for line in macro_chunk.lines[1:]:
                    if first_line_type != line.type:
                        break
                else:
                    display_type = "old" if use == old else "new"

        commit_url_component = "from=%s&to=%s" % (parent.sha1, child.sha1)

    def renderHeaderLeft(db, target, file):
        target.span("comment-chain-title").a(href="/showcomment?chain=%d" %
                                             chain.id).text(chain.title())

    def renderHeaderRight(db, target, file):
        side = use == old and "o" or "n"
        uri = "showcommit?%s&review=%d&file=%d#f%d%s%d" % (
            commit_url_component, review.id, file.id, file.id, side,
            first_line)
        target.span("filename").a(href=uri).text(file.path)

    def renderCommentsLocal(db, target, **kwargs):
        if display_type == "both":
            if use == old: position = "left"
            else: position = "right"
        else:
            position = "center"

        renderComments(db, target, user, chain, position, linkify)

    def lineId(base):
        return "c%d%s" % (chain.id, base)

    def lineCellId(base):
        return "c%d%s" % (chain.id, base)

    target.addInternalScript("commentChainById[%d] = %s;" %
                             (chain.id, chain.getJSConstructor(sha1)),
                             here=True)

    changeset_html.renderFile(db,
                              target,
                              user,
                              review,
                              file,
                              options={
                                  "support_expand": False,
                                  "display_type": display_type,
                                  "header_left": renderHeaderLeft,
                                  "header_right": renderHeaderRight,
                                  "content_after": renderCommentsLocal,
                                  "show": True,
                                  "expand": True,
                                  "line_id": lineId,
                                  "line_cell_id": lineCellId,
                                  "compact": compact,
                                  "tabify": tabify,
                                  "include_deleted": True
                              })

    data = (chain.id, file_id, use == old and "o"
            or "n", first_line, chain.id, file_id, use == old and "o"
            or "n", last_line, htmlutils.jsify(chain.type),
            htmlutils.jsify(chain.state), chain.id)

    target.addInternalScript("""$(document).ready(function ()
  {
    var markers = new CommentMarkers(null);
    markers.setLines(document.getElementById('c%df%d%s%d'), document.getElementById('c%df%d%s%d'));
    markers.setType(%s, %s);
    commentChainById[%d].markers = markers;
  });""" % data,
                             here=True)
Пример #3
0
    def getMacroChunks(self, context_lines=3, minimum_gap=3, highlight=True, lineFilter=None):
        old_lines = self.file.oldLines(highlight)
        new_lines = self.file.newLines(highlight)

        lines = []

        def addLine(line):
            if not lineFilter or lineFilter(line): lines.append(line)

        for chunk in self.chunks:
            old_offset = chunk.delete_offset
            new_offset = chunk.insert_offset

            if chunk.analysis:
                mappings = chunk.analysis.split(';')

                for mapping in mappings:
                    if ':' in mapping:
                        mapped_lines, ops = mapping.split(':')
                    else:
                        mapped_lines = mapping
                        ops = None

                    old_line, new_line = mapped_lines.split('=')
                    old_line = chunk.delete_offset + int(old_line)
                    new_line = chunk.insert_offset + int(new_line)

                    while old_offset < old_line and new_offset < new_line:
                        if old_lines[old_offset - 1] == new_lines[new_offset - 1]:
                            line_type = diff.Line.CONTEXT
                        else:
                            line_type = diff.Line.REPLACED

                        line = diff.Line(line_type,
                                         old_offset, old_lines[old_offset - 1],
                                         new_offset, new_lines[new_offset - 1],
                                         is_whitespace=chunk.is_whitespace)

                        if self.conflicts and line_type == diff.Line.REPLACED and line.isConflictMarker():
                            addLine(diff.Line(diff.Line.DELETED,
                                              old_offset, old_lines[old_offset - 1],
                                              new_offset, None))
                        else:
                            addLine(line)
                            new_offset += 1

                        old_offset += 1

                    while old_offset < old_line:
                        addLine(diff.Line(diff.Line.DELETED,
                                          old_offset, old_lines[old_offset - 1],
                                          new_offset, None))
                        old_offset += 1

                    while new_offset < new_line:
                        addLine(diff.Line(diff.Line.INSERTED,
                                          old_offset, None,
                                          new_offset, new_lines[new_offset - 1]))
                        new_offset += 1

                    try:
                        deleted_line = old_lines[old_offset - 1]
                        inserted_line = new_lines[new_offset - 1]
                    except:
                        raise repr((self.file.path, self.file.old_sha1, self.file.new_sha1, new_offset, len(new_lines)))

                    if deleted_line == inserted_line:
                        line_type = diff.Line.CONTEXT
                        is_whitespace = False
                    else:
                        if ops and ops.startswith("ws"):
                            is_whitespace = True
                            if ops.startswith("ws,"): ops = ops[3:]
                            else: ops = None
                        else:
                            is_whitespace = False

                        line_type = diff.Line.MODIFIED

                        if highlight and ops:
                            if ops == "eol":
                                line_type = diff.Line.REPLACED
                                if highlight:
                                    if not self.file.old_eof_eol: deleted_line += "<i class='eol'>[missing linebreak]</i>"
                                    if not self.file.new_eof_eol: deleted_line += "<i class='eol'>[missing linebreak]</i>"
                            else:
                                deleted_line, inserted_line = diff.html.lineDiffHTML(ops, deleted_line, inserted_line)

                    addLine(diff.Line(line_type,
                                      old_offset, deleted_line,
                                      new_offset, inserted_line,
                                      is_whitespace=chunk.is_whitespace or is_whitespace))

                    old_offset += 1
                    new_offset += 1

            old_line = chunk.delete_offset + chunk.delete_count
            new_line = chunk.insert_offset + chunk.insert_count

            while old_offset < old_line and new_offset < new_line:
                if old_lines[old_offset - 1] == new_lines[new_offset - 1]:
                    line_type = diff.Line.CONTEXT
                else:
                    line_type = diff.Line.REPLACED

                line = diff.Line(line_type,
                                 old_offset, old_lines[old_offset - 1],
                                 new_offset, new_lines[new_offset - 1],
                                 is_whitespace=chunk.is_whitespace)

                if self.conflicts and line_type == diff.Line.REPLACED and line.isConflictMarker():
                    addLine(diff.Line(diff.Line.DELETED,
                                      old_offset, old_lines[old_offset - 1],
                                      new_offset, None))
                else:
                    addLine(line)
                    new_offset += 1

                old_offset += 1

            while old_offset < old_line:
                try:
                    addLine(diff.Line(diff.Line.DELETED,
                                      old_offset, old_lines[old_offset - 1],
                                      new_offset, None))
                except:
                    addLine(diff.Line(diff.Line.DELETED,
                                      old_offset, "",
                                      new_offset, None))

                old_offset += 1

            while new_offset < new_line:
                try:
                    addLine(diff.Line(diff.Line.INSERTED,
                                      old_offset, None,
                                      new_offset, new_lines[new_offset - 1]))
                except:
                    addLine(diff.Line(diff.Line.INSERTED,
                                      old_offset, None,
                                      new_offset, ""))

                new_offset += 1

        old_table = {}
        new_table = {}

        for line in lines:
            if line.old_value is not None:
                old_table[line.old_offset] = line
            if line.new_value is not None:
                new_table[line.new_offset] = line

        def translateInChunk(chunk, old_delta=None, new_delta=None):
            if chunk.analysis:
                mappings = chunk.analysis.split(';')

                previous_old_line = 0
                previous_new_line = 0

                for mapping in mappings:
                    if ':' in mapping:
                        mapped_lines, ops = mapping.split(':')
                    else:
                        mapped_lines = mapping

                    old_line, new_line = mapped_lines.split('=')
                    old_line = int(old_line)
                    new_line = int(new_line)

                    if old_delta is not None:
                        if old_line == old_delta:
                            return new_line
                        elif old_line > old_delta:
                            return previous_new_line
                    else:
                        if new_line == new_delta:
                            return old_line
                        elif new_line > new_delta:
                            return previous_old_line

                    previous_old_line = old_line
                    previous_new_line = new_line

            if old_delta is not None: return min(old_delta, chunk.insert_count)
            else: return min(new_delta, chunk.delete_count)

        def findMatchingOldOffset(offset):
            precedingChunk = None
            for chunk in self.chunks:
                if chunk.insert_offset + chunk.insert_count > offset:
                    if chunk.insert_offset <= offset:
                        delta = translateInChunk(chunk, new_delta=offset - chunk.insert_offset)
                        offset = chunk.delete_offset + delta
                        return offset
                    break
                precedingChunk = chunk
            if precedingChunk:
                offset -= precedingChunk.insert_offset + precedingChunk.insert_count
                offset += precedingChunk.delete_offset + precedingChunk.delete_count
            return offset

        def findMatchingNewOffset(offset):
            precedingChunk = None
            for chunk in self.chunks:
                if chunk.delete_offset + chunk.delete_count > offset:
                    if chunk.delete_offset <= offset:
                        delta = translateInChunk(chunk, old_delta=offset - chunk.delete_offset)
                        offset = chunk.insert_offset + delta
                        return offset
                    break
                precedingChunk = chunk
            if precedingChunk:
                offset -= precedingChunk.delete_offset + precedingChunk.delete_count
                offset += precedingChunk.insert_offset + precedingChunk.insert_count
            return offset

        if self.chains and not self.merge:
            for chain in self.chains:
                if chain.comments:
                    old_table_keys = old_table.keys()
                    new_table_keys = new_table.keys()

                    if self.file.new_sha1 in chain.lines_by_sha1:
                        chain_offset, chain_count = chain.lines_by_sha1[self.file.new_sha1]
                        old_offset = findMatchingOldOffset(chain_offset)
                        new_offset = chain_offset
                        first_line = new_table.get(new_offset)
                    else:
                        chain_offset, chain_count = chain.lines_by_sha1[self.file.old_sha1]
                        old_offset = chain_offset
                        new_offset = findMatchingNewOffset(chain_offset)
                        first_line = old_table.get(old_offset)

                    org_old_offset = old_offset
                    org_new_offset = new_offset

                    count = chain_count

                    while count:
                        if old_offset not in old_table and new_offset not in new_table:
                            try:
                                line = diff.Line(diff.Line.CONTEXT,
                                                 old_offset, old_lines[old_offset - 1],
                                                 new_offset, new_lines[new_offset - 1])
                            except:
                                #raise Exception, repr((count, old_offset, len(old_lines), new_offset, len(new_lines), chain.id, self.file.old_sha1, self.file.new_sha1))
                                break
                            if not lineFilter or lineFilter(line):
                                if not first_line: first_line = line
                                old_table[old_offset] = line
                                new_table[new_offset] = line

                        if old_offset in old_table: old_offset += 1
                        if new_offset in new_table: new_offset += 1
                        count -= 1

        class queue:
            def __init__(self, iterable):
                self.__list = list(iterable)
                self.__offset = 0

            def __getitem__(self, index): return self.__list[self.__offset + index]
            def __nonzero__(self): return self.__offset < len(self.__list)
            def __len__(self): return len(self.__list) - self.__offset
            def __str__(self): return str(self.__list[self.__offset:])
            def __repr__(self): return repr(self.__list[self.__offset:])

            def pop(self):
                self.__offset += 1
                return self.__list[self.__offset - 1]

        all_lines = queue(sorted([(key, value.new_offset, value) for key, value in old_table.items()] +
                                 [(value.old_offset, key, value) for key, value in new_table.items() if value.type not in (diff.Line.CONTEXT, diff.Line.MODIFIED, diff.Line.REPLACED)]))
        all_chunks = self.chunks[:]
        all_chains = self.chains and self.chains[:] or None

        macro_chunks = []

        try:
            def lineOrNone(lines, index):
                try: return lines[index]
                except IndexError: return None

            while all_lines:
                old_offset, new_offset, first_line = all_lines.pop()

                count = min(context_lines, max(old_offset - 1, new_offset - 1))
                old_offset = max(1, old_offset - count)
                new_offset = max(1, new_offset - count)
                lines = []

                while count:
                    if old_offset <= len(old_lines) and new_offset <= len(new_lines):
                        addLine(diff.Line(diff.Line.CONTEXT,
                                          old_offset, old_lines[old_offset - 1],
                                          new_offset, new_lines[new_offset - 1]))
                        old_offset += 1
                        new_offset += 1
                    elif old_offset <= len(old_lines):
                        old_offset += 1
                    else:
                        new_offset += 1
                    count -= 1

                lines.append(first_line)
                if first_line.type != diff.Line.INSERTED: old_offset += 1
                if first_line.type != diff.Line.DELETED: new_offset += 1

                while all_lines:
                    while all_lines and (old_offset == all_lines[0][0] or new_offset == all_lines[0][1]):
                        line = all_lines.pop()[2]
                        lines.append(line)
                        if line.type != diff.Line.INSERTED: old_offset += 1
                        if line.type != diff.Line.DELETED: new_offset += 1

                    if all_lines and all_lines[0][1] - new_offset <= 2 * context_lines + minimum_gap:
                        while old_offset != all_lines[0][0] and new_offset != all_lines[0][1]:
                            line = diff.Line(diff.Line.CONTEXT,
                                             old_offset, lineOrNone(old_lines, old_offset - 1),
                                             new_offset, lineOrNone(new_lines, new_offset - 1))
                            addLine(line)
                            if line.old_value is not None: old_offset += 1
                            if line.new_value is not None: new_offset += 1
                    else: break

                count = context_lines

                while count:
                    if old_offset <= len(old_lines) and new_offset <= len(new_lines):
                        addLine(diff.Line(diff.Line.CONTEXT,
                                          old_offset, old_lines[old_offset - 1],
                                          new_offset, new_lines[new_offset - 1]))
                        old_offset += 1
                        new_offset += 1
                    elif old_offset <= len(old_lines):
                        old_offset += 1
                    else:
                        new_offset += 1
                    count -= 1

                chunks = []

                while all_chunks and (all_chunks[0].delete_offset < old_offset or all_chunks[0].insert_offset < new_offset):
                    chunks.append(all_chunks.pop(0))

                chains = []

                if all_chains:
                    index = 0
                    while index < len(all_chains):
                        chain = all_chains[index]

                        if self.file.new_sha1 in chain.lines_by_sha1:
                            chain_offset, chain_count = chain.lines_by_sha1[self.file.new_sha1]
                            compare_offset = new_offset
                        else:
                            chain_offset, chain_count = chain.lines_by_sha1[self.file.old_sha1]
                            compare_offset = old_offset

                        if chain_offset < compare_offset:
                            chains.append(chain)
                            del all_chains[index]
                        else:
                            index += 1

                macro_chunks.append(diff.MacroChunk(chunks, lines))
        except IndexError:
            raise Exception, "\nold_offset=%d/%d\nnew_offset=%d/%d\nlines=%r\nall_lines=%r\n\n%s" % (old_offset, len(old_lines), new_offset, len(new_lines), lines, all_lines, "".join(traceback.format_exception(*sys.exc_info())))

        if not lineFilter:
            return filter(lambda macro_chunk: bool(macro_chunk.chunks), macro_chunks)
        else:
            return macro_chunks