Example #1
0
 def setUp(self):
     self.brief_tok = lexer.Token('WORD', 'This is brief.', 0, 0, 0)
     self.name_tok = lexer.Token('WORD', 'var', 0, 0, 0)
     self.type_tok = lexer.Token('WORD', 'int', 0, 0, 0)
     self.tok_see = lexer.Token('WORD', 'See', 0, 0, 0)
     self.tok_sig = lexer.Token('WORD', 'payload', 0, 0, 0)
     self.formatter = raw_doc.DoxFormatter()
Example #2
0
 def setUp(self):
     self.tok_name = lexer.Token('WORD', 'name', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.tok_inout = lexer.Token('PARAM_IN_OUT', '[in,out]', 0, 0, 0)
     self.txt_name = raw_doc.RawText([self.tok_name])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
Example #3
0
 def setUp(self):
     self.brief_tok = lexer.Token('WORD', 'This is brief.', 0, 0, 0)
     self.name_tok = lexer.Token('WORD', 'Adaption', 0, 0, 0)
     self.title_tok = lexer.Token('WORD', 'Adaption Title', 0, 0, 0)
     self.tok_see = lexer.Token('WORD', 'See', 0, 0, 0)
     self.tok_sig = lexer.Token('WORD', 'payload', 0, 0, 0)
     self.formatter = raw_doc.DoxFormatter()
Example #4
0
 def setUp(self):
     self.path_t = lexer.Token('WORD', 'apath', 0, 0, 0)
     self.path = raw_doc.RawText([self.path_t])
     self.snippet_t0 = lexer.Token('WORD', 'The', 0, 0, 0)
     self.snippet_t1 = lexer.Token('SPACE', ' ', 0, 0, 0)
     self.snippet_t2 = lexer.Token('WORD', 'snippet', 0, 0, 0)
     self.snippet = raw_doc.RawText(
         [self.snippet_t0, self.snippet_t1, self.snippet_t2])
     self.formatter = raw_doc.DoxFormatter()
Example #5
0
 def testGetFormatted(self):
     b = raw_doc.RawBrief(raw_doc.RawText([self.brief_tok]))
     entry = raw_doc.RawEntry([b])
     entry.name = raw_doc.RawText([self.name_tok])
     entry.title = raw_doc.RawText([self.title_tok])
     entry.sees = [raw_doc.RawSee(raw_doc.RawText([self.tok_see]))]
     formatter = raw_doc.DoxFormatter()
     msg = ('@<entry> Concept Concept Title\n\n'
            '@brief This is brief.\n\n'
            '@see See\n\n')
     self.assertMultiLineEqual(entry.getFormatted(formatter), msg)
Example #6
0
def toDox(proc_entry, line_length=110, in_comment=False):
    """Process a ProcEntry into the dox-like format."""
    formatter = raw_doc.DoxFormatter()
    result = []
    result.append(proc_entry.raw_entry.getFormatted(formatter))
    for key, lst in proc_entry.subentries.iteritems():
        for elem in lst:
            result.append(elem.raw_entry.getFormatted(formatter))
    if in_comment:
        result = [' * ' + l for line in result for l in line.splitlines(False)]
        while result and result[-1] == ' * ':
            result.pop(-1)
        result = ['/*!'] + result + [' */']
    return '\n'.join(result)
Example #7
0
 def testGetFormatted(self):
     code_entry = raw_doc.RawCodeEntry()
     b = raw_doc.RawBrief(raw_doc.RawText([self.brief_tok]))
     code_entry = raw_doc.RawCodeEntry([b])
     code_entry.name = raw_doc.RawText([self.name_tok])
     code_entry.title = raw_doc.RawText([self.title_tok])
     code_entry.sees = [raw_doc.RawSee(raw_doc.RawText([self.tok_see]))]
     s = raw_doc.RawSignature(raw_doc.RawText([self.tok_sig]))
     code_entry.addSignature(s)
     formatter = raw_doc.DoxFormatter()
     txt = ('@<code entry> Concept Concept Title\n\n'
            '@brief This is brief.\n\n'
            '@signature payload\n\n'
            '@see See\n\n')
     self.assertMultiLineEqual(code_entry.getFormatted(formatter), txt)
Example #8
0
#!/usr/bin/python
"""Code for translating a DDDoc tree and its node into raw_doc objects.
"""

import copy
import lexer
import raw_doc
import re
import sys

formatter = raw_doc.DoxFormatter(120)


class TokenTranslator(object):
    def translate(self, token_list):
        result = self.translateLinks(token_list)
        result = self.translateTT(result)
        return result

    def translateLinks(self, token_list):
        result = []
        for token in token_list:
            if '@' in token.val:
                vals = re.split('(@[^@]*@)', token.val)
                for val in vals:
                    if val.startswith('@') and val.endswith('@'):
                        t1 = copy.deepcopy(token)
                        t1.type = 'COMMAND_LINK'
                        t1.val = '@link'
                        t2 = copy.deepcopy(token)
                        if '|' in val:
Example #9
0
 def setUp(self):
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
Example #10
0
 def setUp(self):
     self.t = lexer.Token('WORD', 'aword', 0, 0, 0)
     self.txt = raw_doc.RawText([self.t])
     self.formatter = raw_doc.DoxFormatter()
Example #11
0
 def setUp(self):
     self.path_t = lexer.Token('WORD', 'apath', 0, 0, 0)
     self.path = raw_doc.RawText([self.path_t])
     self.formatter = raw_doc.DoxFormatter()
Example #12
0
 def setUp(self):
     self.fmt = raw_doc.DoxFormatter(50)
     self.txt = ('This is a quite long string that is used to determine '
                 'whether the formatter wraps correctly.\n')