Ejemplo n.º 1
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_THROW', '@throw', 0, 0, 0)
     self.tok_type = lexer.Token('WORD', 'type', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_type = raw_doc.RawText([self.tok_type])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 2
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_TPARAM', '@tparam', 0, 0, 0)
     self.tok_name = lexer.Token('WORD', 'name', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_name = raw_doc.RawText([self.tok_name])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 3
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_RETURN', '@return', 0, 0, 0)
     self.tok_name = lexer.Token('WORD', 'name', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_name = raw_doc.RawText([self.tok_name])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 4
0
 def setUp(self):
     self.brief_tok = lexer.Token('WORD', 'This is brief.', 0, 0, 0)
     self.name_tok = lexer.Token('WORD', 'var', 0, 0, 0)
     self.type_tok = lexer.Token('WORD', 'int', 0, 0, 0)
     self.tok_see = lexer.Token('WORD', 'See', 0, 0, 0)
     self.tok_sig = lexer.Token('WORD', 'payload', 0, 0, 0)
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 5
0
 def setUp(self):
     self.brief_tok = lexer.Token('WORD', 'This is brief.', 0, 0, 0)
     self.name_tok = lexer.Token('WORD', 'Adaption', 0, 0, 0)
     self.title_tok = lexer.Token('WORD', 'Adaption Title', 0, 0, 0)
     self.tok_see = lexer.Token('WORD', 'See', 0, 0, 0)
     self.tok_sig = lexer.Token('WORD', 'payload', 0, 0, 0)
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 6
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_PARAM', '@param', 0, 0, 0)
     self.tok_name = lexer.Token('WORD', 'name', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.tok_inout = lexer.Token('PARAM_IN_OUT', '[in,out]', 0, 0, 0)
     self.txt_name = raw_doc.RawText([self.tok_name])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 7
0
 def setUp(self):
     self.path_t = lexer.Token('WORD', 'apath', 0, 0, 0)
     self.path = raw_doc.RawText([self.path_t])
     self.snippet_t0 = lexer.Token('WORD', 'The', 0, 0, 0)
     self.snippet_t1 = lexer.Token('SPACE', ' ', 0, 0, 0)
     self.snippet_t2 = lexer.Token('WORD', 'snippet', 0, 0, 0)
     self.snippet = raw_doc.RawText(
         [self.snippet_t0, self.snippet_t1, self.snippet_t2])
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 8
0
 def testGetFormatted(self):
     b = raw_doc.RawBrief(self.brief_tok, raw_doc.RawText([self.brief_tok]))
     entry = raw_doc.RawEntry(self.brief_tok, [b])
     entry.name = raw_doc.RawText([self.name_tok])
     entry.title = raw_doc.RawText([self.title_tok])
     entry.sees = [
         raw_doc.RawSee(self.tok_see, raw_doc.RawText([self.tok_see]))
     ]
     formatter = raw_doc.DoxFormatter()
     msg = ('@<entry> Concept Concept Title\n\n'
            '@brief This is brief.\n\n'
            '@see See\n\n')
     self.assertMultiLineEqual(entry.getFormatted(formatter), msg)
Ejemplo n.º 9
0
 def testGetFormatted(self):
     code_entry = raw_doc.RawCodeEntry(self.code_tok)
     b = raw_doc.RawBrief(self.brief_tok, raw_doc.RawText([self.brief_tok]))
     code_entry = raw_doc.RawCodeEntry(self.code_tok, [b])
     code_entry.name = raw_doc.RawText([self.name_tok])
     code_entry.title = raw_doc.RawText([self.title_tok])
     code_entry.sees = [
         raw_doc.RawSee(self.tok_see, raw_doc.RawText([self.tok_see]))
     ]
     s = raw_doc.RawSignature(self.tok_sig, raw_doc.RawText([self.tok_sig]))
     code_entry.addSignature(s)
     formatter = raw_doc.DoxFormatter()
     txt = ('@<code entry> Concept Concept Title\n\n'
            '@brief This is brief.\n\n'
            '@signature payload\n\n'
            '@see See\n\n')
     self.assertMultiLineEqual(code_entry.getFormatted(formatter), txt)
Ejemplo n.º 10
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_SIGNATURE', '@signature', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 11
0
 def setUp(self):
     self.t = lexer.Token('WORD', 'aword', 0, 0, 0)
     self.txt = raw_doc.RawText([self.t])
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 12
0
 def setUp(self):
     self.path_t = lexer.Token('WORD', 'apath', 0, 0, 0)
     self.path = raw_doc.RawText([self.path_t])
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 13
0
 def setUp(self):
     self.t = lexer.Token('WORD', 'aword', 0, 0, 0)
     self.p = raw_doc.RawParagraph(self.t, raw_doc.RawText([self.t]))
     self.formatter = raw_doc.DoxFormatter()
Ejemplo n.º 14
0
 def setUp(self):
     self.fmt = raw_doc.DoxFormatter(50)
     self.txt = ('This is a quite long string that is used to determine '
                 'whether the formatter wraps correctly.\n')
Ejemplo n.º 15
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_DATARACE', '@datarace', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()