コード例 #1
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_THROW', '@throw', 0, 0, 0)
     self.tok_type = lexer.Token('WORD', 'type', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_type = raw_doc.RawText([self.tok_type])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
コード例 #2
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_TPARAM', '@tparam', 0, 0, 0)
     self.tok_name = lexer.Token('WORD', 'name', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_name = raw_doc.RawText([self.tok_name])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
コード例 #3
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_RETURN', '@return', 0, 0, 0)
     self.tok_name = lexer.Token('WORD', 'name', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_name = raw_doc.RawText([self.tok_name])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
コード例 #4
0
 def setUp(self):
     self.brief_tok = lexer.Token('WORD', 'This is brief.', 0, 0, 0)
     self.name_tok = lexer.Token('WORD', 'var', 0, 0, 0)
     self.type_tok = lexer.Token('WORD', 'int', 0, 0, 0)
     self.tok_see = lexer.Token('WORD', 'See', 0, 0, 0)
     self.tok_sig = lexer.Token('WORD', 'payload', 0, 0, 0)
     self.formatter = raw_doc.DoxFormatter()
コード例 #5
0
 def setUp(self):
     self.brief_tok = lexer.Token('WORD', 'This is brief.', 0, 0, 0)
     self.name_tok = lexer.Token('WORD', 'Adaption', 0, 0, 0)
     self.title_tok = lexer.Token('WORD', 'Adaption Title', 0, 0, 0)
     self.tok_see = lexer.Token('WORD', 'See', 0, 0, 0)
     self.tok_sig = lexer.Token('WORD', 'payload', 0, 0, 0)
     self.formatter = raw_doc.DoxFormatter()
コード例 #6
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_PARAM', '@param', 0, 0, 0)
     self.tok_name = lexer.Token('WORD', 'name', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.tok_inout = lexer.Token('PARAM_IN_OUT', '[in,out]', 0, 0, 0)
     self.txt_name = raw_doc.RawText([self.tok_name])
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
コード例 #7
0
 def setUp(self):
     self.path_t = lexer.Token('WORD', 'apath', 0, 0, 0)
     self.path = raw_doc.RawText([self.path_t])
     self.snippet_t0 = lexer.Token('WORD', 'The', 0, 0, 0)
     self.snippet_t1 = lexer.Token('SPACE', ' ', 0, 0, 0)
     self.snippet_t2 = lexer.Token('WORD', 'snippet', 0, 0, 0)
     self.snippet = raw_doc.RawText(
         [self.snippet_t0, self.snippet_t1, self.snippet_t2])
     self.formatter = raw_doc.DoxFormatter()
コード例 #8
0
 def testConstructionWithTokens(self):
     tokens = [
         lexer.Token('WORD', 'test', 0, 0, 0),
         lexer.Token('SPACE', ' ', 0, 0, 0),
         lexer.Token('WORD', 'foo', 0, 0, 0)
     ]
     text = raw_doc.RawText(tokens)
     self.assertEqual(text.tokens, tokens)
     self.failIf(text.empty)
コード例 #9
0
 def setUp(self):
     self.code_tok = lexer.Token('COMMAND_CODE', '@code', 0, 0, 0)
     self.sig_tok = lexer.Token('COMMAND_SIGNATURE', '@signature', 0, 0, 0)
     self.brief_tok = lexer.Token('WORD', 'This is brief.', 0, 0, 0)
     self.name_tok = lexer.Token('WORD', 'Concept', 0, 0, 0)
     self.title_tok = lexer.Token('WORD', 'Concept Title', 0, 0, 0)
     self.tok_see = lexer.Token('WORD', 'See', 0, 0, 0)
     self.tok_sig = lexer.Token('WORD', 'payload', 0, 0, 0)
コード例 #10
0
    def testMerge(self):
        self.page_token = lexer.Token('COMMAND_PAGE', '@page', 0, 0, 0)
        doc_left = raw_doc.RawDoc()
        page_left = raw_doc.RawPage(self.page_token)
        doc_left.entries.append(page_left)
        doc_right = raw_doc.RawDoc()
        page_right = raw_doc.RawPage(self.page_token)
        doc_right.entries.append(page_right)
        doc_left.merge(doc_right)

        self.assertEqual(len(doc_left.entries), 2)
        self.assertEqual(len(doc_right.entries), 1)
        self.assertEqual(doc_left.entries[0], page_left)
        self.assertEqual(doc_left.entries[1], page_right)
コード例 #11
0
 def setUp(self):
     self.t = lexer.Token('WORD', 'aword', 0, 0, 0)
     self.txt = raw_doc.RawText([self.t])
     self.formatter = raw_doc.DoxFormatter()
コード例 #12
0
 def setUp(self):
     self.path_t = lexer.Token('WORD', 'apath', 0, 0, 0)
     self.path = raw_doc.RawText([self.path_t])
     self.formatter = raw_doc.DoxFormatter()
コード例 #13
0
 def setUp(self):
     self.t = lexer.Token('WORD', 'aword', 0, 0, 0)
     self.p = raw_doc.RawParagraph(self.t, raw_doc.RawText([self.t]))
     self.formatter = raw_doc.DoxFormatter()
コード例 #14
0
 def testAddSignature(self):
     code_entry = raw_doc.RawCodeEntry(self.code_tok)
     s = raw_doc.RawSignature(
         self.sig_tok,
         raw_doc.RawText([lexer.Token('WORD', 'payload', 0, 0, 0)]))
     code_entry.addSignature(s)
コード例 #15
0
 def setUp(self):
     self.tok = lexer.Token('COMMAND_SIGNATURE', '@signature', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()
コード例 #16
0
 def setUp(self):
     self.tok = lexer.Token('WORD', 'entry', 0, 0, 0)
     self.brief_tok = lexer.Token('WORD', 'This is brief.', 0, 0, 0)
     self.name_tok = lexer.Token('WORD', 'Concept', 0, 0, 0)
     self.title_tok = lexer.Token('WORD', 'Concept Title', 0, 0, 0)
     self.tok_see = lexer.Token('WORD', 'See', 0, 0, 0)
コード例 #17
0
ファイル: test_raw_doc.py プロジェクト: h-2/seqan-1
 def setUp(self):
     self.tok = lexer.Token('COMMAND_DATARACE', '@datarace', 0, 0, 0)
     self.tok_text = lexer.Token('WORD', 'text', 0, 0, 0)
     self.txt_text = raw_doc.RawText([self.tok_text])
     self.formatter = raw_doc.DoxFormatter()