def doMain(args): # Parse all legacy files. import seqan.dddoc.core as core app = core.App() for path in args.legacy_doc_dirs: print 'Scanning %s...' % path app.loadFiles(path) migrated_doc = raw_doc.RawDoc() if args.legacy_doc_dirs: app.loadingComplete() migrated_doc.entries = migration.migrate(app.dddoc_tree) print 'migrated_doc.entries', [e.name.text for e in migrated_doc.entries] # Parse all normal input files. fmgr = file_mgr.FileManager() master_doc = raw_doc.RawDoc() master_doc.merge(migrated_doc) fns = FileNameSource(args.inputs) for filename in fns.generate(): if args.debug: print 'Processing %s' % filename the_file = fmgr.loadFile(filename) lex = lexer.Lexer(dox_tokens.LEXER_TOKENS, skip_whitespace=False) for comment in the_file.comments: # TODO(holtgrew): Also give offset. lex.input(comment.text, filename, comment.line, comment.col, comment.offset_col) parser = dox_parser.Parser() try: parser.parse(lex) except dox_parser.ParserError, e: dox_parser.printParserError(e) return 1 master_doc.merge(parser.documentation)
def testMerge(self): doc_left = raw_doc.RawDoc() page_left = raw_doc.RawPage() doc_left.entries.append(page_left) doc_right = raw_doc.RawDoc() page_right = raw_doc.RawPage() doc_right.entries.append(page_right) doc_left.merge(doc_right) self.assertEqual(len(doc_left.entries), 2) self.assertEqual(len(doc_right.entries), 1) self.assertEqual(doc_left.entries[0], page_left) self.assertEqual(doc_left.entries[1], page_right)
def __init__(self): self.states = ['top'] self.handlers = { 'top': TopLevelState(self), 'class': ClassDocState(self), 'function': FunctionDocState(self), 'macro': MacroDocState(self), 'metafunction': MetafunctionDocState(self), 'concept': ConceptDocState(self), 'page': PageState(self), 'group': GroupState(self), 'var': VariableState(self), 'tag': TagState(self), 'enum': EnumState(self), 'adaption': AdaptionState(self), 'typedef': TypedefState(self), } self.documentation = raw_doc.RawDoc()