def testTokenize(self): root = tokens.Token(None) grammer = lexers.Grammer() grammer.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent()) grammer.add('word', re.compile('(?P<content>\w+) *'), WordComponent()) lexer = lexers.Lexer() # Basic lexer.tokenize(root, grammer, u'foo bar') self.assertIsInstance(root(0), FooBar) self.assertEqual(root(0).content, u'foo') self.assertIsInstance(root(1), FooBar) self.assertEqual(root(1).content, u'bar') # Fall through root = tokens.Token(None) lexer.tokenize(root, grammer, u'foo other bar') self.assertIsInstance(root(0), FooBar) self.assertEqual(root(0).content, u'foo') self.assertIsInstance(root(1), tokens.Word) self.assertNotIsInstance(root(1), FooBar) self.assertEqual(root(1).content, u'other') self.assertIsInstance(root(2), FooBar) self.assertEqual(root(2).content, u'bar')
def testToken(self, mock): token = tokens.Token() self.assertEqual(token.name, 'Token') self.assertTrue(token.recursive) token = tokens.Token(recursive=False) self.assertFalse(token.recursive)
def testNodeTree(self): root = tokens.Token(None, recursive=False) tokens.Token(root) pick = do_pickle(root) self.assertFalse(root.recursive) self.assertFalse(pick.recursive) self.assertTrue(root(0).recursive) self.assertTrue(pick(0).recursive)
def _createFieldError(self, parent, token, page, modal_flag): """Helper for creating error alert.""" filename = page.local key = token['key'] err = alert.AlertToken(None, brand=u'error') alert_title = alert.AlertTitle(err, brand=u'error', string=u'Missing Template Item: "{}"'.format(key)) alert_content = alert.AlertContent(err, brand=u'error') token.copyToToken(alert_content) if modal_flag: modal_content = tokens.Token(None) core.Paragraph(modal_content, string=u"The document must include the \"{0}\" template item, this can "\ u"be included by add adding the following to the markdown " \ u"file ({1}):".format(key, filename)) core.Code(modal_content, content=u"!template! item key={0}\nInclude text (in MooseDocs format) " \ u"regarding the \"{0}\" template item here.\n" \ u"!template-end!".format(key)) link = floats.create_modal_link(alert_title, title=u'Missing Template Item "{}"'.format(key), content=modal_content) materialicon.Icon(link, icon=u'help_outline', class_='small', style='float:right;color:white;margin-bottom:5px;') self.renderer.render(parent, err, page)
def createLatexHelper(self, parent, token, page, desired): func = lambda p, t, u, l: latex.Command(p, 'hyperref', token=t, args=[latex.Bracket(string=l)]) # Create optional content bookmark = token['bookmark'] if desired is None: self._createOptionalContent(parent, token, page) return None url = str(desired.relativeDestination(page)) head = heading.find_heading(self.translator, desired, bookmark) tok = tokens.Token(None) if head is None: msg = "The linked page ({}) does not contain a heading, so the filename " \ "is being utilized.".format(desired.local) LOG.warning(common.report_error(msg, page.source, token.info.line if token.info else None, token.info[0] if token.info else token.text(), prefix='WARNING')) latex.String(parent, content=page.local) else: label = head.get('id') or re.sub(r' +', r'-', head.text().lower()) href = func(parent, token, url, label) if len(token) == 0: head.copyToToken(tok) else: token.copyToToken(tok) self.renderer.render(href, tok, page) return None
def getRoot(self): """ Create the AST root node. This is called by the Translator object. """ return tokens.Token(None)
def createLatex(self, parent, token, page): ctoken = token(1) opts = get_listing_options(ctoken) cap = token(0) key = cap['key'] if key: latex.String(opts[0], content="label={},".format(key)) tok = tokens.Token() cap.copyToToken(tok) if key: latex.String(opts[0], content="caption=") else: latex.String(opts[0], content="title=") if not cap.children: latex.String(opts[0], content="\\mbox{}", escape=False) else: self.translator.renderer.render(latex.Brace(opts[0]), tok, page) latex.Environment(parent, 'lstlisting', string=ctoken['content'].strip('\n'), escape=False, after_begin='\n', before_end='\n', args=opts, info=token.info) token.children = list() return parent
def _renderField(self, parent, token, page, modal=None): """Helper to render tokens, the logic is the same across formats.""" # Locate the replacement key = token['key'] func = lambda n: (n.name == 'TemplateItem') and (n['key'] == key) replacement = anytree.search.find(token.root, filter_=func) if replacement: # Add beginning TemplateSubField for child in token: if (child.name == 'TemplateSubField') and (child['command'] == 'field-begin'): self.renderer.render(parent, child, page) # Render TemplateItem self.renderer.render(parent, replacement, page) # Add ending TemplateSubField for child in token: if (child.name == 'TemplateSubField') and (child['command'] == 'field-end'): self.renderer.render(parent, child, page) # Remove the TemplateFieldItem, otherwise the content will be rendered again replacement.remove() elif not token['required']: tok = tokens.Token(None) token.copyToToken(tok) self.renderer.render(parent, tok, page) else: self._createFieldError(parent, token, page, modal)
def createToken(self, info, parent): content = info['block'] if 'block' in info else info['inline'] if self.settings['prefix'] is None: msg = "The 'prefix' option is required." raise exceptions.TokenizeException(msg) # Extract the unordered list self.reader.parse(parent, content, MooseDocs.BLOCK) ul = parent.children[-1] ul.parent = None # Check the list type if not isinstance(ul, tokens.UnorderedList): msg = "The content is required to be an unordered list (i.e., use '-')." raise exceptions.TokenizeException(msg) # Build the matrix prefix = self.settings['prefix'] label = u'{}{:d}'.format(prefix, self.extension.increment(prefix)) matrix = SQARequirementMatrix(parent) heading = self.settings['heading'] if heading is not None: matrix.heading = tokens.Token(None) #pylint: disable=attribute-defined-outside-init self.reader.parse(matrix.heading, heading, MooseDocs.INLINE) for i, item in enumerate(ul.children): matrix_item = SQARequirementMatrixItem(matrix, label=u'{}.{:d}'.format( label, i)) for child in item: child.parent = matrix_item return parent
def createHTMLHelper(self, parent, token, page, desired): bookmark = token['bookmark'] # Handle 'optional' linking if desired is None: tok = tokens.Token(None) for child in token.copy(): child.parent = tok self.renderer.render(parent, tok, page) return None url = unicode(desired.relativeDestination(page)) if bookmark: url += '#{}'.format(bookmark) link = core.Link(None, url=url, info=token.info) if len(token.children) == 0: head = heading.find_heading(self.translator, desired, bookmark) if head is not None: for child in head: child.parent = link else: tokens.String(link, content=url) else: for child in token.copy(): child.parent = link self.renderer.render(parent, link, page) return None
def _createOptionalContent(self, parent, token, page): """Renders text without link for optional link.""" tok = tokens.Token(None) token.copyToToken(tok) if len(tok) == 0: # Use filename if no children exist tokens.String(tok, content=page.local) self.renderer.render(parent, tok, page)
def createMaterialize(self, token, parent): key = token.key func = lambda n: isinstance(n, SQADocumentItem) and (n.key == key) replacement = anytree.search.find(token.root, filter_=func, maxlevel=2) if replacement: if token.heading is not None: self.translator.renderer.process(parent, token.heading) self.translator.renderer.process(parent, replacement) # Remove item so it doesn't render again replacement.parent = None for child in replacement: child.parent = None # else: filename = self.translator.current.local content = tokens.Token(None) self.translator.reader.parse(content, ERROR_CONTENT.format(key, filename)) modal_title = tokens.String( None, content=u'Missing Template Item "{}"'.format(key)) alert_title = tokens.Token(None) tokens.String(alert_title, content=u'Missing Template Item "{}"'.format(key)) h_token = floats.ModalLink(alert_title, url=unicode(filename), content=content, title=modal_title, class_='moose-help') materialicon.IconToken(h_token, icon=u'help_outline') err = alert.AlertToken(token.parent, brand=u'error', title=alert_title) for child in token.children: child.parent = err self.translator.renderer.process(parent, err)
def testAlert(self): filename = os.path.join(MooseDocs.MOOSE_DIR, 'framework', 'doc', 'content', 'utilities', 'MooseDocs', 'extensions', 'alert.md') content = common.read(filename) ast = tokens.Token(None) self._reader.parse(ast, content) do_pickle(ast, timer=False) do_c_pickle(ast, timer=False)
def testParseExceptions(self): MooseDocs.LOG_LEVEL = logging.DEBUG reader = readers.Reader(lexers.RecursiveLexer('foo')) with self.assertRaises(exceptions.MooseDocsException) as e: reader.parse([], u'') self.assertIn("The argument 'root'", e.exception.message) with self.assertRaises(exceptions.MooseDocsException) as e: reader.parse(tokens.Token(), []) self.assertIn("The argument 'content'", e.exception.message)
def testParseExceptions(self): MooseDocs.LOG_LEVEL = logging.DEBUG reader = readers.Reader(lexers.RecursiveLexer('foo')) with self.assertRaises(exceptions.MooseDocsException) as e: reader.tokenize([], '', None) self.assertIn("The argument 'root'", str(e.exception)) with self.assertRaises(exceptions.MooseDocsException) as e: reader.tokenize(tokens.Token(), [], None) self.assertIn("The argument 'content'", str(e.exception)) MooseDocs.LOG_LEVEL = logging.INFO
def _addItems(self, parent, token, items, cls): #pylint: disable=unused-argument root_page = self.translator.current # token.root.page errors = [] for obj in items: if obj.removed: continue li = html.Tag(parent, 'li', class_='{} collection-item'.format(cls)) href = None #TODO: need to figure out how to get rid of 'systems' prefix: # /Executioner/Adaptivity/index.md # /Adaptivity/index.md if isinstance(obj, syntax.SyntaxNode): nodes = root_page.findall(os.path.join('syntax', obj.markdown()), exc=None) else: nodes = root_page.findall(obj.markdown(), exc=None) if len(nodes) > 1: msg = "Located multiple pages with the given filename:" for n in nodes: msg += '\n {}'.format(n.fullpath) errors.append(msg) elif len(nodes) == 0: msg = "Failed to locate a page with the given filename: {}".format( obj.markdown()) errors.append(msg) else: href = nodes[0].relativeDestination(root_page) # allow error html.Tag(li, 'a', class_='{}-name'.format(cls), string=unicode(obj.name), href=href) if obj.description is not None: desc = html.Tag(li, 'span', class_='{}-description'.format(cls)) ast = tokens.Token(None) self.translator.reader.parse(ast, unicode(obj.description), group=MooseDocs.INLINE) self.translator.renderer.process(desc, ast) return errors
def testTokenizeException(self): root = tokens.Token(None) content = page.PageNodeBase(None) reader = readers.Reader(lexers.RecursiveLexer('foo')) translator = Translator(content, reader, HTMLRenderer(), []) translator.init('') reader.add('foo', WordComponent()) reader.parse(root, u'throw bar') self.assertIsInstance(root(0), tokens.ExceptionToken) self.assertIsInstance(root(1), tokens.Word) self.assertEqual(root(1).content, u'bar')
def testTokenizeWithExtraContent(self, mock): # Extra root = tokens.Token(None) grammer = lexers.Grammer() grammer.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent()) lexer = lexers.Lexer() lexer.tokenize(root, grammer, u'foo ???') self.assertIsInstance(root(0), FooBar) self.assertEqual(root(0).content, u'foo') self.assertEqual(mock.call_count, 1)
def testTokenizeWithExtraContent(self): # Extra root = tokens.Token(None) grammar = lexers.Grammar() grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent()) lexer = lexers.Lexer() lexer.tokenize(root, grammar, u'foo ???') self.assertIsInstance(root(0), FooBar) self.assertEqual(root(0).content, u'foo') self.assertIsInstance(root(1), tokens.ErrorToken) self.assertIn('Unprocessed', root(1).message)
def testTokenizeWithExtraContent(self): # Extra root = tokens.Token(None) grammar = lexers.Grammar() grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent()) lexer = lexers.Lexer() lexer.tokenize(root, 'foo ???', None, grammar) self.assertEqual(root(0).name, 'FooBar') self.assertEqual(root(0)['content'], 'foo') self.assertEqual(root(1).name, 'ErrorToken') self.assertIn('Unprocessed', root(1)['message'])
def testCreateToken(self): """ Test the createToken method is called. """ class TestToken(TokenComponent): PARSE_SETTINGS = False def createToken(self, *args): self.count = 1 info = mock.Mock(spec=LexerInformation) parent = tokens.Token() comp = TestToken() comp(info, parent) self.assertEqual(comp.count, 1)
def tokenize(self): """ Perform tokenization of content, using cache if the content has not changed. """ if self.modified() or (self.content is None): self._ast = None self._result = None self.read() if self._ast is None: self._ast = tokens.Token(None) self.translator.reader.parse(self._ast, self.content) return self._ast
def testTokenizeException(self): root = tokens.Token(None) reader = readers.Reader(lexers.RecursiveLexer('block', 'inline')) reader.add('block', WordComponent()) reader.add('inline', LetterComponent()) reader.tokenize(root, 'throw bar', pages.Page('foo', source='foo')) self.assertEqual(root(0).name, 'ErrorToken') self.assertEqual(root(1).name, 'Word') self.assertEqual(root(1)(0).name, 'Letter') self.assertEqual(root(1)(1).name, 'Letter') self.assertEqual(root(1)(2).name, 'Letter') self.assertEqual(root(1)(0)['content'], 'b') self.assertEqual(root(1)(1)['content'], 'a') self.assertEqual(root(1)(2)['content'], 'r')
def createMaterialize(self, parent, token, page): key = token.key func = lambda n: isinstance(n, SQADocumentItem) and (n.key == key) replacement = anytree.search.find(token.root, filter_=func, maxlevel=2) if replacement: self.renderer.render(parent, replacement, page) # Remove item so it doesn't render again replacement.parent = None for child in replacement: child.parent = None # else: filename = page.local err = alert.AlertToken(None, brand=u'error') alert_title = alert.AlertTitle( err, brand=u'error', string=u'Missing Template Item "{}"'.format(key)) alert_content = alert.AlertContent(err, brand=u'error') modal_content = tokens.Token(None) core.Paragraph(modal_content, string=u"The document must include the \"{0}\" template item, this can "\ u"be included by add adding the following to the markdown " \ u"file ({1}):".format(key, filename)) core.Code(modal_content, code=u"!sqa! item key={0}\nInclude text (in MooseDocs format) " \ u"regarding the \"{0}\" template item here.\n" \ u"!sqa-end!".format(key)) link = floats.create_modal_link( alert_title, title=u'Missing Template Item "{}"'.format(key), content=modal_content) materialicon.IconToken(link, icon=u'help_outline', class_=u'material-icons moose-help') for child in token.children: child.parent = alert_content self.renderer.render(parent, err, page)
def testBasic(self): root = tokens.Token(None) content = page.PageNodeBase(None) reader = readers.Reader(lexers.RecursiveLexer('block', 'inline')) translator = Translator(content, reader, HTMLRenderer(), []) translator.init('') reader.add('block', BlockComponent()) reader.add('inline', WordComponent()) reader.parse(root, u'foo bar') self.assertIsInstance(root(0), tokens.Token) self.assertIsInstance(root(0)(0), tokens.Word) self.assertEqual(root(0)(0).content, u'foo') self.assertIsInstance(root(0)(1), tokens.Word) self.assertEqual(root(0)(1).content, u'bar')
def testTokenize(self): root = tokens.Token(None) grammar = lexers.Grammar() grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent()) grammar.add('word', re.compile('(?P<content>\w+) *'), WordComponent()) lexer = lexers.Lexer() # Basic lexer.tokenize(root, 'foo bar', None, grammar) self.assertEqual(root(0).name, 'FooBar') self.assertEqual(root(0)['content'], 'foo') self.assertEqual(root(1).name, 'FooBar') self.assertEqual(root(1)['content'], 'bar') # Fall through root = tokens.Token(None) lexer.tokenize(root, 'foo other bar', None, grammar) self.assertEqual(root(0).name, 'FooBar') self.assertEqual(root(0)['content'], 'foo') self.assertEqual(root(1).name, 'Word') self.assertEqual(root(1)['content'], 'other') self.assertEqual(root(2).name, 'FooBar') self.assertEqual(root(2)['content'], 'bar')
def createToken(self, info, parent): title = self.settings.pop('title', None) brand = info['subcommand'] if title: title_root = tokens.Token(None) self.reader.parse(title_root, title, MooseDocs.INLINE) else: title_root = None if self.settings['prefix'] is not None: prefix = self.settings['prefix'] else: prefix = self.extension.get('use-title-prefix', True) return AlertToken(parent, brand=brand, prefix=prefix, title=title_root)
def testTokenize(self): lexer = lexers.RecursiveLexer('block', 'inline') lexer.add('block', 'foo', re.compile('(?P<inline>\w+) *'), letters_func) lexer.add('inline', 'bar', re.compile('(?P<content>\w)'), letter_func) root = tokens.Token(None) lexer.tokenize(root, 'foo', None, lexer.grammar('block')) self.assertIsInstance(root(0), tokens.Token) self.assertEqual(root(0).name, 'Letters') self.assertEqual(root(0)(0).name, 'Letter') self.assertEqual(root(0)(0)['content'], 'f') self.assertEqual(root(0)(1).name, 'Letter') self.assertEqual(root(0)(1)['content'], 'o') self.assertEqual(root(0)(2).name, 'Letter') self.assertEqual(root(0)(2)['content'], 'o')
def testTokenize(self): lexer = lexers.RecursiveLexer('block', 'inline') lexer.add('block', 'foo', re.compile('(?P<inline>\w+) *'), EmptyComponent()) lexer.add('inline', 'bar', re.compile('(?P<content>\w)'), WordComponent()) root = tokens.Token(None) lexer.tokenize(root, lexer.grammer(), u'foo') self.assertIsInstance(root(0), tokens.Token) self.assertNotIsInstance(root(0), tokens.Word) self.assertIsInstance(root(0)(0), tokens.Word) self.assertEqual(root(0)(0).content, u'f') self.assertIsInstance(root(0)(1), tokens.Word) self.assertEqual(root(0)(1).content, u'o') self.assertIsInstance(root(0)(2), tokens.Word) self.assertEqual(root(0)(2).content, u'o')
def createToken(self, parent, info, page): marker = info['marker'] n = len(marker) token = tokens.Token(self.TOKEN, parent) #pylint: disable=not-callable strip_regex = re.compile(r'^ {%s}(.*?)$' % n, flags=re.MULTILINE) for item in self.ITEM_RE.finditer(info['items']): content = ' '*n + item.group('item') indent = re.search(r'^\S', content, flags=re.MULTILINE|re.UNICODE) if indent: msg = "List item content must be indented by {} to match the list item " \ "characters of '{}', to end a list item you must use two empty lines." raise exceptions.MooseDocsException(msg, n, marker) content = strip_regex.sub(r'\1', content) self.reader.tokenize(ListItem(token), content, page, line=info.line) return token