def _RunConnectivityCheck(self, testCase, content): # Connectivity check wordTokenStream = Tokenizer.GetVHDLTokenizer( content, alphaCharacters=self.__ALPHA_CHARACTERS__, numberCharacters="") vhdlBlockStream = TokenToBlockParser.Transform(wordTokenStream) try: blockIterator = iter(vhdlBlockStream) firstBlock = next(blockIterator) if (not isinstance(firstBlock, StartOfDocumentBlock)): print( "{RED}First block is not StartOfDocumentBlock: {block}{NOCOLOR}" .format(block=firstBlock, **Console.Foreground)) elif (not isinstance(firstBlock.StartToken, StartOfDocumentToken)): print( "{RED}First token is not StartOfDocumentToken: {token}{NOCOLOR}" .format(token=firstBlock.StartToken, **Console.Foreground)) lastBlock = None lastToken = firstBlock.StartToken for vhdlBlock in blockIterator: if isinstance(vhdlBlock, EndOfDocumentBlock): lastBlock = vhdlBlock break tokenIterator = iter(vhdlBlock) for token in tokenIterator: if (token.NextToken is None): print("{RED}Token has an open end.{NOCOLOR}".format( **Console.Foreground)) elif (lastToken.NextToken is not token): print( "{RED}Last token is not connected to the current one.{NOCOLOR}" .format(**Console.Foreground)) elif (token.PreviousToken is not lastToken): print( "{RED}Current token is not connected to lastToken.{NOCOLOR}" .format(**Console.Foreground)) lastToken = token else: print("{RED}No EndOfDocumentBlock found.{NOCOLOR}".format( **Console.Foreground)) if (not isinstance(lastBlock, EndOfDocumentBlock)): print( "{RED}Last block is not EndOfDocumentBlock: {block}{NOCOLOR}" .format(block=lastBlock, **Console.Foreground)) elif (not isinstance(lastBlock.StartToken, EndOfDocumentToken)): print( "{RED}Last block is not EndOfDocumentToken: {token}{NOCOLOR}" .format(token=lastBlock.StartToken, **Console.Foreground)) except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **Console.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **Console.Foreground))
def test_TokenLinking(self) -> None: # test['name'] tokenStream = Tokenizer.GetVHDLTokenizer(self.code) tokenIterator = iter(tokenStream) startToken = next(tokenIterator) self.assertIsInstance(startToken, StartOfDocumentToken, msg="First token is not StartOfDocumentToken: {token}".format(token=startToken)) self.assertIsNone(startToken.PreviousToken, msg="First token has no open start.") lastToken: Token = startToken endToken: Token = None for token in tokenIterator: if isinstance(token, EndOfDocumentToken): endToken = token break self.assertEqual(lastToken.NextToken, token, msg="Last token is not connected to the current token: {token}".format(token=token)) self.assertEqual(lastToken, token.PreviousToken, msg="Current token is not connected to lastToken: {token}".format(token=token)) lastToken = token else: self.fail(msg="No EndOfDocumentToken found.") self.assertIsInstance(endToken, EndOfDocumentToken, msg="End token is not EndOfDocumentToken: {token}".format(token=endToken)) self.assertEqual(lastToken.NextToken, endToken, msg="Last token is not connected to the end token: {token}".format(token=token)) self.assertEqual(lastToken, endToken.PreviousToken, msg="End token is not connected to lastToken: {token}".format(token=token)) self.assertIsNone(endToken.NextToken, msg="End token has no open end: {token}".format(token=endToken.NextToken))
def _RunExpectedBlocks(self, testCase, content): # History check counter = testCase.GetExpectedBlocks() wordTokenStream = Tokenizer.GetVHDLTokenizer( content, alphaCharacters=self.__ALPHA_CHARACTERS__, numberCharacters="") vhdlBlockStream = TokenToBlockParser.Transform(wordTokenStream) try: for vhdlBlock in vhdlBlockStream: counter.Count(vhdlBlock.__class__) if counter.Check(): print(" Expected blocks check - {GREEN}PASSED{NOCOLOR}". format(**Console.Foreground)) else: print( " Expected blocks check - {RED}FAILED{NOCOLOR}".format( **Console.Foreground)) counter.PrintReport() except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **Console.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **Console.Foreground))
def Parse(self, content=None): # FIXME: parameter type if (content is None): if (not self._filePath.exists()): raise DOMParserException("File '{0!s}' does not exist.".format( self._filePath)) with self._filePath.open('r') as fileHandle: content = fileHandle.read() vhdlTokenStream = Tokenizer.GetVHDLTokenizer(content) vhdlBlockStream = TokenToBlockParser.Transform(vhdlTokenStream) vhdlGroupStream = BlockToGroupParser.Transform(vhdlBlockStream) groups = [group for group in vhdlGroupStream] firstGroup = groups[0] lastGroup = groups[-1] if (not isinstance(firstGroup, StartOfDocumentGroup)): raise DOMParserException( "Expected group is not a StartOfDocumentGroup.", firstGroup) elif (not isinstance(lastGroup, EndOfDocumentGroup)): raise DOMParserException( "Expected group is not an EndOfDocumentGroup.", lastGroup) # run recursively (node, group) self.stateParse(self, firstGroup)
def HandleBlockStreaming(self: FrontEndProtocol, args): self.PrintHeadline() file = Path(args.Filename) if (not file.exists()): print("File '{0!s}' does not exist.".format(file)) with file.open('r') as fileHandle: content = fileHandle.read() tokenStream = Tokenizer.GetVHDLTokenizer(content) blockStream = TokenToBlockParser.Transform(tokenStream) try: for block in blockStream: if isinstance(block, (LinebreakBlock, IndentationBlock)): self.WriteNormal("{DARK_GRAY}{block!r}{NOCOLOR}".format( block=block, **self.Foreground)) elif isinstance(block, CommentBlock): self.WriteNormal("{DARK_GREEN}{block!r}{NOCOLOR}".format( block=block, **self.Foreground)) elif isinstance( block, (Entity.NameBlock, Entity.NameBlock, Entity.EndBlock)): self.WriteNormal("{DARK_RED}{block!r}{NOCOLOR}".format( block=block, **self.Foreground)) elif isinstance( block, (GenericList.OpenBlock, GenericList.DelimiterBlock, GenericList.CloseBlock)): self.WriteNormal("{DARK_BLUE}{block!r}{NOCOLOR}".format( block=block, **self.Foreground)) elif isinstance(block, (PortList.OpenBlock, PortList.DelimiterBlock, PortList.CloseBlock)): self.WriteNormal("{DARK_CYAN}{block!r}{NOCOLOR}".format( block=block, **self.Foreground)) elif isinstance( block, (InterfaceConstantBlock, InterfaceSignalBlock)): self.WriteNormal("{BLUE}{block!r}{NOCOLOR}".format( block=block, **self.Foreground)) else: self.WriteNormal("{YELLOW}{block!r}{NOCOLOR}".format( block=block, **self.Foreground)) for token in block: self.WriteVerbose(repr(token)) except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **self.Foreground)) self.exit()
def test_BlockLinking(self) -> None: # test['name'] with self.assertRaises(BlockParserException) as ex: tokenStream = Tokenizer.GetVHDLTokenizer(self.code) blockStream = TokenToBlockParser.Transform(tokenStream) blockIterator = iter(blockStream) firstBlock = next(blockIterator) self.assertIsInstance(firstBlock, StartOfDocumentBlock, msg="First block is not StartOfDocumentBlock: {block}".format(block=firstBlock)) startToken = firstBlock.StartToken self.assertIsInstance(startToken, StartOfDocumentToken, msg="First token is not StartOfDocumentToken: {token}".format(token=startToken)) lastBlock: Block = firstBlock endBlock: Block = None lastToken: Token = startToken for block in blockIterator: if isinstance(block, EndOfDocumentBlock): endBlock = block break # Block checks self.assertEqual(lastBlock.NextBlock, block, msg="Last block is not connected to the current block: {block}".format(block=block)) self.assertEqual(lastBlock, block.PreviousBlock, msg="Current block is not connected to last block: {block}".format(block=block)) # Token checks tokenIterator = iter(block) for token in tokenIterator: self.assertIsNotNone(token.NextToken, msg="Token has an open end (token).".format(token=token.NextToken)) self.assertEqual(lastToken.NextToken, token, msg="Last token is not connected to the current token.") self.assertIsNotNone(token.PreviousToken, msg="Token has an open end (PreviousToken).") self.assertEqual(token.PreviousToken, lastToken, msg="Current token is not connected to lastToken.") lastToken = token lastBlock = block else: self.fail(msg="No EndOfDocumentBlock found.") # Block checks self.assertIsInstance(endBlock, EndOfDocumentBlock, msg="End block is not EndOfDocumentblock: {token}".format(token=endBlock)) self.assertIsInstance(endBlock.EndToken, EndOfDocumentToken, msg="End block's token is not EndOfDocumentToken: {token}".format(token=endBlock.EndToken)) # Token checks self.assertEqual(lastToken.NextToken, endBlock.EndToken, msg="Last token is not connected to the end token.") self.assertEqual(lastToken, endBlock.EndToken.PreviousToken, msg="End token is not connected to lastToken.") self.assertIsNone(endBlock.EndToken.NextToken, msg="End token has no open end: {token}".format(token=endBlock.EndToken.NextToken))
def HandleTokenize(self, args): self.PrintHeadline() file = Path(args.Filename) if (not file.exists()): print("File '{0!s}' does not exist.".format(file)) with file.open('r') as fileHandle: content = fileHandle.read() from pyVHDLParser.Base import ParserException from pyVHDLParser.Token import StartOfDocumentToken, EndOfDocumentToken, CharacterToken, SpaceToken, WordToken, LinebreakToken, CommentToken, IndentationToken from pyVHDLParser.Token.Parser import Tokenizer vhdlTokenStream = Tokenizer.GetVHDLTokenizer(content) try: for vhdlToken in vhdlTokenStream: if isinstance(vhdlToken, (LinebreakToken, SpaceToken, IndentationToken)): print("{DARK_GRAY}{block}{NOCOLOR}".format( block=vhdlToken, **self.Foreground)) elif isinstance(vhdlToken, CommentToken): print("{DARK_GREEN}{block}{NOCOLOR}".format( block=vhdlToken, **self.Foreground)) elif isinstance(vhdlToken, CharacterToken): print("{DARK_CYAN}{block}{NOCOLOR}".format( block=vhdlToken, **self.Foreground)) elif isinstance(vhdlToken, WordToken): print("{WHITE}{block}{NOCOLOR}".format(block=vhdlToken, **self.Foreground)) elif isinstance(vhdlToken, (StartOfDocumentToken, EndOfDocumentToken)): print("{YELLOW}{block}{NOCOLOR}".format(block=vhdlToken, **self.Foreground)) else: print("{RED}{block}{NOCOLOR}".format(block=vhdlToken, **self.Foreground)) except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **self.Foreground)) self.exit()
def test_KeywordToWordTokenMissmatch(self) -> None: tokenStream = Tokenizer.GetVHDLTokenizer(self.code) tokenIterator = iter(tokenStream) token = next(tokenIterator) self.assertIsInstance( token, StartOfDocumentToken, msg= "Token has not expected type.\n Actual: {actual} pos={pos!s}\n Expected: {expected}" .format(actual=token.__class__.__qualname__, pos=token.Start, expected=StartOfDocumentToken.__qualname__)) token = next(tokenIterator) keywordToken = token self.assertIsInstance( token, WordToken, msg= "Token has not expected type.\n Actual: {actual} pos={pos!s}\n Expected: {expected}" .format(actual=token.__class__.__qualname__, pos=token.Start, expected=StartOfDocumentToken.__qualname__)) self.assertTrue( token == "keyword", msg= "The token's value does not match.\n Context: {context}\n Actual: {actual}\n Expected: {expected}" .format(context="at {pos!s}".format(pos=token.Start), actual="'{token!r}' of {type}".format( token=token, type=token.__class__.__qualname__), expected="'{value}' of {type}".format( value="keyword", type=WordToken.__qualname__))) token = next(tokenIterator) self.assertIsInstance( token, EndOfDocumentToken, msg= "Token has not expected type.\n Actual: {actual} pos={pos!s}\n Expected: {expected}" .format(actual=token.__class__.__qualname__, pos=token.Start, expected=EndOfDocumentToken.__qualname__)) with self.assertRaises(TokenizerException) as ex: _ = EntityKeyword(keywordToken)
def test_BlockSequenceError(self) -> None: # test['name'] tokenStream = Tokenizer.GetVHDLTokenizer(self.code) blockStream = TokenToBlockParser.Transform(tokenStream) blockIterator = iter(blockStream) listIterator = iter(self.blockStream.blocks) with self.assertRaises(BlockParserException) as ex: try: while True: block = next(blockIterator) item = next(listIterator) self.assertIsInstance( block, item[0], msg="Block has not expected type.\n Actual: {actual!s}\n Expected: {expected}".format( # actual=block.__class__.__qualname__, actual=block, expected=item[0].__qualname__ ) ) if item[1] is not None: blockValue = str(block) super().failIf( blockValue != item[1], msg="The blocks's value does not match.\n Actual: '{actual}'\n Expected: '{expected}'".format( actual=CharacterTranslation(blockValue, oneLiner=True), expected=CharacterTranslation(item[1], oneLiner=True) ) ) except TokenizerException as ex: self.fail(msg="Unexpected 'TokenizerException' at {pos}".format(pos=ex.Position)) except BlockParserException: raise except StopIteration: pass except AssertionError: raise except Exception as ex: self.fail(msg="Unexpected exception '{exname}' := {ex!s}.".format(ex=ex, exname=ex.__class__.__qualname__)) print(ex)
def test_TokenSequence(self) -> None: # test['name'] tokenStream = Tokenizer.GetVHDLTokenizer(self.code) tokenIterator = iter(tokenStream) listIterator = iter(self.tokenstream.tokens) try: while True: token = next(tokenIterator) item = next(listIterator) self.assertIsInstance( token, item[0], msg= "Token has not expected type.\n Actual: {actual} pos={pos!s}\n Expected: {expected}" .format(actual=token.__class__.__qualname__, pos=token.Start, expected=item[0].__qualname__)) if item[1] is not None: super().failIf( token != item[1], msg= "The token's value does not match.\n Context: {context}\n Actual: {actual}\n Expected: {expected}" .format(context="at {pos!s}".format(pos=token.Start), actual="'{token!r}' of {type}".format( token=token, type=token.__class__.__qualname__), expected="'{value}' of {type}".format( value=item[1], type=item[0].__qualname__))) except TokenizerException as ex: self.fail(msg="Unexpected 'TokenizerException' ({ex!s}) at {pos}". format(ex=ex, pos=ex.Position)) except StopIteration: pass except AssertionError: raise except Exception as ex: self.fail(msg="Unexpected exception '{exname}' := {ex!s}.".format( ex=ex, exname=ex.__class__.__qualname__))
def Parse(self, content=None): # FIXME: parameter type if (content is None): if (not self._path.exists()): raise DOMParserException("File '{0!s}' does not exist.".format(self._path))\ from FileNotFoundError(str(self._path)) with self._path.open('r') as fileHandle: content = fileHandle.read() vhdlTokenStream = Tokenizer.GetVHDLTokenizer(content) vhdlBlockStream = TokenToBlockParser.Transform(vhdlTokenStream) vhdlGroupStream = BlockToGroupParser.Transform(vhdlBlockStream) try: groups = [group for group in vhdlGroupStream] except BlockParserException as ex: raise DOMParserException( "Error while parsing and indexing the source code.", ex.Group) from ex except GroupParserException as ex: raise DOMParserException("Unexpected ParserException.", ex.Block) from ex except ParserException as ex: raise DOMParserException("Unexpected ParserException.", ex.Position) from ex except Exception as ex: raise DOMParserException("Unexpected exception.", None) from ex firstGroup = groups[0] lastGroup = groups[-1] if (not isinstance(firstGroup, StartOfDocumentGroup)): raise DOMParserException( "Expected group is not a StartOfDocumentGroup.", firstGroup) elif (not isinstance(lastGroup, EndOfDocumentGroup)): raise DOMParserException( "Expected group is not an EndOfDocumentGroup.", lastGroup) # run recursively (node, group) self.stateParse(self, firstGroup)
def HandleGroupStreaming(self, args): self.PrintHeadline() file = Path(args.Filename) if (not file.exists()): print("File '{0!s}' does not exist.".format(file)) with file.open('r') as fileHandle: content = fileHandle.read() from pyVHDLParser.Base import ParserException from pyVHDLParser.Token import CharacterToken, SpaceToken, WordToken, LinebreakToken, CommentToken, IndentationToken from pyVHDLParser.Token.Keywords import BoundaryToken, EndToken, KeywordToken, DelimiterToken from pyVHDLParser.Token.Parser import Tokenizer from pyVHDLParser.Blocks import TokenToBlockParser from pyVHDLParser.Groups import BlockToGroupParser print("{RED}{line}{NOCOLOR}".format(line="=" * 160, **self.Foreground)) try: vhdlTokenStream = [ token for token in Tokenizer.GetVHDLTokenizer(content) ] vhdlBlockStream = [ block for block in TokenToBlockParser.Transform(vhdlTokenStream) ] except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **self.Foreground)) vhdlGroupStream = BlockToGroupParser.Transform(vhdlBlockStream) try: for vhdlGroup in vhdlGroupStream: print("{CYAN}{block}{NOCOLOR}".format(block=vhdlGroup, **self.Foreground)) for block in vhdlGroup: if isinstance(block, (IndentationToken, LinebreakToken, BoundaryToken, DelimiterToken, EndToken)): print("{DARK_GRAY} {block}{NOCOLOR}".format( block=block, **self.Foreground)) elif isinstance(block, (CommentToken)): print("{DARK_GREEN} {block}{NOCOLOR}".format( block=block, **self.Foreground)) elif isinstance(block, KeywordToken): print("{DARK_CYAN} {block}{NOCOLOR}".format( block=block, **self.Foreground)) elif isinstance(block, (WordToken, SpaceToken, CharacterToken)): print("{DARK_GREEN} {block}{NOCOLOR}".format( block=block, **self.Foreground)) else: print("{YELLOW} {block}{NOCOLOR}".format( block=block, **self.Foreground)) except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **self.Foreground)) self.exit()
def HandleTokenize(self: FrontEndProtocol, args): self.PrintHeadline() file = Path(args.Filename) if (not file.exists()): print("File '{0!s}' does not exist.".format(file)) with file.open('r') as fileHandle: content = fileHandle.read() tokenStream = Tokenizer.GetVHDLTokenizer(content) tokenIterator = iter(tokenStream) firstToken = next(tokenIterator) try: while next(tokenIterator): pass except StopIteration: pass if isinstance(firstToken, StartOfDocumentToken): print("{YELLOW}{token!r}{NOCOLOR}".format(token=firstToken, **self.Foreground)) try: tokenIterator = firstToken.GetIterator(inclusiveStopToken=False) for token in tokenIterator: if isinstance(token, (LinebreakToken, SpaceToken, IndentationToken)): print("{DARK_GRAY}{token!r}{NOCOLOR}".format( token=token, **self.Foreground)) elif isinstance(token, CommentToken): print("{DARK_GREEN}{token!r}{NOCOLOR}".format( token=token, **self.Foreground)) elif isinstance(token, CharacterToken): print("{DARK_CYAN}{token!r}{NOCOLOR}".format( token=token, **self.Foreground)) elif isinstance(token, WordToken): print("{WHITE}{token!r}{NOCOLOR}".format( token=token, **self.Foreground)) else: print("{RED}{token!r}{NOCOLOR}".format(token=token, **self.Foreground)) tokenIterator = token.GetIterator() lastToken = next(tokenIterator) if isinstance(lastToken, EndOfDocumentToken): print("{YELLOW}{token!r}{NOCOLOR}".format(token=lastToken, **self.Foreground)) except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **self.Foreground)) nodeFormat = "t_{line}_{id}" nodeID = 0 line = 0 node = nodeFormat.format(line=line, id=nodeID) graphvizBuffer = dedent("""\ digraph TokenStream {{ graph [rankdir=LR splines=ortho] node [shape=record]; {node} [style=filled, fillcolor=gold, label="{caption}|{{None|None|Next}}"]; """).format(node=node, caption=firstToken.__class__.__qualname__) lline = 0 sameRanked = [node] lineStarts = [node] tokenIterator = firstToken.GetIterator(inclusiveStopToken=False) for token in tokenIterator: nodeID += 1 nnode = nodeFormat.format(line=line, id=nodeID) graphvizBuffer += dedent("""\ {lnode} -> {node}; {node} [style=filled, fillcolor={color}, label="{caption}|{{Prev|{content}|Next}}"]; """).format(node=nnode, lnode=node, color=translate(token), caption=token.__class__.__qualname__, content=CharacterTranslation(str(token))) node = nnode if len(sameRanked) == 0: lineStarts.append(node) sameRanked.append(node) if isinstance(token, (LinebreakToken, SingleLineCommentToken)): # graphvizBuffer += dedent("""\ # # {{ rank=same {nodes} }} # # """).format(nodes=" ".join(sameRanked)) sameRanked = [] line += 1 else: lline = line tokenIterator = token.GetIterator() lastToken = next(tokenIterator) graphvizBuffer += dedent("""\ t_{lline}_{lid} -> t_{line}_00; t_{line}_00 [style=filled, fillcolor=gold, label="{caption}|{{Prev|None|None}}"]; {{ rank=same {nodes} }} }} """).format(line=line, lline=lline, lid=nodeID - 1, caption=lastToken.__class__.__qualname__, nodes=" ".join(lineStarts)) gvFile = file.with_suffix('.gv') with gvFile.open('w') as fileHandle: fileHandle.write(graphvizBuffer) self.exit()
def get_token_stream(self): with open(self._filename, 'r') as handle: content = handle.read() stream = Tokenizer.GetVHDLTokenizer(content) return stream
else: print("File name expected.") Exit.exit(-1) if (not file.exists()): print("File '{0!s}' does not exist.".format(file)) with file.open('r') as fileHandle: content = fileHandle.read() # ============================================================================== if (mode & 6 == 2): from pyVHDLParser.Token.Parser import Tokenizer print("{RED}{line}{NOCOLOR}".format(line="=" * 160, **Console.Foreground)) vhdlTokenStream = Tokenizer.GetVHDLTokenizer(content) try: for vhdlToken in vhdlTokenStream: if isinstance(vhdlToken, (LinebreakToken, SpaceToken, IndentationToken)): print("{DARK_GRAY}{block}{NOCOLOR}".format( block=vhdlToken, **Console.Foreground)) elif isinstance(vhdlToken, CommentToken): print("{DARK_GREEN}{block}{NOCOLOR}".format( block=vhdlToken, **Console.Foreground)) elif isinstance(vhdlToken, CharacterToken): print("{DARK_CYAN}{block}{NOCOLOR}".format( block=vhdlToken, **Console.Foreground)) elif isinstance(vhdlToken, StringToken): print("{WHITE}{block}{NOCOLOR}".format(block=vhdlToken,
def HandleCheckTokenize(self, args): self.PrintHeadline() file = Path(args.Filename) if (not file.exists()): print("File '{0!s}' does not exist.".format(file)) with file.open('r') as fileHandle: content = fileHandle.read() from pyVHDLParser.Base import ParserException from pyVHDLParser.Token import StartOfDocumentToken, EndOfDocumentToken from pyVHDLParser.Token.Parser import Tokenizer vhdlTokenStream = Tokenizer.GetVHDLTokenizer(content) try: tokenIterator = iter(vhdlTokenStream) firstToken = next(tokenIterator) try: while next(tokenIterator): pass except StopIteration: pass if (not isinstance(firstToken, StartOfDocumentToken)): print( "{RED}First token is not StartOfDocumentToken: {token}{NOCOLOR}" .format(token=firstToken, **self.Foreground)) if (firstToken.NextToken is None): print("{RED}First token has an open end.{NOCOLOR}".format( **self.Foreground)) tokenIterator = firstToken.GetIterator() lastToken = None vhdlToken = firstToken for newToken in tokenIterator: if (vhdlToken.NextToken is None): print("{RED}Token has an open end.{NOCOLOR}".format( **self.Foreground)) print("{RED} Token: {token}{NOCOLOR}".format( token=vhdlToken, **self.Foreground)) elif ((vhdlToken is not firstToken) and (lastToken.NextToken is not vhdlToken)): print( "{RED}Last token is not connected to the current token.{NOCOLOR}" .format(**self.Foreground)) print("{RED} Curr: {token}{NOCOLOR}".format( token=vhdlToken, **self.Foreground)) print("{DARK_RED} Prev: {token}{NOCOLOR}".format( token=vhdlToken.PreviousToken, **self.Foreground)) print("{RED} Last: {token}{NOCOLOR}".format( token=lastToken, **self.Foreground)) print("{RED} Next: {token}{NOCOLOR}".format( token=lastToken.NextToken, **self.Foreground)) if (lastToken.NextToken is None): print("{DARK_RED} Next: {token}{NOCOLOR}".format( token="--------", **self.Foreground)) else: print("{DARK_RED} Next: {token}{NOCOLOR}".format( token=lastToken.NextToken.NextToken, **self.Foreground)) if (vhdlToken.PreviousToken is None): print("{DARK_RED} Prev: {token}{NOCOLOR}".format( token="--------", **self.Foreground)) else: print("{DARK_RED} Prev: {token}{NOCOLOR}".format( token=vhdlToken.PreviousToken.PreviousToken, **self.Foreground)) elif (vhdlToken.PreviousToken is not lastToken): print( "{RED}Current token is not connected to lastToken.{NOCOLOR}" .format(**self.Foreground)) print("{RED} Curr: {token}{NOCOLOR}".format( token=vhdlToken, **self.Foreground)) print("{RED} Prev: {token}{NOCOLOR}".format( token=vhdlToken.PreviousToken, **self.Foreground)) print("{RED} Last: {token}{NOCOLOR}".format( token=lastToken, **self.Foreground)) print("{DARK_RED} Next: {token}{NOCOLOR}".format( token=lastToken.NextToken, **self.Foreground)) lastToken = vhdlToken vhdlToken = newToken if isinstance(newToken, EndOfDocumentToken): print( "{GREEN}No double-linking errors in token stream found.{NOCOLOR}" .format(**self.Foreground)) break else: print("{RED}No EndOfDocumentToken found.{NOCOLOR}".format( **self.Foreground)) if (not isinstance(vhdlToken, EndOfDocumentToken)): print( "{RED}Last token is not EndOfDocumentToken: {token}{NOCOLOR}" .format(token=lastToken, **self.Foreground)) elif (vhdlToken.PreviousToken is not lastToken): print( "{RED}EndOfDocumentToken is not connected to lastToken.{NOCOLOR}" .format(**self.Foreground)) print("{RED} Curr: {token}{NOCOLOR}".format( token=vhdlToken, **self.Foreground)) print("{RED} Prev: {token}{NOCOLOR}".format( token=vhdlToken.PreviousToken, **self.Foreground)) print("{RED} Last: {token}{NOCOLOR}".format( token=lastToken, **self.Foreground)) print("{DARK_RED} Next: {token}{NOCOLOR}".format( token=lastToken.NextToken, **self.Foreground)) except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **self.Foreground)) self.exit()
def HandleBlockStreaming(self, args): self.PrintHeadline() file = Path(args.Filename) if (not file.exists()): print("File '{0!s}' does not exist.".format(file)) with file.open('r') as fileHandle: content = fileHandle.read() from pyVHDLParser.Token.Parser import Tokenizer from pyVHDLParser.Blocks import TokenToBlockParser from pyVHDLParser.Base import ParserException from pyVHDLParser.Blocks import CommentBlock from pyVHDLParser.Blocks.Common import LinebreakBlock, IndentationBlock from pyVHDLParser.Blocks.List import GenericList, PortList from pyVHDLParser.Blocks.InterfaceObject import InterfaceConstantBlock, InterfaceSignalBlock from pyVHDLParser.Blocks.Structural import Entity vhdlTokenStream = Tokenizer.GetVHDLTokenizer(content) vhdlBlockStream = TokenToBlockParser.Transform(vhdlTokenStream) try: for vhdlBlock in vhdlBlockStream: if isinstance(vhdlBlock, (LinebreakBlock, IndentationBlock)): self.WriteNormal("{DARK_GRAY}{block}{NOCOLOR}".format( block=vhdlBlock, **self.Foreground)) elif isinstance(vhdlBlock, CommentBlock): self.WriteNormal("{DARK_GREEN}{block}{NOCOLOR}".format( block=vhdlBlock, **self.Foreground)) elif isinstance( vhdlBlock, (Entity.NameBlock, Entity.NameBlock, Entity.EndBlock)): self.WriteNormal("{DARK_RED}{block}{NOCOLOR}".format( block=vhdlBlock, **self.Foreground)) elif isinstance( vhdlBlock, (GenericList.OpenBlock, GenericList.DelimiterBlock, GenericList.CloseBlock)): self.WriteNormal("{DARK_BLUE}{block}{NOCOLOR}".format( block=vhdlBlock, **self.Foreground)) elif isinstance(vhdlBlock, (PortList.OpenBlock, PortList.DelimiterBlock, PortList.CloseBlock)): self.WriteNormal("{DARK_CYAN}{block}{NOCOLOR}".format( block=vhdlBlock, **self.Foreground)) elif isinstance( vhdlBlock, (InterfaceConstantBlock, InterfaceSignalBlock)): self.WriteNormal("{BLUE}{block}{NOCOLOR}".format( block=vhdlBlock, **self.Foreground)) else: self.WriteNormal("{YELLOW}{block}{NOCOLOR}".format( block=vhdlBlock, **self.Foreground)) for token in vhdlBlock: self.WriteVerbose(str(token)) except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **self.Foreground)) self.exit()
def HandleCheckBlockStreaming(self, args): self.PrintHeadline() file = Path(args.Filename) if (not file.exists()): print("File '{0!s}' does not exist.".format(file)) with file.open('r') as fileHandle: content = fileHandle.read() from pyVHDLParser.Base import ParserException from pyVHDLParser.Token import StartOfDocumentToken, EndOfDocumentToken, Token from pyVHDLParser.Token.Parser import Tokenizer from pyVHDLParser.Blocks import Block, StartOfDocumentBlock, EndOfDocumentBlock from pyVHDLParser.Blocks import TokenToBlockParser vhdlTokenStream = Tokenizer.GetVHDLTokenizer(content) vhdlBlockStream = TokenToBlockParser.Transform(vhdlTokenStream) try: blockIterator = iter(vhdlBlockStream) firstBlock = next(blockIterator) self.WriteVerbose(str(firstBlock)) if (not isinstance(firstBlock, StartOfDocumentBlock)): self.WriteError( "{RED}First block is not StartOfDocumentBlock: {block}{NOCOLOR}" .format(block=firstBlock, **self.Foreground)) self.WriteError("{YELLOW} Block: {block}{NOCOLOR}".format( block=firstBlock, **self.Foreground)) startToken = firstBlock.StartToken self.WriteDebug(str(startToken)) if (not isinstance(startToken, StartOfDocumentToken)): self.WriteError( "{RED}First token is not StartOfDocumentToken: {token}{NOCOLOR}" .format(token=startToken, **self.Foreground)) self.WriteError("{YELLOW} Token: {token}{NOCOLOR}".format( token=startToken, **self.Foreground)) lastBlock: Block = firstBlock endBlock: Block = None lastToken: Token = startToken for vhdlBlock in blockIterator: self.WriteNormal(str(vhdlBlock)) if isinstance(vhdlBlock, EndOfDocumentBlock): self.WriteDebug( "{GREEN}Found EndOfDocumentBlock...{NOCOLOR}".format( **self.Foreground)) endBlock = vhdlBlock break tokenIterator = iter(vhdlBlock) for token in tokenIterator: self.WriteVerbose(str(token)) # if (token.NextToken is None): # self.WriteError("{RED}Token has an open end (NextToken).{NOCOLOR}".format(**self.Foreground)) # self.WriteError("{YELLOW} Token: {token}{NOCOLOR}".format(token=token, **self.Foreground)) # el if (lastToken.NextToken is not token): self.WriteError( "{RED}Last token is not connected to the current token.{NOCOLOR}" .format(**self.Foreground)) self.WriteError( "{YELLOW} Last: {token!s}{NOCOLOR}".format( token=lastToken, **self.Foreground)) self.WriteError( "{YELLOW} Next: {token!s}{NOCOLOR}".format( token=lastToken.NextToken, **self.Foreground)) self.WriteError("") self.WriteError( "{YELLOW} Cur.: {token!s}{NOCOLOR}".format( token=token, **self.Foreground)) self.WriteError("") if (token.PreviousToken is None): self.WriteError( "{RED}Token has an open end (PreviousToken).{NOCOLOR}" .format(**self.Foreground)) self.WriteError( "{YELLOW} Token: {token}{NOCOLOR}".format( token=token, **self.Foreground)) elif (token.PreviousToken is not lastToken): print( "{RED}Current token is not connected to lastToken.{NOCOLOR}" .format(**self.Foreground)) # print("{RED} Block: {block}{NOCOLOR}".format(block=vhdlBlock, **self.Foreground)) print("{YELLOW} Cur.: {token}{NOCOLOR}".format( token=token, **self.Foreground)) print("{YELLOW} Prev: {token}{NOCOLOR}".format( token=token.PreviousToken, **self.Foreground)) self.WriteError("") print("{YELLOW} Last: {token}{NOCOLOR}".format( token=lastToken, **self.Foreground)) print("{YELLOW} Next: {token}{NOCOLOR}".format( token=lastToken.NextToken, **self.Foreground)) self.WriteError("") lastToken = token lastBlock = vhdlBlock else: self.WriteError( "{RED}No EndOfDocumentBlock found.{NOCOLOR}".format( **self.Foreground)) if (not isinstance(endBlock, EndOfDocumentBlock)): self.WriteError( "{RED}Last block is not EndOfDocumentBlock: {block}{NOCOLOR}" .format(block=endBlock, **self.Foreground)) self.WriteError("{YELLOW} Block: {block}{NOCOLOR}".format( block=firstBlock, **self.Foreground)) elif (not isinstance(endBlock.EndToken, EndOfDocumentToken)): self.WriteError( "{RED}Last token is not EndOfDocumentToken: {token}{NOCOLOR}" .format(token=endBlock.EndToken, **self.Foreground)) self.WriteError("{YELLOW} Token: {token}{NOCOLOR}".format( token=endBlock.EndToken, **self.Foreground)) except ParserException as ex: print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground)) except NotImplementedError as ex: print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format( ex, **self.Foreground)) self.WriteNormal("") self.WriteNormal( "{CYAN}All checks are done.{NOCOLOR}".format(**self.Foreground)) self.exit()