Example #1
0
    def find_imports(tokens):
        """
        Find imports
        """
        results = []
        stream = TokenStream(tokens)
        while not stream.eof:
            token = stream.pop()

            if token.kind != IMPORT:
                continue
            import_token = token
            try:
                token = stream.pop()
                if token.kind == IDENTIFIER:
                    results.append(token.value)
                else:
                    LocationException.warning(
                        "import bad argument", token.location
                    ).log(LOGGER)
            except EOFException:
                LocationException.warning(
                    "EOF reached when parsing import", location=import_token.location
                ).log(LOGGER)
        return results
Example #2
0
    def _preprocess(self,
                    tokens,
                    defines=None,
                    include_paths=None,
                    included_files=None):
        """
        Pre-process tokens while filling in defines
        """
        stream = TokenStream(tokens)
        include_paths = [] if include_paths is None else include_paths
        included_files = [] if included_files is None else included_files
        defines = {} if defines is None else defines
        result = []

        while not stream.eof:
            token = stream.pop()
            if not token.kind == PREPROCESSOR:
                result.append(token)
                continue

            try:
                result += self.preprocessor(token, stream, defines,
                                            include_paths, included_files)
            except LocationException as exc:
                exc.log(LOGGER)

        return result
Example #3
0
File: parser.py Project: wzab/vunit
    def find_instances(tokens):
        """
        Find module instances
        """
        results = []
        stream = TokenStream(tokens)
        while not stream.eof:
            token = stream.pop()

            if token.kind in (BEGIN, END):
                _parse_block_label(stream)
                continue

            if not token.kind == IDENTIFIER:
                continue
            modulename = token.value

            try:
                token = stream.pop()
            except EOFException:
                continue

            if token.kind == HASH:
                results.append(modulename)
                continue
            elif token.kind == IDENTIFIER:
                results.append(modulename)
                continue

        return results
Example #4
0
    def find_instances(tokens):
        """
        Find module instances
        """
        results = []
        stream = TokenStream(tokens)
        while not stream.eof:
            token = stream.pop()

            if token.kind in (BEGIN, END):
                _parse_block_label(stream)
                continue

            if not token.kind == IDENTIFIER:
                continue
            modulename = token.value

            try:
                token = stream.pop()
            except EOFException:
                continue

            if token.kind == HASH:
                results.append(modulename)
                continue
            elif token.kind == IDENTIFIER:
                results.append(modulename)
                continue

        return results
Example #5
0
 def find_imports(tokens):
     """
     Find imports
     """
     results = []
     stream = TokenStream(tokens)
     while not stream.eof:
         token = stream.pop()
         if token.kind == tokenizer.IMPORT:
             stream.skip_until(tokenizer.IDENTIFIER)
             token = stream.pop()
             if token is not None:
                 results.append(token.value)
     return results
Example #6
0
def preprocess(tokens, defines=None, include_paths=None, included_files=None):
    """
    Pre-process tokens while filling in defines
    """
    stream = TokenStream(tokens)
    include_paths = [] if include_paths is None else include_paths
    included_files = [] if included_files is None else included_files
    defines = {} if defines is None else defines
    result = []

    while not stream.eof:
        token = stream.pop()
        if not token.kind == tokenizer.PREPROCESSOR:
            result.append(token)
            continue

        if token.value == "define":
            macro = define(stream)
            defines[macro.name] = macro

        if token.value == "include":
            stream.skip_until(tokenizer.STRING)
            file_name = stream.pop().value

            full_name = None
            for include_path in include_paths:
                full_name = join(include_path, file_name)
                if exists(full_name):
                    break
            else:
                assert False
            included_files.append(full_name)
            with open(full_name, "r") as fptr:
                included_tokens = tokenize(fptr.read())
            result += preprocess(included_tokens, defines, include_paths, included_files)

        elif token.value in defines:
            macro = defines[token.value]
            if macro.num_args == 0:
                values = []
            else:
                values = parse_macro_actuals(stream)
            result += macro.expand(values)

    return result
Example #7
0
File: parser.py Project: wzab/vunit
    def find_package_references(tokens):
        """
        Find package_references pkg::func
        """
        results = []
        stream = TokenStream(tokens)
        while not stream.eof:
            token = stream.pop()
            if token.kind == IMPORT:
                stream.skip_until(SEMI_COLON)
                if not stream.eof:
                    stream.pop()

            elif token.kind == IDENTIFIER and not stream.eof:
                kind = stream.pop().kind
                if kind == DOUBLE_COLON:
                    results.append(token.value)
                    stream.skip_while(IDENTIFIER, DOUBLE_COLON)
        return results
Example #8
0
    def _preprocess(self, tokens, defines=None, include_paths=None, included_files=None):
        """
        Pre-process tokens while filling in defines
        """
        stream = TokenStream(tokens)
        include_paths = [] if include_paths is None else include_paths
        included_files = [] if included_files is None else included_files
        defines = {} if defines is None else defines
        result = []

        while not stream.eof:
            token = stream.pop()
            if not token.kind == PREPROCESSOR:
                result.append(token)
                continue

            try:
                result += self.preprocessor(token, stream, defines, include_paths, included_files)
            except LocationException as exc:
                exc.log(LOGGER)

        return result
Example #9
0
    def find_package_references(tokens):
        """
        Find package_references pkg::func
        """
        results = []
        stream = TokenStream(tokens)
        while not stream.eof:
            token = stream.pop()
            if token.kind == IMPORT:
                stream.skip_until(SEMI_COLON)
                if not stream.eof:
                    stream.pop()

            elif token.kind == IDENTIFIER and not stream.eof:
                kind = stream.pop().kind
                if kind == DOUBLE_COLON:
                    results.append(token.value)
                    stream.skip_while(IDENTIFIER, DOUBLE_COLON)
        return results
Example #10
0
    def find_imports(tokens):
        """
        Find imports
        """
        results = []
        stream = TokenStream(tokens)
        while not stream.eof:
            token = stream.pop()

            if token.kind != IMPORT:
                continue
            import_token = token
            try:
                token = stream.pop()
                if token.kind == IDENTIFIER:
                    results.append(token.value)
                else:
                    LocationException.warning("import bad argument",
                                              token.location).log(LOGGER)
            except EOFException:
                LocationException.warning("EOF reached when parsing import",
                                          location=import_token.location).log(LOGGER)
        return results
Example #11
0
    def find_instances(tokens):
        """
        Find module instances
        """
        results = []
        stream = TokenStream(tokens)
        while not stream.eof:
            token = stream.pop()

            if not token.kind == tokenizer.IDENTIFIER:
                continue
            modulename = token.value

            token = stream.pop()
            if token is None:
                continue
            elif token.kind == tokenizer.HASH:
                results.append(modulename)
                continue
            elif token.kind == tokenizer.IDENTIFIER:
                results.append(modulename)
                continue

        return results