Exemplo n.º 1
0
    def test(self):
        for filename in os.listdir(
                os.path.realpath(
                    os.path.join(os.path.dirname(__file__),
                                 os.path.join('..', 'models')))):
            if filename.endswith(".nestml"):
                print('Start creating AST for ' + filename + ' ...'),
                input_file = FileStream(
                    os.path.join(
                        os.path.dirname(__file__),
                        os.path.join(os.path.join('..', 'models'), filename)))
                lexer = PyNestMLLexer(input_file)
                lexer._errHandler = BailErrorStrategy()
                lexer._errHandler.reset(lexer)

                # create a token stream
                stream = CommonTokenStream(lexer)
                stream.fill()

                # parse the file
                parser = PyNestMLParser(stream)
                parser._errHandler = BailErrorStrategy()
                parser._errHandler.reset(parser)

                compilation_unit = parser.nestMLCompilationUnit()

                # now build the meta_model
                ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
                ast = ast_builder_visitor.visit(compilation_unit)
                assert isinstance(ast, ASTNestMLCompilationUnit)
Exemplo n.º 2
0
    def _test_single_input_path(cls, input_path):
        print('Start creating AST for ' + input_path + ' ...'),
        input_file = FileStream(input_path)
        lexer = PyNestMLLexer(input_file)
        lexer._errHandler = BailErrorStrategy()
        lexer._errHandler.reset(lexer)

        # create a token stream
        stream = CommonTokenStream(lexer)
        stream.fill()

        # parse the file
        parser = PyNestMLParser(stream)
        parser._errHandler = BailErrorStrategy()
        parser._errHandler.reset(parser)

        # process the comments
        compilation_unit = parser.nestMLCompilationUnit()

        # now build the meta_model
        ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
        ast = ast_builder_visitor.visit(compilation_unit)
        assert isinstance(ast, ASTNestMLCompilationUnit)

        # now, do the actual test for clone()
        ast_copy = ast.clone()
        assert str(ast) == str(ast_copy)
        ast.get_neuron_list()[0].name = "foo"
        ast_copy.get_neuron_list()[0].name = "bar"
        assert str(ast) != str(ast_copy)
        ast_copy.get_neuron_list()[0].name = "foo"
        assert str(ast) == str(ast_copy)
Exemplo n.º 3
0
    def test(self):
        # print('Start creating AST for ' + filename + ' ...'),
        input_file = FileStream(
            os.path.join(
                os.path.realpath(
                    os.path.join(os.path.dirname(__file__), 'resources')),
                'CommentTest.nestml'))
        lexer = PyNestMLLexer(input_file)
        lexer._errHandler = BailErrorStrategy()
        lexer._errHandler.reset(lexer)

        # create a token stream
        stream = CommonTokenStream(lexer)
        stream.fill()

        # parse the file
        parser = PyNestMLParser(stream)
        parser._errHandler = BailErrorStrategy()
        parser._errHandler.reset(parser)

        # process the comments
        compilation_unit = parser.nestMLCompilationUnit()

        # now build the meta_model
        ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
        ast = ast_builder_visitor.visit(compilation_unit)
        neuron_body_elements = ast.get_neuron_list()[0].get_body(
        ).get_body_elements()

        # check if init values comment is correctly detected
        assert (neuron_body_elements[0].get_comment()[0] == 'state comment ok')

        # check that all declaration comments are detected
        comments = neuron_body_elements[0].get_declarations()[0].get_comment()
        assert (comments[0] == 'pre comment 1 ok')
        assert (comments[1] == 'pre comment 2 ok')
        assert (comments[2] == 'inline comment ok')
        assert (comments[3] == 'post comment 1 ok')
        assert (comments[4] == 'post comment 2 ok')
        assert ('pre comment not ok' not in comments)
        assert ('post comment not ok' not in comments)

        # check that equation block comment is detected
        self.assertEqual(neuron_body_elements[1].get_comment()[0],
                         'equations comment ok')
        # check that parameters block comment is detected
        self.assertEqual(neuron_body_elements[2].get_comment()[0],
                         'parameters comment ok')
        # check that internals block comment is detected
        self.assertEqual(neuron_body_elements[3].get_comment()[0],
                         'internals comment ok')
        # check that input comment is detected
        self.assertEqual(neuron_body_elements[4].get_comment()[0],
                         'input comment ok')
        # check that output comment is detected
        self.assertEqual(neuron_body_elements[5].get_comment()[0],
                         'output comment ok')
        # check that update comment is detected
        self.assertEqual(neuron_body_elements[6].get_comment()[0],
                         'update comment ok')
Exemplo n.º 4
0
 def test(self):
     for filename in os.listdir(
             os.path.realpath(
                 os.path.join(os.path.dirname(__file__),
                              os.path.join('..', 'models')))):
         if filename.endswith(".nestml"):
             input_file = FileStream(
                 os.path.join(
                     os.path.dirname(__file__),
                     os.path.join(os.path.join('..', 'models'), filename)))
             lexer = PyNestMLLexer(input_file)
             # create a token stream
             stream = CommonTokenStream(lexer)
             stream.fill()
             # parse the file
             parser = PyNestMLParser(stream)
             # process the comments
             compilation_unit = parser.nestMLCompilationUnit()
             # create a new visitor and return the new AST
             ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
             ast = ast_builder_visitor.visit(compilation_unit)
             # update the corresponding symbol tables
             SymbolTable.initialize_symbol_table(ast.get_source_position())
             symbol_table_visitor = ASTSymbolTableVisitor()
             for neuron in ast.get_neuron_list():
                 neuron.accept(symbol_table_visitor)
                 SymbolTable.add_neuron_scope(name=neuron.get_name(),
                                              scope=neuron.get_scope())
             self.assertTrue(isinstance(ast, ASTNestMLCompilationUnit))
     return
 def test(self):
     for filename in os.listdir(os.path.realpath(os.path.join(os.path.dirname(__file__),
                                                              os.path.join('..', 'models')))):
         if filename.endswith(".nestml"):
             input_file = FileStream(
                 os.path.join(os.path.dirname(__file__), os.path.join(os.path.join('..', 'models'), filename)))
             lexer = PyNestMLLexer(input_file)
             # create a token stream
             stream = CommonTokenStream(lexer)
             stream.fill()
             # parse the file
             parser = PyNestMLParser(stream)
             # process the comments
             compilation_unit = parser.nestMLCompilationUnit()
             # create a new visitor and return the new AST
             ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
             ast = ast_builder_visitor.visit(compilation_unit)
             # update the corresponding symbol tables
             SymbolTable.initialize_symbol_table(ast.get_source_position())
             symbol_table_visitor = ASTSymbolTableVisitor()
             for neuron in ast.get_neuron_list():
                 neuron.accept(symbol_table_visitor)
                 SymbolTable.add_neuron_scope(name=neuron.get_name(), scope=neuron.get_scope())
             self.assertTrue(isinstance(ast, ASTNestMLCompilationUnit))
     return
Exemplo n.º 6
0
 def test(self):
     # print('Start Expression Parser Test...'),
     input_file = FileStream(
         os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')),
                      'ExpressionCollection.nestml'))
     lexer = PyNestMLLexer(input_file)
     # create a token stream
     stream = CommonTokenStream(lexer)
     stream.fill()
     # parse the file
     parser = PyNestMLParser(stream)
     compilation_unit = parser.nestMLCompilationUnit()
     # print('done')
     ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
     ast = ast_builder_visitor.visit(compilation_unit)
     # print('done')
     self.assertTrue(isinstance(ast, ASTNestMLCompilationUnit))
Exemplo n.º 7
0
 def test(cls):
     for filename in os.listdir(os.path.realpath(os.path.join(os.path.dirname(__file__),
                                                              os.path.join('..', 'models')))):
         if filename.endswith(".nestml"):
             # print('Start creating AST for ' + filename + ' ...'),
             input_file = FileStream(
                 os.path.join(os.path.dirname(__file__), os.path.join(os.path.join('..', 'models'), filename)))
             lexer = PyNestMLLexer(input_file)
             # create a token stream
             stream = CommonTokenStream(lexer)
             stream.fill()
             # parse the file
             parser = PyNestMLParser(stream)
             # process the comments
             compilation_unit = parser.nestMLCompilationUnit()
             # now build the meta_model
             ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
             ast = ast_builder_visitor.visit(compilation_unit)
             assert isinstance(ast, ASTNestMLCompilationUnit)
Exemplo n.º 8
0
 def test(self):
     # print('Start creating AST for ' + filename + ' ...'),
     input_file = FileStream(
         os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')),
                      'CommentTest.nestml'))
     lexer = PyNestMLLexer(input_file)
     # create a token stream
     stream = CommonTokenStream(lexer)
     stream.fill()
     # parse the file
     parser = PyNestMLParser(stream)
     # process the comments
     compilation_unit = parser.nestMLCompilationUnit()
     # now build the meta_model
     ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
     ast = ast_builder_visitor.visit(compilation_unit)
     neuron_body_elements = ast.get_neuron_list()[0].get_body().get_body_elements()
     # check if init values comment is correctly detected
     assert (neuron_body_elements[0].get_comment()[0] == 'init_values comment ok')
     # check that all declaration comments are detected
     comments = neuron_body_elements[0].get_declarations()[0].get_comment()
     assert (comments[0] == 'pre comment 1 ok')
     assert (comments[1] == 'pre comment 2 ok')
     assert (comments[2] == 'inline comment ok')
     assert (comments[3] == 'post comment 1 ok')
     assert (comments[4] == 'post comment 2 ok')
     assert ('pre comment not ok' not in comments)
     assert ('post comment not ok' not in comments)
     # check that equation block comment is detected
     self.assertEqual(neuron_body_elements[1].get_comment()[0], 'equations comment ok')
     # check that parameters block comment is detected
     self.assertEqual(neuron_body_elements[2].get_comment()[0], 'parameters comment ok')
     # check that internals block comment is detected
     self.assertEqual(neuron_body_elements[3].get_comment()[0], 'internals comment ok')
     # check that input comment is detected
     self.assertEqual(neuron_body_elements[4].get_comment()[0], 'input comment ok')
     # check that output comment is detected
     self.assertEqual(neuron_body_elements[5].get_comment()[0], 'output comment ok')
     # check that update comment is detected
     self.assertEqual(neuron_body_elements[6].get_comment()[0], 'update comment ok')
Exemplo n.º 9
0
    def test(self):
        input_file = FileStream(
            os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')),
                         'ExpressionCollection.nestml'))
        lexer = PyNestMLLexer(input_file)
        lexer._errHandler = BailErrorStrategy()
        lexer._errHandler.reset(lexer)

        # create a token stream
        stream = CommonTokenStream(lexer)
        stream.fill()

        # parse the file
        parser = PyNestMLParser(stream)
        parser._errHandler = BailErrorStrategy()
        parser._errHandler.reset(parser)
        compilation_unit = parser.nestMLCompilationUnit()
        assert compilation_unit is not None

        ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
        ast = ast_builder_visitor.visit(compilation_unit)
        self.assertTrue(isinstance(ast, ASTNestMLCompilationUnit))
Exemplo n.º 10
0
    def test(self):
        # print('Start special block parsing and AST-building test...'),
        input_file = FileStream(
            os.path.join(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')),
                                      'BlockTest.nestml')))
        lexer = PyNestMLLexer(input_file)
        lexer._errHandler = BailErrorStrategy()
        lexer._errHandler.reset(lexer)

        # create a token stream
        stream = CommonTokenStream(lexer)
        stream.fill()

        # parse the file
        parser = PyNestMLParser(stream)
        parser._errHandler = BailErrorStrategy()
        parser._errHandler.reset(parser)

        compilation_unit = parser.nestMLCompilationUnit()
        ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
        ast = ast_builder_visitor.visit(compilation_unit)
        self.assertTrue(isinstance(ast, ASTNestMLCompilationUnit))
Exemplo n.º 11
0
    def parse_model(cls, file_path=None):
        """
        Parses a handed over model and returns the meta_model representation of it.
        :param file_path: the path to the file which shall be parsed.
        :type file_path: str
        :return: a new ASTNESTMLCompilationUnit object.
        :rtype: ASTNestMLCompilationUnit
        """
        try:
            input_file = FileStream(file_path)
        except IOError:
            code, message = Messages.get_input_path_not_found(path=file_path)
            Logger.log_message(node=None, code=None, message=message,
                               error_position=None, log_level=LoggingLevel.ERROR)
            return
        code, message = Messages.get_start_processing_file(file_path)
        Logger.log_message(node=None, code=code, message=message, error_position=None, log_level=LoggingLevel.INFO)

        # create a lexer and hand over the input
        lexer = PyNestMLLexer()
        lexer.removeErrorListeners()
        lexer.addErrorListener(ConsoleErrorListener())
        lexerErrorListener = NestMLErrorListener()
        lexer.addErrorListener(lexerErrorListener)
        # lexer._errHandler = BailErrorStrategy()  # N.B. uncomment this line and the next to halt immediately on lexer errors
        # lexer._errHandler.reset(lexer)
        lexer.inputStream = input_file
        # create a token stream
        stream = CommonTokenStream(lexer)
        stream.fill()
        if lexerErrorListener._error_occurred:
            code, message = Messages.get_lexer_error()
            Logger.log_message(node=None, code=None, message=message,
                               error_position=None, log_level=LoggingLevel.ERROR)
            return
        # parse the file
        parser = PyNestMLParser(None)
        parser.removeErrorListeners()
        parser.addErrorListener(ConsoleErrorListener())
        parserErrorListener = NestMLErrorListener()
        parser.addErrorListener(parserErrorListener)
        # parser._errHandler = BailErrorStrategy()	# N.B. uncomment this line and the next to halt immediately on parse errors
        # parser._errHandler.reset(parser)
        parser.setTokenStream(stream)
        compilation_unit = parser.nestMLCompilationUnit()
        if parserErrorListener._error_occurred:
            code, message = Messages.get_parser_error()
            Logger.log_message(node=None, code=None, message=message,
                               error_position=None, log_level=LoggingLevel.ERROR)
            return

        # create a new visitor and return the new AST
        ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
        ast = ast_builder_visitor.visit(compilation_unit)

        # create and update the corresponding symbol tables
        SymbolTable.initialize_symbol_table(ast.get_source_position())
        for neuron in ast.get_neuron_list():
            neuron.accept(ASTSymbolTableVisitor())
            SymbolTable.add_neuron_scope(neuron.get_name(), neuron.get_scope())

        # store source paths
        for neuron in ast.get_neuron_list():
            neuron.file_path = file_path
        ast.file_path = file_path

        return ast
Exemplo n.º 12
0
    def parse_model(cls, file_path=None):
        """
        Parses a handed over model and returns the meta_model representation of it.
        :param file_path: the path to the file which shall be parsed.
        :type file_path: str
        :return: a new ASTNESTMLCompilationUnit object.
        :rtype: ASTNestMLCompilationUnit
        """
        try:
            input_file = FileStream(file_path)
        except IOError:
            code, message = Messages.get_input_path_not_found(path=file_path)
            Logger.log_message(neuron=None, code=None, message=message, error_position=None, log_level=LoggingLevel.ERROR)
            return
        code, message = Messages.get_start_processing_file(file_path)
        Logger.log_message(neuron=None, code=code, message=message, error_position=None, log_level=LoggingLevel.INFO)

        # create a lexer and hand over the input
        lexer = PyNestMLLexer()
        lexer.removeErrorListeners()
        lexer.addErrorListener(ConsoleErrorListener())
        lexerErrorListener = NestMLErrorListener()
        lexer.addErrorListener(lexerErrorListener)
        # lexer._errHandler = BailErrorStrategy()  # N.B. uncomment this line and the next to halt immediately on lexer errors
        # lexer._errHandler.reset(lexer)
        lexer.inputStream = input_file
        # create a token stream
        stream = CommonTokenStream(lexer)
        stream.fill()
        if lexerErrorListener._error_occurred:
            code, message = Messages.get_lexer_error()
            Logger.log_message(neuron=None, code=None, message=message, error_position=None, log_level=LoggingLevel.ERROR)
            return
        # parse the file
        parser = PyNestMLParser(None)
        parser.removeErrorListeners()
        parser.addErrorListener(ConsoleErrorListener())
        parserErrorListener = NestMLErrorListener()
        parser.addErrorListener(parserErrorListener)
        # parser._errHandler = BailErrorStrategy()	# N.B. uncomment this line and the next to halt immediately on parse errors
        # parser._errHandler.reset(parser)
        parser.setTokenStream(stream)
        compilation_unit = parser.nestMLCompilationUnit()
        if parserErrorListener._error_occurred:
            code, message = Messages.get_parser_error()
            Logger.log_message(neuron=None, code=None, message=message, error_position=None, log_level=LoggingLevel.ERROR)
            return

        # create a new visitor and return the new AST
        ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
        ast = ast_builder_visitor.visit(compilation_unit)

        # create and update the corresponding symbol tables
        SymbolTable.initialize_symbol_table(ast.get_source_position())
        log_to_restore = copy.deepcopy(Logger.get_log())
        counter = Logger.curr_message

        # replace all derived variables through a computer processable names: e.g. g_in''' -> g_in__ddd
        restore_differential_order = []
        for ode in ASTUtils.get_all(ast, ASTOdeEquation):
            lhs_variable = ode.get_lhs()
            if lhs_variable.get_differential_order() > 0:
                lhs_variable.differential_order = lhs_variable.get_differential_order() - 1
                restore_differential_order.append(lhs_variable)

        for shape in ASTUtils.get_all(ast, ASTOdeShape):
            lhs_variable = shape.get_variable()
            if lhs_variable.get_differential_order() > 0:
                lhs_variable.differential_order = lhs_variable.get_differential_order() - 1
                restore_differential_order.append(lhs_variable)

        # than replace remaining variables
        for variable in ASTUtils.get_all(ast, ASTVariable):
            if variable.get_differential_order() > 0:
                variable.set_name(variable.get_name() + "__" + "d" * variable.get_differential_order())
                variable.differential_order = 0

        # now also equations have no ' at lhs. replace every occurrence of last d to ' to compensate
        for ode_variable in restore_differential_order:
            ode_variable.differential_order = 1
        Logger.set_log(log_to_restore, counter)
        for neuron in ast.get_neuron_list():
            neuron.accept(ASTSymbolTableVisitor())
            SymbolTable.add_neuron_scope(neuron.get_name(), neuron.get_scope())
        return ast