def visit_neuron(self, node): """ Private method: Used to visit a single neuron and create the corresponding global as well as local scopes. :return: a single neuron. :rtype: ast_neuron """ # set current processed neuron Logger.set_current_neuron(node) code, message = Messages.get_start_building_symbol_table() Logger.log_message(neuron=node, code=code, error_position=node.get_source_position(), message=message, log_level=LoggingLevel.INFO) # before starting the work on the neuron, make everything which was implicit explicit # but if we have a model without an equations block, just skip this step if node.get_equations_blocks() is not None: make_implicit_odes_explicit(node.get_equations_blocks()) scope = Scope(scope_type=ScopeType.GLOBAL, source_position=node.get_source_position()) node.update_scope(scope) node.get_body().update_scope(scope) # now first, we add all predefined elements to the scope variables = PredefinedVariables.get_variables() functions = PredefinedFunctions.get_function_symbols() types = PredefinedTypes.get_types() for symbol in variables.keys(): node.get_scope().add_symbol(variables[symbol]) for symbol in functions.keys(): node.get_scope().add_symbol(functions[symbol]) for symbol in types.keys(): node.get_scope().add_symbol(types[symbol])
def visit_function(self, node): """ Private method: Used to visit a single function block and create the corresponding scope. :param node: a function block object. :type node: ast_function """ self.block_type_stack.push( BlockType.LOCAL) # before entering, update the current node type symbol = FunctionSymbol(scope=node.get_scope(), element_reference=node, param_types=list(), name=node.get_name(), is_predefined=False, return_type=None) # put it on the stack for the endvisit method self.symbol_stack.push(symbol) symbol.set_comment(node.get_comment()) node.get_scope().add_symbol(symbol) scope = Scope(scope_type=ScopeType.FUNCTION, enclosing_scope=node.get_scope(), source_position=node.get_source_position()) node.get_scope().add_scope(scope) # put it on the stack for the endvisit method self.scope_stack.push(scope) for arg in node.get_parameters(): # first visit the data type to ensure that variable symbol can receive a combined data type arg.get_data_type().update_scope(scope) if node.has_return_type(): node.get_return_type().update_scope(scope) node.get_block().update_scope(scope) return
def visit_neuron(self, node): """ Private method: Used to visit a single neuron and create the corresponding global as well as local scopes. :return: a single neuron. :rtype: ast_neuron """ # set current processed neuron Logger.set_current_node(node) code, message = Messages.get_start_building_symbol_table() Logger.log_message(node=node, code=code, error_position=node.get_source_position(), message=message, log_level=LoggingLevel.INFO) scope = Scope(scope_type=ScopeType.GLOBAL, source_position=node.get_source_position()) node.update_scope(scope) node.get_body().update_scope(scope) # now first, we add all predefined elements to the scope variables = PredefinedVariables.get_variables() functions = PredefinedFunctions.get_function_symbols() types = PredefinedTypes.get_types() for symbol in variables.keys(): node.get_scope().add_symbol(variables[symbol]) for symbol in functions.keys(): node.get_scope().add_symbol(functions[symbol]) for symbol in types.keys(): node.get_scope().add_symbol(types[symbol])
def visit_on_receive_block(self, node): """ Private method: Used to visit a single onReceive block and create the corresponding scope. :param node: an onReceive block object. :type node: ASTOnReceiveBlock """ self.block_type_stack.push(BlockType.LOCAL) scope = Scope(scope_type=ScopeType.ON_RECEIVE, enclosing_scope=node.get_scope(), source_position=node.get_source_position()) node.get_scope().add_scope(scope) node.get_block().update_scope(scope)
def visit_update_block(self, node): """ Private method: Used to visit a single update block and create the corresponding scope. :param node: an update block object. :type node: ASTDynamics """ self.block_type_stack.push(BlockType.LOCAL) scope = Scope(scope_type=ScopeType.UPDATE, enclosing_scope=node.get_scope(), source_position=node.get_source_position()) node.get_scope().add_scope(scope) node.get_block().update_scope(scope) return