def visit_expression(self, node): """ Visits an expression which uses a binary logic operator and updates the type. :param node: a single expression. :type node: ast_expression """ lhs_type = node.get_lhs().type rhs_type = node.get_rhs().type lhs_type.referenced_object = node.get_lhs() rhs_type.referenced_object = node.get_rhs() if isinstance(lhs_type, BooleanTypeSymbol) and isinstance(rhs_type, BooleanTypeSymbol): node.type = PredefinedTypes.get_boolean_type() else: if isinstance(lhs_type, BooleanTypeSymbol): offending_type = lhs_type else: offending_type = rhs_type code, message = Messages.get_type_different_from_expected(BooleanTypeSymbol(), offending_type) Logger.log_message(code=code, message=message, error_position=lhs_type.referenced_object.get_source_position(), log_level=LoggingLevel.ERROR) node.type = ErrorTypeSymbol() return
def endvisit_assignment(self, node): scope = node.get_scope() var_name = node.get_variable().get_name() _expr = node.get_expression() var_symbol = scope.resolve_to_symbol(var_name, SymbolKind.VARIABLE) _equals = var_symbol.get_type_symbol().equals(_expr.type) message = 'line ' + str(_expr.get_source_position()) + ' : LHS = ' + \ var_symbol.get_type_symbol().get_symbol_name() + \ ' RHS = ' + _expr.type.get_symbol_name() + \ ' Equal ? ' + str(_equals) if isinstance(_expr.type, UnitTypeSymbol): message += " Neuroscience Factor: " + \ str(UnitConverter().get_factor(_expr.type.astropy_unit)) Logger.log_message(error_position=node.get_source_position(), code=MessageCode.TYPE_MISMATCH, message=message, log_level=LoggingLevel.INFO) if _equals is False: Logger.log_message(message="Type mismatch in test!", code=MessageCode.TYPE_MISMATCH, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) return
def binary_operation_not_defined_error(self, _operator, _other): from pynestml.symbols.error_type_symbol import ErrorTypeSymbol result = ErrorTypeSymbol() code, message = Messages.get_binary_operation_not_defined(lhs=self, operator=_operator, rhs=_other) Logger.log_message(code=code, message=message, error_position=self.referenced_object.get_source_position(), log_level=LoggingLevel.ERROR) return result
def visit_neuron(self, node): """ Private method: Used to visit a single neuron and create the corresponding global as well as local scopes. :return: a single neuron. :rtype: ast_neuron """ # set current processed neuron Logger.set_current_neuron(node) code, message = Messages.get_start_building_symbol_table() Logger.log_message(neuron=node, code=code, error_position=node.get_source_position(), message=message, log_level=LoggingLevel.INFO) # before starting the work on the neuron, make everything which was implicit explicit # but if we have a model without an equations block, just skip this step if node.get_equations_blocks() is not None: make_implicit_odes_explicit(node.get_equations_blocks()) scope = Scope(scope_type=ScopeType.GLOBAL, source_position=node.get_source_position()) node.update_scope(scope) node.get_body().update_scope(scope) # now first, we add all predefined elements to the scope variables = PredefinedVariables.get_variables() functions = PredefinedFunctions.get_function_symbols() types = PredefinedTypes.get_types() for symbol in variables.keys(): node.get_scope().add_symbol(variables[symbol]) for symbol in functions.keys(): node.get_scope().add_symbol(functions[symbol]) for symbol in types.keys(): node.get_scope().add_symbol(types[symbol])
def analyse_neuron(self, neuron): # type: (ASTNeuron) -> None """ Analyse and transform a single neuron. :param neuron: a single neuron. """ code, message = Messages.get_start_processing_neuron(neuron.get_name()) Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO) # make normalization # apply spikes to buffers # get rid of convolve, store them and apply then at the end equations_block = neuron.get_equations_block() shape_to_buffers = {} if neuron.get_equations_block() is not None: # extract function names and corresponding incoming buffers convolve_calls = OdeTransformer.get_sum_function_calls(equations_block) for convolve in convolve_calls: shape_to_buffers[str(convolve.get_args()[0])] = str(convolve.get_args()[1]) OdeTransformer.refactor_convolve_call(neuron.get_equations_block()) self.make_functions_self_contained(equations_block.get_ode_functions()) self.replace_functions_through_defining_expressions(equations_block.get_ode_equations(), equations_block.get_ode_functions()) # transform everything into gsl processable (e.g. no functional shapes) or exact form. self.transform_shapes_and_odes(neuron, shape_to_buffers) self.apply_spikes_from_buffers(neuron, shape_to_buffers) # update the symbol table symbol_table_visitor = ASTSymbolTableVisitor() symbol_table_visitor.after_ast_rewrite_ = True # ODE block might have been removed entirely: suppress warnings neuron.accept(symbol_table_visitor)
def visit_simple_expression(self, node): """ Visits a single variable as contained in a simple expression and derives its type. :param node: a single simple expression :type node: ASTSimpleExpression """ assert isinstance(node, ASTSimpleExpression), \ '(PyNestML.Visitor.VariableVisitor) No or wrong type of simple expression provided (%s)!' % type(node) assert (node.get_scope() is not None), \ '(PyNestML.Visitor.VariableVisitor) No scope found, run symboltable creator!' scope = node.get_scope() var_name = node.get_variable().get_name() var_resolve = scope.resolve_to_symbol(var_name, SymbolKind.VARIABLE) # update the type of the variable according to its symbol type. if var_resolve is not None: node.type = var_resolve.get_type_symbol() node.type.referenced_object = node else: # check if var_name is actually a type literal (e.g. "mV") var_resolve = scope.resolve_to_symbol(var_name, SymbolKind.TYPE) if var_resolve is not None: node.type = var_resolve node.type.referenced_object = node else: message = 'Variable ' + str(node) + ' could not be resolved!' Logger.log_message(code=MessageCode.SYMBOL_NOT_RESOLVED, error_position=node.get_source_position(), message=message, log_level=LoggingLevel.ERROR) node.type = ErrorTypeSymbol() return
def visit_assignment(self, node): symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_complete_name(), SymbolKind.VARIABLE) if symbol is None: code, message = Messages.get_variable_not_defined(node.get_variable().get_complete_name()) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, neuron=self.neuron)
def generate_module_code(self, neurons): # type: (list(ASTNeuron)) -> None """ Generates code that is necessary to integrate neuron models into the NEST infrastructure. :param neurons: a list of neurons :type neurons: list(ASTNeuron) """ namespace = {'neurons': neurons, 'moduleName': FrontendConfiguration.get_module_name(), 'now': datetime.datetime.utcnow()} if not os.path.exists(FrontendConfiguration.get_target_path()): os.makedirs(FrontendConfiguration.get_target_path()) with open(str(os.path.join(FrontendConfiguration.get_target_path(), FrontendConfiguration.get_module_name())) + '.h', 'w+') as f: f.write(str(self._template_module_header.render(namespace))) with open(str(os.path.join(FrontendConfiguration.get_target_path(), FrontendConfiguration.get_module_name())) + '.cpp', 'w+') as f: f.write(str(self._template_module_class.render(namespace))) with open(str(os.path.join(FrontendConfiguration.get_target_path(), 'CMakeLists')) + '.txt', 'w+') as f: f.write(str(self._template_cmakelists.render(namespace))) if not os.path.isdir(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli'))): os.makedirs(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli'))) with open(str(os.path.join(FrontendConfiguration.get_target_path(), 'sli', FrontendConfiguration.get_module_name() + "-init")) + '.sli', 'w+') as f: f.write(str(self._template_sli_init.render(namespace))) code, message = Messages.get_module_generated(FrontendConfiguration.get_target_path()) Logger.log_message(None, code, message, None, LoggingLevel.INFO)
def check_co_co(cls, _neuron=None): """ Checks the coco for the handed over neuron. :param _neuron: a single neuron instance. :type _neuron: ASTNeuron """ assert (_neuron is not None and isinstance(_neuron, ASTNeuron)), \ '(PyNestML.CoCo.FunctionCallsConsistent) No or wrong type of neuron provided (%s)!' % type(_neuron) cls.__neuronName = _neuron.get_name() for userDefinedFunction in _neuron.get_functions(): cls.processed_function = userDefinedFunction symbol = userDefinedFunction.get_scope().resolve_to_symbol(userDefinedFunction.get_name(), SymbolKind.FUNCTION) # first ensure that the block contains at least one statement if symbol is not None and len(userDefinedFunction.get_block().get_stmts()) > 0: # now check that the last statement is a return cls.__check_return_recursively(symbol.get_return_type(), userDefinedFunction.get_block().get_stmts(), False) # now if it does not have a statement, but uses a return type, it is an error elif symbol is not None and userDefinedFunction.has_return_type() and \ not symbol.get_return_type().equals(PredefinedTypes.get_void_type()): code, message = Messages.get_no_return() Logger.log_message(neuron=_neuron, code=code, message=message, error_position=userDefinedFunction.get_source_position(), log_level=LoggingLevel.ERROR) return
def visit_variable(self, node): """ Visits each shape and checks if it is used correctly. :param node: a single node. :type node: AST_ """ for shapeName in self.__shapes: # in order to allow shadowing by local scopes, we first check if the element has been declared locally symbol = node.get_scope().resolve_to_symbol(shapeName, SymbolKind.VARIABLE) # if it is not a shape just continue if symbol is None: code, message = Messages.get_no_variable_found(shapeName) Logger.log_message(neuron=self.__neuron_node, code=code, message=message, log_level=LoggingLevel.ERROR) continue if not symbol.is_shape(): continue if node.get_complete_name() == shapeName: parent = self.__neuron_node.get_parent(node) if parent is not None: if isinstance(parent, ASTOdeShape): continue grandparent = self.__neuron_node.get_parent(parent) if grandparent is not None and isinstance(grandparent, ASTFunctionCall): grandparent_func_name = grandparent.get_name() if grandparent_func_name == 'curr_sum' or grandparent_func_name == 'cond_sum' or \ grandparent_func_name == 'convolve': continue code, message = Messages.get_shape_outside_convolve(shapeName) Logger.log_message(error_position=node.get_source_position(), code=code, message=message, log_level=LoggingLevel.ERROR) return
def is_vectorized_assignment(cls, assignment): """ Indicates whether the handed over assignment is vectorized, i.e., an assignment of vectors. :param assignment: a single assignment. :type assignment: ASTAssignment :return: True if vectorized, otherwise False. :rtype: bool """ from pynestml.symbols.symbol import SymbolKind assert isinstance(assignment, ASTAssignment), \ '(PyNestML.CodeGeneration.Assignments) No or wrong type of assignment provided (%s)!' % type(assignment) symbol = assignment.get_scope().resolve_to_symbol(assignment.get_variable().get_complete_name(), SymbolKind.VARIABLE) if symbol is not None: if symbol.has_vector_parameter(): return True else: # otherwise we have to check if one of the variables used in the rhs is a vector for var in assignment.get_expression().get_variables(): symbol = var.get_scope().resolve_to_symbol(var.get_complete_name(), SymbolKind.VARIABLE) if symbol is not None and symbol.has_vector_parameter(): return True return False else: Logger.log_message(message='No symbol could be resolved!', log_level=LoggingLevel.ERROR) return False
def drop_implicit_cast_warning(source_position, lhs_type_symbol, rhs_type_symbol): code, message = Messages.get_implicit_cast_rhs_to_lhs( rhs_type_symbol.print_symbol(), lhs_type_symbol.print_symbol()) Logger.log_message(error_position=source_position, code=code, message=message, log_level=LoggingLevel.WARNING)
def test_expression_after_magnitude_conversion_in_direct_assignment(self): Logger.set_logging_level(LoggingLevel.NO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')), 'DirectAssignmentWithDifferentButCompatibleUnits.nestml')) printed_rhs_expression = print_rhs_of_first_assignment_in_update_block(model) self.assertEqual(printed_rhs_expression, '1000.0 * (10*V)')
def __init__(self, target): if not target.upper() in self.get_known_targets(): code, msg = Messages.get_unknown_target(target) Logger.log_message(message=msg, code=code, log_level=LoggingLevel.ERROR) self._target = "" raise InvalidTargetException() self._target = target
def test_invalid_inline_expression_has_several_lhs(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join( os.path.realpath( os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) assert model is None
def drop_missing_type_error(_assignment): code, message = Messages.get_type_could_not_be_derived( _assignment.get_expression()) Logger.log_message( code=code, message=message, error_position=_assignment.get_expression().get_source_position(), log_level=LoggingLevel.ERROR)
def test(self): # Todo: this test is not yet complete, @ptraeder complete it Logger.init_logger(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources', 'MagnitudeCompatibilityTest.nestml')))) # Logger.setCurrentNeuron(model.getNeuronList()[0]) ExpressionTestVisitor().handle(model)
def test_variable_with_same_name_as_unit(self): Logger.set_logging_level(LoggingLevel.NO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableWithSameNameAsUnit.nestml')) self.assertEqual( len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.WARNING)), 3)
def test_valid_element_not_defined_in_scope(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableNotDefined.nestml')) self.assertEqual( len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0)
def test_invalid_output_port_defined_if_emit_call(self): """test that an error is raised when the emit_spike() function is called by the neuron, but a spiking output port is not defined""" Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall-2.nestml')) self.assertTrue(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)) > 0)
def test_valid_output_port_defined_if_emit_call(self): """test that no error is raised when the output block is missing, but not emit_spike() functions are called""" Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) self.assertEqual(len( Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0)
def test_declaration_with_same_variable_name_as_unit(self): model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')), 'DeclarationWithSameVariableNameAsUnit.nestml')) self.assertEqual(len( Logger.get_all_messages_of_level_and_or_neuron(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) self.assertEqual(len( Logger.get_all_messages_of_level_and_or_neuron(model.get_neuron_list()[0], LoggingLevel.WARNING)), 3)
def convert_name_reference(self, variable: ASTVariable, prefix: str = ''): """ Converts a single name reference to a gsl processable format. :param ast_variable: a single variable :type ast_variable: ASTVariable :return: a gsl processable format of the variable :rtype: str """ if variable.get_name() == PredefinedVariables.E_CONSTANT: return 'numerics::e' symbol = variable.get_scope().resolve_to_symbol( variable.get_complete_name(), SymbolKind.VARIABLE) if symbol is None: # test if variable name can be resolved to a type if PredefinedUnits.is_unit(variable.get_complete_name()): return str( UnitConverter.get_factor( PredefinedUnits.get_unit( variable.get_complete_name()).get_unit())) code, message = Messages.get_could_not_resolve(variable.get_name()) Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message, error_position=variable.get_source_position()) return '' if symbol.is_state(): return GSLNamesConverter.name(symbol) if symbol.is_buffer(): if isinstance(symbol.get_type_symbol(), UnitTypeSymbol): units_conversion_factor = UnitConverter.get_factor( symbol.get_type_symbol().unit.unit) else: units_conversion_factor = 1 s = "" if not units_conversion_factor == 1: s += "(" + str(units_conversion_factor) + " * " s += prefix + 'B_.' + NestNamesConverter.buffer_value(symbol) if symbol.has_vector_parameter(): s += '[i]' if not units_conversion_factor == 1: s += ")" return s variable_name = NestNamesConverter.convert_to_cpp_name( variable.get_name()) if symbol.is_local() or symbol.is_inline_expression: return variable_name if symbol.has_vector_parameter(): return prefix + 'get_' + variable_name + '()[i]' return prefix + 'get_' + variable_name + '()'
def generate_code(self, neurons): if self._target == "NEST": from pynestml.codegeneration.nest_codegenerator import NESTCodeGenerator _codeGenerator = NESTCodeGenerator() _codeGenerator.generate_code(neurons) elif self._target == "": # dummy/null target: user requested to not generate any code code, message = Messages.get_no_code_generated() Logger.log_message(None, code, message, None, LoggingLevel.INFO)
def visit_declaration(self, node: ASTDeclaration): """ Checks if the coco applies. :param node: a single declaration. """ if node.is_inline_expression and not node.has_expression(): code, message = Messages.get_no_rhs(node.get_variables()[0].get_name()) Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, code=code, message=message)
def check_model_with_cocos(cls, model_as_string): if pynestml_available: Logger.init_logger(LoggingLevel.NO) model = ModelParser.parse_model(model=model_as_string, from_string=True) return str(Logger.get_json_format()) else: print('PyNestML not available, no checks performed!') return str({})
def drop_incompatible_types_error(containing_expression, lhs_type_symbol, rhs_type_symbol): code, message = Messages.get_type_different_from_expected( lhs_type_symbol, rhs_type_symbol) Logger.log_message( error_position=containing_expression.get_source_position(), code=code, message=message, log_level=LoggingLevel.ERROR)
def test_valid_co_co_resolution_legally_used(self): """ """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoResolutionLegallyUsed.nestml')) self.assertEqual(len( Logger.get_all_messages_of_level_and_or_node(model.get_synapse_list()[0], LoggingLevel.ERROR)), 0)
def endvisit_data_type(self, node): if node.is_unit_type() and node.get_unit_type().get_type_symbol() is not None: node.set_type_symbol(node.get_unit_type().get_type_symbol()) if self.symbol is not None: self.result = self.symbol.get_symbol_name() else: code, message = Messages.astdatatype_type_symbol_could_not_be_derived() Logger.log_message(None, code, message, node.get_source_position(), LoggingLevel.ERROR) return
def test_valid_co_co_priorities_correctly_specified(self): """ """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) self.assertEqual(len( Logger.get_all_messages_of_level_and_or_node(model.get_synapse_list()[0], LoggingLevel.ERROR)), 0)
def warn_implicit_cast_from_to(self, _from, _to): code, message = Messages.get_implicit_cast_rhs_to_lhs( _to.print_symbol(), _from.print_symbol()) Logger.log_message( code=code, message=message, error_position=self.get_referenced_object().get_source_position(), log_level=LoggingLevel.WARNING) return _to
def visit_assignment(self, node): symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_name(), SymbolKind.VARIABLE) if symbol is not None and (symbol.block_type == BlockType.INPUT_BUFFER_SPIKE or symbol.block_type == BlockType.INPUT_BUFFER_CURRENT): code, message = Messages.get_value_assigned_to_buffer(node.get_variable().get_complete_name()) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) return
def visit_simple_expression(self, node): """ Visits a single function call as stored in a simple expression and derives the correct type of all its parameters. :param node: a simple expression :type node: ASTSimpleExpression :rtype void """ assert isinstance(node, ASTSimpleExpression), \ '(PyNestML.Visitor.FunctionCallVisitor) No or wrong type of simple expression provided (%s)!' % tuple(node) assert (node.get_scope() is not None), \ "(PyNestML.Visitor.FunctionCallVisitor) No scope found, run symboltable creator!" scope = node.get_scope() function_name = node.get_function_call().get_name() method_symbol = scope.resolve_to_symbol(function_name, SymbolKind.FUNCTION) # check if this function exists if method_symbol is None: code, message = Messages.get_could_not_resolve(function_name) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) node.type = ErrorTypeSymbol() return return_type = method_symbol.get_return_type() return_type.referenced_object = node # convolve symbol does not have a return type set. # returns whatever type the second parameter is. if function_name == PredefinedFunctions.CONVOLVE: # Deviations from the assumptions made here are handled in the convolveCoco buffer_parameter = node.get_function_call().get_args()[1] if buffer_parameter.getVariable() is not None: buffer_name = buffer_parameter.getVariable().getName() buffer_symbol_resolve = scope.resolve_to_symbol( buffer_name, SymbolKind.VARIABLE) if buffer_symbol_resolve is not None: node.type = buffer_symbol_resolve.getTypeSymbol() return # getting here means there is an error with the parameters to convolve code, message = Messages.get_convolve_needs_buffer_parameter() Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) node.type = ErrorTypeSymbol() return if isinstance(method_symbol.get_return_type(), VoidTypeSymbol): # todo by KP: the error message is not used here, @ptraeder fix this # error_msg = ErrorStrings.message_void_function_on_rhs(self, function_name, node.get_source_position()) node.type = ErrorTypeSymbol() return # if nothing special is handled, just get the expression type from the return type of the function node.type = return_type
def process(): """ Returns ------- errors_occurred : bool Flag indicating whether errors occurred during processing """ errors_occurred = False # init log dir create_report_dir() # The handed over parameters seem to be correct, proceed with the main routine init_predefined() # now proceed to parse all models compilation_units = list() nestml_files = FrontendConfiguration.get_files() if not type(nestml_files) is list: nestml_files = [nestml_files] for nestml_file in nestml_files: parsed_unit = ModelParser.parse_model(nestml_file) if parsed_unit is not None: compilation_units.append(parsed_unit) if len(compilation_units) > 0: # generate a list of all neurons neurons = list() for compilationUnit in compilation_units: neurons.extend(compilationUnit.get_neuron_list()) # check if across two files two neurons with same name have been defined CoCosManager.check_not_two_neurons_across_units(compilation_units) # now exclude those which are broken, i.e. have errors. if not FrontendConfiguration.is_dev: for neuron in neurons: if Logger.has_errors(neuron): code, message = Messages.get_neuron_contains_errors( neuron.get_name()) Logger.log_message( node=neuron, code=code, message=message, error_position=neuron.get_source_position(), log_level=LoggingLevel.INFO) neurons.remove(neuron) errors_occurred = True # perform code generation _codeGenerator = CodeGenerator.from_target_name( FrontendConfiguration.get_target(), options=FrontendConfiguration.get_codegen_opts()) _codeGenerator.generate_code(neurons) for neuron in neurons: if Logger.has_errors(neuron): errors_occurred = True break if FrontendConfiguration.store_log: store_log_to_file() return errors_occurred
def test_invalid_coco_state_variables_initialized(self): """ Test that the CoCo condition is applicable for all the variables in the state block not initialized """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoStateVariablesInitialized.nestml')) self.assertEqual(len( Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 2)
def test_invalid_coco_kernel_type_initial_values(self): """ Test the functionality of CoCoKernelType. """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelTypeInitialValues.nestml')) self.assertEqual(len( Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 4)
def setUp(self): PredefinedUnits.register_units() PredefinedTypes.register_types() PredefinedFunctions.register_functions() PredefinedVariables.register_variables() SymbolTable.initialize_symbol_table(ASTSourceLocation(start_line=0, start_column=0, end_line=0, end_column=0)) Logger.init_logger(LoggingLevel.INFO) self.target_path = str(os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join( os.pardir, 'target'))))
def visit_declaration(self, node): symbol = node.get_scope().resolve_to_symbol(node.get_variables()[0].get_complete_name(), SymbolKind.VARIABLE) if symbol is None: code, message = Messages.get_variable_not_defined(node.get_variable().get_complete_name()) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, astnode=node) return self._variables.append(symbol)
def setUp(self) -> None: PredefinedUnits.register_units() PredefinedTypes.register_types() PredefinedFunctions.register_functions() PredefinedVariables.register_variables() SymbolTable.initialize_symbol_table(ASTSourceLocation(start_line=0, start_column=0, end_line=0, end_column=0)) Logger.init_logger(LoggingLevel.INFO) self.target_path = str(os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join(os.pardir, 'target'))))
def visit_unit_type(self, node): """ Check if the coco applies, :param node: a single unit type object. :type node: ast_unit_type """ if node.is_div and isinstance(node.lhs, int) and node.lhs != 1: code, message = Messages.get_wrong_numerator(str(node)) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR)
def visit_ode_equation(self, node): """ Checks the coco. :param node: A single ode equation. :type node: ast_ode_equation """ if node.get_lhs().get_differential_order() == 0: code, message = Messages.get_order_not_declared(node.get_lhs().get_name()) Logger.log_message(error_position=node.get_source_position(), code=code, message=message, log_level=LoggingLevel.ERROR)
def handle_target(cls, target): if target is None or target.upper() == "NONE": target = "" # make sure `target` is always a string if target not in CodeGenerator.get_known_targets(): code, message = Messages.get_unknown_target(target) Logger.log_message(None, code, message, None, LoggingLevel.ERROR) raise InvalidTargetException() cls.target = target
def visit_assignment(self, node): symbol = node.get_scope().resolve_to_symbol( node.get_variable().get_name(), SymbolKind.VARIABLE) if symbol is not None and symbol.block_type == BlockType.INPUT: code, message = Messages.get_value_assigned_to_buffer( node.get_variable().get_complete_name()) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR)
def test(self): # Todo: this test is not yet complete, @ptraeder complete it Logger.init_logger(LoggingLevel.INFO) model = ModelParser.parse_model( os.path.join( os.path.realpath( os.path.join(os.path.dirname(__file__), 'resources', 'MagnitudeCompatibilityTest.nestml')))) # Logger.setCurrentNeuron(model.getNeuronList()[0]) ExpressionTestVisitor().handle(model)
def endvisit_neuron(self, node): # before following checks occur, we need to ensure several simple properties CoCosManager.post_symbol_table_builder_checks(node, after_ast_rewrite=self.after_ast_rewrite_) # update the equations if node.get_equations_blocks() is not None and len(node.get_equations_blocks().get_declarations()) > 0: equation_block = node.get_equations_blocks() assign_ode_to_variables(equation_block) Logger.set_current_node(None)
def visit_input_port(self, node): if node.is_current() and node.has_input_qualifiers() and len( node.get_input_qualifiers()) > 0: code, message = Messages.get_current_buffer_specified( node.get_name(), list((str(buf) for buf in node.get_input_qualifiers()))) Logger.log_message(error_position=node.get_source_position(), code=code, message=message, log_level=LoggingLevel.ERROR)
def visit_declaration(self, node): """ Checks if the coco applies. :param node: a single declaration. :type node: ASTDeclaration. """ if node.is_function and not node.has_expression(): code, message = Messages.get_no_rhs(node.get_variables()[0].get_name()) Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, code=code, message=message) return
def register_type(cls, symbol): """ Registers a new type into the system. :param: a single type symbol. :type: UnitTypeSymbol """ if not symbol.is_primitive() and symbol.unit.get_name() not in cls.name2type.keys(): cls.name2type[symbol.unit.get_name()] = symbol code, message = Messages.get_new_type_registered(symbol.unit.get_name()) Logger.log_message(code=code, message=message, log_level=LoggingLevel.INFO) return
def analyse_transform_neurons(self, neurons): # type: (list(ASTNeuron)) -> None """ Analyse and transform a list of neurons. :param neurons: a list of neurons. """ for neuron in neurons: code, message = Messages.get_analysing_transforming_neuron(neuron.get_name()) Logger.log_message(None, code, message, None, LoggingLevel.INFO) self.analyse_neuron(neuron) # now store the transformed model self.store_transformed_model(neuron)
def visit_declaration(self, node): """ Checks the coco. :param node: a single declaration. :type node: ast_declaration """ if node.is_function and len(node.get_variables()) > 1: code, message = Messages.get_several_lhs(list((var.get_name() for var in node.get_variables()))) Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, code=code, message=message) return
def check_co_co(cls, node): """ Ensures the coco for the handed over neuron. :param node: a single neuron instance. :type node: ast_neuron """ for func in node.get_functions(): if func.get_name() in cls.nest_name_space: code, message = Messages.get_nest_collision(func.get_name()) Logger.log_message(error_position=func.get_source_position(), code=code, message=message, log_level=LoggingLevel.ERROR) return
def visit_ode_equation(self, node): """ Ensures the coco. :param node: a single equation object. :type node: ast_ode_equation """ symbol = node.get_scope().resolve_to_symbol(node.get_lhs().get_name_of_lhs(), SymbolKind.VARIABLE) if symbol is not None and not symbol.is_init_values(): code, message = Messages.get_equation_var_not_in_init_values_block(node.get_lhs().get_name_of_lhs()) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) return
def get_unit(cls, name): """ Returns a single UnitType if the corresponding unit has been predefined. :param name: the name of a unit :type name: str :return: a single UnitType object, or None :rtype: UnitType """ if name in cls.name2unit.keys(): return cls.name2unit[name] else: code, message = Messages.get_unit_does_not_exist(name) Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR) return None
def do_magnitude_conversion_rhs_to_lhs(_rhs_type_symbol, _lhs_type_symbol, _containing_expression): """ determine conversion factor from rhs to lhs, register it with the relevant expression, drop warning """ _containing_expression.set_implicit_conversion_factor( UnitTypeSymbol.get_conversion_factor(_lhs_type_symbol.astropy_unit, _rhs_type_symbol.astropy_unit)) _containing_expression.type = _lhs_type_symbol code, message = Messages.get_implicit_magnitude_conversion(_lhs_type_symbol, _rhs_type_symbol, _containing_expression.get_implicit_conversion_factor()) Logger.log_message(code=code, message=message, error_position=_containing_expression.get_source_position(), log_level=LoggingLevel.WARNING)
def visit_simple_expression(self, node): """ Visits a single function call as stored in a simple expression and derives the correct type of all its parameters. :param node: a simple expression :type node: ASTSimpleExpression :rtype void """ assert isinstance(node, ASTSimpleExpression), \ '(PyNestML.Visitor.FunctionCallVisitor) No or wrong type of simple expression provided (%s)!' % tuple(node) assert (node.get_scope() is not None), \ "(PyNestML.Visitor.FunctionCallVisitor) No scope found, run symboltable creator!" scope = node.get_scope() function_name = node.get_function_call().get_name() method_symbol = scope.resolve_to_symbol(function_name, SymbolKind.FUNCTION) # check if this function exists if method_symbol is None: code, message = Messages.get_could_not_resolve(function_name) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) node.type = ErrorTypeSymbol() return return_type = method_symbol.get_return_type() return_type.referenced_object = node # convolve symbol does not have a return type set. # returns whatever type the second parameter is. if function_name == PredefinedFunctions.CONVOLVE: # Deviations from the assumptions made here are handled in the convolveCoco buffer_parameter = node.get_function_call().get_args()[1] if buffer_parameter.get_variable() is not None: buffer_name = buffer_parameter.get_variable().get_name() buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE) if buffer_symbol_resolve is not None: node.type = buffer_symbol_resolve.get_type_symbol() return # getting here means there is an error with the parameters to convolve code, message = Messages.get_convolve_needs_buffer_parameter() Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) node.type = ErrorTypeSymbol() return if isinstance(method_symbol.get_return_type(), VoidTypeSymbol): # todo by KP: the error message is not used here, @ptraeder fix this # error_msg = ErrorStrings.message_void_function_on_rhs(self, function_name, node.get_source_position()) node.type = ErrorTypeSymbol() return # if nothing special is handled, just get the expression type from the return type of the function node.type = return_type
def visit_input_line(self, node): """ Private method: Used to visit a single input line, create the corresponding symbol and update the scope. :param node: a single input line. :type node: ast_input_line """ if node.is_spike() and node.has_datatype(): node.get_datatype().update_scope(node.get_scope()) elif node.is_spike(): code, message = Messages.get_buffer_type_not_defined(node.get_name()) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.WARNING) for inputType in node.get_input_types(): inputType.update_scope(node.get_scope())
def visit_assignment(self, node): """ Checks the coco on the current node. :param node: a single node. :type node: ast_assignment """ symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_name(), SymbolKind.VARIABLE) if (symbol is not None and symbol.block_type == BlockType.PARAMETERS and node.get_scope().get_scope_type() != ScopeType.GLOBAL): code, message = Messages.get_assignment_not_allowed(node.get_variable().get_complete_name()) Logger.log_message(error_position=node.get_source_position(), code=code, message=message, log_level=LoggingLevel.ERROR) return
def convert_name_reference(self, variable): """ Converts a single variable to nest processable format. :param variable: a single variable. :type variable: ASTVariable :return: a nest processable format. :rtype: str """ from pynestml.codegeneration.nest_printer import NestPrinter assert (variable is not None and isinstance(variable, ASTVariable)), \ '(PyNestML.CodeGeneration.NestReferenceConverter) No or wrong type of uses-gsl provided (%s)!' % type( variable) variable_name = NestNamesConverter.convert_to_cpp_name(variable.get_complete_name()) if PredefinedUnits.is_unit(variable.get_complete_name()): return str( UnitConverter.get_factor(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit())) if variable_name == PredefinedVariables.E_CONSTANT: return 'numerics::e' else: symbol = variable.get_scope().resolve_to_symbol(variable_name, SymbolKind.VARIABLE) if symbol is None: # this should actually not happen, but an error message is better than an exception code, message = Messages.get_could_not_resolve(variable_name) Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message, error_position=variable.get_source_position()) return '' else: if symbol.is_local(): return variable_name + ('[i]' if symbol.has_vector_parameter() else '') elif symbol.is_buffer(): return NestPrinter.print_origin(symbol) + NestNamesConverter.buffer_value(symbol) \ + ('[i]' if symbol.has_vector_parameter() else '') else: if symbol.is_function: return 'get_' + variable_name + '()' + ('[i]' if symbol.has_vector_parameter() else '') else: if symbol.is_init_values(): temp = NestPrinter.print_origin(symbol) if self.uses_gsl: temp += GSLNamesConverter.name(symbol) else: temp += NestNamesConverter.name(symbol) temp += ('[i]' if symbol.has_vector_parameter() else '') return temp else: return NestPrinter.print_origin(symbol) + \ NestNamesConverter.name(symbol) + \ ('[i]' if symbol.has_vector_parameter() else '')
def visit_expression(self, node): """ Visits a single rhs but does not execute any steps besides printing a message. This visitor indicates that no functionality has been implemented for this type of nodes. :param node: a single rhs :type node: ast_expression or ast_simple_expression """ error_msg = ErrorStrings.message_no_semantics(self, str(node), node.get_source_position()) node.type = ErrorTypeSymbol() # just warn though Logger.log_message(message=error_msg, code=MessageCode.NO_SEMANTICS, error_position=node.get_source_position(), log_level=LoggingLevel.WARNING) return