def replace_variable_names_in_expressions( cls, neuron: ASTNeuron, solver_dicts: List[dict]) -> None: """ Replace all occurrences of variables names in NESTML format (e.g. `g_ex$''`)` with the ode-toolbox formatted variable name (e.g. `g_ex__DOLLAR__d__d`). Variables aliasing convolutions should already have been covered by replace_convolution_aliasing_inlines(). """ def replace_var(_expr=None): if isinstance(_expr, ASTSimpleExpression) and _expr.is_variable(): var = _expr.get_variable() if cls.variable_in_solver( cls.to_ode_toolbox_processed_name( var.get_complete_name()), solver_dicts): ast_variable = ASTVariable( cls.to_ode_toolbox_processed_name( var.get_complete_name()), differential_order=0) ast_variable.set_source_position(var.get_source_position()) _expr.set_variable(ast_variable) elif isinstance(_expr, ASTVariable): var = _expr if cls.variable_in_solver( cls.to_ode_toolbox_processed_name( var.get_complete_name()), solver_dicts): var.set_name( cls.to_ode_toolbox_processed_name( var.get_complete_name())) var.set_differential_order(0) def func(x): return replace_var(x) neuron.accept(ASTHigherOrderVisitor(func))
def get_cm_info(cls, neuron: ASTNeuron): """ Checks if this compartmental conditions apply for the handed over neuron. If yes, it checks the presence of expected functions and declarations. In addition it organizes and builds a dictionary (cm_info) which describes all the relevant data that was found :param neuron: a single neuron instance. :type neuron: ASTNeuron """ cm_info = cls.detectCMInlineExpressions(neuron) # further computation not necessary if there were no cm neurons if not cm_info: cm_info = dict() cm_info = cls.calcExpectedFunctionNamesForChannels(cm_info) cm_info = cls.checkAndFindFunctions(neuron, cm_info) cm_info = cls.addChannelVariablesSectionAndEnforceProperVariableNames(neuron, cm_info) # now check for existence of expected state variables # and add their ASTVariable objects to cm_info missing_states_visitor = StateMissingVisitor(cm_info) neuron.accept(missing_states_visitor) return missing_states_visitor.cm_info
def replace_convolution_aliasing_inlines(cls, neuron: ASTNeuron) -> None: """ Replace all occurrences of kernel names (e.g. ``I_dend`` and ``I_dend'`` for a definition involving a second-order kernel ``inline kernel I_dend = convolve(kern_name, spike_buf)``) with the ODE-toolbox generated variable ``kern_name__X__spike_buf``. """ def replace_var(_expr, replace_var_name: str, replace_with_var_name: str): if isinstance(_expr, ASTSimpleExpression) and _expr.is_variable(): var = _expr.get_variable() if var.get_name() == replace_var_name: ast_variable = ASTVariable( replace_with_var_name + '__d' * var.get_differential_order(), differential_order=0) ast_variable.set_source_position(var.get_source_position()) _expr.set_variable(ast_variable) elif isinstance(_expr, ASTVariable): var = _expr if var.get_name() == replace_var_name: var.set_name(replace_with_var_name + '__d' * var.get_differential_order()) var.set_differential_order(0) for decl in neuron.get_equations_block().get_declarations(): from pynestml.utils.ast_utils import ASTUtils if isinstance(decl, ASTInlineExpression) \ and isinstance(decl.get_expression(), ASTSimpleExpression) \ and '__X__' in str(decl.get_expression()): replace_with_var_name = decl.get_expression().get_variable( ).get_name() neuron.accept( ASTHigherOrderVisitor(lambda x: replace_var( x, decl.get_variable_name(), replace_with_var_name)))
def check_co_co(cls, neuron: ASTNeuron): """ Checks the coco for the handed over neuron. :param neuron: a single neuron instance. """ visitor = OutputPortDefinedIfEmitCalledVisitor() visitor.neuron = neuron neuron.accept(visitor)
def check_co_co(cls, node: ASTNeuron): """ Ensures the coco for the handed over neuron. :param node: a single neuron instance. :type node: ASTNeuron """ kernel_type_visitor = KernelTypeVisitor() kernel_type_visitor._neuron = node node.accept(kernel_type_visitor)
def check_co_co(cls, node: ASTNeuron): """ Ensures the coco for the handed over neuron. :param node: a single neuron instance. """ equations_defined_visitor = EquationsDefinedVisitor() node.accept(equations_defined_visitor) integrate_odes_called_visitor = IntegrateOdesCalledVisitor() node.accept(integrate_odes_called_visitor) if equations_defined_visitor.equations_defined() and not integrate_odes_called_visitor.integrate_odes_called(): code, message = Messages.get_equations_defined_but_integrate_odes_not_called() Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR)
def check_co_co(cls, node: ASTNeuron): """ Ensures the coco for the handed over neuron. :param node: a single neuron instance. """ if isinstance(node, ASTSynapse): return # XXX: TODO: check that there are no equations other than the ones moved to the neuron (if any) equations_defined_visitor = EquationsDefinedVisitor() node.accept(equations_defined_visitor) integrate_odes_called_visitor = IntegrateOdesCalledVisitor() node.accept(integrate_odes_called_visitor) if equations_defined_visitor.equations_defined( ) and not integrate_odes_called_visitor.integrate_odes_called(): code, message = Messages.get_equations_defined_but_integrate_odes_not_called( ) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR)
def check_co_co(cls, neuron: ASTNeuron): """ Checks if this compartmental conditions apply for the handed over neuron. Models which do not have a state variable named as specified in the value of cm_trigger_variable_name are not relevant :param neuron: a single neuron instance. :type neuron: ASTNeuron """ cm_info = cls.detectCMInlineExpressions(neuron) # further computation not necessary if there were no cm neurons if not cm_info: return True cm_info = cls.calcExpectedFunctionNamesForChannels(cm_info) cm_info = cls.checkAndFindFunctions(neuron, cm_info) cm_info = cls.addChannelVariablesSectionAndEnforceProperVariableNames(neuron, cm_info) # now check for existence of expected state variables # and add their ASTVariable objects to cm_info missing_states_visitor = StateMissingVisitor(cm_info) neuron.accept(missing_states_visitor)
def setup_generation_helpers(self, neuron: ASTNeuron) -> Dict: """ Returns a standard namespace with often required functionality. :param neuron: a single neuron instance :type neuron: ASTNeuron :return: a map from name to functionality. :rtype: dict """ gsl_converter = GSLReferenceConverter() gsl_printer = UnitlessExpressionPrinter(gsl_converter) # helper classes and objects converter = NESTReferenceConverter(False) unitless_pretty_printer = UnitlessExpressionPrinter(converter) namespace = dict() namespace['neuronName'] = neuron.get_name() namespace['neuron'] = neuron namespace['moduleName'] = FrontendConfiguration.get_module_name() namespace['printer'] = NestPrinter(unitless_pretty_printer) namespace['assignments'] = NestAssignmentsHelper() namespace['names'] = NestNamesConverter() namespace['declarations'] = NestDeclarationsHelper() namespace['utils'] = ASTUtils() namespace['idemPrinter'] = UnitlessExpressionPrinter() namespace['outputEvent'] = namespace['printer'].print_output_event( neuron.get_body()) namespace['is_spike_input'] = ASTUtils.is_spike_input( neuron.get_body()) namespace['is_current_input'] = ASTUtils.is_current_input( neuron.get_body()) namespace['odeTransformer'] = OdeTransformer() namespace['printerGSL'] = gsl_printer namespace['now'] = datetime.datetime.utcnow() namespace['tracing'] = FrontendConfiguration.is_dev namespace[ 'PredefinedUnits'] = pynestml.symbols.predefined_units.PredefinedUnits namespace[ 'UnitTypeSymbol'] = pynestml.symbols.unit_type_symbol.UnitTypeSymbol namespace['initial_values'] = {} namespace['uses_analytic_solver'] = neuron.get_name() in self.analytic_solver.keys() \ and self.analytic_solver[neuron.get_name()] is not None if namespace['uses_analytic_solver']: namespace['analytic_state_variables'] = self.analytic_solver[ neuron.get_name()]["state_variables"] namespace['analytic_variable_symbols'] = { sym: neuron.get_equations_block().get_scope().resolve_to_symbol( sym, SymbolKind.VARIABLE) for sym in namespace['analytic_state_variables'] } namespace['update_expressions'] = {} for sym, expr in self.analytic_solver[ neuron.get_name()]["initial_values"].items(): namespace['initial_values'][sym] = expr for sym in namespace['analytic_state_variables']: expr_str = self.analytic_solver[ neuron.get_name()]["update_expressions"][sym] expr_ast = ModelParser.parse_expression(expr_str) # pretend that update expressions are in "equations" block, which should always be present, as differential equations must have been defined to get here expr_ast.update_scope( neuron.get_equations_blocks().get_scope()) expr_ast.accept(ASTSymbolTableVisitor()) namespace['update_expressions'][sym] = expr_ast namespace['propagators'] = self.analytic_solver[ neuron.get_name()]["propagators"] namespace['uses_numeric_solver'] = neuron.get_name() in self.analytic_solver.keys() \ and self.numeric_solver[neuron.get_name()] is not None if namespace['uses_numeric_solver']: namespace['numeric_state_variables'] = self.numeric_solver[ neuron.get_name()]["state_variables"] namespace['numeric_variable_symbols'] = { sym: neuron.get_equations_block().get_scope().resolve_to_symbol( sym, SymbolKind.VARIABLE) for sym in namespace['numeric_state_variables'] } assert not any([ sym is None for sym in namespace['numeric_variable_symbols'].values() ]) namespace['numeric_update_expressions'] = {} for sym, expr in self.numeric_solver[ neuron.get_name()]["initial_values"].items(): namespace['initial_values'][sym] = expr for sym in namespace['numeric_state_variables']: expr_str = self.numeric_solver[ neuron.get_name()]["update_expressions"][sym] expr_ast = ModelParser.parse_expression(expr_str) # pretend that update expressions are in "equations" block, which should always be present, as differential equations must have been defined to get here expr_ast.update_scope( neuron.get_equations_blocks().get_scope()) expr_ast.accept(ASTSymbolTableVisitor()) namespace['numeric_update_expressions'][sym] = expr_ast namespace['useGSL'] = namespace['uses_numeric_solver'] namespace['names'] = GSLNamesConverter() converter = NESTReferenceConverter(True) unitless_pretty_printer = UnitlessExpressionPrinter(converter) namespace['printer'] = NestPrinter(unitless_pretty_printer) namespace["spike_updates"] = neuron.spike_updates rng_visitor = ASTRandomNumberGeneratorVisitor() neuron.accept(rng_visitor) namespace['norm_rng'] = rng_visitor._norm_rng_is_used return namespace
def check_co_co(cls, node: ASTNeuron): visitor = VectorDeclarationVisitor() node.accept(visitor)
def check_co_co(cls, node: ASTNeuron): """ Ensures the coco for the handed over neuron. :param node: a single neuron instance. """ node.accept(InputPortDatatypeVisitor())
def check_co_co(cls, node: ASTNeuron, after_ast_rewrite: bool = False): """ Checks if this coco applies for the handed over neuron. Models which contain undefined variables are not correct. :param node: a single neuron instance. :param after_ast_rewrite: indicates whether this coco is checked after the code generator has done rewriting of the abstract syntax tree. If True, checks are not as rigorous. Use False where possible. """ # for each variable in all expressions, check if the variable has been defined previously expression_collector_visitor = ASTExpressionCollectorVisitor() node.accept(expression_collector_visitor) expressions = expression_collector_visitor.ret for expr in expressions: if isinstance(expr, ASTVariable): vars = [expr] else: vars = expr.get_variables() for var in vars: symbol = var.get_scope().resolve_to_symbol( var.get_complete_name(), SymbolKind.VARIABLE) # this part is required to check that we handle invariants differently expr_par = node.get_parent(expr) # test if the symbol has been defined at least if symbol is None: if after_ast_rewrite: # after ODE-toolbox transformations, convolutions are replaced by state variables, so cannot perform this check properly symbol2 = node.get_scope().resolve_to_symbol( var.get_name(), SymbolKind.VARIABLE) if symbol2 is not None: # an inline expression defining this variable name (ignoring differential order) exists if "__X__" in str( symbol2 ): # if this variable was the result of a convolution... continue else: # for kernels, also allow derivatives of that kernel to appear if node.get_equations_block() is not None: inline_expr_names = [ inline_expr.variable_name for inline_expr in node.get_equations_block(). get_inline_expressions() ] if var.get_name() in inline_expr_names: inline_expr_idx = inline_expr_names.index( var.get_name()) inline_expr = node.get_equations_block( ).get_inline_expressions()[inline_expr_idx] from pynestml.utils.ast_utils import ASTUtils if ASTUtils.inline_aliases_convolution( inline_expr): symbol2 = node.get_scope( ).resolve_to_symbol( var.get_name(), SymbolKind.VARIABLE) if symbol2 is not None: # actually, no problem detected, skip error # XXX: TODO: check that differential order is less than or equal to that of the kernel continue # check if this symbol is actually a type, e.g. "mV" in the expression "(1 + 2) * mV" symbol2 = var.get_scope().resolve_to_symbol( var.get_complete_name(), SymbolKind.TYPE) if symbol2 is not None: continue # symbol is a type symbol code, message = Messages.get_variable_not_defined( var.get_complete_name()) Logger.log_message( code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, node=node) return # check if it is part of an invariant # if it is the case, there is no "recursive" declaration # so check if the parent is a declaration and the expression the invariant if isinstance( expr_par, ASTDeclaration) and expr_par.get_invariant() == expr: # in this case its ok if it is recursive or defined later on continue # check if it has been defined before usage, except for predefined symbols, input ports and variables added by the AST transformation functions if (not symbol.is_predefined) \ and symbol.block_type != BlockType.INPUT \ and not symbol.get_referenced_object().get_source_position().is_added_source_position(): # except for parameters, those can be defined after if ((not symbol.get_referenced_object( ).get_source_position().before(var.get_source_position())) and (not symbol.block_type in [ BlockType.PARAMETERS, BlockType.INTERNALS, BlockType.STATE ])): code, message = Messages.get_variable_used_before_declaration( var.get_name()) Logger.log_message( node=node, message=message, error_position=var.get_source_position(), code=code, log_level=LoggingLevel.ERROR) # now check that they are not defined recursively, e.g. V_m mV = V_m + 1 # todo: we should not check this for invariants if (symbol.get_referenced_object().get_source_position( ).encloses(var.get_source_position()) and not symbol.get_referenced_object(). get_source_position().is_added_source_position()): code, message = Messages.get_variable_defined_recursively( var.get_name()) Logger.log_message( code=code, message=message, error_position=symbol.get_referenced_object( ).get_source_position(), log_level=LoggingLevel.ERROR, node=node)
def check_co_co(cls, node: ASTNeuron): visitor = VectorVariablesVisitor() node.accept(visitor)