def __check_scope(cls, neuron, scope):
     """
     Checks a single scope and proceeds recursively.
     :param neuron: a single neuron object, required for correct printing of messages.
     :type neuron: ast_neuron
     :param scope: a single scope to check.
     :type scope: Scope
     """
     checked = list()
     for sym1 in scope.get_symbols_in_this_scope():
         if sym1.get_symbol_kind() != SymbolKind.VARIABLE or sym1.is_predefined:
             continue
         for sym2 in scope.get_symbols_in_this_scope():
             if (sym1 is not sym2 and
                     sym1.get_symbol_name() == sym2.get_symbol_name() and
                     sym2 not in checked):
                 if sym2.get_symbol_kind() == SymbolKind.TYPE:
                     code, message = Messages.get_variable_with_same_name_as_type(sym1.get_symbol_name())
                     Logger.log_message(error_position=sym1.get_referenced_object().get_source_position(),
                                        neuron=neuron, log_level=LoggingLevel.WARNING, code=code, message=message)
                 elif sym1.get_symbol_kind() == sym2.get_symbol_kind():
                     if sym2.is_predefined:
                         code, message = Messages.get_variable_redeclared(sym1.get_symbol_name(), True)
                         Logger.log_message(error_position=sym1.get_referenced_object().get_source_position(),
                                            neuron=neuron, log_level=LoggingLevel.ERROR, code=code, message=message)
                     elif sym1.get_referenced_object().get_source_position().before(
                             sym2.get_referenced_object().get_source_position()):
                         code, message = Messages.get_variable_redeclared(sym1.get_symbol_name(), False)
                         Logger.log_message(error_position=sym2.get_referenced_object().get_source_position(),
                                            neuron=neuron, log_level=LoggingLevel.ERROR, code=code, message=message)
         checked.append(sym1)
     for scope in scope.get_scopes():
         cls.__check_scope(neuron, scope)
     return
 def visit_variable(self, node):
     """
     Visits each shape and checks if it is used correctly.
     :param node: a single node.
     :type node: AST_
     """
     for shapeName in self.__shapes:
         # in order to allow shadowing by local scopes, we first check if the element has been declared locally
         symbol = node.get_scope().resolve_to_symbol(shapeName, SymbolKind.VARIABLE)
         # if it is not a shape just continue
         if symbol is None:
             code, message = Messages.get_no_variable_found(shapeName)
             Logger.log_message(neuron=self.__neuron_node, code=code, message=message, log_level=LoggingLevel.ERROR)
             continue
         if not symbol.is_shape():
             continue
         if node.get_complete_name() == shapeName:
             parent = self.__neuron_node.get_parent(node)
             if parent is not None:
                 if isinstance(parent, ASTOdeShape):
                     continue
                 grandparent = self.__neuron_node.get_parent(parent)
                 if grandparent is not None and isinstance(grandparent, ASTFunctionCall):
                     grandparent_func_name = grandparent.get_name()
                     if grandparent_func_name == 'curr_sum' or grandparent_func_name == 'cond_sum' or \
                             grandparent_func_name == 'convolve':
                         continue
             code, message = Messages.get_shape_outside_convolve(shapeName)
             Logger.log_message(error_position=node.get_source_position(),
                                code=code, message=message,
                                log_level=LoggingLevel.ERROR)
     return
    def check_co_co(cls, node):
        """
        Checks if this coco applies for the handed over neuron. Models which use not defined elements are not
        correct.
        :param node: a single neuron instance.
        :type node: ast_neuron
        """
        # for each variable in all expressions, check if the variable has been defined previously
        expression_collector_visitor = ASTExpressionCollectorVisitor()
        node.accept(expression_collector_visitor)
        expressions = expression_collector_visitor.ret
        for expr in expressions:
            for var in expr.get_variables():
                symbol = var.get_scope().resolve_to_symbol(var.get_complete_name(), SymbolKind.VARIABLE)
                # this part is required to check that we handle invariants differently
                expr_par = node.get_parent(expr)

                if symbol is None:
                    # check if this symbol is actually a type, e.g. "mV" in the expression "(1 + 2) * mV"
                    symbol = var.get_scope().resolve_to_symbol(var.get_complete_name(), SymbolKind.TYPE)
                    if symbol is None:
                        # symbol has not been defined; neither as a variable name nor as a type symbol
                        code, message = Messages.get_variable_not_defined(var.get_name())
                        Logger.log_message(neuron=node, code=code, message=message, log_level=LoggingLevel.ERROR,
                                           error_position=var.get_source_position())
                # first check if it is part of an invariant
                # if it is the case, there is no "recursive" declaration
                # so check if the parent is a declaration and the expression the invariant
                elif isinstance(expr_par, ASTDeclaration) and expr_par.get_invariant() == expr:
                    # in this case its ok if it is recursive or defined later on
                    continue

                # now check if it has been defined before usage, except for buffers, those are special cases
                elif (not symbol.is_predefined and symbol.block_type != BlockType.INPUT_BUFFER_CURRENT and
                      symbol.block_type != BlockType.INPUT_BUFFER_SPIKE):
                    # except for parameters, those can be defined after
                    if (not symbol.get_referenced_object().get_source_position().before(var.get_source_position()) and
                            symbol.block_type != BlockType.PARAMETERS):
                        code, message = Messages.get_variable_used_before_declaration(var.get_name())
                        Logger.log_message(neuron=node, message=message, error_position=var.get_source_position(),
                                           code=code, log_level=LoggingLevel.ERROR)
                        # now check that they are now defined recursively, e.g. V_m mV = V_m + 1
                    # todo by KP: we should not check this for invariants
                    if (symbol.get_referenced_object().get_source_position().encloses(var.get_source_position()) and
                            not symbol.get_referenced_object().get_source_position().is_added_source_position()):
                        code, message = Messages.get_variable_defined_recursively(var.get_name())
                        Logger.log_message(code=code, message=message, error_position=symbol.get_referenced_object().
                                           get_source_position(), log_level=LoggingLevel.ERROR, neuron=node)

        # now check for each assignment whether the left hand side variable is defined
        vis = ASTAssignedVariableDefinedVisitor(node)
        node.accept(vis)
        return
    def visit_simple_expression(self, node):
        """
        Visits a single function call as stored in a simple expression and derives the correct type of all its
        parameters. :param node: a simple expression :type node: ASTSimpleExpression :rtype void
        """
        assert isinstance(node, ASTSimpleExpression), \
            '(PyNestML.Visitor.FunctionCallVisitor) No or wrong type of simple expression provided (%s)!' % tuple(node)
        assert (node.get_scope() is not None), \
            "(PyNestML.Visitor.FunctionCallVisitor) No scope found, run symboltable creator!"
        scope = node.get_scope()
        function_name = node.get_function_call().get_name()
        method_symbol = scope.resolve_to_symbol(function_name, SymbolKind.FUNCTION)
        # check if this function exists
        if method_symbol is None:
            code, message = Messages.get_could_not_resolve(function_name)
            Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
            return
        return_type = method_symbol.get_return_type()
        return_type.referenced_object = node

        # convolve symbol does not have a return type set.
        # returns whatever type the second parameter is.
        if function_name == PredefinedFunctions.CONVOLVE:
            # Deviations from the assumptions made here are handled in the convolveCoco
            buffer_parameter = node.get_function_call().get_args()[1]

            if buffer_parameter.get_variable() is not None:
                buffer_name = buffer_parameter.get_variable().get_name()
                buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE)
                if buffer_symbol_resolve is not None:
                    node.type = buffer_symbol_resolve.get_type_symbol()
                    return

            # getting here means there is an error with the parameters to convolve
            code, message = Messages.get_convolve_needs_buffer_parameter()
            Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
            return

        if isinstance(method_symbol.get_return_type(), VoidTypeSymbol):
            # todo by KP: the error message is not used here, @ptraeder fix this
            # error_msg = ErrorStrings.message_void_function_on_rhs(self, function_name, node.get_source_position())
            node.type = ErrorTypeSymbol()
            return

        # if nothing special is handled, just get the expression type from the return type of the function
        node.type = return_type
Пример #5
0
    def transform_shapes_and_odes(self, neuron, shape_to_buffers):
        # type: (ASTNeuron, map(str, str)) -> ASTNeuron
        """
        Solves all odes and equations in the handed over neuron.

        Precondition: it should be ensured that most one equations block is present.

        :param neuron: a single neuron instance.
        :param shape_to_buffers: Map of shape names to buffers to which they were connected.
        :return: A transformed version of the neuron that can be passed to the GSL.
        """

        assert isinstance(neuron.get_equations_blocks(), ASTEquationsBlock), "Precondition violated: only one equation block should be present"

        equations_block = neuron.get_equations_block()

        if len(equations_block.get_ode_shapes()) == 0:
            code, message = Messages.get_neuron_solved_by_solver(neuron.get_name())
            Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
            return neuron
        elif len(equations_block.get_ode_shapes()) == 1 and \
                str(equations_block.get_ode_shapes()[0].get_expression()).strip().startswith(
                    "delta"):  # assume the model is well formed
            shape = equations_block.get_ode_shapes()[0]
            integrate_delta_solution(equations_block, neuron, shape, shape_to_buffers)
            return neuron
        elif len(equations_block.get_ode_equations()) == 1:
            code, message = Messages.get_neuron_analyzed(neuron.get_name())
            Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
            solver_result = self.solve_ode_with_shapes(equations_block)

            if solver_result["solver"] is "analytical":
                neuron = integrate_exact_solution(neuron, solver_result)
                neuron.remove_equations_block()
            elif (solver_result["solver"] is "numeric"
                  and self.is_functional_shape_present(equations_block.get_ode_shapes())):
                functional_shapes_to_odes(neuron, solver_result)

            return neuron
        else:
            code, message = Messages.get_neuron_solved_by_solver(neuron.get_name())
            Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)

            if self.is_functional_shape_present(equations_block.get_ode_shapes()):
                ode_shapes = self.solve_functional_shapes(equations_block)
                functional_shapes_to_odes(neuron, ode_shapes)

            return neuron
Пример #6
0
    def generate_module_code(self, neurons):
        # type: (list(ASTNeuron)) -> None
        """
        Generates code that is necessary to integrate neuron models into the NEST infrastructure.
        :param neurons: a list of neurons
        :type neurons: list(ASTNeuron)
        """
        namespace = {'neurons': neurons,
                     'moduleName': FrontendConfiguration.get_module_name(),
                     'now': datetime.datetime.utcnow()}
        if not os.path.exists(FrontendConfiguration.get_target_path()):
            os.makedirs(FrontendConfiguration.get_target_path())

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   FrontendConfiguration.get_module_name())) + '.h', 'w+') as f:
            f.write(str(self._template_module_header.render(namespace)))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   FrontendConfiguration.get_module_name())) + '.cpp', 'w+') as f:
            f.write(str(self._template_module_class.render(namespace)))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   'CMakeLists')) + '.txt', 'w+') as f:
            f.write(str(self._template_cmakelists.render(namespace)))

        if not os.path.isdir(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli'))):
            os.makedirs(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli')))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(), 'sli',
                                   FrontendConfiguration.get_module_name() + "-init")) + '.sli', 'w+') as f:
            f.write(str(self._template_sli_init.render(namespace)))

        code, message = Messages.get_module_generated(FrontendConfiguration.get_target_path())
        Logger.log_message(None, code, message, None, LoggingLevel.INFO)
Пример #7
0
 def analyse_neuron(self, neuron):
     # type: (ASTNeuron) -> None
     """
     Analyse and transform a single neuron.
     :param neuron: a single neuron.
     """
     code, message = Messages.get_start_processing_neuron(neuron.get_name())
     Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
     # make normalization
     # apply spikes to buffers
     # get rid of convolve, store them and apply then at the end
     equations_block = neuron.get_equations_block()
     shape_to_buffers = {}
     if neuron.get_equations_block() is not None:
         # extract function names and corresponding incoming buffers
         convolve_calls = OdeTransformer.get_sum_function_calls(equations_block)
         for convolve in convolve_calls:
             shape_to_buffers[str(convolve.get_args()[0])] = str(convolve.get_args()[1])
         OdeTransformer.refactor_convolve_call(neuron.get_equations_block())
         self.make_functions_self_contained(equations_block.get_ode_functions())
         self.replace_functions_through_defining_expressions(equations_block.get_ode_equations(),
                                                        equations_block.get_ode_functions())
         # transform everything into gsl processable (e.g. no functional shapes) or exact form.
         self.transform_shapes_and_odes(neuron, shape_to_buffers)
         self.apply_spikes_from_buffers(neuron, shape_to_buffers)
         # update the symbol table
         symbol_table_visitor = ASTSymbolTableVisitor()
         symbol_table_visitor.after_ast_rewrite_ = True		# ODE block might have been removed entirely: suppress warnings
         neuron.accept(symbol_table_visitor)
 def check_co_co(cls, _neuron=None):
     """
     Checks the coco for the handed over neuron.
     :param _neuron: a single neuron instance.
     :type _neuron: ASTNeuron
     """
     assert (_neuron is not None and isinstance(_neuron, ASTNeuron)), \
         '(PyNestML.CoCo.FunctionCallsConsistent) No or wrong type of neuron provided (%s)!' % type(_neuron)
     cls.__neuronName = _neuron.get_name()
     for userDefinedFunction in _neuron.get_functions():
         cls.processed_function = userDefinedFunction
         symbol = userDefinedFunction.get_scope().resolve_to_symbol(userDefinedFunction.get_name(),
                                                                    SymbolKind.FUNCTION)
         # first ensure that the block contains at least one statement
         if symbol is not None and len(userDefinedFunction.get_block().get_stmts()) > 0:
             # now check that the last statement is a return
             cls.__check_return_recursively(symbol.get_return_type(),
                                            userDefinedFunction.get_block().get_stmts(), False)
         # now if it does not have a statement, but uses a return type, it is an error
         elif symbol is not None and userDefinedFunction.has_return_type() and \
                 not symbol.get_return_type().equals(PredefinedTypes.get_void_type()):
             code, message = Messages.get_no_return()
             Logger.log_message(neuron=_neuron, code=code, message=message,
                                error_position=userDefinedFunction.get_source_position(),
                                log_level=LoggingLevel.ERROR)
     return
 def visit_assignment(self, node):
     symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_complete_name(),
                                                 SymbolKind.VARIABLE)
     if symbol is None:
         code, message = Messages.get_variable_not_defined(node.get_variable().get_complete_name())
         Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR, neuron=self.neuron)
Пример #10
0
 def visit_neuron(self, node):
     """
     Private method: Used to visit a single neuron and create the corresponding global as well as local scopes.
     :return: a single neuron.
     :rtype: ast_neuron
     """
     # set current processed neuron
     Logger.set_current_neuron(node)
     code, message = Messages.get_start_building_symbol_table()
     Logger.log_message(neuron=node, code=code, error_position=node.get_source_position(),
                        message=message, log_level=LoggingLevel.INFO)
     # before starting the work on the neuron, make everything which was implicit explicit
     # but if we have a model without an equations block, just skip this step
     if node.get_equations_blocks() is not None:
         make_implicit_odes_explicit(node.get_equations_blocks())
     scope = Scope(scope_type=ScopeType.GLOBAL, source_position=node.get_source_position())
     node.update_scope(scope)
     node.get_body().update_scope(scope)
     # now first, we add all predefined elements to the scope
     variables = PredefinedVariables.get_variables()
     functions = PredefinedFunctions.get_function_symbols()
     types = PredefinedTypes.get_types()
     for symbol in variables.keys():
         node.get_scope().add_symbol(variables[symbol])
     for symbol in functions.keys():
         node.get_scope().add_symbol(functions[symbol])
     for symbol in types.keys():
         node.get_scope().add_symbol(types[symbol])
 def visit_input_line(self, node):
     """
     Checks the coco on the current node.
     :param node: a single input line node.
     :type node: ast_input_line
     """
     if node.is_spike() and not node.has_datatype():
         code, message = Messages.get_data_type_not_specified(node.get_name())
         Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR,
                            code=code, message=message)
     if node.is_current() and node.has_datatype():
         code, message = Messages.get_not_type_allowed(node.get_name())
         Logger.log_message(error_position=str(node.get_source_position()),
                            code=code, message=message,
                            log_level=LoggingLevel.ERROR)
     return
Пример #12
0
    def visit_expression(self, node):
        """
        Visits an expression which uses a binary logic operator and updates the type.
        :param node: a single expression.
        :type node: ast_expression
        """
        lhs_type = node.get_lhs().type
        rhs_type = node.get_rhs().type

        lhs_type.referenced_object = node.get_lhs()
        rhs_type.referenced_object = node.get_rhs()

        if isinstance(lhs_type, BooleanTypeSymbol) and isinstance(rhs_type, BooleanTypeSymbol):
            node.type = PredefinedTypes.get_boolean_type()
        else:
            if isinstance(lhs_type, BooleanTypeSymbol):
                offending_type = lhs_type
            else:
                offending_type = rhs_type
            code, message = Messages.get_type_different_from_expected(BooleanTypeSymbol(), offending_type)
            Logger.log_message(code=code, message=message,
                               error_position=lhs_type.referenced_object.get_source_position(),
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
        return
Пример #13
0
 def binary_operation_not_defined_error(self, _operator, _other):
     from pynestml.symbols.error_type_symbol import ErrorTypeSymbol
     result = ErrorTypeSymbol()
     code, message = Messages.get_binary_operation_not_defined(lhs=self, operator=_operator, rhs=_other)
     Logger.log_message(code=code, message=message, error_position=self.referenced_object.get_source_position(),
                        log_level=LoggingLevel.ERROR)
     return result
 def visit_function_call(self, node):
     func_name = node.get_name()
     if func_name == 'convolve' or func_name == 'cond_sum' or func_name == 'curr_sum':
         symbol_var = node.get_scope().resolve_to_symbol(str(node.get_args()[0]),
                                                         SymbolKind.VARIABLE)
         symbol_buffer = node.get_scope().resolve_to_symbol(str(node.get_args()[1]),
                                                            SymbolKind.VARIABLE)
         if symbol_var is not None and not symbol_var.is_shape() and not symbol_var.is_init_values():
             code, message = Messages.get_first_arg_not_shape_or_equation(func_name)
             Logger.log_message(code=code, message=message,
                                error_position=node.get_source_position(), log_level=LoggingLevel.ERROR)
         if symbol_buffer is not None and not symbol_buffer.is_input_buffer_spike():
             code, message = Messages.get_second_arg_not_a_buffer(func_name)
             Logger.log_message(error_position=node.get_source_position(),
                                code=code, message=message,
                                log_level=LoggingLevel.ERROR)
         return
 def visit_assignment(self, node):
     symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_name(), SymbolKind.VARIABLE)
     if symbol is not None and (symbol.block_type == BlockType.INPUT_BUFFER_SPIKE or
                                symbol.block_type == BlockType.INPUT_BUFFER_CURRENT):
         code, message = Messages.get_value_assigned_to_buffer(node.get_variable().get_complete_name())
         Logger.log_message(code=code, message=message,
                            error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
     return
Пример #16
0
    def handle_target(cls, target):
        if target is None or target.upper() == "NONE":
            target = ""     # make sure `target` is always a string

        if target not in CodeGenerator.get_known_targets():
            code, message = Messages.get_unknown_target(target)
            Logger.log_message(None, code, message, None, LoggingLevel.ERROR)
            raise InvalidTargetException()

        cls.target = target
 def visit_unit_type(self, node):
     """
     Check if the coco applies,
     :param node: a single unit type object.
     :type node: ast_unit_type
     """
     if node.is_div and isinstance(node.lhs, int) and node.lhs != 1:
         code, message = Messages.get_wrong_numerator(str(node))
         Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
 def visit_declaration(self, node):
     """
     Checks the coco for a declaration.
     :param node: a single declaration.
     :type node: ASTDeclaration
     """
     assert isinstance(node, ASTDeclaration)
     if node.has_invariant():
         invariant_type = node.get_invariant().type
         if invariant_type is None or isinstance(invariant_type, ErrorTypeSymbol):
             code, message = Messages.get_type_could_not_be_derived(str(node.get_invariant()))
             Logger.log_message(error_position=node.get_invariant().get_source_position(), code=code,
                                message=message, log_level=LoggingLevel.ERROR)
         elif not invariant_type.equals(PredefinedTypes.get_boolean_type()):
             code, message = Messages.get_type_different_from_expected(PredefinedTypes.get_boolean_type(),
                                                                       invariant_type)
             Logger.log_message(error_position=node.get_invariant().get_source_position(), code=code,
                                message=message, log_level=LoggingLevel.ERROR)
     return
 def visit_ode_equation(self, node):
     """
     Checks the coco.
     :param node: A single ode equation.
     :type node: ast_ode_equation
     """
     if node.get_lhs().get_differential_order() == 0:
         code, message = Messages.get_order_not_declared(node.get_lhs().get_name())
         Logger.log_message(error_position=node.get_source_position(), code=code,
                            message=message, log_level=LoggingLevel.ERROR)
Пример #20
0
 def register_type(cls, symbol):
     """
     Registers a new type into the system.
     :param: a single type symbol.
     :type: UnitTypeSymbol
     """
     if not symbol.is_primitive() and symbol.unit.get_name() not in cls.name2type.keys():
         cls.name2type[symbol.unit.get_name()] = symbol
         code, message = Messages.get_new_type_registered(symbol.unit.get_name())
         Logger.log_message(code=code, message=message, log_level=LoggingLevel.INFO)
     return
Пример #21
0
 def visit_declaration(self, node):
     """
     Checks if the coco applies.
     :param node: a single declaration.
     :type node: ASTDeclaration.
     """
     if node.is_function and not node.has_expression():
         code, message = Messages.get_no_rhs(node.get_variables()[0].get_name())
         Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR,
                            code=code, message=message)
     return
 def visit_declaration(self, node):
     """
     Checks the coco.
     :param node: a single declaration.
     :type node: ast_declaration
     """
     if node.is_function and len(node.get_variables()) > 1:
         code, message = Messages.get_several_lhs(list((var.get_name() for var in node.get_variables())))
         Logger.log_message(error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR,
                            code=code, message=message)
     return
Пример #23
0
 def analyse_transform_neurons(self, neurons):
     # type: (list(ASTNeuron)) -> None
     """
     Analyse and transform a list of neurons.
     :param neurons: a list of neurons.
     """
     for neuron in neurons:
         code, message = Messages.get_analysing_transforming_neuron(neuron.get_name())
         Logger.log_message(None, code, message, None, LoggingLevel.INFO)
         self.analyse_neuron(neuron)
         # now store the transformed model
         self.store_transformed_model(neuron)
 def check_co_co(cls, node):
     """
     Ensures the coco for the handed over neuron.
     :param node: a single neuron instance.
     :type node: ast_neuron
     """
     for func in node.get_functions():
         if func.get_name() in cls.nest_name_space:
             code, message = Messages.get_nest_collision(func.get_name())
             Logger.log_message(error_position=func.get_source_position(),
                                code=code, message=message,
                                log_level=LoggingLevel.ERROR)
     return
 def visit_ode_equation(self, node):
     """
     Ensures the coco.
     :param node: a single equation object.
     :type node: ast_ode_equation
     """
     symbol = node.get_scope().resolve_to_symbol(node.get_lhs().get_name_of_lhs(), SymbolKind.VARIABLE)
     if symbol is not None and not symbol.is_init_values():
         code, message = Messages.get_equation_var_not_in_init_values_block(node.get_lhs().get_name_of_lhs())
         Logger.log_message(code=code, message=message,
                            error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
         return
Пример #26
0
 def visit_input_line(self, node):
     """
     Private method: Used to visit a single input line, create the corresponding symbol and update the scope.
     :param node: a single input line.
     :type node: ast_input_line
     """
     if node.is_spike() and node.has_datatype():
         node.get_datatype().update_scope(node.get_scope())
     elif node.is_spike():
         code, message = Messages.get_buffer_type_not_defined(node.get_name())
         Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                            log_level=LoggingLevel.WARNING)
     for inputType in node.get_input_types():
         inputType.update_scope(node.get_scope())
Пример #27
0
def add_ode_to_variable(ode_equation):
    """
    Resolves to the corresponding symbol and updates the corresponding ode-declaration. In the case that
    :param ode_equation: a single ode-equation
    :type ode_equation: ast_ode_equation
    """
    # the definition of a differential equations is defined by stating the derivation, thus derive the actual order
    diff_order = ode_equation.get_lhs().get_differential_order() - 1
    # we check if the corresponding symbol already exists, e.g. V_m' has already been declared
    existing_symbol = (ode_equation.get_scope().resolve_to_symbol(ode_equation.get_lhs().get_name() + '\'' * diff_order,
                                                                  SymbolKind.VARIABLE))
    if existing_symbol is not None:
        existing_symbol.set_ode_definition(ode_equation.get_rhs())
        # todo added on merge
        ode_equation.get_scope().update_variable_symbol(existing_symbol)
        code, message = Messages.get_ode_updated(ode_equation.get_lhs().get_name_of_lhs())
        Logger.log_message(error_position=existing_symbol.get_referenced_object().get_source_position(),
                           code=code, message=message, log_level=LoggingLevel.INFO)
    else:
        code, message = Messages.get_no_variable_found(ode_equation.get_lhs().get_name_of_lhs())
        Logger.log_message(code=code, message=message, error_position=ode_equation.get_source_position(),
                           log_level=LoggingLevel.ERROR)
    return
Пример #28
0
 def get_unit(cls, name):
     """
     Returns a single UnitType if the corresponding unit has been predefined.
     :param name: the name of a unit
     :type name: str
     :return: a single UnitType object, or None
     :rtype: UnitType
     """
     if name in cls.name2unit.keys():
         return cls.name2unit[name]
     else:
         code, message = Messages.get_unit_does_not_exist(name)
         Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR)
         return None
Пример #29
0
    def do_magnitude_conversion_rhs_to_lhs(_rhs_type_symbol, _lhs_type_symbol, _containing_expression):
        """
        determine conversion factor from rhs to lhs, register it with the relevant expression, drop warning
        """
        _containing_expression.set_implicit_conversion_factor(
            UnitTypeSymbol.get_conversion_factor(_lhs_type_symbol.astropy_unit,
                                                 _rhs_type_symbol.astropy_unit))
        _containing_expression.type = _lhs_type_symbol

        code, message = Messages.get_implicit_magnitude_conversion(_lhs_type_symbol, _rhs_type_symbol,
                                                                   _containing_expression.get_implicit_conversion_factor())
        Logger.log_message(code=code, message=message,
                           error_position=_containing_expression.get_source_position(),
                           log_level=LoggingLevel.WARNING)
 def visit_assignment(self, node):
     """
     Checks the coco on the current node.
     :param node: a single node.
     :type node: ast_assignment
     """
     symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_name(), SymbolKind.VARIABLE)
     if (symbol is not None and symbol.block_type == BlockType.PARAMETERS and
             node.get_scope().get_scope_type() != ScopeType.GLOBAL):
         code, message = Messages.get_assignment_not_allowed(node.get_variable().get_complete_name())
         Logger.log_message(error_position=node.get_source_position(),
                            code=code, message=message,
                            log_level=LoggingLevel.ERROR)
     return
Пример #31
0
 def visit_variable(self, node: ASTNode):
     """
     Visits each kernel and checks if it is used correctly.
     :param node: a single node.
     """
     for kernelName in self.__kernels:
         # in order to allow shadowing by local scopes, we first check if the element has been declared locally
         symbol = node.get_scope().resolve_to_symbol(
             kernelName, SymbolKind.VARIABLE)
         # if it is not a kernel just continue
         if symbol is None:
             if not isinstance(node, ASTExternalVariable):
                 code, message = Messages.get_no_variable_found(kernelName)
                 Logger.log_message(node=self.__neuron_node,
                                    code=code,
                                    message=message,
                                    log_level=LoggingLevel.ERROR)
             continue
         if not symbol.is_kernel():
             continue
         if node.get_complete_name() == kernelName:
             parent = self.__neuron_node.get_parent(node)
             if parent is not None:
                 if isinstance(parent, ASTKernel):
                     continue
                 grandparent = self.__neuron_node.get_parent(parent)
                 if grandparent is not None and isinstance(
                         grandparent, ASTFunctionCall):
                     grandparent_func_name = grandparent.get_name()
                     if grandparent_func_name == 'convolve':
                         continue
             code, message = Messages.get_kernel_outside_convolve(
                 kernelName)
             Logger.log_message(code=code,
                                message=message,
                                log_level=LoggingLevel.ERROR,
                                error_position=node.get_source_position())
Пример #32
0
    def convert_name_reference(self, variable):
        """
        Converts a single variable to nest processable format.
        :param variable: a single variable.
        :type variable: ASTVariable
        :return: a nest processable format.
        :rtype: str
        """
        from pynestml.codegeneration.nest_printer import NestPrinter
        assert (variable is not None and isinstance(variable, ASTVariable)), \
            '(PyNestML.CodeGeneration.NestReferenceConverter) No or wrong type of uses-gsl provided (%s)!' % type(
                variable)
        variable_name = NestNamesConverter.convert_to_cpp_name(variable.get_complete_name())

        if PredefinedUnits.is_unit(variable.get_complete_name()):
            return str(
                UnitConverter.get_factor(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit()))
        if variable_name == PredefinedVariables.E_CONSTANT:
            return 'numerics::e'
        else:
            symbol = variable.get_scope().resolve_to_symbol(variable_name, SymbolKind.VARIABLE)
            if symbol is None:
                # this should actually not happen, but an error message is better than an exception
                code, message = Messages.get_could_not_resolve(variable_name)
                Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message,
                                   error_position=variable.get_source_position())
                return ''
            else:
                if symbol.is_local():
                    return variable_name + ('[i]' if symbol.has_vector_parameter() else '')
                elif symbol.is_buffer():
                    return NestPrinter.print_origin(symbol) + NestNamesConverter.buffer_value(symbol) \
                           + ('[i]' if symbol.has_vector_parameter() else '')
                else:
                    if symbol.is_function:
                        return 'get_' + variable_name + '()' + ('[i]' if symbol.has_vector_parameter() else '')
                    else:
                        if symbol.is_init_values():
                            temp = NestPrinter.print_origin(symbol)
                            if self.uses_gsl:
                                temp += GSLNamesConverter.name(symbol)
                            else:
                                temp += NestNamesConverter.name(symbol)
                            temp += ('[i]' if symbol.has_vector_parameter() else '')
                            return temp
                        else:
                            return NestPrinter.print_origin(symbol) + \
                                   NestNamesConverter.name(symbol) + \
                                   ('[i]' if symbol.has_vector_parameter() else '')
Пример #33
0
def add_ode_shape_to_variable(ode_shape):
    """
    Adds the shape as the defining equation.
    :param ode_shape: a single shape object.
    :type ode_shape: ast_ode_shape
    """
    if ode_shape.get_variable().get_differential_order() == 0:
        # we only update those which define an ode
        return
    # we check if the corresponding symbol already exists, e.g. V_m' has already been declared
    existing_symbol = ode_shape.get_scope().resolve_to_symbol(ode_shape.get_variable().get_name_of_lhs(),
                                                              SymbolKind.VARIABLE)
    if existing_symbol is not None:
        existing_symbol.set_ode_definition(ode_shape.get_expression())
        existing_symbol.set_variable_type(VariableType.SHAPE)
        ode_shape.get_scope().update_variable_symbol(existing_symbol)
        code, message = Messages.get_ode_updated(ode_shape.get_variable().get_name_of_lhs())
        Logger.log_message(error_position=existing_symbol.get_referenced_object().get_source_position(),
                           code=code, message=message, log_level=LoggingLevel.INFO)
    else:
        code, message = Messages.get_no_variable_found(ode_shape.get_variable().get_name_of_lhs())
        Logger.log_message(code=code, message=message, error_position=ode_shape.get_source_position(),
                           log_level=LoggingLevel.ERROR)
    return
Пример #34
0
 def attempt_magnitude_cast(self, other):
     if self.differs_only_in_magnitude(other):
         factor = UnitTypeSymbol.get_conversion_factor(
             self.astropy_unit, other.astropy_unit)
         other.referenced_object.set_implicit_conversion_factor(factor)
         code, message = Messages.get_implicit_magnitude_conversion(
             self, other, factor)
         Logger.log_message(
             code=code,
             message=message,
             error_position=self.referenced_object.get_source_position(),
             log_level=LoggingLevel.WARNING)
         return self
     else:
         return self.binary_operation_not_defined_error('+/-', other)
 def visit_ode_equation(self, node):
     """
     Checks the coco.
     :param node: A single ode equation.
     :type node: ast_ode_equation
     """
     variable_name = node.get_lhs().get_name()
     variable_symbol = node.get_lhs().get_scope().resolve_to_symbol(
         variable_name, SymbolKind.VARIABLE)
     if variable_symbol is None:
         code, message = Messages.get_variable_not_defined(variable_name)
         Logger.log_message(code=code,
                            message=message,
                            log_level=LoggingLevel.ERROR,
                            error_position=node.get_source_position())
         return
     variable_type = variable_symbol.type_symbol
     from pynestml.utils.unit_type import UnitType
     from pynestml.symbols.unit_type_symbol import UnitTypeSymbol
     unit_type_name = "inv_diff_order_unit_type_" + variable_name + "'" * node.get_lhs(
     ).get_differential_order()
     inv_diff_order_unit_type = UnitType(
         name=unit_type_name,
         unit=1 / units.s**node.get_lhs().get_differential_order())
     inv_diff_order_unit_type_symbol = UnitTypeSymbol(
         inv_diff_order_unit_type)
     lhs_type = variable_type * inv_diff_order_unit_type_symbol
     rhs_type = node.get_rhs().type
     if not rhs_type.is_castable_to(lhs_type):
         code, message = Messages.get_ode_needs_consistent_units(
             variable_name,
             node.get_lhs().get_differential_order(), lhs_type, rhs_type)
         Logger.log_message(error_position=node.get_source_position(),
                            code=code,
                            message=message,
                            log_level=LoggingLevel.ERROR)
 def visit_input_port(self, node):
     """
     Checks the coco on the current node.
     :param node: a single input port.
     :type node: ASTInputPort
     """
     if node.is_spike():
         if node.has_input_qualifiers() and len(
                 node.get_input_qualifiers()) > 1:
             code, message = Messages.get_multiple_keywords(", ".join(
                 [str(q) for q in node.get_input_qualifiers()]))
             Logger.log_message(error_position=node.get_source_position(),
                                code=code,
                                message=message,
                                log_level=LoggingLevel.ERROR)
Пример #37
0
 def register_type(cls, symbol):
     """
     Registers a new type into the system.
     :param: a single type symbol.
     :type: UnitTypeSymbol
     """
     if not symbol.is_primitive() and symbol.unit.get_name(
     ) not in cls.name2type.keys():
         cls.name2type[symbol.unit.get_name()] = symbol
         code, message = Messages.get_new_type_registered(
             symbol.unit.get_name())
         Logger.log_message(code=code,
                            message=message,
                            log_level=LoggingLevel.INFO)
     return
    def visit_input_port(self, node):
        """
        Private method: Used to visit a single input port, create the corresponding symbol and update the scope.
        :param node: a single input port.
        :type node: ASTInputPort
        """
        if not node.has_datatype():
            code, message = Messages.get_input_port_type_not_defined(node.get_name())
            Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
        else:
            node.get_datatype().update_scope(node.get_scope())

        for qual in node.get_input_qualifiers():
            qual.update_scope(node.get_scope())
 def visit_assignment(self, node: ASTAssignment) -> None:
     """
     Checks the coco on the current node.
     :param node: a single node.
     """
     symbol = node.get_scope().resolve_to_symbol(
         node.get_variable().get_name(), SymbolKind.VARIABLE)
     if (symbol is not None and symbol.block_type
             in [BlockType.PARAMETERS, BlockType.COMMON_PARAMETERS]
             and node.get_scope().get_scope_type() != ScopeType.GLOBAL):
         code, message = Messages.get_assignment_not_allowed(
             node.get_variable().get_complete_name())
         Logger.log_message(error_position=node.get_source_position(),
                            code=code,
                            message=message,
                            log_level=LoggingLevel.ERROR)
Пример #40
0
 def visit_ode_equation(self, node):
     """
     Ensures the coco.
     :param node: a single equation object.
     :type node: ast_ode_equation
     """
     symbol = node.get_scope().resolve_to_symbol(
         node.get_lhs().get_name_of_lhs(), SymbolKind.VARIABLE)
     if symbol is not None and not symbol.is_state():
         code, message = Messages.get_equation_var_not_in_state_block(
             node.get_lhs().get_name_of_lhs())
         Logger.log_message(code=code,
                            message=message,
                            error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
         return
Пример #41
0
    def visit_declaration(self, node: ASTDeclaration):

        variables = node.get_variables()
        for var in variables:
            vector_parameter = var.get_vector_parameter()
            if vector_parameter is not None:
                vector_parameter_var = ASTVariable(vector_parameter, scope=node.get_scope())
                symbol = vector_parameter_var.get_scope().resolve_to_symbol(vector_parameter_var.get_complete_name(),
                                                                            SymbolKind.VARIABLE)
                # vector parameter is a variable
                if symbol is not None:
                    if not symbol.block_type == BlockType.PARAMETERS and not symbol.block_type == BlockType.INTERNALS:
                        code, message = Messages.get_vector_parameter_wrong_block(vector_parameter_var.get_complete_name(),
                                                                                  str(symbol.block_type))
                        Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR,
                                           code=code, message=message)
Пример #42
0
 def get_unit(cls, name):
     """
     Returns a single sympy unit symbol if the corresponding unit has been predefined.
     :param name: the name of a unit
     :type name: str
     :return: a single UnitType object.
     :rtype: UnitType
     """
     if name in cls.name2unit.keys():
         return cls.name2unit[name]
     else:
         code, message = Messages.get_unit_does_not_exist(name)
         Logger.log_message(code=code,
                            message=message,
                            log_level=LoggingLevel.ERROR)
         return None
Пример #43
0
    def generate_neurons(self, neurons):
        # type: (list(ASTNeuron)) -> None
        """
        Analyse a list of neurons, solve them and generate the corresponding code.
        :param neurons: a list of neurons.
        """
        from pynestml.frontend.frontend_configuration import FrontendConfiguration

        for neuron in neurons:
            self.generate_neuron_code(neuron)
            if not Logger.has_errors(neuron):
                code, message = Messages.get_code_generated(
                    neuron.get_name(), FrontendConfiguration.get_target_path())
                Logger.log_message(neuron, code, message,
                                   neuron.get_source_position(),
                                   LoggingLevel.INFO)
Пример #44
0
    def generate_neurons(self, neurons: Sequence[ASTNeuron]):
        """
        Generate code for the given neurons.

        :param neurons: a list of neurons.
        """
        from pynestml.frontend.frontend_configuration import FrontendConfiguration

        for neuron in neurons:
            self.generate_neuron_code(neuron)
            if not Logger.has_errors(neuron):
                code, message = Messages.get_code_generated(
                    neuron.get_name(), FrontendConfiguration.get_target_path())
                Logger.log_message(neuron, code, message,
                                   neuron.get_source_position(),
                                   LoggingLevel.INFO)
Пример #45
0
    def generate_synapses(self, synapses: Sequence[ASTSynapse]) -> None:
        """
        Generates code for a list of synapses.
        :param synapses: a list of synapses.
        """
        from pynestml.frontend.frontend_configuration import FrontendConfiguration

        for synapse in synapses:
            if Logger.logging_level == LoggingLevel.INFO:
                print("Generating code for the synapse {}.".format(
                    synapse.get_name()))
            self.generate_synapse_code(synapse)
            code, message = Messages.get_code_generated(
                synapse.get_name(), FrontendConfiguration.get_target_path())
            Logger.log_message(synapse, code, message,
                               synapse.get_source_position(),
                               LoggingLevel.INFO)
 def visit_function_call(self, node):
     """
     Checks the coco on the current function call.
     :param node: a single function call.
     :type node: ASTFunctionCall
     """
     f_name = node.get_name()
     if f_name == PredefinedFunctions.CONVOLVE:
         for arg in node.get_args():
             if not isinstance(
                     arg, ASTSimpleExpression) or not arg.is_variable():
                 code, message = Messages.get_not_a_variable(str(arg))
                 Logger.log_message(
                     code=code,
                     message=message,
                     error_position=arg.get_source_position(),
                     log_level=LoggingLevel.ERROR)
Пример #47
0
 def visit_input_line(self, node):
     """
     Private method: Used to visit a single input line, create the corresponding symbol and update the scope.
     :param node: a single input line.
     :type node: ast_input_line
     """
     if node.is_spike() and node.has_datatype():
         node.get_datatype().update_scope(node.get_scope())
     elif node.is_spike():
         code, message = Messages.get_buffer_type_not_defined(
             node.get_name())
         Logger.log_message(code=code,
                            message=message,
                            error_position=node.get_source_position(),
                            log_level=LoggingLevel.WARNING)
     for inputType in node.get_input_types():
         inputType.update_scope(node.get_scope())
Пример #48
0
    def visit_unit_type(self, node):
        """
        Visits a single unit type element, checks for correct usage of units and builds the corresponding combined
        unit.
        :param node: a single unit type meta_model.
        :type node: ASTUnitType
        :return: a new type symbol representing this unit type.
        :rtype: type_symbol
        """
        if node.is_simple_unit():
            type_s = PredefinedTypes.get_type(node.unit)
            if type_s is None:
                code, message = Messages.unknown_type(str(node.unit))
                Logger.log_message(None, code, message, node.get_source_position(), LoggingLevel.ERROR)
                return

            node.set_type_symbol(type_s)
            self.symbol = type_s
Пример #49
0
    def do_magnitude_conversion_rhs_to_lhs(_rhs_type_symbol, _lhs_type_symbol,
                                           _containing_expression):
        """
        determine conversion factor from rhs to lhs, register it with the relevant expression, drop warning
        """
        _containing_expression.set_implicit_conversion_factor(
            UnitTypeSymbol.get_conversion_factor(
                _lhs_type_symbol.astropy_unit, _rhs_type_symbol.astropy_unit))
        _containing_expression.type = _lhs_type_symbol

        code, message = Messages.get_implicit_magnitude_conversion(
            _lhs_type_symbol, _rhs_type_symbol,
            _containing_expression.get_implicit_conversion_factor())
        Logger.log_message(
            code=code,
            message=message,
            error_position=_containing_expression.get_source_position(),
            log_level=LoggingLevel.WARNING)
Пример #50
0
 def get_multiple_receptors(self) -> List[VariableSymbol]:
     """
     Returns a list of all spike input ports which are defined as both inhibitory *and* excitatory at the same time.
     :return: a list of spike input port variable symbols
     """
     ret = list()
     for port in self.get_spike_input_ports():
         if port.is_excitatory() and port.is_inhibitory():
             if port is not None:
                 ret.append(port)
             else:
                 code, message = Messages.get_could_not_resolve(port.get_symbol_name())
                 Logger.log_message(
                     message=message,
                     code=code,
                     error_position=port.get_source_position(),
                     log_level=LoggingLevel.ERROR)
     return ret
Пример #51
0
 def get_multiple_receptors(self):
     """
     Returns a list of all spike buffers which are defined as inhibitory and excitatory.
     :return: a list of spike buffers variable symbols
     :rtype: list(VariableSymbol)
     """
     ret = list()
     for iBuffer in self.get_spike_buffers():
         if iBuffer.is_excitatory() and iBuffer.is_inhibitory():
             if iBuffer is not None:
                 ret.append(iBuffer)
             else:
                 code, message = Messages.get_could_not_resolve(iBuffer.get_symbol_name())
                 Logger.log_message(
                     message=message,
                     code=code,
                     error_position=iBuffer.get_source_position(),
                     log_level=LoggingLevel.ERROR)
     return ret
Пример #52
0
    def handle_compound_assignment(self, node):
        rhs_expr = node.get_expression()
        lhs_variable_symbol = node.get_variable().resolve_in_own_scope()
        rhs_type_symbol = rhs_expr.type

        if lhs_variable_symbol is None:
            code, message = Messages.get_equation_var_not_in_state_block(
                node.get_variable().get_complete_name())
            Logger.log_message(code=code,
                               message=message,
                               error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
            return

        if isinstance(rhs_type_symbol, ErrorTypeSymbol):
            LoggingHelper.drop_missing_type_error(node)
            return

        lhs_type_symbol = lhs_variable_symbol.get_type_symbol()

        if node.is_compound_product:
            if self.__types_do_not_match(lhs_type_symbol,
                                         lhs_type_symbol * rhs_type_symbol):
                TypeCaster.try_to_recover_or_error(
                    lhs_type_symbol, lhs_type_symbol * rhs_type_symbol,
                    node.get_expression())
                return
            return

        if node.is_compound_quotient:
            if self.__types_do_not_match(lhs_type_symbol,
                                         lhs_type_symbol / rhs_type_symbol):
                TypeCaster.try_to_recover_or_error(
                    lhs_type_symbol, lhs_type_symbol / rhs_type_symbol,
                    node.get_expression())
                return
            return

        assert node.is_compound_sum or node.is_compound_minus
        if self.__types_do_not_match(lhs_type_symbol, rhs_type_symbol):
            TypeCaster.try_to_recover_or_error(lhs_type_symbol,
                                               rhs_type_symbol,
                                               node.get_expression())
Пример #53
0
 def check_co_co(cls, node: ASTNeuron):
     """
     Ensures the coco for the handed over neuron.
     :param node: a single neuron instance.
     """
     if isinstance(node, ASTSynapse):
         return  # XXX: TODO: check that there are no equations other than the ones moved to the neuron (if any)
     equations_defined_visitor = EquationsDefinedVisitor()
     node.accept(equations_defined_visitor)
     integrate_odes_called_visitor = IntegrateOdesCalledVisitor()
     node.accept(integrate_odes_called_visitor)
     if equations_defined_visitor.equations_defined(
     ) and not integrate_odes_called_visitor.integrate_odes_called():
         code, message = Messages.get_equations_defined_but_integrate_odes_not_called(
         )
         Logger.log_message(code=code,
                            message=message,
                            error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
Пример #54
0
 def from_target_name(
         target_name: str,
         options: Optional[Mapping[str, Any]] = None) -> CodeGenerator:
     """Static factory method that returns a new instance of a child class of CodeGenerator"""
     assert target_name.upper() in CodeGenerator.get_known_targets(
     ), "Unknown target platform requested: \"" + str(target_name) + "\""
     if target_name.upper() == "NEST":
         from pynestml.codegeneration.nest_codegenerator import NESTCodeGenerator
         return NESTCodeGenerator(options)
     elif target_name.upper() == "AUTODOC":
         from pynestml.codegeneration.autodoc_codegenerator import AutoDocCodeGenerator
         assert options is None or options == {}, "\"autodoc\" code generator does not support options"
         return AutoDocCodeGenerator()
     elif target_name == "":
         # dummy/null target: user requested to not generate any code
         code, message = Messages.get_no_code_generated()
         Logger.log_message(None, code, message, None, LoggingLevel.INFO)
         return CodeGenerator("", options)
     assert False  # cannot reach here due to earlier assert -- silence static checker warnings
Пример #55
0
    def calcExpectedFunctionNamesForChannels(cls, cm_info):
        variables_procesed = defaultdict()
        
        for ion_channel_name, channel_info in cm_info.items():
            cm_expression = channel_info["ASTInlineExpression"]
            variables = channel_info["inner_variables"]
            variable_names_seen = set()
            
            variables_info = defaultdict()

            for variable_used in variables:
                variable_name = variable_used.name.strip(cls.padding_character)
                if not variable_name.endswith(ion_channel_name):
                    variables_info[variable_name]=defaultdict()
                    variables_info[variable_name]["ASTVariable"] = variable_used
                    variables_info[variable_name]["is_valid"] = False
                    continue
                
                # enforce unique variable names per channel, i.e n and m , not n and n
                if variable_name in variable_names_seen:
                    code, message = Messages.get_cm_inline_expression_variable_used_mulitple_times(cm_expression, variable_name, ion_channel_name)
                    Logger.log_message(code=code, message=message, error_position=variable_used.get_source_position(), log_level=LoggingLevel.ERROR, node=variable_used)
                    continue
                else:
                    variable_names_seen.add(variable_name)
                
                pure_variable_name = cls.extract_pure_variable_name(variable_name, ion_channel_name)
                expected_inf_function_name = cls.getExpectedInfFunctionName(ion_channel_name, pure_variable_name)
                expected_tau_function_name = cls.getExpectedTauFunctionName(ion_channel_name, pure_variable_name)
                
                variables_info[pure_variable_name]=defaultdict(lambda: defaultdict())
                variables_info[pure_variable_name]["expected_functions"][cls.inf_string] = expected_inf_function_name
                variables_info[pure_variable_name]["expected_functions"][cls.tau_sring] = expected_tau_function_name
                variables_info[pure_variable_name]["ASTVariable"] = variable_used
                variables_info[pure_variable_name]["is_valid"] = True
                
            variables_procesed[ion_channel_name] = copy.copy(variables_info)
            
        for ion_channel_name, variables_info in variables_procesed.items():
            cm_info[ion_channel_name]["inner_variables"] = variables_info
        
        return cm_info
Пример #56
0
 def check_co_co(cls, compilation_unit):
     """
     Checks the coco for the handed over compilation unit.
     :param compilation_unit: a single compilation unit.
     :type compilation_unit: ASTCompilationUnit
     """
     checked = list()  # a list of already checked elements
     for neuronA in compilation_unit.get_neuron_list():
         for neuronB in compilation_unit.get_neuron_list():
             if neuronA is not neuronB and neuronA.get_name(
             ) == neuronB.get_name() and neuronB not in checked:
                 code, message = Messages.get_model_redeclared(
                     neuronB.get_name())
                 Logger.log_message(
                     error_position=neuronB.get_source_position(),
                     code=code,
                     message=message,
                     log_level=LoggingLevel.ERROR)
         checked.append(neuronA)
     return
Пример #57
0
def process():
    # init log dir
    create_report_dir()
    # The handed over parameters seem to be correct, proceed with the main routine
    init_predefined()
    # now proceed to parse all models
    compilation_units = list()
    nestml_files = FrontendConfiguration.get_files()
    if not type(nestml_files) is list:
        nestml_files = [nestml_files]
    for nestml_file in nestml_files:
        parsed_unit = ModelParser.parse_model(nestml_file)
        if parsed_unit is not None:
            compilation_units.append(parsed_unit)
    if len(compilation_units) > 0:
        # generate a list of all neurons
        neurons = list()
        for compilationUnit in compilation_units:
            neurons.extend(compilationUnit.get_neuron_list())
        # check if across two files two neurons with same name have been defined
        CoCosManager.check_not_two_neurons_across_units(compilation_units)
        # now exclude those which are broken, i.e. have errors.
        if not FrontendConfiguration.is_dev():
            for neuron in neurons:
                if Logger.has_errors(neuron):
                    code, message = Messages.get_neuron_contains_errors(
                        neuron.get_name())
                    Logger.log_message(
                        neuron=neuron,
                        code=code,
                        message=message,
                        error_position=neuron.get_source_position(),
                        log_level=LoggingLevel.INFO)
                    neurons.remove(neuron)
        # perform code generation
        _codeGenerator = CodeGenerator(
            target=FrontendConfiguration.get_target())
        _codeGenerator.generate_code(neurons)
    if FrontendConfiguration.store_log:
        store_log_to_file()
    return
Пример #58
0
    def generate_code(self, neurons):
        """
        Generate code for the given neurons and (depending on the target) generate an index page, module entrypoint or
        similar that incorporates an enumeration of all neurons.

        :param neurons: a list of neurons.
        :type neurons: List[ASTNode]
        """
        if self._target.upper() == "NEST":
            from pynestml.codegeneration.nest_codegenerator import NESTCodeGenerator
            _codeGenerator = NESTCodeGenerator()
            _codeGenerator.generate_code(neurons)
        elif self._target.upper() == "AUTODOC":
            from pynestml.codegeneration.autodoc_codegenerator import AutoDocCodeGenerator
            _codeGenerator = AutoDocCodeGenerator()
            _codeGenerator.generate_code(neurons)
        else:
            # dummy/null target: user requested to not generate any code
            assert self._target == ""
            code, message = Messages.get_no_code_generated()
            Logger.log_message(None, code, message, None, LoggingLevel.INFO)
 def check_co_co(cls, list_of_compilation_units):
     """
     Checks the coco.
     :param list_of_compilation_units: a list of compilation units.
     :type list_of_compilation_units: list(ASTNestMLCompilationUnit)
     """
     # list_of_nodes = ASTUtils.get_all_nodes(list_of_compilation_units)
     conflicting_nodes = list()
     checked = list()
     for nodeA in list_of_compilation_units:
         for nodeB in list_of_compilation_units:
             if nodeA is not nodeB and nodeA.get_name() == nodeB.get_name():
                 code, message = Messages.get_compilation_unit_name_collision(
                     nodeA.get_name(), nodeA.get_artifact_name(),
                     nodeB.get_artifact_name())
                 Logger.log_message(code=code,
                                    message=message,
                                    log_level=LoggingLevel.ERROR)
             conflicting_nodes.append(nodeB)
         checked.append(nodeA)
     return conflicting_nodes
Пример #60
0
    def is_conductance_based(self) -> bool:
        """
        Indicates whether this element is conductance based, based on the physical units of the spike input port. If the unit can be cast to Siemens, the function returns True, otherwise it returns False.

        :return: True if conductance based, otherwise False.
        """
        is_cond_based = self.type_symbol.is_castable_to(
            UnitTypeSymbol(unit=PredefinedUnits.get_unit("S")))
        is_curr_based = self.type_symbol.is_castable_to(
            UnitTypeSymbol(unit=PredefinedUnits.get_unit("A")))
        if is_cond_based == is_curr_based:
            code, message = Messages.get_could_not_determine_cond_based(
                type_str=self.type_symbol.print_nestml_type(), name=self.name)
            Logger.log_message(
                node=None,
                code=code,
                message=message,
                log_level=LoggingLevel.WARNING,
                error_position=ASTSourceLocation.get_added_source_position())
            return False

        return is_cond_based