Ejemplo n.º 1
0
    def setup_generation_helpers(self, neuron):
        """
        Returns a standard namespace with often required functionality.
        :param neuron: a single neuron instance
        :type neuron: ASTNeuron
        :return: a map from name to functionality.
        :rtype: dict
        """
        gsl_converter = GSLReferenceConverter()
        gsl_printer = LegacyExpressionPrinter(gsl_converter)
        # helper classes and objects
        converter = NESTReferenceConverter(False)
        legacy_pretty_printer = LegacyExpressionPrinter(converter)

        namespace = dict()

        namespace['neuronName'] = neuron.get_name()
        namespace['neuron'] = neuron
        namespace['moduleName'] = FrontendConfiguration.get_module_name()
        namespace['printer'] = NestPrinter(legacy_pretty_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['idemPrinter'] = LegacyExpressionPrinter()
        namespace['outputEvent'] = namespace['printer'].print_output_event(neuron.get_body())
        namespace['is_spike_input'] = ASTUtils.is_spike_input(neuron.get_body())
        namespace['is_current_input'] = ASTUtils.is_current_input(neuron.get_body())
        namespace['odeTransformer'] = OdeTransformer()
        namespace['printerGSL'] = gsl_printer
        namespace['now'] = datetime.datetime.utcnow()

        self.define_solver_type(neuron, namespace)
        return namespace
Ejemplo n.º 2
0
def setup_generation_helpers(neuron):
    """
    Returns a standard namespace with often required functionality.
    :param neuron: a single neuron instance
    :type neuron: ASTNeuron
    :return: a map from name to functionality.
    :rtype: dict
    """
    gsl_converter = GSLReferenceConverter()
    gsl_printer = LegacyExpressionPrinter(gsl_converter)
    # helper classes and objects
    converter = NESTReferenceConverter(False)
    legacy_pretty_printer = LegacyExpressionPrinter(converter)

    namespace = dict()

    namespace['neuronName'] = neuron.get_name()
    namespace['neuron'] = neuron
    namespace['moduleName'] = FrontendConfiguration.get_module_name()
    namespace['printer'] = NestPrinter(legacy_pretty_printer)
    namespace['assignments'] = NestAssignmentsHelper()
    namespace['names'] = NestNamesConverter()
    namespace['declarations'] = NestDeclarationsHelper()
    namespace['utils'] = ASTUtils()
    namespace['idemPrinter'] = LegacyExpressionPrinter()
    namespace['outputEvent'] = namespace['printer'].print_output_event(neuron.get_body())
    namespace['is_spike_input'] = ASTUtils.is_spike_input(neuron.get_body())
    namespace['is_current_input'] = ASTUtils.is_current_input(neuron.get_body())
    namespace['odeTransformer'] = OdeTransformer()
    namespace['printerGSL'] = gsl_printer
    namespace['now'] = datetime.datetime.utcnow()

    define_solver_type(neuron, namespace)
    return namespace
Ejemplo n.º 3
0
 def add_to_initial_values_block(self, declaration):
     # todo by KP: factor me out to utils
     """
     Adds the handed over declaration to the initial values block.
     :param declaration: a single declaration.
     :type declaration: ast_declaration
     """
     if self.get_initial_blocks() is None:
         ASTUtils.create_initial_values_block(self)
     self.get_initial_blocks().get_declarations().append(declaration)
     return
Ejemplo n.º 4
0
    def endvisit_neuron(self, node):
        # before following checks occur, we need to ensure several simple properties
        CoCosManager.post_symbol_table_builder_checks(
            node, after_ast_rewrite=self.after_ast_rewrite_)

        # update the equations
        if node.get_equations_blocks() is not None and len(
                node.get_equations_blocks().get_declarations()) > 0:
            equation_block = node.get_equations_blocks()
            ASTUtils.assign_ode_to_variables(equation_block)

        Logger.set_current_node(None)
Ejemplo n.º 5
0
def add_declaration_to_state_block(neuron: ASTNeuron, variable: str,
                                   initial_value: str) -> ASTNeuron:
    """
    Adds a single declaration to the state block of the neuron. The declared variable is of type real.
    :param neuron: a neuron
    :param variable: state variable to add
    :param initial_value: corresponding initial value
    :return: a modified neuron
    """
    tmp = ModelParser.parse_expression(initial_value)
    vector_variable = ASTUtils.get_vectorized_variable(tmp, neuron.get_scope())
    declaration_string = variable + ' real' + (
        '[' + vector_variable.get_vector_parameter() + ']' if
        vector_variable is not None and vector_variable.has_vector_parameter()
        else '') + ' = ' + initial_value
    ast_declaration = ModelParser.parse_declaration(declaration_string)
    if vector_variable is not None:
        ast_declaration.set_size_parameter(
            vector_variable.get_vector_parameter())
    neuron.add_to_state_block(ast_declaration)
    ast_declaration.update_scope(neuron.get_state_blocks().get_scope())

    symtable_visitor = ASTSymbolTableVisitor()
    symtable_visitor.block_type_stack.push(BlockType.STATE)
    ast_declaration.accept(symtable_visitor)
    symtable_visitor.block_type_stack.pop()

    return neuron
Ejemplo n.º 6
0
    def setup_model_generation_helpers(self, neuron: ASTNeuron):
        """
        Returns a namespace for Jinja2 neuron model documentation template.

        :param neuron: a single neuron instance
        :type neuron: ASTNeuron
        :return: a map from name to functionality.
        :rtype: dict
        """
        converter = LatexReferenceConverter()
        latex_expression_printer = LatexExpressionPrinter(converter)

        namespace = dict()

        namespace['now'] = datetime.datetime.utcnow()
        namespace['neuron'] = neuron
        namespace['neuronName'] = str(neuron.get_name())
        namespace['printer'] = NestPrinter(latex_expression_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['odeTransformer'] = OdeTransformer()

        import textwrap
        pre_comments_bak = neuron.pre_comments
        neuron.pre_comments = []
        namespace['neuron_source_code'] = textwrap.indent(
            neuron.__str__(), "   ")
        neuron.pre_comments = pre_comments_bak

        return namespace
Ejemplo n.º 7
0
def add_declaration_to_internals(neuron: ASTNeuron, variable_name: str,
                                 init_expression: str) -> ASTNeuron:
    """
    Adds the variable as stored in the declaration tuple to the neuron. The declared variable is of type real.
    :param neuron: a single neuron instance
    :param variable_name: the name of the variable to add
    :param init_expression: initialization expression
    :return: the neuron extended by the variable
    """
    tmp = ModelParser.parse_expression(init_expression)
    vector_variable = ASTUtils.get_vectorized_variable(tmp, neuron.get_scope())

    declaration_string = variable_name + ' real' + (
        '[' + vector_variable.get_vector_parameter() + ']' if
        vector_variable is not None and vector_variable.has_vector_parameter()
        else '') + ' = ' + init_expression
    ast_declaration = ModelParser.parse_declaration(declaration_string)
    if vector_variable is not None:
        ast_declaration.set_size_parameter(
            vector_variable.get_vector_parameter())
    neuron.add_to_internal_block(ast_declaration)
    ast_declaration.update_scope(neuron.get_internals_blocks().get_scope())
    symtable_visitor = ASTSymbolTableVisitor()
    symtable_visitor.block_type_stack.push(BlockType.INTERNALS)
    ast_declaration.accept(symtable_visitor)
    symtable_visitor.block_type_stack.pop()
    return neuron
Ejemplo n.º 8
0
def replace_integrate_call(neuron, update_instructions):
    # type: (...) -> ASTNeuron
    """
    Replaces all integrate calls to the corresponding references to propagation.
    :param neuron: a single neuron instance
    :return: The neuron without an integrate calls. The function calls are replaced through an
             incremental exact solution,
    """
    integrate_call = ASTUtils.get_function_call(
        neuron.get_update_blocks(), PredefinedFunctions.INTEGRATE_ODES)
    # by construction of a valid neuron, only a single integrate call should be there
    if isinstance(integrate_call, list):
        integrate_call = integrate_call[0]
    if integrate_call is not None:
        small_statement = neuron.get_parent(integrate_call)
        assert (small_statement is not None
                and isinstance(small_statement, ASTSmallStmt))

        block = neuron.get_parent(neuron.get_parent(small_statement))
        assert (block is not None and isinstance(block, ASTBlock))

        for i in range(0, len(block.get_stmts())):
            if block.get_stmts()[i].equals(neuron.get_parent(small_statement)):
                del block.get_stmts()[i]
                block.get_stmts()[i:i] = list(
                    (ModelParser.parse_stmt(prop)
                     for prop in update_instructions))
                break
    return neuron
Ejemplo n.º 9
0
def add_declaration_to_internals(neuron, variable_name, init_expression):
    # type: (ASTNeuron,  str, str) -> ASTNeuron
    """
    Adds the variable as stored in the declaration tuple to the neuron.
    :param neuron: a single neuron instance
    :param variable_name: the name of the variable to add
    :param init_expression: initialization expression
    :return: the neuron extended by the variable
    """
    try:
        tmp = ModelParser.parse_expression(init_expression)
        vector_variable = ASTUtils.get_vectorized_variable(
            tmp, neuron.get_scope())

        declaration_string = variable_name + ' real' + (
            '[' + vector_variable.get_vector_parameter() +
            ']' if vector_variable is not None
            and vector_variable.has_vector_parameter() else
            '') + ' = ' + init_expression
        ast_declaration = ModelParser.parse_declaration(declaration_string)
        if vector_variable is not None:
            ast_declaration.set_size_parameter(
                vector_variable.get_vector_parameter())
        neuron.add_to_internal_block(ast_declaration)
        return neuron
    except:
        raise RuntimeError('Must not fail by construction.')
 def print_function_call(self, function_call):
     # type: (ASTFunctionCall) -> str
     function_name = self.reference_converter.convert_function_call(function_call)
     if ASTUtils.needs_arguments(function_call):
         return function_name % self.print_function_call_argument_list(function_call)
     else:
         return function_name
Ejemplo n.º 11
0
    def print_function_call(self, function_call, prefix=''):
        """Print a function call, including bracketed arguments list.

        Parameters
        ----------
        node : ASTFunctionCall
            The function call node to print.
        prefix : str
            Optional string that will be prefixed to the function call. For example, to refer to a function call in the class "node", use a prefix equal to "node." or "node->".

            Predefined functions will not be prefixed.

        Returns
        -------
        s : str
            The function call string.
        """
        function_name = self.reference_converter.convert_function_call(
            function_call, prefix=prefix)
        if ASTUtils.needs_arguments(function_call):
            if function_call.get_name(
            ) == PredefinedFunctions.PRINT or function_call.get_name(
            ) == PredefinedFunctions.PRINTLN:
                return function_name.format(
                    self.reference_converter.convert_print_statement(
                        function_call))
            else:
                return function_name.format(
                    *self.print_function_call_argument_list(function_call,
                                                            prefix=prefix))
        else:
            return function_name
Ejemplo n.º 12
0
def add_declaration_to_initial_values(neuron, variable, initial_value):
    # type: (ASTNeuron, str, str) -> ASTNeuron
    """
    Adds a single declaration to the initial values block of the neuron.
    :param neuron: a neuron
    :param variable: state variable to add
    :param initial_value: corresponding initial value
    :return: a modified neuron
    """
    try:

        tmp = ModelParser.parse_expression(initial_value)
        vector_variable = ASTUtils.get_vectorized_variable(
            tmp, neuron.get_scope())
        declaration_string = variable + ' real' + (
            '[' + vector_variable.get_vector_parameter() +
            ']' if vector_variable is not None
            and vector_variable.has_vector_parameter() else
            '') + ' = ' + initial_value
        ast_declaration = ModelParser.parse_declaration(declaration_string)
        if vector_variable is not None:
            ast_declaration.set_size_parameter(
                vector_variable.get_vector_parameter())
        neuron.add_to_initial_values_block(ast_declaration)
        return neuron
    except:
        raise RuntimeError('Must not fail by construction.')
Ejemplo n.º 13
0
 def print_function_call(self, function_call):
     # type: (ASTFunctionCall) -> str
     function_name = self.reference_converter.convert_function_call(function_call)
     if ASTUtils.needs_arguments(function_call):
         return function_name % self.print_function_call_arguments(function_call)
     else:
         return function_name
Ejemplo n.º 14
0
    def setup_index_generation_helpers(self, neurons: List[ASTNeuron]):
        """
        Returns a namespace for Jinja2 neuron model index page template.

        :param neurons: a list of neuron instances
        :type neurons: List[ASTNeuron]
        :return: a map from name to functionality.
        :rtype: dict
        """
        converter = LatexReferenceConverter()
        latex_expression_printer = LatexExpressionPrinter(converter)

        namespace = dict()

        namespace['now'] = datetime.datetime.utcnow()
        namespace['neurons'] = neurons
        namespace['neuronNames'] = [
            str(neuron.get_name()) for neuron in neurons
        ]
        namespace['printer'] = NestPrinter(latex_expression_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['odeTransformer'] = OdeTransformer()

        return namespace
Ejemplo n.º 15
0
def replace_integrate_call(neuron, update_instructions):
    # type: (...) -> ASTNeuron
    """
    Replaces all integrate calls to the corresponding references to propagation.
    :param neuron: a single neuron instance
    :return: The neuron without an integrate calls. The function calls are replaced through an
             incremental exact solution,
    """
    integrate_call = ASTUtils.get_function_call(neuron.get_update_blocks(), PredefinedFunctions.INTEGRATE_ODES)
    # by construction of a valid neuron, only a single integrate call should be there
    if isinstance(integrate_call, list):
        integrate_call = integrate_call[0]
    if integrate_call is not None:
        small_statement = neuron.get_parent(integrate_call)
        assert (small_statement is not None and isinstance(small_statement, ASTSmallStmt))

        block = neuron.get_parent(neuron.get_parent(small_statement))
        assert (block is not None and isinstance(block, ASTBlock))

        for i in range(0, len(block.get_stmts())):
            if block.get_stmts()[i].equals(neuron.get_parent(small_statement)):
                del block.get_stmts()[i]
                block.get_stmts()[i:i] = list((ModelParser.parse_stmt(prop) for prop in update_instructions))
                break
    return neuron
Ejemplo n.º 16
0
    def visit_kernel(self, node):
        """
        Checks the coco on the current node.
        :param node: AST kernel object
        :type node: ASTKernel
        """
        for var, expr in zip(node.variables, node.expressions):
            # check kernel type
            if (var.get_differential_order() == 0
                and not type(expr.type) in [IntegerTypeSymbol, RealTypeSymbol]) \
                or (var.get_differential_order() > 0
                    and not expr.type.is_castable_to(PredefinedTypes.get_type("ms")**-var.get_differential_order())):
                actual_type_str = str(expr.type)
                if 'unit' in dir(expr.type) \
                        and expr.type.unit is not None \
                        and expr.type.unit.unit is not None:
                    actual_type_str = str(expr.type.unit.unit)
                code, message = Messages.get_kernel_wrong_type(
                    var.get_name(), var.get_differential_order(),
                    actual_type_str)
                Logger.log_message(error_position=node.get_source_position(),
                                   log_level=LoggingLevel.ERROR,
                                   code=code,
                                   message=message)

            # check types of the state variables
            for order in range(var.get_differential_order()):
                iv_name = var.get_name() + order * "'"
                decl = ASTUtils.get_declaration_by_name(
                    self._neuron.get_state_blocks(), iv_name)
                if decl is None:
                    code, message = Messages.get_variable_not_defined(iv_name)
                    Logger.log_message(
                        node=self._neuron,
                        code=code,
                        message=message,
                        log_level=LoggingLevel.ERROR,
                        error_position=node.get_source_position())
                    continue
                assert len(
                    self._neuron.get_state_blocks().get_declarations()
                    [0].get_variables()
                ) == 1, "Only single variables are supported as targets of an assignment."
                iv = decl.get_variables()[0]
                if not iv.get_type_symbol().get_value().is_castable_to(
                        PredefinedTypes.get_type("ms")**-order):
                    actual_type_str = DebugTypeConverter.convert(
                        iv.get_type_symbol())
                    expected_type_str = "s^-" + str(order)
                    code, message = Messages.get_kernel_iv_wrong_type(
                        iv_name, actual_type_str, expected_type_str)
                    Logger.log_message(
                        error_position=node.get_source_position(),
                        log_level=LoggingLevel.ERROR,
                        code=code,
                        message=message)
Ejemplo n.º 17
0
    def setup_generation_helpers(self, neuron):
        """
        Returns a standard namespace with often required functionality.
        :param neuron: a single neuron instance
        :type neuron: ASTNeuron
        :return: a map from name to functionality.
        :rtype: dict
        """
        gsl_converter = GSLReferenceConverter()
        gsl_printer = LegacyExpressionPrinter(gsl_converter)
        # helper classes and objects
        converter = NESTReferenceConverter(False)
        legacy_pretty_printer = LegacyExpressionPrinter(converter)

        namespace = dict()

        namespace['neuronName'] = neuron.get_name()
        namespace['neuron'] = neuron
        namespace['moduleName'] = FrontendConfiguration.get_module_name()
        namespace['printer'] = NestPrinter(legacy_pretty_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['idemPrinter'] = LegacyExpressionPrinter()
        namespace['outputEvent'] = namespace['printer'].print_output_event(neuron.get_body())
        namespace['is_spike_input'] = ASTUtils.is_spike_input(neuron.get_body())
        namespace['is_current_input'] = ASTUtils.is_current_input(neuron.get_body())
        namespace['odeTransformer'] = OdeTransformer()
        namespace['printerGSL'] = gsl_printer
        namespace['now'] = datetime.datetime.utcnow()
        namespace['tracing'] = FrontendConfiguration.is_dev

        namespace['PredefinedUnits'] = pynestml.symbols.predefined_units.PredefinedUnits
        namespace['UnitTypeSymbol'] = pynestml.symbols.unit_type_symbol.UnitTypeSymbol

        rng_visitor = ASTRandomNumberGeneratorVisitor()
        neuron.accept(rng_visitor)
        namespace['norm_rng'] = rng_visitor._norm_rng_is_used

        self.define_solver_type(neuron, namespace)
        return namespace
Ejemplo n.º 18
0
 def add_to_internal_block(self, declaration, index=-1):
     """
     Adds the handed over declaration the internal block
     :param declaration: a single declaration
     :type declaration: ast_declaration
     """
     if self.get_internals_blocks() is None:
         ASTUtils.create_internal_block(self)
     n_declarations = len(self.get_internals_blocks().get_declarations())
     if n_declarations == 0:
         index = 0
     else:
         index = 1 + (index % len(self.get_internals_blocks().get_declarations()))
     self.get_internals_blocks().get_declarations().insert(index, declaration)
     declaration.update_scope(self.get_internals_blocks().get_scope())
     from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor
     symtable_vistor = ASTSymbolTableVisitor()
     symtable_vistor.block_type_stack.push(BlockType.INTERNALS)
     declaration.accept(symtable_vistor)
     symtable_vistor.block_type_stack.pop()
Ejemplo n.º 19
0
    def add_to_state_block(self, declaration):
        """
        Adds the handed over declaration to the state block.
        :param declaration: a single declaration.
        :type declaration: ast_declaration
        """
        from pynestml.utils.ast_utils import ASTUtils
        if self.get_state_blocks() is None:
            ASTUtils.create_state_block(self)
        self.get_state_blocks().get_declarations().append(declaration)
        declaration.update_scope(self.get_state_blocks().get_scope())
        from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor

        symtable_vistor = ASTSymbolTableVisitor()
        symtable_vistor.block_type_stack.push(BlockType.STATE)
        declaration.accept(symtable_vistor)
        symtable_vistor.block_type_stack.pop()
        from pynestml.symbols.symbol import SymbolKind
        assert declaration.get_variables()[0].get_scope().resolve_to_symbol(
            declaration.get_variables()[0].get_name(), SymbolKind.VARIABLE) is not None
        assert declaration.get_scope().resolve_to_symbol(declaration.get_variables()[0].get_name(),
                                                         SymbolKind.VARIABLE) is not None
Ejemplo n.º 20
0
 def convert_function_call(self, function_call):
     """
     Returns the same function call back.
     :param function_call: a function call
     :type function_call: ASTFunctionCall
     :return: the same sting back
     :rtype: str
     """
     result = function_call.get_name()
     if ASTUtils.needs_arguments(function_call):
         result += '(%s)'
     else:
         result += '()'
     return result
Ejemplo n.º 21
0
 def convert_function_call(self, function_call):
     """
     Returns the same function call back.
     :param function_call: a function call
     :type function_call: ASTFunctionCall
     :return: the same sting back
     :rtype: str
     """
     result = function_call.get_name()
     if ASTUtils.needs_arguments(function_call):
         n_args = len(function_call.get_args())
         result += '(' + ', '.join(['%s' for _ in range(n_args)]) + ')'
     else:
         result += '()'
     return result
 def convert_function_call(self, function_call):
     """
     Returns the same function call back.
     :param function_call: a function call
     :type function_call: ASTFunctionCall
     :return: the same sting back
     :rtype: str
     """
     result = function_call.get_name()
     if ASTUtils.needs_arguments(function_call):
         n_args = len(function_call.get_args())
         result += '(' + ', '.join(['%s' for _ in range(n_args)]) + ')'
     else:
         result += '()'
     return result
 def check_co_co(cls, list_of_compilation_units):
     """
     Checks the coco.
     :param list_of_compilation_units: a list of compilation units.
     :type list_of_compilation_units: list(ASTNestMLCompilationUnit)
     """
     list_of_neurons = ASTUtils.get_all_neurons(list_of_compilation_units)
     conflicting_neurons = list()
     checked = list()
     for neuronA in list_of_neurons:
         for neuronB in list_of_neurons:
             if neuronA is not neuronB and neuronA.get_name() == neuronB.get_name():
                 code, message = Messages.get_compilation_unit_name_collision(neuronA.get_name(),
                                                                              neuronA.get_artifact_name(),
                                                                              neuronB.get_artifact_name())
                 Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR)
             conflicting_neurons.append(neuronB)
         checked.append(neuronA)
     return conflicting_neurons
Ejemplo n.º 24
0
def add_declaration_to_initial_values(neuron, variable, initial_value):
    # type: (ASTNeuron, str, str) -> ASTNeuron
    """
    Adds a single declaration to the initial values block of the neuron.
    :param neuron: a neuron
    :param variable: state variable to add
    :param initial_value: corresponding initial value
    :return: a modified neuron
    """
    tmp = ModelParser.parse_expression(initial_value)
    vector_variable = ASTUtils.get_vectorized_variable(tmp, neuron.get_scope())
    declaration_string = variable + ' real' + (
        '[' + vector_variable.get_vector_parameter() + ']'
        if vector_variable is not None and vector_variable.has_vector_parameter() else '') + ' = ' + initial_value
    ast_declaration = ModelParser.parse_declaration(declaration_string)
    if vector_variable is not None:
        ast_declaration.set_size_parameter(vector_variable.get_vector_parameter())
    neuron.add_to_initial_values_block(ast_declaration)
    return neuron
Ejemplo n.º 25
0
def add_declaration_to_internals(neuron, variable_name, init_expression):
    # type: (ASTNeuron,  str, str) -> ASTNeuron
    """
    Adds the variable as stored in the declaration tuple to the neuron.
    :param neuron: a single neuron instance
    :param variable_name: the name of the variable to add
    :param init_expression: initialization expression
    :return: the neuron extended by the variable
    """
    tmp = ModelParser.parse_expression(init_expression)
    vector_variable = ASTUtils.get_vectorized_variable(tmp, neuron.get_scope())

    declaration_string = variable_name + ' real' + (
        '[' + vector_variable.get_vector_parameter() + ']'
        if vector_variable is not None and vector_variable.has_vector_parameter() else '') + ' = ' + init_expression
    ast_declaration = ModelParser.parse_declaration(declaration_string)
    if vector_variable is not None:
        ast_declaration.set_size_parameter(vector_variable.get_vector_parameter())
    neuron.add_to_internal_block(ast_declaration)
    return neuron
 def check_co_co(cls, list_of_compilation_units):
     """
     Checks the coco.
     :param list_of_compilation_units: a list of compilation units.
     :type list_of_compilation_units: list(ASTNestMLCompilationUnit)
     """
     list_of_neurons = ASTUtils.get_all_neurons(list_of_compilation_units)
     conflicting_neurons = list()
     checked = list()
     for neuronA in list_of_neurons:
         for neuronB in list_of_neurons:
             if neuronA is not neuronB and neuronA.get_name(
             ) == neuronB.get_name():
                 code, message = Messages.get_compilation_unit_name_collision(
                     neuronA.get_name(), neuronA.get_artifact_name(),
                     neuronB.get_artifact_name())
                 Logger.log_message(code=code,
                                    message=message,
                                    log_level=LoggingLevel.ERROR)
             conflicting_neurons.append(neuronB)
         checked.append(neuronA)
     return conflicting_neurons
Ejemplo n.º 27
0
    def convert_function_call(self, function_call, prefix=''):
        """Return the function call in NESTML syntax.

        Parameters
        ----------
        function_call : ASTFunctionCall
            The function call node to convert.
        prefix : str
            The prefix argument is not relevant for rendering NESTML syntax and will be ignored.

        Returns
        -------
        s : str
            The function call string in NESTML syntax.
        """
        result = function_call.get_name()
        if ASTUtils.needs_arguments(function_call):
            n_args = len(function_call.get_args())
            result += '(' + ', '.join(['{!s}' for _ in range(n_args)]) + ')'
        else:
            result += '()'
        return result
Ejemplo n.º 28
0
 def convert_function_call(cls, function_call):
     """
     Converts a single handed over function call to nest processable format.
     :param function_call: a single function call
     :type function_call:  ASTFunctionCall
     :return: a string representation
     :rtype: str
     """
     function_name = function_call.get_name()
     if function_name == 'and':
         return '&&'
     elif function_name == 'or':
         return '||'
     elif function_name == 'resolution':
         return 'nest::Time::get_resolution().get_ms()'
     elif function_name == 'steps':
         return 'nest::Time(nest::Time::ms((double) %s)).get_steps()'
     elif function_name == PredefinedFunctions.POW:
         return 'std::pow(%s, %s)'
     elif function_name == PredefinedFunctions.MAX or function_name == PredefinedFunctions.BOUNDED_MAX:
         return 'std::max(%s, %s)'
     elif function_name == PredefinedFunctions.MIN or function_name == PredefinedFunctions.BOUNDED_MIN:
         return 'std::min(%s, %s)'
     elif function_name == PredefinedFunctions.EXP:
         return 'std::exp(%s)'
     elif function_name == PredefinedFunctions.LOG:
         return 'std::log(%s)'
     elif function_name == 'expm1':
         return 'numerics::expm1(%s)'
     elif function_name == PredefinedFunctions.EMIT_SPIKE:
         return 'set_spiketime(nest::Time::step(origin.get_steps()+lag+1));\n' \
                'nest::SpikeEvent se;\n' \
                'nest::kernel().event_delivery_manager.send(*this, se, lag)'
     elif ASTUtils.needs_arguments(function_call):
         n_args = len(function_call.get_args())
         return function_name + '(' + ', '.join(['%s' for _ in range(n_args)]) + ')'
     else:
         return function_name + '()'
Ejemplo n.º 29
0
    def visit_assignment(self, node):
        symbol = node.get_scope().resolve_to_symbol(
            node.get_variable().get_complete_name(), SymbolKind.VARIABLE)
        if symbol is None:
            if self.after_ast_rewrite:  # after ODE-toolbox transformations, convolutions are replaced by state variables, so cannot perform this check properly
                symbol = node.get_scope().resolve_to_symbol(
                    node.get_variable().get_name(), SymbolKind.VARIABLE)
                if symbol is not None:
                    # an inline expression defining this variable name (ignoring differential order) exists
                    if "__X__" in str(
                            symbol
                    ):  # if this variable was the result of a convolution...
                        return
            else:
                # for kernels, also allow derivatives of that kernel to appear
                if self.neuron.get_equations_block() is not None:
                    for inline_expr in self.neuron.get_equations_block(
                    ).get_inline_expressions():
                        if node.get_variable().get_name(
                        ) == inline_expr.variable_name:
                            from pynestml.utils.ast_utils import ASTUtils
                            if ASTUtils.inline_aliases_convolution(
                                    inline_expr):
                                symbol = node.get_scope().resolve_to_symbol(
                                    node.get_variable().get_name(),
                                    SymbolKind.VARIABLE)
                                if symbol is not None:
                                    # actually, no problem detected, skip error
                                    # XXX: TODO: check that differential order is less than or equal to that of the kernel
                                    return

            code, message = Messages.get_variable_not_defined(
                node.get_variable().get_complete_name())
            Logger.log_message(code=code,
                               message=message,
                               error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR,
                               node=self.neuron)
Ejemplo n.º 30
0
    def convert_function_call(self, function_call):
        """
        Convert function call.

        :param function_call: a function call
        :type function_call: ASTFunctionCall
        :return: pretty-printed format string
        :rtype: str
        """
        result = function_call.get_name()

        symbols = {"convolve": r"\\text{convolve}"}

        for symbol_find, symbol_replace in symbols.items():
            result = re.sub(r"(?<![a-zA-Z])(" + symbol_find + ")(?![a-zA-Z])",
                            symbol_replace, result)  # "whole word" match

        if ASTUtils.needs_arguments(function_call):
            n_args = len(function_call.get_args())
            result += '(' + ', '.join(['%s' for _ in range(n_args)]) + ')'
        else:
            result += '()'

        return result
    def convert_function_call(cls, function_call, prefix=''):
        """
        Converts a single handed over function call to C++ NEST API syntax.

        Parameters
        ----------
        function_call : ASTFunctionCall
            The function call node to convert.
        prefix : str
            Optional string that will be prefixed to the function call. For example, to refer to a function call in the class "node", use a prefix equal to "node." or "node->".

            Predefined functions will not be prefixed.

        Returns
        -------
        s : str
            The function call string in C++ syntax.
        """
        function_name = function_call.get_name()

        if function_name == 'and':
            return '&&'

        if function_name == 'or':
            return '||'

        if function_name == PredefinedFunctions.TIME_RESOLUTION:
            return 'nest::Time::get_resolution().get_ms()'

        if function_name == PredefinedFunctions.TIME_STEPS:
            return 'nest::Time(nest::Time::ms((double) %s)).get_steps()'

        if function_name == PredefinedFunctions.POW:
            return 'std::pow(%s, %s)'

        if function_name == PredefinedFunctions.MAX or function_name == PredefinedFunctions.BOUNDED_MAX:
            return 'std::max(%s, %s)'

        if function_name == PredefinedFunctions.MIN or function_name == PredefinedFunctions.BOUNDED_MIN:
            return 'std::min(%s, %s)'

        if function_name == PredefinedFunctions.EXP:
            return 'std::exp(%s)'

        if function_name == PredefinedFunctions.LOG:
            return 'std::log(%s)'

        if function_name == PredefinedFunctions.EXPM1:
            return 'numerics::expm1(%s)'

        if function_name == PredefinedFunctions.EMIT_SPIKE:
            return 'set_spiketime(nest::Time::step(origin.get_steps()+lag+1));\n' \
                   'nest::SpikeEvent se;\n' \
                   'nest::kernel().event_delivery_manager.send(*this, se, lag)'

        # suppress prefix for misc. predefined functions
        function_is_predefined = PredefinedFunctions.get_function(
            function_name
        )  # check if function is "predefined" purely based on the name, as we don't have access to the function symbol here
        if function_is_predefined:
            prefix = ''

        if ASTUtils.needs_arguments(function_call):
            n_args = len(function_call.get_args())
            return prefix + function_name + '(' + ', '.join(
                ['%s' for _ in range(n_args)]) + ')'

        return prefix + function_name + '()'
Ejemplo n.º 32
0
    def convert_function_call(cls, function_call, prefix=''):
        """
        Converts a single handed over function call to C++ NEST API syntax.

        Parameters
        ----------
        function_call : ASTFunctionCall
            The function call node to convert.
        prefix : str
            Optional string that will be prefixed to the function call. For example, to refer to a function call in the class "node", use a prefix equal to "node." or "node->".

            Predefined functions will not be prefixed.

        Returns
        -------
        s : str
            The function call string in C++ syntax.
        """
        function_name = function_call.get_name()

        if function_name == 'and':
            return '&&'

        if function_name == 'or':
            return '||'

        if function_name == PredefinedFunctions.TIME_RESOLUTION:
            return 'nest::Time::get_resolution().get_ms()'

        if function_name == PredefinedFunctions.TIME_STEPS:
            return 'nest::Time(nest::Time::ms((double) ({!s}))).get_steps()'

        if function_name == PredefinedFunctions.CLIP:
            # warning: the arguments of this function must swapped and
            # are therefore [v_max, v_min, v], hence its structure
            return 'std::min({2!s}, std::max({1!s}, {0!s}))'

        if function_name == PredefinedFunctions.MAX:
            return 'std::max({!s}, {!s})'

        if function_name == PredefinedFunctions.MIN:
            return 'std::min({!s}, {!s})'

        if function_name == PredefinedFunctions.EXP:
            return 'std::exp({!s})'

        if function_name == PredefinedFunctions.LN:
            return 'std::log({!s})'

        if function_name == PredefinedFunctions.LOG10:
            return 'std::log10({!s})'

        if function_name == PredefinedFunctions.COSH:
            return 'std::cosh({!s})'

        if function_name == PredefinedFunctions.SINH:
            return 'std::sinh({!s})'

        if function_name == PredefinedFunctions.TANH:
            return 'std::tanh({!s})'

        if function_name == PredefinedFunctions.EXPM1:
            return 'numerics::expm1({!s})'

        if function_name == PredefinedFunctions.RANDOM_NORMAL:
            return '(({!s}) + ({!s}) * ' + prefix + 'normal_dev_( nest::kernel().rng_manager.get_rng( ' + prefix + 'get_thread() ) ))'

        if function_name == PredefinedFunctions.RANDOM_UNIFORM:
            return '(({!s}) + ({!s}) * nest::kernel().rng_manager.get_rng( ' + prefix + 'get_thread() )->drand())'

        if function_name == PredefinedFunctions.EMIT_SPIKE:
            return 'set_spiketime(nest::Time::step(origin.get_steps()+lag+1));\n' \
                   'nest::SpikeEvent se;\n' \
                   'nest::kernel().event_delivery_manager.send(*this, se, lag)'

        # suppress prefix for misc. predefined functions
        # check if function is "predefined" purely based on the name, as we don't have access to the function symbol here
        function_is_predefined = PredefinedFunctions.get_function(
            function_name)
        if function_is_predefined:
            prefix = ''

        if ASTUtils.needs_arguments(function_call):
            n_args = len(function_call.get_args())
            return prefix + function_name + '(' + ', '.join(
                ['{!s}' for _ in range(n_args)]) + ')'
        return prefix + function_name + '()'
Ejemplo n.º 33
0
    def check_co_co(cls, node: ASTNeuron, after_ast_rewrite: bool = False):
        """
        Checks if this coco applies for the handed over neuron. Models which contain undefined variables are not correct.
        :param node: a single neuron instance.
        :param after_ast_rewrite: indicates whether this coco is checked after the code generator has done rewriting of the abstract syntax tree. If True, checks are not as rigorous. Use False where possible.
        """
        # for each variable in all expressions, check if the variable has been defined previously
        expression_collector_visitor = ASTExpressionCollectorVisitor()
        node.accept(expression_collector_visitor)
        expressions = expression_collector_visitor.ret
        for expr in expressions:
            if isinstance(expr, ASTVariable):
                vars = [expr]
            else:
                vars = expr.get_variables()

            for var in vars:
                symbol = var.get_scope().resolve_to_symbol(
                    var.get_complete_name(), SymbolKind.VARIABLE)
                # this part is required to check that we handle invariants differently
                expr_par = node.get_parent(expr)

                # test if the symbol has been defined at least
                if symbol is None:
                    if after_ast_rewrite:  # after ODE-toolbox transformations, convolutions are replaced by state variables, so cannot perform this check properly
                        symbol2 = node.get_scope().resolve_to_symbol(
                            var.get_name(), SymbolKind.VARIABLE)
                        if symbol2 is not None:
                            # an inline expression defining this variable name (ignoring differential order) exists
                            if "__X__" in str(
                                    symbol2
                            ):  # if this variable was the result of a convolution...
                                continue
                    else:
                        # for kernels, also allow derivatives of that kernel to appear
                        if node.get_equations_block() is not None:
                            inline_expr_names = [
                                inline_expr.variable_name
                                for inline_expr in node.get_equations_block().
                                get_inline_expressions()
                            ]
                            if var.get_name() in inline_expr_names:
                                inline_expr_idx = inline_expr_names.index(
                                    var.get_name())
                                inline_expr = node.get_equations_block(
                                ).get_inline_expressions()[inline_expr_idx]
                                from pynestml.utils.ast_utils import ASTUtils
                                if ASTUtils.inline_aliases_convolution(
                                        inline_expr):
                                    symbol2 = node.get_scope(
                                    ).resolve_to_symbol(
                                        var.get_name(), SymbolKind.VARIABLE)
                                    if symbol2 is not None:
                                        # actually, no problem detected, skip error
                                        # XXX: TODO: check that differential order is less than or equal to that of the kernel
                                        continue

                    # check if this symbol is actually a type, e.g. "mV" in the expression "(1 + 2) * mV"
                    symbol2 = var.get_scope().resolve_to_symbol(
                        var.get_complete_name(), SymbolKind.TYPE)
                    if symbol2 is not None:
                        continue  # symbol is a type symbol

                    code, message = Messages.get_variable_not_defined(
                        var.get_complete_name())
                    Logger.log_message(
                        code=code,
                        message=message,
                        error_position=node.get_source_position(),
                        log_level=LoggingLevel.ERROR,
                        node=node)
                    return

                # check if it is part of an invariant
                # if it is the case, there is no "recursive" declaration
                # so check if the parent is a declaration and the expression the invariant
                if isinstance(
                        expr_par,
                        ASTDeclaration) and expr_par.get_invariant() == expr:
                    # in this case its ok if it is recursive or defined later on
                    continue

                # check if it has been defined before usage, except for predefined symbols, input ports and variables added by the AST transformation functions
                if (not symbol.is_predefined) \
                        and symbol.block_type != BlockType.INPUT \
                        and not symbol.get_referenced_object().get_source_position().is_added_source_position():
                    # except for parameters, those can be defined after
                    if ((not symbol.get_referenced_object(
                    ).get_source_position().before(var.get_source_position()))
                            and (not symbol.block_type in [
                                BlockType.PARAMETERS, BlockType.INTERNALS,
                                BlockType.STATE
                            ])):
                        code, message = Messages.get_variable_used_before_declaration(
                            var.get_name())
                        Logger.log_message(
                            node=node,
                            message=message,
                            error_position=var.get_source_position(),
                            code=code,
                            log_level=LoggingLevel.ERROR)
                    # now check that they are not defined recursively, e.g. V_m mV = V_m + 1
                    # todo: we should not check this for invariants
                    if (symbol.get_referenced_object().get_source_position(
                    ).encloses(var.get_source_position())
                            and not symbol.get_referenced_object().
                            get_source_position().is_added_source_position()):
                        code, message = Messages.get_variable_defined_recursively(
                            var.get_name())
                        Logger.log_message(
                            code=code,
                            message=message,
                            error_position=symbol.get_referenced_object(
                            ).get_source_position(),
                            log_level=LoggingLevel.ERROR,
                            node=node)
Ejemplo n.º 34
0
    def setup_generation_helpers(self, neuron: ASTNeuron) -> Dict:
        """
        Returns a standard namespace with often required functionality.
        :param neuron: a single neuron instance
        :type neuron: ASTNeuron
        :return: a map from name to functionality.
        :rtype: dict
        """
        gsl_converter = GSLReferenceConverter()
        gsl_printer = UnitlessExpressionPrinter(gsl_converter)
        # helper classes and objects
        converter = NESTReferenceConverter(False)
        unitless_pretty_printer = UnitlessExpressionPrinter(converter)

        namespace = dict()

        namespace['neuronName'] = neuron.get_name()
        namespace['neuron'] = neuron
        namespace['moduleName'] = FrontendConfiguration.get_module_name()
        namespace['printer'] = NestPrinter(unitless_pretty_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['idemPrinter'] = UnitlessExpressionPrinter()
        namespace['outputEvent'] = namespace['printer'].print_output_event(
            neuron.get_body())
        namespace['is_spike_input'] = ASTUtils.is_spike_input(
            neuron.get_body())
        namespace['is_current_input'] = ASTUtils.is_current_input(
            neuron.get_body())
        namespace['odeTransformer'] = OdeTransformer()
        namespace['printerGSL'] = gsl_printer
        namespace['now'] = datetime.datetime.utcnow()
        namespace['tracing'] = FrontendConfiguration.is_dev

        namespace[
            'PredefinedUnits'] = pynestml.symbols.predefined_units.PredefinedUnits
        namespace[
            'UnitTypeSymbol'] = pynestml.symbols.unit_type_symbol.UnitTypeSymbol

        namespace['initial_values'] = {}
        namespace['uses_analytic_solver'] = neuron.get_name() in self.analytic_solver.keys() \
            and self.analytic_solver[neuron.get_name()] is not None
        if namespace['uses_analytic_solver']:
            namespace['analytic_state_variables'] = self.analytic_solver[
                neuron.get_name()]["state_variables"]
            namespace['analytic_variable_symbols'] = {
                sym:
                neuron.get_equations_block().get_scope().resolve_to_symbol(
                    sym, SymbolKind.VARIABLE)
                for sym in namespace['analytic_state_variables']
            }
            namespace['update_expressions'] = {}
            for sym, expr in self.analytic_solver[
                    neuron.get_name()]["initial_values"].items():
                namespace['initial_values'][sym] = expr
            for sym in namespace['analytic_state_variables']:
                expr_str = self.analytic_solver[
                    neuron.get_name()]["update_expressions"][sym]
                expr_ast = ModelParser.parse_expression(expr_str)
                # pretend that update expressions are in "equations" block, which should always be present, as differential equations must have been defined to get here
                expr_ast.update_scope(
                    neuron.get_equations_blocks().get_scope())
                expr_ast.accept(ASTSymbolTableVisitor())
                namespace['update_expressions'][sym] = expr_ast

            namespace['propagators'] = self.analytic_solver[
                neuron.get_name()]["propagators"]

        namespace['uses_numeric_solver'] = neuron.get_name() in self.analytic_solver.keys() \
            and self.numeric_solver[neuron.get_name()] is not None
        if namespace['uses_numeric_solver']:
            namespace['numeric_state_variables'] = self.numeric_solver[
                neuron.get_name()]["state_variables"]
            namespace['numeric_variable_symbols'] = {
                sym:
                neuron.get_equations_block().get_scope().resolve_to_symbol(
                    sym, SymbolKind.VARIABLE)
                for sym in namespace['numeric_state_variables']
            }
            assert not any([
                sym is None
                for sym in namespace['numeric_variable_symbols'].values()
            ])
            namespace['numeric_update_expressions'] = {}
            for sym, expr in self.numeric_solver[
                    neuron.get_name()]["initial_values"].items():
                namespace['initial_values'][sym] = expr
            for sym in namespace['numeric_state_variables']:
                expr_str = self.numeric_solver[
                    neuron.get_name()]["update_expressions"][sym]
                expr_ast = ModelParser.parse_expression(expr_str)
                # pretend that update expressions are in "equations" block, which should always be present, as differential equations must have been defined to get here
                expr_ast.update_scope(
                    neuron.get_equations_blocks().get_scope())
                expr_ast.accept(ASTSymbolTableVisitor())
                namespace['numeric_update_expressions'][sym] = expr_ast

            namespace['useGSL'] = namespace['uses_numeric_solver']
            namespace['names'] = GSLNamesConverter()
            converter = NESTReferenceConverter(True)
            unitless_pretty_printer = UnitlessExpressionPrinter(converter)
            namespace['printer'] = NestPrinter(unitless_pretty_printer)

        namespace["spike_updates"] = neuron.spike_updates

        rng_visitor = ASTRandomNumberGeneratorVisitor()
        neuron.accept(rng_visitor)
        namespace['norm_rng'] = rng_visitor._norm_rng_is_used

        return namespace
Ejemplo n.º 35
0
    def parse_model(cls, file_path=None):
        """
        Parses a handed over model and returns the meta_model representation of it.
        :param file_path: the path to the file which shall be parsed.
        :type file_path: str
        :return: a new ASTNESTMLCompilationUnit object.
        :rtype: ASTNestMLCompilationUnit
        """
        try:
            input_file = FileStream(file_path)
        except IOError:
            code, message = Messages.get_input_path_not_found(path=file_path)
            Logger.log_message(neuron=None, code=None, message=message, error_position=None, log_level=LoggingLevel.ERROR)
            return
        code, message = Messages.get_start_processing_file(file_path)
        Logger.log_message(neuron=None, code=code, message=message, error_position=None, log_level=LoggingLevel.INFO)

        # create a lexer and hand over the input
        lexer = PyNestMLLexer()
        lexer.removeErrorListeners()
        lexer.addErrorListener(ConsoleErrorListener())
        lexerErrorListener = NestMLErrorListener()
        lexer.addErrorListener(lexerErrorListener)
        # lexer._errHandler = BailErrorStrategy()  # N.B. uncomment this line and the next to halt immediately on lexer errors
        # lexer._errHandler.reset(lexer)
        lexer.inputStream = input_file
        # create a token stream
        stream = CommonTokenStream(lexer)
        stream.fill()
        if lexerErrorListener._error_occurred:
            code, message = Messages.get_lexer_error()
            Logger.log_message(neuron=None, code=None, message=message, error_position=None, log_level=LoggingLevel.ERROR)
            return
        # parse the file
        parser = PyNestMLParser(None)
        parser.removeErrorListeners()
        parser.addErrorListener(ConsoleErrorListener())
        parserErrorListener = NestMLErrorListener()
        parser.addErrorListener(parserErrorListener)
        # parser._errHandler = BailErrorStrategy()	# N.B. uncomment this line and the next to halt immediately on parse errors
        # parser._errHandler.reset(parser)
        parser.setTokenStream(stream)
        compilation_unit = parser.nestMLCompilationUnit()
        if parserErrorListener._error_occurred:
            code, message = Messages.get_parser_error()
            Logger.log_message(neuron=None, code=None, message=message, error_position=None, log_level=LoggingLevel.ERROR)
            return

        # create a new visitor and return the new AST
        ast_builder_visitor = ASTBuilderVisitor(stream.tokens)
        ast = ast_builder_visitor.visit(compilation_unit)

        # create and update the corresponding symbol tables
        SymbolTable.initialize_symbol_table(ast.get_source_position())
        log_to_restore = copy.deepcopy(Logger.get_log())
        counter = Logger.curr_message

        # replace all derived variables through a computer processable names: e.g. g_in''' -> g_in__ddd
        restore_differential_order = []
        for ode in ASTUtils.get_all(ast, ASTOdeEquation):
            lhs_variable = ode.get_lhs()
            if lhs_variable.get_differential_order() > 0:
                lhs_variable.differential_order = lhs_variable.get_differential_order() - 1
                restore_differential_order.append(lhs_variable)

        for shape in ASTUtils.get_all(ast, ASTOdeShape):
            lhs_variable = shape.get_variable()
            if lhs_variable.get_differential_order() > 0:
                lhs_variable.differential_order = lhs_variable.get_differential_order() - 1
                restore_differential_order.append(lhs_variable)

        # than replace remaining variables
        for variable in ASTUtils.get_all(ast, ASTVariable):
            if variable.get_differential_order() > 0:
                variable.set_name(variable.get_name() + "__" + "d" * variable.get_differential_order())
                variable.differential_order = 0

        # now also equations have no ' at lhs. replace every occurrence of last d to ' to compensate
        for ode_variable in restore_differential_order:
            ode_variable.differential_order = 1
        Logger.set_log(log_to_restore, counter)
        for neuron in ast.get_neuron_list():
            neuron.accept(ASTSymbolTableVisitor())
            SymbolTable.add_neuron_scope(neuron.get_name(), neuron.get_scope())
        return ast
Ejemplo n.º 36
0
    def analyse_neuron(self, neuron: ASTNeuron) -> List[ASTAssignment]:
        """
        Analyse and transform a single neuron.
        :param neuron: a single neuron.
        :return: spike_updates: list of spike updates, see documentation for get_spike_update_expressions() for more information.
        """
        code, message = Messages.get_start_processing_neuron(neuron.get_name())
        Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)

        equations_block = neuron.get_equations_block()

        if equations_block is None:
            # add all declared state variables as none of them are used in equations block
            self.non_equations_state_variables[neuron.get_name()] = []
            self.non_equations_state_variables[neuron.get_name()].extend(ASTUtils.all_variables_defined_in_block(neuron.get_initial_values_blocks()))
            self.non_equations_state_variables[neuron.get_name()].extend(ASTUtils.all_variables_defined_in_block(neuron.get_state_blocks()))

            return []

        delta_factors = self.get_delta_factors_(neuron, equations_block)
        kernel_buffers = self.generate_kernel_buffers_(neuron, equations_block)
        self.replace_convolve_calls_with_buffers_(neuron, equations_block, kernel_buffers)
        self.make_inline_expressions_self_contained(equations_block.get_inline_expressions())
        self.replace_inline_expressions_through_defining_expressions(
            equations_block.get_ode_equations(), equations_block.get_inline_expressions())

        analytic_solver, numeric_solver = self.ode_toolbox_analysis(neuron, kernel_buffers)
        self.analytic_solver[neuron.get_name()] = analytic_solver
        self.numeric_solver[neuron.get_name()] = numeric_solver

        self.non_equations_state_variables[neuron.get_name()] = []
        for decl in neuron.get_initial_values_blocks().get_declarations():
            for var in decl.get_variables():
                # check if this variable is not in equations
                if not neuron.get_equations_blocks():
                    self.non_equations_state_variables[neuron.get_name()].append(var)
                    continue

                used_in_eq = False
                for ode_eq in neuron.get_equations_blocks().get_ode_equations():
                    if ode_eq.get_lhs().get_name() == var.get_name():
                        used_in_eq = True
                        break
                for kern in neuron.get_equations_blocks().get_kernels():
                    for kern_var in kern.get_variables():
                        if kern_var.get_name() == var.get_name():
                            used_in_eq = True
                            break

                if not used_in_eq:
                    self.non_equations_state_variables[neuron.get_name()].append(var)

        self.remove_initial_values_for_kernels(neuron)
        kernels = self.remove_kernel_definitions_from_equations_block(neuron)
        self.update_initial_values_for_odes(neuron, [analytic_solver, numeric_solver], kernels)
        self.remove_ode_definitions_from_equations_block(neuron)
        self.create_initial_values_for_kernels(neuron, [analytic_solver, numeric_solver], kernels)
        self.replace_variable_names_in_expressions(neuron, [analytic_solver, numeric_solver])
        self.add_timestep_symbol(neuron)

        if self.analytic_solver[neuron.get_name()] is not None:
            neuron = add_declarations_to_internals(neuron, self.analytic_solver[neuron.get_name()]["propagators"])

        self.update_symbol_table(neuron, kernel_buffers)
        spike_updates = self.get_spike_update_expressions(
            neuron, kernel_buffers, [analytic_solver, numeric_solver], delta_factors)

        return spike_updates
Ejemplo n.º 37
0
    def convert_function_call(self, function_call, prefix=''):
        """Convert a single function call to C++ GSL API syntax.

        Parameters
        ----------
        function_call : ASTFunctionCall
            The function call node to convert.
        prefix : str
            Optional string that will be prefixed to the function call. For example, to refer to a function call in the class "node", use a prefix equal to "node." or "node->".

            Predefined functions will not be prefixed.

        Returns
        -------
        s : str
            The function call string in C++ syntax.
        """
        function_name = function_call.get_name()

        if function_name == PredefinedFunctions.TIME_RESOLUTION:
            return 'nest::Time::get_resolution().get_ms()'

        if function_name == PredefinedFunctions.TIME_STEPS:
            return 'nest::Time(nest::Time::ms((double) {!s})).get_steps()'

        if function_name == PredefinedFunctions.MAX:
            return 'std::max({!s}, {!s})'

        if function_name == PredefinedFunctions.MIN:
            return 'std::min({!s}, {!s})'

        if function_name == PredefinedFunctions.CLIP:
            # warning: the arguments of this function have been swapped and
            # are therefore [v_max, v_min, v], hence its structure
            return 'std::min({2!s}, std::max({1!s}, {0!s}))'

        if function_name == PredefinedFunctions.EXP:
            if self.is_upper_bound:
                return 'std::exp(std::min({!s},' + str(
                    self.maximal_exponent) + '))'
            else:
                return 'std::exp({!s})'

        if function_name == PredefinedFunctions.COSH:
            if self.is_upper_bound:
                return 'std::cosh(std::min(std::abs({!s}),' + str(
                    self.maximal_exponent) + '))'
            else:
                return 'std::cosh({!s})'

        if function_name == PredefinedFunctions.SINH:
            if self.is_upper_bound:
                return 'std::sinh(({!s} > 0 ? 1 : -1)*std::min(std::abs({!s}),' + str(
                    self.maximal_exponent) + '))'
            else:
                return 'std::sinh({!s})'

        if function_name == PredefinedFunctions.TANH:
            return 'std::tanh({!s})'

        if function_name == PredefinedFunctions.LN:
            return 'std::log({!s})'

        if function_name == PredefinedFunctions.LOG10:
            return 'std::log10({!s})'

        if function_name == PredefinedFunctions.EXPM1:
            return 'numerics::expm1({!s})'

        if function_name == PredefinedFunctions.RANDOM_NORMAL:
            return '(({!s}) + ({!s}) * ' + prefix + 'normal_dev_( nest::get_vp_specific_rng( ' + prefix + 'get_thread() ) ))'

        if function_name == PredefinedFunctions.RANDOM_UNIFORM:
            return '(({!s}) + ({!s}) * nest::get_vp_specific_rng( ' + prefix + 'get_thread() )->drand())'

        if function_name == PredefinedFunctions.EMIT_SPIKE:
            return 'set_spiketime(nest::Time::step(origin.get_steps()+lag+1));\n' \
                   'nest::SpikeEvent se;\n' \
                   'nest::kernel().event_delivery_manager.send(*this, se, lag)'

        if function_name == PredefinedFunctions.DELIVER_SPIKE:
            return '''
        set_delay( {1!s} );
        const long __delay_steps = nest::Time::delay_ms_to_steps( get_delay() );
        set_delay_steps(__delay_steps);
        e.set_receiver( *__target );
  e.set_weight( {0!s} );
  // use accessor functions (inherited from Connection< >) to obtain delay in steps and rport
  e.set_delay_steps( get_delay_steps() );
  e.set_rport( get_rport() );
e();
'''

        # suppress prefix for misc. predefined functions
        # check if function is "predefined" purely based on the name, as we don't have access to the function symbol here
        function_is_predefined = PredefinedFunctions.get_function(
            function_name)
        if function_is_predefined:
            prefix = ''

        if ASTUtils.needs_arguments(function_call):
            n_args = len(function_call.get_args())
            return prefix + function_name + '(' + ', '.join(
                ['{!s}' for _ in range(n_args)]) + ')'

        return prefix + function_name + '()'