Example #1
0
def apply_incoming_spikes(neuron):
    """
    Adds a set of update instructions to the handed over neuron.
    :param neuron: a single neuron instance
    :type neuron: ASTNeuron
    :return: the modified neuron
    :rtype: ASTNeuron
    """
    assert (neuron is not None and isinstance(neuron, ASTNeuron)), \
        '(PyNestML.Solver.BaseTransformer) No or wrong type of neuron provided (%s)!' % type(neuron)
    conv_calls = OdeTransformer.get_sum_function_calls(neuron)
    printer = ExpressionsPrettyPrinter()
    spikes_updates = list()
    for convCall in conv_calls:
        shape = convCall.get_args()[0].get_variable().get_complete_name()
        buffer = convCall.get_args()[1].get_variable().get_complete_name()
        initial_values = (
            neuron.get_initial_values_blocks().get_declarations() if neuron.get_initial_values_blocks() is not None else list())
        for astDeclaration in initial_values:
            for variable in astDeclaration.get_variables():
                if re.match(shape + "[\']*", variable.get_complete_name()) or re.match(shape + '__[\\d]+$',
                                                                                       variable.get_complete_name()):
                    spikes_updates.append(ModelParser.parse_assignment(
                        variable.get_complete_name() + " += " + buffer + " * " + printer.print_expression(
                            astDeclaration.get_expression())))
    for update in spikes_updates:
        add_assignment_to_update_block(update, neuron)
    return neuron
Example #2
0
    def __init__(self):
        self.analytic_solver = {}
        self.numeric_solver = {}

        # setup the template environment

        def raise_helper(msg):
            raise TemplateRuntimeError(msg)

        env = Environment(loader=FileSystemLoader(
            os.path.join(os.path.dirname(__file__), 'resources_nest')))
        env.globals['raise'] = raise_helper
        env.globals["is_delta_kernel"] = is_delta_kernel
        setup_env = Environment(loader=FileSystemLoader(
            os.path.join(os.path.dirname(__file__), 'resources_nest',
                         'setup')))
        setup_env.globals['raise'] = raise_helper
        # setup the cmake template
        self._template_cmakelists = setup_env.get_template('CMakeLists.jinja2')
        # setup the module class template
        self._template_module_class = env.get_template('ModuleClass.jinja2')
        # setup the NEST module template
        self._template_module_header = env.get_template('ModuleHeader.jinja2')
        # setup the SLI_Init file
        self._template_sli_init = setup_env.get_template('SLI_Init.jinja2')
        # setup the neuron header template
        self._template_neuron_h_file = env.get_template('NeuronHeader.jinja2')
        # setup the neuron implementation template
        self._template_neuron_cpp_file = env.get_template('NeuronClass.jinja2')
        self._printer = ExpressionsPrettyPrinter()
Example #3
0
def apply_incoming_spikes(neuron):
    """
    Adds a set of update instructions to the handed over neuron.
    :param neuron: a single neuron instance
    :type neuron: ASTNeuron
    :return: the modified neuron
    :rtype: ASTNeuron
    """
    assert (neuron is not None and isinstance(neuron, ASTNeuron)), \
        '(PyNestML.Solver.BaseTransformer) No or wrong type of neuron provided (%s)!' % type(neuron)
    conv_calls = OdeTransformer.get_sum_function_calls(neuron)
    printer = ExpressionsPrettyPrinter()
    spikes_updates = list()
    for convCall in conv_calls:
        shape = convCall.get_args()[0].get_variable().get_complete_name()
        buffer = convCall.get_args()[1].get_variable().get_complete_name()
        initial_values = (
            neuron.get_initial_values_blocks().get_declarations()
            if neuron.get_initial_values_blocks() is not None else list())
        for astDeclaration in initial_values:
            for variable in astDeclaration.get_variables():
                if re.match(shape + "[\']*",
                            variable.get_complete_name()) or re.match(
                                shape + '__[\\d]+$',
                                variable.get_complete_name()):
                    spikes_updates.append(
                        ModelParser.parse_assignment(
                            variable.get_complete_name() + " += " + buffer +
                            " * " + printer.print_expression(
                                astDeclaration.get_expression())))
    for update in spikes_updates:
        add_assignment_to_update_block(update, neuron)
    return neuron
Example #4
0
 def __init__(self, expression_pretty_printer, reference_convert=None):
     """
     The standard constructor.
     :param reference_convert: a single reference converter
     :type reference_convert: IReferenceConverter
     """
     if expression_pretty_printer is not None:
         self.expression_pretty_printer = expression_pretty_printer
     else:
         self.expression_pretty_printer = ExpressionsPrettyPrinter(
             reference_convert)
     return
Example #5
0
 def __init__(self, expression_pretty_printer, reference_convert=None):
     """
     The standard constructor.
     :param reference_convert: a single reference converter
     :type reference_convert: IReferenceConverter
     """
     if expression_pretty_printer is not None:
         self.expression_pretty_printer = expression_pretty_printer
     else:
         self.expression_pretty_printer = ExpressionsPrettyPrinter(reference_convert)
     return
Example #6
0
    def __init__(self):
        # setup the template environment
        def raise_helper(msg):
            raise TemplateRuntimeError(msg)
        env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'resources_nest')))
        env.globals['raise'] = raise_helper
        setup_env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'resources_nest', 'setup')))
        setup_env.globals['raise'] = raise_helper
        # setup the cmake template
        self._template_cmakelists = setup_env.get_template('CMakeLists.jinja2')
        # setup the module class template
        self._template_module_class = env.get_template('ModuleClass.jinja2')
        # setup the NEST module template
        self._template_module_header = env.get_template('ModuleHeader.jinja2')
        # setup the SLI_Init file
        self._template_sli_init = setup_env.get_template('SLI_Init.jinja2')
        # setup the neuron header template
        self._template_neuron_h_file = env.get_template('NeuronHeader.jinja2')
        # setup the neuron implementation template
        self._template_neuron_cpp_file = env.get_template('NeuronClass.jinja2')

        self._printer = ExpressionsPrettyPrinter()
Example #7
0
class NestPrinter:
    """
    This class contains all methods as required to transform
    """
    def __init__(self, expression_pretty_printer, reference_convert=None):
        """
        The standard constructor.
        :param reference_convert: a single reference converter
        :type reference_convert: IReferenceConverter
        """
        if expression_pretty_printer is not None:
            self.expression_pretty_printer = expression_pretty_printer
        else:
            self.expression_pretty_printer = ExpressionsPrettyPrinter(
                reference_convert)
        return

    def print_node(self, node):
        ret = ''
        if isinstance(node, ASTArithmeticOperator):
            ret = self.print_arithmetic_operator(node)
        if isinstance(node, ASTAssignment):
            ret = self.print_assignment(node)
        if isinstance(node, ASTBitOperator):
            ret = self.print_bit_operator(node)
        if isinstance(node, ASTBlock):
            ret = self.print_block(node)
        if isinstance(node, ASTBlockWithVariables):
            ret = self.print_block_with_variables(node)
        if isinstance(node, ASTBody):
            ret = self.print_body(node)
        if isinstance(node, ASTComparisonOperator):
            ret = self.print_comparison_operator(node)
        if isinstance(node, ASTCompoundStmt):
            ret = self.print_compound_stmt(node)
        if isinstance(node, ASTDataType):
            ret = self.print_data_type(node)
        if isinstance(node, ASTDeclaration):
            ret = self.print_declaration(node)
        if isinstance(node, ASTElifClause):
            ret = self.print_elif_clause(node)
        if isinstance(node, ASTElseClause):
            ret = self.print_else_clause(node)
        if isinstance(node, ASTEquationsBlock):
            ret = self.print_equations_block(node)
        if isinstance(node, ASTExpression):
            ret = self.print_expression(node)
        if isinstance(node, ASTForStmt):
            ret = self.print_for_stmt(node)
        if isinstance(node, ASTFunction):
            ret = self.print_function(node)
        if isinstance(node, ASTFunctionCall):
            ret = self.print_function_call(node)
        if isinstance(node, ASTIfClause):
            ret = self.print_if_clause(node)
        if isinstance(node, ASTIfStmt):
            ret = self.print_if_stmt(node)
        if isinstance(node, ASTInputBlock):
            ret = self.print_input_block(node)
        if isinstance(node, ASTInputPort):
            ret = self.print_input_port(node)
        if isinstance(node, ASTInputQualifier):
            ret = self.print_input_qualifier(node)
        if isinstance(node, ASTLogicalOperator):
            ret = self.print_logical_operator(node)
        if isinstance(node, ASTNestMLCompilationUnit):
            ret = self.print_compilation_unit(node)
        if isinstance(node, ASTNeuron):
            ret = self.print_neuron(node)
        if isinstance(node, ASTOdeEquation):
            ret = self.print_ode_equation(node)
        if isinstance(node, ASTInlineExpression):
            ret = self.print_inline_expression(node)
        if isinstance(node, ASTKernel):
            ret = self.print_kernel(node)
        if isinstance(node, ASTOutputBlock):
            ret = self.print_output_block(node)
        if isinstance(node, ASTParameter):
            ret = self.print_parameter(node)
        if isinstance(node, ASTReturnStmt):
            ret = self.print_return_stmt(node)
        if isinstance(node, ASTSimpleExpression):
            ret = self.print_simple_expression(node)
        if isinstance(node, ASTSmallStmt):
            ret = self.print_small_stmt(node)
        if isinstance(node, ASTUnaryOperator):
            ret = self.print_unary_operator(node)
        if isinstance(node, ASTUnitType):
            ret = self.print_unit_type(node)
        if isinstance(node, ASTUpdateBlock):
            ret = self.print_update_block(node)
        if isinstance(node, ASTVariable):
            ret = self.print_variable(node)
        if isinstance(node, ASTWhileStmt):
            ret = self.print_while_stmt(node)
        if isinstance(node, ASTStmt):
            ret = self.print_stmt(node)
        return ret

    def print_assignment(self, node: ASTAssignment, prefix: str = "") -> str:
        ret = self.print_node(node.lhs) + ' '
        if node.is_compound_quotient:
            ret += '/='
        elif node.is_compound_product:
            ret += '*='
        elif node.is_compound_minus:
            ret += '-='
        elif node.is_compound_sum:
            ret += '+='
        else:
            ret += '='
        ret += ' ' + self.print_node(node.rhs)
        return ret

    def print_variable(self, node: ASTVariable) -> str:
        ret = node.name
        for i in range(1, node.differential_order + 1):
            ret += "__d"
        return ret

    def print_expression(self,
                         node: ASTExpressionNode,
                         prefix: str = "",
                         with_origins=True) -> str:
        """
        Pretty Prints the handed over rhs to a nest readable format.
        :param node: a single meta_model node.
        :type node: ASTExpressionNode
        :return: the corresponding string representation
        :rtype: str
        """
        return self.expression_pretty_printer.print_expression(
            node, prefix=prefix, with_origins=with_origins)

    def print_method_call(self, node: ASTFunctionCall) -> str:
        """
        Prints a single handed over function call.
        :param node: a single function call.
        :type node: ASTFunctionCall
        :return: the corresponding string representation.
        :rtype: str
        """
        return self.expression_pretty_printer.print_function_call(node)

    @classmethod
    def print_comparison_operator(cls, for_stmt):
        """
        Prints a single handed over comparison operator for a for stmt to a Nest processable format.
        :param for_stmt: a single for stmt
        :type for_stmt: ASTForStmt
        :return: a string representation
        :rtype: str
        """
        step = for_stmt.get_step()
        if step < 0:
            return '>'
        elif step > 0:
            return '<'
        else:
            return '!='

    @classmethod
    def print_step(cls, for_stmt):
        """
        Prints the step length to a nest processable format.
        :param for_stmt: a single for stmt
        :type for_stmt: ASTForStmt
        :return: a string representation
        :rtype: str
        """
        assert isinstance(for_stmt, ASTForStmt), \
            '(PyNestML.CodeGenerator.Printer) No or wrong type of for-stmt provided (%s)!' % type(for_stmt)
        return for_stmt.get_step()

    @classmethod
    def print_origin(cls, variable_symbol, prefix=''):
        """
        Returns a prefix corresponding to the origin of the variable symbol.
        :param variable_symbol: a single variable symbol.
        :type variable_symbol: VariableSymbol
        :return: the corresponding prefix
        :rtype: str
        """
        assert isinstance(variable_symbol, VariableSymbol), \
            '(PyNestML.CodeGenerator.Printer) No or wrong type of variable symbol provided (%s)!' % type(
                variable_symbol)

        if variable_symbol.block_type == BlockType.STATE:
            return prefix + 'S_.'

        if variable_symbol.block_type == BlockType.EQUATION:
            return prefix + 'S_.'

        if variable_symbol.block_type == BlockType.PARAMETERS:
            return prefix + 'P_.'

        if variable_symbol.block_type == BlockType.INTERNALS:
            return prefix + 'V_.'

        if variable_symbol.block_type == BlockType.INPUT_BUFFER_CURRENT:
            return prefix + 'B_.'

        if variable_symbol.block_type == BlockType.INPUT_BUFFER_SPIKE:
            return prefix + 'B_.'

        return ''

    @classmethod
    def print_output_event(cls, ast_body):
        """
        For the handed over neuron, this operations checks of output event shall be preformed.
        :param ast_body: a single neuron body
        :type ast_body: ASTBody
        :return: the corresponding representation of the event
        :rtype: str
        """
        assert (ast_body is not None and isinstance(ast_body, ASTBody)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of body provided (%s)!' % type(ast_body)
        outputs = ast_body.get_output_blocks()
        if len(outputs) > 0:
            output = outputs[0]
            if output.is_spike():
                return 'nest::SpikeEvent'
            elif output.is_current():
                return 'nest::CurrentEvent'
            else:
                raise RuntimeError(
                    'Unexpected output type. Must be current or spike, is %s.'
                    % str(output))
        else:
            # no output port defined in the model: pretend dummy spike output port to obtain usable model
            return 'nest::SpikeEvent'

    @classmethod
    def print_buffer_initialization(cls, variable_symbol):
        """
        Prints the buffer initialization.
        :param variable_symbol: a single variable symbol.
        :type variable_symbol: VariableSymbol
        :return: a buffer initialization
        :rtype: str
        """
        return 'get_' + variable_symbol.get_symbol_name(
        ) + '().clear(); //includes resize'

    @classmethod
    def print_function_declaration(cls, ast_function):
        """
        Returns a nest processable function declaration head, i.e. the part which appears in the .h file.
        :param ast_function: a single function.
        :type ast_function: ASTFunction
        :return: the corresponding string representation.
        :rtype: str
        """
        from pynestml.meta_model.ast_function import ASTFunction
        from pynestml.symbols.symbol import SymbolKind
        assert (ast_function is not None and isinstance(ast_function, ASTFunction)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_function provided (%s)!' % type(ast_function)
        function_symbol = ast_function.get_scope().resolve_to_symbol(
            ast_function.get_name(), SymbolKind.FUNCTION)
        if function_symbol is None:
            raise RuntimeError('Cannot resolve the method ' +
                               ast_function.get_name())
        declaration = ast_function.print_comment('//') + '\n'
        declaration += PyNestml2NestTypeConverter.convert(
            function_symbol.get_return_type()).replace('.', '::')
        declaration += ' '
        declaration += ast_function.get_name() + '('
        for typeSym in function_symbol.get_parameter_types():
            declaration += PyNestml2NestTypeConverter.convert(typeSym)
            if function_symbol.get_parameter_types().index(typeSym) < len(
                    function_symbol.get_parameter_types()) - 1:
                declaration += ', '
        declaration += ') const\n'
        return declaration

    @classmethod
    def print_function_definition(cls, ast_function, namespace):
        """
        Returns a nest processable function definition, i.e. the part which appears in the .cpp file.
        :param ast_function: a single function.
        :type ast_function: ASTFunction
        :param namespace: the namespace in which this function is defined in
        :type namespace: str
        :return: the corresponding string representation.
        :rtype: str
        """
        assert isinstance(ast_function, ASTFunction), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_function provided (%s)!' % type(ast_function)
        assert isinstance(namespace, str), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of namespace provided (%s)!' % type(namespace)
        function_symbol = ast_function.get_scope().resolve_to_symbol(
            ast_function.get_name(), SymbolKind.FUNCTION)
        if function_symbol is None:
            raise RuntimeError('Cannot resolve the method ' +
                               ast_function.get_name())
        # first collect all parameters
        params = list()
        for param in ast_function.get_parameters():
            params.append(param.get_name())
        declaration = ast_function.print_comment('//') + '\n'
        declaration += PyNestml2NestTypeConverter.convert(
            function_symbol.get_return_type()).replace('.', '::')
        declaration += ' '
        if namespace is not None:
            declaration += namespace + '::'
        declaration += ast_function.get_name() + '('
        for typeSym in function_symbol.get_parameter_types():
            # create the type name combination, e.g. double Tau
            declaration += PyNestml2NestTypeConverter.convert(typeSym) + ' ' + \
                params[function_symbol.get_parameter_types().index(typeSym)]
            # if not the last component, separate by ','
            if function_symbol.get_parameter_types().index(typeSym) < \
                    len(function_symbol.get_parameter_types()) - 1:
                declaration += ', '
        declaration += ') const\n'
        return declaration

    def print_buffer_array_getter(self, ast_buffer):
        """
        Returns a string containing the nest declaration for a multi-receptor spike buffer.
        :param ast_buffer: a single buffer Variable Symbol
        :type ast_buffer: VariableSymbol
        :return: a string representation of the getter
        :rtype: str
        """
        assert (ast_buffer is not None and isinstance(ast_buffer, VariableSymbol)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.is_spike_buffer() and ast_buffer.is_inhibitory(
        ) and ast_buffer.is_excitatory():
            return 'inline ' + PyNestml2NestTypeConverter.convert(ast_buffer.get_type_symbol()) + '&' + ' get_' \
                   + ast_buffer.get_symbol_name() + '() {' + \
                   '  return spike_inputs_[' + ast_buffer.get_symbol_name().upper() + ' - 1]; }'
        else:
            return self.print_buffer_getter(ast_buffer, True)

    @classmethod
    def print_buffer_getter(cls, ast_buffer, is_in_struct=False):
        """
        Returns a string representation declaring a buffer getter as required in nest.
        :param ast_buffer: a single variable symbol representing a buffer.
        :type ast_buffer: VariableSymbol
        :param is_in_struct: indicates whether this getter is used in a struct or not
        :type is_in_struct: bool
        :return: a string representation of the getter.
        :rtype: str
        """
        assert (ast_buffer is not None and isinstance(ast_buffer, VariableSymbol)), \
            '(PyNestMl.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        assert (is_in_struct is not None and isinstance(is_in_struct, bool)), \
            '(PyNestMl.CodeGeneration.Printer) No or wrong type of is-in-struct provided (%s)!' % type(is_in_struct)
        declaration = 'inline '
        if ast_buffer.has_vector_parameter():
            declaration += 'std::vector<'
            declaration += PyNestml2NestTypeConverter.convert(
                ast_buffer.get_type_symbol())
            declaration += '> &'
        else:
            declaration += PyNestml2NestTypeConverter.convert(
                ast_buffer.get_type_symbol()) + '&'
        declaration += ' get_' + ast_buffer.get_symbol_name() + '() {'
        if is_in_struct:
            declaration += 'return ' + ast_buffer.get_symbol_name() + ';'
        else:
            declaration += 'return B_.get_' + ast_buffer.get_symbol_name(
            ) + '();'
        declaration += '}'
        return declaration

    @classmethod
    def print_buffer_declaration_value(cls, ast_buffer):
        """
        Returns a string representation for the declaration of a buffer's value.
        :param ast_buffer: a single buffer variable symbol
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.has_vector_parameter():
            return 'std::vector<double> ' + NestNamesConverter.buffer_value(
                ast_buffer)
        else:
            return 'double ' + NestNamesConverter.buffer_value(ast_buffer)

    @classmethod
    def print_buffer_declaration(cls, ast_buffer):
        """
        Returns a string representation for the declaration of a buffer.
        :param ast_buffer: a single buffer variable symbol
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.has_vector_parameter():
            buffer_type = 'std::vector< ' + PyNestml2NestTypeConverter.convert(
                ast_buffer.get_type_symbol()) + ' >'
        else:
            buffer_type = PyNestml2NestTypeConverter.convert(
                ast_buffer.get_type_symbol())
        buffer_type.replace(".", "::")
        return buffer_type + " " + ast_buffer.get_symbol_name()

    @classmethod
    def print_buffer_declaration_header(cls, ast_buffer):
        """
        Prints the comment as stated over the buffer declaration.
        :param ast_buffer: a single buffer variable symbol.
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        return '//!< Buffer for input (type: ' + ast_buffer.get_type_symbol(
        ).get_symbol_name() + ')'
Example #8
0
from pynestml.symbol_table.symbol_table import SymbolTable
from pynestml.symbols.predefined_functions import PredefinedFunctions
from pynestml.symbols.predefined_types import PredefinedTypes
from pynestml.symbols.predefined_units import PredefinedUnits
from pynestml.symbols.predefined_variables import PredefinedVariables
from pynestml.utils.logger import Logger, LoggingLevel
from pynestml.utils.model_parser import ModelParser

SymbolTable.initialize_symbol_table(
    ASTSourceLocation(start_line=0, start_column=0, end_line=0, end_column=0))
PredefinedUnits.register_units()
PredefinedTypes.register_types()
PredefinedVariables.register_variables()
PredefinedFunctions.register_functions()
Logger.init_logger(LoggingLevel.INFO)
printer = NestPrinter(ExpressionsPrettyPrinter(), NESTReferenceConverter())


def get_first_statement_in_update_block(model):
    if model.get_neuron_list()[0].get_update_blocks():
        return model.get_neuron_list()[0].get_update_blocks().get_block(
        ).get_stmts()[0]
    return None


def get_first_declaration_in_state_block(model):
    return model.get_neuron_list()[0].get_state_blocks().get_declarations()[0]


def get_first_declared_function(model):
    return model.get_neuron_list()[0].get_functions()[0]
Example #9
0
class NESTCodeGenerator(CodeGenerator):
    """
    Code generator for a C++ NEST extension module.
    """

    _variable_matching_template = r'(\b)({})(\b)'

    def __init__(self):
        self.analytic_solver = {}
        self.numeric_solver = {}

        # setup the template environment

        def raise_helper(msg):
            raise TemplateRuntimeError(msg)

        env = Environment(loader=FileSystemLoader(
            os.path.join(os.path.dirname(__file__), 'resources_nest')))
        env.globals['raise'] = raise_helper
        env.globals["is_delta_kernel"] = is_delta_kernel
        setup_env = Environment(loader=FileSystemLoader(
            os.path.join(os.path.dirname(__file__), 'resources_nest',
                         'setup')))
        setup_env.globals['raise'] = raise_helper
        # setup the cmake template
        self._template_cmakelists = setup_env.get_template('CMakeLists.jinja2')
        # setup the module class template
        self._template_module_class = env.get_template('ModuleClass.jinja2')
        # setup the NEST module template
        self._template_module_header = env.get_template('ModuleHeader.jinja2')
        # setup the SLI_Init file
        self._template_sli_init = setup_env.get_template('SLI_Init.jinja2')
        # setup the neuron header template
        self._template_neuron_h_file = env.get_template('NeuronHeader.jinja2')
        # setup the neuron implementation template
        self._template_neuron_cpp_file = env.get_template('NeuronClass.jinja2')
        self._printer = ExpressionsPrettyPrinter()

    def generate_code(self, neurons):
        self.analyse_transform_neurons(neurons)
        self.generate_neurons(neurons)
        self.generate_module_code(neurons)

    def generate_module_code(self, neurons: List[ASTNeuron]) -> None:
        """
        Generates code that is necessary to integrate neuron models into the NEST infrastructure.
        :param neurons: a list of neurons
        :type neurons: list(ASTNeuron)
        """
        namespace = {
            'neurons': neurons,
            'moduleName': FrontendConfiguration.get_module_name(),
            'now': datetime.datetime.utcnow()
        }
        if not os.path.exists(FrontendConfiguration.get_target_path()):
            os.makedirs(FrontendConfiguration.get_target_path())

        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 FrontendConfiguration.get_module_name())) +
                '.h', 'w+') as f:
            f.write(str(self._template_module_header.render(namespace)))

        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 FrontendConfiguration.get_module_name())) +
                '.cpp', 'w+') as f:
            f.write(str(self._template_module_class.render(namespace)))

        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 'CMakeLists')) + '.txt', 'w+') as f:
            f.write(str(self._template_cmakelists.render(namespace)))

        if not os.path.isdir(
                os.path.realpath(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 'sli'))):
            os.makedirs(
                os.path.realpath(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 'sli')))

        with open(
                str(
                    os.path.join(
                        FrontendConfiguration.get_target_path(), 'sli',
                        FrontendConfiguration.get_module_name() + "-init")) +
                '.sli', 'w+') as f:
            f.write(str(self._template_sli_init.render(namespace)))

        code, message = Messages.get_module_generated(
            FrontendConfiguration.get_target_path())
        Logger.log_message(None, code, message, None, LoggingLevel.INFO)

    def analyse_transform_neurons(self, neurons: List[ASTNeuron]) -> None:
        """
        Analyse and transform a list of neurons.
        :param neurons: a list of neurons.
        """
        for neuron in neurons:
            code, message = Messages.get_analysing_transforming_neuron(
                neuron.get_name())
            Logger.log_message(None, code, message, None, LoggingLevel.INFO)
            spike_updates = self.analyse_neuron(neuron)
            neuron.spike_updates = spike_updates
            # now store the transformed model
            self.store_transformed_model(neuron)

    def get_delta_factors_(self, neuron, equations_block):
        r"""
        For every occurrence of a convolution of the form `x^(n) = a * convolve(kernel, inport) + ...` where `kernel` is a delta function, add the element `(x^(n), inport) --> a` to the set.
        """
        delta_factors = {}
        for ode_eq in equations_block.get_ode_equations():
            var = ode_eq.get_lhs()
            expr = ode_eq.get_rhs()
            conv_calls = OdeTransformer.get_convolve_function_calls(expr)
            for conv_call in conv_calls:
                assert len(
                    conv_call.args
                ) == 2, "convolve() function call should have precisely two arguments: kernel and spike buffer"
                kernel = conv_call.args[0]
                if is_delta_kernel(
                        neuron.get_kernel_by_name(
                            kernel.get_variable().get_name())):
                    inport = conv_call.args[1].get_variable()
                    expr_str = str(expr)
                    sympy_expr = sympy.parsing.sympy_parser.parse_expr(
                        expr_str)
                    sympy_expr = sympy.expand(sympy_expr)
                    sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(
                        str(conv_call))
                    factor_str = []
                    for term in sympy.Add.make_args(sympy_expr):
                        if term.find(sympy_conv_expr):
                            factor_str.append(
                                str(term.replace(sympy_conv_expr, 1)))
                    factor_str = " + ".join(factor_str)
                    delta_factors[(var, inport)] = factor_str

        return delta_factors

    def generate_kernel_buffers_(self, neuron, equations_block):
        """
        For every occurrence of a convolution of the form `convolve(var, spike_buf)`: add the element `(kernel, spike_buf)` to the set, with `kernel` being the kernel that contains variable `var`.
        """

        kernel_buffers = set()
        convolve_calls = OdeTransformer.get_convolve_function_calls(
            equations_block)
        for convolve in convolve_calls:
            el = (convolve.get_args()[0], convolve.get_args()[1])
            sym = convolve.get_args()[0].get_scope().resolve_to_symbol(
                convolve.get_args()[0].get_variable().name,
                SymbolKind.VARIABLE)
            if sym is None:
                raise Exception(
                    "No initial value(s) defined for kernel with variable \"" +
                    convolve.get_args()[0].get_variable().get_complete_name() +
                    "\"")
            if sym.block_type == BlockType.INPUT_BUFFER_SPIKE:
                el = (el[1], el[0])

            # find the corresponding kernel object
            var = el[0].get_variable()
            assert var is not None
            kernel = neuron.get_kernel_by_name(var.get_name())
            assert kernel is not None, "In convolution \"convolve(" + str(
                var.name) + ", " + str(
                    el[1]) + ")\": no kernel by name \"" + var.get_name(
                    ) + "\" found in neuron."

            el = (kernel, el[1])
            kernel_buffers.add(el)

        return kernel_buffers

    def replace_variable_names_in_expressions(self, neuron, solver_dicts):
        """
        Replace all occurrences of variables names in NESTML format (e.g. `g_ex$''`)` with the ode-toolbox formatted
        variable name (e.g. `g_ex__DOLLAR__d__d`).
        """
        def replace_var(_expr=None):
            if isinstance(_expr, ASTSimpleExpression) and _expr.is_variable():
                var = _expr.get_variable()
                if variable_in_solver(
                        to_ode_toolbox_processed_name(var.get_complete_name()),
                        solver_dicts):
                    ast_variable = ASTVariable(to_ode_toolbox_processed_name(
                        var.get_complete_name()),
                                               differential_order=0)
                    ast_variable.set_source_position(var.get_source_position())
                    _expr.set_variable(ast_variable)

            elif isinstance(_expr, ASTVariable):
                var = _expr
                if variable_in_solver(
                        to_ode_toolbox_processed_name(var.get_complete_name()),
                        solver_dicts):
                    var.set_name(
                        to_ode_toolbox_processed_name(var.get_complete_name()))
                    var.set_differential_order(0)

        def func(x):
            return replace_var(x)

        neuron.accept(ASTHigherOrderVisitor(func))

    def replace_convolve_calls_with_buffers_(self, neuron, equations_block,
                                             kernel_buffers):
        r"""
        Replace all occurrences of `convolve(kernel[']^n, spike_input_port)` with the corresponding buffer variable, e.g. `g_E__X__spikes_exc[__d]^n` for a kernel named `g_E` and a spike input port named `spikes_exc`.
        """
        def replace_function_call_through_var(_expr=None):
            if _expr.is_function_call() and _expr.get_function_call().get_name(
            ) == "convolve":
                convolve = _expr.get_function_call()
                el = (convolve.get_args()[0], convolve.get_args()[1])
                sym = convolve.get_args()[0].get_scope().resolve_to_symbol(
                    convolve.get_args()[0].get_variable().name,
                    SymbolKind.VARIABLE)
                if sym.block_type == BlockType.INPUT_BUFFER_SPIKE:
                    el = (el[1], el[0])
                var = el[0].get_variable()
                spike_input_port = el[1].get_variable()
                kernel = neuron.get_kernel_by_name(var.get_name())

                _expr.set_function_call(None)
                buffer_var = construct_kernel_X_spike_buf_name(
                    var.get_name(), spike_input_port,
                    var.get_differential_order() - 1)
                if is_delta_kernel(kernel):
                    # delta kernel are treated separately, and should be kept out of the dynamics (computing derivates etc.) --> set to zero
                    _expr.set_variable(None)
                    _expr.set_numeric_literal(0)
                else:
                    ast_variable = ASTVariable(buffer_var)
                    ast_variable.set_source_position(
                        _expr.get_source_position())
                    _expr.set_variable(ast_variable)

        def func(x):
            return replace_function_call_through_var(x) if isinstance(
                x, ASTSimpleExpression) else True

        equations_block.accept(ASTHigherOrderVisitor(func))

    def add_timestep_symbol(self, neuron):
        assert neuron.get_initial_value(
            "__h"
        ) is None, "\"__h\" is a reserved name, please do not use variables by this name in your NESTML file"
        assert not "__h" in [
            sym.name for sym in neuron.get_internal_symbols()
        ], "\"__h\" is a reserved name, please do not use variables by this name in your NESTML file"
        neuron.add_to_internal_block(
            ModelParser.parse_declaration('__h ms = resolution()'), index=0)

    def analyse_neuron(self, neuron: ASTNeuron) -> List[ASTAssignment]:
        """
        Analyse and transform a single neuron.
        :param neuron: a single neuron.
        :return: spike_updates: list of spike updates, see documentation for get_spike_update_expressions() for more information.
        """
        code, message = Messages.get_start_processing_neuron(neuron.get_name())
        Logger.log_message(neuron, code, message, neuron.get_source_position(),
                           LoggingLevel.INFO)

        equations_block = neuron.get_equations_block()

        if equations_block is None:
            return []

        delta_factors = self.get_delta_factors_(neuron, equations_block)
        kernel_buffers = self.generate_kernel_buffers_(neuron, equations_block)
        self.replace_convolve_calls_with_buffers_(neuron, equations_block,
                                                  kernel_buffers)
        self.make_inline_expressions_self_contained(
            equations_block.get_inline_expressions())
        self.replace_inline_expressions_through_defining_expressions(
            equations_block.get_ode_equations(),
            equations_block.get_inline_expressions())

        analytic_solver, numeric_solver = self.ode_toolbox_analysis(
            neuron, kernel_buffers)
        self.analytic_solver[neuron.get_name()] = analytic_solver
        self.numeric_solver[neuron.get_name()] = numeric_solver
        self.remove_initial_values_for_kernels(neuron)
        kernels = self.remove_kernel_definitions_from_equations_block(neuron)
        self.update_initial_values_for_odes(neuron,
                                            [analytic_solver, numeric_solver],
                                            kernels)
        self.remove_ode_definitions_from_equations_block(neuron)
        self.create_initial_values_for_kernels(
            neuron, [analytic_solver, numeric_solver], kernels)
        self.replace_variable_names_in_expressions(
            neuron, [analytic_solver, numeric_solver])
        self.add_timestep_symbol(neuron)

        if self.analytic_solver[neuron.get_name()] is not None:
            neuron = add_declarations_to_internals(
                neuron, self.analytic_solver[neuron.get_name()]["propagators"])

        self.update_symbol_table(neuron, kernel_buffers)
        spike_updates = self.get_spike_update_expressions(
            neuron, kernel_buffers, [analytic_solver, numeric_solver],
            delta_factors)

        return spike_updates

    def generate_neuron_code(self, neuron: ASTNeuron) -> None:
        """
        For a handed over neuron, this method generates the corresponding header and implementation file.
        :param neuron: a single neuron object.
        """
        if not os.path.isdir(FrontendConfiguration.get_target_path()):
            os.makedirs(FrontendConfiguration.get_target_path())
        self.generate_model_h_file(neuron)
        self.generate_neuron_cpp_file(neuron)

    def generate_model_h_file(self, neuron: ASTNeuron) -> None:
        """
        For a handed over neuron, this method generates the corresponding header file.
        :param neuron: a single neuron object.
        """
        neuron_h_file = self._template_neuron_h_file.render(
            self.setup_generation_helpers(neuron))
        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 neuron.get_name())) + '.h', 'w+') as f:
            f.write(str(neuron_h_file))

    def generate_neuron_cpp_file(self, neuron: ASTNeuron) -> None:
        """
        For a handed over neuron, this method generates the corresponding implementation file.
        :param neuron: a single neuron object.
        """
        neuron_cpp_file = self._template_neuron_cpp_file.render(
            self.setup_generation_helpers(neuron))
        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 neuron.get_name())) + '.cpp', 'w+') as f:
            f.write(str(neuron_cpp_file))

    def setup_generation_helpers(self, neuron: ASTNeuron) -> Dict:
        """
        Returns a standard namespace with often required functionality.
        :param neuron: a single neuron instance
        :type neuron: ASTNeuron
        :return: a map from name to functionality.
        :rtype: dict
        """
        gsl_converter = GSLReferenceConverter()
        gsl_printer = UnitlessExpressionPrinter(gsl_converter)
        # helper classes and objects
        converter = NESTReferenceConverter(False)
        unitless_pretty_printer = UnitlessExpressionPrinter(converter)

        namespace = dict()

        namespace['neuronName'] = neuron.get_name()
        namespace['neuron'] = neuron
        namespace['moduleName'] = FrontendConfiguration.get_module_name()
        namespace['printer'] = NestPrinter(unitless_pretty_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['idemPrinter'] = UnitlessExpressionPrinter()
        namespace['outputEvent'] = namespace['printer'].print_output_event(
            neuron.get_body())
        namespace['is_spike_input'] = ASTUtils.is_spike_input(
            neuron.get_body())
        namespace['is_current_input'] = ASTUtils.is_current_input(
            neuron.get_body())
        namespace['odeTransformer'] = OdeTransformer()
        namespace['printerGSL'] = gsl_printer
        namespace['now'] = datetime.datetime.utcnow()
        namespace['tracing'] = FrontendConfiguration.is_dev

        namespace[
            'PredefinedUnits'] = pynestml.symbols.predefined_units.PredefinedUnits
        namespace[
            'UnitTypeSymbol'] = pynestml.symbols.unit_type_symbol.UnitTypeSymbol

        namespace['initial_values'] = {}
        namespace['uses_analytic_solver'] = neuron.get_name() in self.analytic_solver.keys() \
            and self.analytic_solver[neuron.get_name()] is not None
        if namespace['uses_analytic_solver']:
            namespace['analytic_state_variables'] = self.analytic_solver[
                neuron.get_name()]["state_variables"]
            namespace['analytic_variable_symbols'] = {
                sym:
                neuron.get_equations_block().get_scope().resolve_to_symbol(
                    sym, SymbolKind.VARIABLE)
                for sym in namespace['analytic_state_variables']
            }
            namespace['update_expressions'] = {}
            for sym, expr in self.analytic_solver[
                    neuron.get_name()]["initial_values"].items():
                namespace['initial_values'][sym] = expr
            for sym in namespace['analytic_state_variables']:
                expr_str = self.analytic_solver[
                    neuron.get_name()]["update_expressions"][sym]
                expr_ast = ModelParser.parse_expression(expr_str)
                # pretend that update expressions are in "equations" block, which should always be present, as differential equations must have been defined to get here
                expr_ast.update_scope(
                    neuron.get_equations_blocks().get_scope())
                expr_ast.accept(ASTSymbolTableVisitor())
                namespace['update_expressions'][sym] = expr_ast

            namespace['propagators'] = self.analytic_solver[
                neuron.get_name()]["propagators"]

        namespace['uses_numeric_solver'] = neuron.get_name() in self.analytic_solver.keys() \
            and self.numeric_solver[neuron.get_name()] is not None
        if namespace['uses_numeric_solver']:
            namespace['numeric_state_variables'] = self.numeric_solver[
                neuron.get_name()]["state_variables"]
            namespace['numeric_variable_symbols'] = {
                sym:
                neuron.get_equations_block().get_scope().resolve_to_symbol(
                    sym, SymbolKind.VARIABLE)
                for sym in namespace['numeric_state_variables']
            }
            assert not any([
                sym is None
                for sym in namespace['numeric_variable_symbols'].values()
            ])
            namespace['numeric_update_expressions'] = {}
            for sym, expr in self.numeric_solver[
                    neuron.get_name()]["initial_values"].items():
                namespace['initial_values'][sym] = expr
            for sym in namespace['numeric_state_variables']:
                expr_str = self.numeric_solver[
                    neuron.get_name()]["update_expressions"][sym]
                expr_ast = ModelParser.parse_expression(expr_str)
                # pretend that update expressions are in "equations" block, which should always be present, as differential equations must have been defined to get here
                expr_ast.update_scope(
                    neuron.get_equations_blocks().get_scope())
                expr_ast.accept(ASTSymbolTableVisitor())
                namespace['numeric_update_expressions'][sym] = expr_ast

            namespace['useGSL'] = namespace['uses_numeric_solver']
            namespace['names'] = GSLNamesConverter()
            converter = NESTReferenceConverter(True)
            unitless_pretty_printer = UnitlessExpressionPrinter(converter)
            namespace['printer'] = NestPrinter(unitless_pretty_printer)

        namespace["spike_updates"] = neuron.spike_updates

        rng_visitor = ASTRandomNumberGeneratorVisitor()
        neuron.accept(rng_visitor)
        namespace['norm_rng'] = rng_visitor._norm_rng_is_used

        return namespace

    def ode_toolbox_analysis(self, neuron: ASTNeuron,
                             kernel_buffers: Mapping[ASTKernel, ASTInputPort]):
        """
        Prepare data for ODE-toolbox input format, invoke ODE-toolbox analysis via its API, and return the output.
        """
        assert isinstance(
            neuron.get_equations_blocks(),
            ASTEquationsBlock), "only one equation block should be present"

        equations_block = neuron.get_equations_block()

        if len(equations_block.get_kernels()) == 0 and len(
                equations_block.get_ode_equations()) == 0:
            # no equations defined -> no changes to the neuron
            return None, None

        code, message = Messages.get_neuron_analyzed(neuron.get_name())
        Logger.log_message(neuron, code, message, neuron.get_source_position(),
                           LoggingLevel.INFO)

        parameters_block = neuron.get_parameter_blocks()
        odetoolbox_indict = self.transform_ode_and_kernels_to_json(
            neuron, parameters_block, kernel_buffers)
        odetoolbox_indict["options"] = {}
        odetoolbox_indict["options"]["output_timestep_symbol"] = "__h"
        solver_result = analysis(
            odetoolbox_indict,
            disable_stiffness_check=True,
            debug=FrontendConfiguration.logging_level == "DEBUG")
        analytic_solver = None
        analytic_solvers = [
            x for x in solver_result if x["solver"] == "analytical"
        ]
        assert len(
            analytic_solvers
        ) <= 1, "More than one analytic solver not presently supported"
        if len(analytic_solvers) > 0:
            analytic_solver = analytic_solvers[0]

        # if numeric solver is required, generate a stepping function that includes each state variable
        numeric_solver = None
        numeric_solvers = [
            x for x in solver_result if x["solver"].startswith("numeric")
        ]
        if numeric_solvers:
            solver_result = analysis(
                odetoolbox_indict,
                disable_stiffness_check=True,
                disable_analytic_solver=True,
                debug=FrontendConfiguration.logging_level == "DEBUG")
            numeric_solvers = [
                x for x in solver_result if x["solver"].startswith("numeric")
            ]
            assert len(
                numeric_solvers
            ) <= 1, "More than one numeric solver not presently supported"
            if len(numeric_solvers) > 0:
                numeric_solver = numeric_solvers[0]

        return analytic_solver, numeric_solver

    def update_symbol_table(self, neuron, kernel_buffers):
        """
        Update symbol table and scope.
        """
        SymbolTable.delete_neuron_scope(neuron.get_name())
        symbol_table_visitor = ASTSymbolTableVisitor()
        symbol_table_visitor.after_ast_rewrite_ = True
        neuron.accept(symbol_table_visitor)
        SymbolTable.add_neuron_scope(neuron.get_name(), neuron.get_scope())

    def remove_initial_values_for_kernels(self, neuron):
        """
        Remove initial values for original declarations (e.g. g_in, g_in', V_m); these might conflict with the initial value expressions returned from ODE-toolbox.
        """
        assert isinstance(
            neuron.get_equations_blocks(),
            ASTEquationsBlock), "only one equation block should be present"

        equations_block = neuron.get_equations_block()
        symbols_to_remove = set()
        for kernel in equations_block.get_kernels():
            for kernel_var in kernel.get_variables():
                kernel_var_order = kernel_var.get_differential_order()
                for order in range(kernel_var_order):
                    symbol_name = kernel_var.get_name() + "'" * order
                    symbol = equations_block.get_scope().resolve_to_symbol(
                        symbol_name, SymbolKind.VARIABLE)
                    symbols_to_remove.add(symbol_name)

        decl_to_remove = set()
        for symbol_name in symbols_to_remove:
            for decl in neuron.get_initial_blocks().get_declarations():
                if len(decl.get_variables()) == 1:
                    if decl.get_variables()[0].get_name() == symbol_name:
                        decl_to_remove.add(decl)
                else:
                    for var in decl.get_variables():
                        if var.get_name() == symbol_name:
                            decl.variables.remove(var)

        for decl in decl_to_remove:
            neuron.get_initial_blocks().get_declarations().remove(decl)

    def update_initial_values_for_odes(self, neuron, solver_dicts, kernels):
        """
        Update initial values for original ODE declarations (e.g. g_in, V_m', g_ahp'') that are present in the model
        before ODE-toolbox processing, with the formatted variable names and initial values returned by ODE-toolbox.
        """
        assert isinstance(
            neuron.get_equations_blocks(),
            ASTEquationsBlock), "only one equation block should be present"
        equations_block = neuron.get_equations_block()

        for iv_decl in neuron.get_initial_blocks().get_declarations():
            for var in iv_decl.get_variables():
                var_name = var.get_complete_name()
                if is_ode_variable(var.get_name(), neuron):
                    assert variable_in_solver(
                        to_ode_toolbox_processed_name(var_name), solver_dicts)

                    # replace the left-hand side variable name by the ode-toolbox format
                    var.set_name(
                        to_ode_toolbox_processed_name(var.get_complete_name()))
                    var.set_differential_order(0)

                    # replace the defining expression by the ode-toolbox result
                    iv_expr = get_initial_value_from_ode_toolbox_result(
                        to_ode_toolbox_processed_name(var_name), solver_dicts)
                    assert iv_expr is not None
                    iv_expr = ModelParser.parse_expression(iv_expr)
                    iv_expr.update_scope(
                        neuron.get_initial_blocks().get_scope())
                    iv_decl.set_expression(iv_expr)

    def _get_ast_variable(self, neuron, var_name) -> Optional[ASTVariable]:
        """
        Grab the ASTVariable corresponding to the initial value by this name
        """
        for decl in neuron.get_initial_values_blocks().get_declarations():
            for var in decl.variables:
                if var.get_name() == var_name:
                    return var
        return None

    def create_initial_values_for_kernels(self, neuron, solver_dicts, kernels):
        """
        Add the variables used in kernels from the ode-toolbox result dictionary as ODEs in NESTML AST
        """
        for solver_dict in solver_dicts:
            if solver_dict is None:
                continue
            for var_name in solver_dict["initial_values"].keys():
                if variable_in_kernels(var_name, kernels):
                    # original initial value expressions should have been removed to make place for ode-toolbox results
                    assert not declaration_in_initial_values(neuron, var_name)

        for solver_dict in solver_dicts:
            if solver_dict is None:
                continue

            for var_name, expr in solver_dict["initial_values"].items():
                # here, overwrite is allowed because initial values might be repeated between numeric and analytic solver
                if variable_in_kernels(var_name, kernels):
                    expr = "0"  # for kernels, "initial value" returned by ode-toolbox is actually the increment value; the actual initial value is assumed to be 0
                    if not declaration_in_initial_values(neuron, var_name):
                        add_declaration_to_initial_values(
                            neuron, var_name, expr)

    def create_initial_values_for_ode_toolbox_odes(self, neuron, solver_dicts,
                                                   kernel_buffers, kernels):
        """
        Add the variables used in ODEs from the ode-toolbox result dictionary as ODEs in NESTML AST.
        """
        for solver_dict in solver_dicts:
            if solver_dict is None:
                continue
            for var_name in solver_dict["initial_values"].keys():
                # original initial value expressions should have been removed to make place for ode-toolbox results
                assert not declaration_in_initial_values(neuron, var_name)

        for solver_dict in solver_dicts:
            if solver_dict is None:
                continue

            for var_name, expr in solver_dict["initial_values"].items():
                # here, overwrite is allowed because initial values might be repeated between numeric and analytic solver

                if variable_in_kernels(var_name, kernels):
                    expr = "0"  # for kernels, "initial value" returned by ode-toolbox is actually the increment value; the actual initial value is assumed to be 0

                if not declaration_in_initial_values(neuron, var_name):
                    add_declaration_to_initial_values(neuron, var_name, expr)

    def get_spike_update_expressions(self, neuron: ASTNeuron, kernel_buffers,
                                     solver_dicts,
                                     delta_factors) -> List[ASTAssignment]:
        """
        Generate the equations that update the dynamical variables when incoming spikes arrive. To be invoked after ode-toolbox.

        For example, a resulting `assignment_str` could be "I_kernel_in += (in_spikes/nS) * 1". The values are taken from the initial values for each corresponding dynamical variable, either from ode-toolbox or directly from user specification in the model.

        Note that for kernels, `initial_values` actually contains the increment upon spike arrival, rather than the initial value of the corresponding ODE dimension.
        """
        spike_updates = []
        initial_values = neuron.get_initial_values_blocks()

        for kernel, spike_input_port in kernel_buffers:
            if neuron.get_scope().resolve_to_symbol(
                    str(spike_input_port), SymbolKind.VARIABLE) is None:
                continue

            buffer_type = neuron.get_scope().resolve_to_symbol(
                str(spike_input_port), SymbolKind.VARIABLE).get_type_symbol()

            if is_delta_kernel(kernel):
                continue

            for kernel_var in kernel.get_variables():
                for var_order in range(
                        get_kernel_var_order_from_ode_toolbox_result(
                            kernel_var.get_name(), solver_dicts)):
                    kernel_spike_buf_name = construct_kernel_X_spike_buf_name(
                        kernel_var.get_name(), spike_input_port, var_order)
                    expr = get_initial_value_from_ode_toolbox_result(
                        kernel_spike_buf_name, solver_dicts)
                    assert expr is not None, "Initial value not found for kernel " + kernel_var
                    expr = str(expr)
                    if expr in ["0", "0.", "0.0"]:
                        continue  # skip adding the statement if we're only adding zero

                    assignment_str = kernel_spike_buf_name + " += "
                    assignment_str += "(" + str(spike_input_port) + ")"
                    if not expr in ["1.", "1.0", "1"]:
                        assignment_str += " * (" + \
                            self._printer.print_expression(ModelParser.parse_expression(expr)) + ")"

                    if not buffer_type.print_nestml_type() in [
                            "1.", "1.0", "1"
                    ]:
                        assignment_str += " / (" + buffer_type.print_nestml_type(
                        ) + ")"

                    ast_assignment = ModelParser.parse_assignment(
                        assignment_str)
                    ast_assignment.update_scope(neuron.get_scope())
                    ast_assignment.accept(ASTSymbolTableVisitor())

                    spike_updates.append(ast_assignment)

        for k, factor in delta_factors.items():
            var = k[0]
            inport = k[1]
            assignment_str = var.get_name() + "'" * (
                var.get_differential_order() - 1) + " += "
            if not factor in ["1.", "1.0", "1"]:
                assignment_str += "(" + self._printer.print_expression(
                    ModelParser.parse_expression(factor)) + ") * "
            assignment_str += str(inport)
            ast_assignment = ModelParser.parse_assignment(assignment_str)
            ast_assignment.update_scope(neuron.get_scope())
            ast_assignment.accept(ASTSymbolTableVisitor())

            spike_updates.append(ast_assignment)

        return spike_updates

    def remove_kernel_definitions_from_equations_block(self, neuron):
        """
        Removes all kernels in this block.
        """
        equations_block = neuron.get_equations_block()

        decl_to_remove = set()
        for decl in equations_block.get_declarations():
            if type(decl) is ASTKernel:
                decl_to_remove.add(decl)

        for decl in decl_to_remove:
            equations_block.get_declarations().remove(decl)

        return decl_to_remove

    def remove_ode_definitions_from_equations_block(self, neuron):
        """
        Removes all ODEs in this block.
        """
        equations_block = neuron.get_equations_block()

        decl_to_remove = set()
        for decl in equations_block.get_ode_equations():
            decl_to_remove.add(decl)

        for decl in decl_to_remove:
            equations_block.get_declarations().remove(decl)

    def transform_ode_and_kernels_to_json(self, neuron: ASTNeuron,
                                          parameters_block, kernel_buffers):
        """
        Converts AST node to a JSON representation suitable for passing to ode-toolbox.

        Each kernel has to be generated for each spike buffer convolve in which it occurs, e.g. if the NESTML model code contains the statements

            convolve(G, ex_spikes)
            convolve(G, in_spikes)

        then `kernel_buffers` will contain the pairs `(G, ex_spikes)` and `(G, in_spikes)`, from which two ODEs will be generated, with dynamical state (variable) names `G__X__ex_spikes` and `G__X__in_spikes`.

        :param equations_block: ASTEquationsBlock
        :return: Dict
        """
        odetoolbox_indict = {}

        gsl_converter = ODEToolboxReferenceConverter()
        gsl_printer = UnitlessExpressionPrinter(gsl_converter)

        odetoolbox_indict["dynamics"] = []
        equations_block = neuron.get_equations_block()
        for equation in equations_block.get_ode_equations():
            # n.b. includes single quotation marks to indicate differential order
            lhs = to_ode_toolbox_name(equation.get_lhs().get_complete_name())
            rhs = gsl_printer.print_expression(equation.get_rhs())
            entry = {"expression": lhs + " = " + rhs}
            symbol_name = equation.get_lhs().get_name()
            symbol = equations_block.get_scope().resolve_to_symbol(
                symbol_name, SymbolKind.VARIABLE)

            entry["initial_values"] = {}
            symbol_order = equation.get_lhs().get_differential_order()
            for order in range(symbol_order):
                iv_symbol_name = symbol_name + "'" * order
                initial_value_expr = neuron.get_initial_value(iv_symbol_name)
                if initial_value_expr:
                    expr = gsl_printer.print_expression(initial_value_expr)
                    entry["initial_values"][to_ode_toolbox_name(
                        iv_symbol_name)] = expr
            odetoolbox_indict["dynamics"].append(entry)

        # write a copy for each (kernel, spike buffer) combination
        for kernel, spike_input_port in kernel_buffers:

            if is_delta_kernel(kernel):
                # delta function -- skip passing this to ode-toolbox
                continue

            for kernel_var in kernel.get_variables():
                expr = get_expr_from_kernel_var(kernel,
                                                kernel_var.get_complete_name())
                kernel_order = kernel_var.get_differential_order()
                kernel_X_spike_buf_name_ticks = construct_kernel_X_spike_buf_name(
                    kernel_var.get_name(),
                    spike_input_port,
                    kernel_order,
                    diff_order_symbol="'")

                replace_rhs_variables(expr, kernel_buffers)

                entry = {}
                entry[
                    "expression"] = kernel_X_spike_buf_name_ticks + " = " + str(
                        expr)

                # initial values need to be declared for order 1 up to kernel order (e.g. none for kernel function f(t) = ...; 1 for kernel ODE f'(t) = ...; 2 for f''(t) = ... and so on)
                entry["initial_values"] = {}
                for order in range(kernel_order):
                    iv_sym_name_ode_toolbox = construct_kernel_X_spike_buf_name(
                        kernel_var.get_name(),
                        spike_input_port,
                        order,
                        diff_order_symbol="'")
                    symbol_name_ = kernel_var.get_name() + "'" * order
                    symbol = equations_block.get_scope().resolve_to_symbol(
                        symbol_name_, SymbolKind.VARIABLE)
                    assert symbol is not None, "Could not find initial value for variable " + symbol_name_
                    initial_value_expr = symbol.get_declaring_expression()
                    assert initial_value_expr is not None, "No initial value found for variable name " + symbol_name_
                    entry["initial_values"][
                        iv_sym_name_ode_toolbox] = gsl_printer.print_expression(
                            initial_value_expr)

                odetoolbox_indict["dynamics"].append(entry)

        odetoolbox_indict["parameters"] = {}
        if parameters_block is not None:
            for decl in parameters_block.get_declarations():
                for var in decl.variables:
                    odetoolbox_indict["parameters"][var.get_complete_name(
                    )] = gsl_printer.print_expression(decl.get_expression())

        return odetoolbox_indict

    def make_inline_expressions_self_contained(
        self, inline_expressions: List[ASTInlineExpression]
    ) -> List[ASTInlineExpression]:
        """
        Make inline_expressions self contained, i.e. without any references to other inline_expressions.

        TODO: it should be a method inside of the ASTInlineExpression
        TODO: this should be done by means of a visitor

        :param inline_expressions: A sorted list with entries ASTInlineExpression.
        :return: A list with ASTInlineExpressions. Defining expressions don't depend on each other.
        """
        for source in inline_expressions:
            source_position = source.get_source_position()
            for target in inline_expressions:
                matcher = re.compile(
                    self._variable_matching_template.format(
                        source.get_variable_name()))
                target_definition = str(target.get_expression())
                target_definition = re.sub(
                    matcher, "(" + str(source.get_expression()) + ")",
                    target_definition)
                target.expression = ModelParser.parse_expression(
                    target_definition)
                target.expression.update_scope(source.get_scope())
                target.expression.accept(ASTSymbolTableVisitor())

                def log_set_source_position(node):
                    if node.get_source_position().is_added_source_position():
                        node.set_source_position(source_position)

                target.expression.accept(
                    ASTHigherOrderVisitor(visit_funcs=log_set_source_position))

        return inline_expressions

    def replace_inline_expressions_through_defining_expressions(
            self, definitions, inline_expressions):
        # type: (list(ASTOdeEquation), list(ASTInlineExpression)) -> list(ASTInlineExpression)
        """
        Replaces symbols from `inline_expressions` in `definitions` with corresponding defining expressions from `inline_expressions`.

        :param definitions: A sorted list with entries {"symbol": "name", "definition": "expression"} that should be made free from.
        :param inline_expressions: A sorted list with entries {"symbol": "name", "definition": "expression"} with inline_expressions which must be replaced in `definitions`.
        :return: A list with definitions. Expressions in `definitions` don't depend on inline_expressions from `inline_expressions`.
        """
        for m in inline_expressions:
            source_position = m.get_source_position()
            for target in definitions:
                matcher = re.compile(
                    self._variable_matching_template.format(
                        m.get_variable_name()))
                target_definition = str(target.get_rhs())
                target_definition = re.sub(matcher,
                                           "(" + str(m.get_expression()) + ")",
                                           target_definition)
                target.rhs = ModelParser.parse_expression(target_definition)
                target.update_scope(m.get_scope())
                target.accept(ASTSymbolTableVisitor())

                def log_set_source_position(node):
                    if node.get_source_position().is_added_source_position():
                        node.set_source_position(source_position)

                target.accept(
                    ASTHigherOrderVisitor(visit_funcs=log_set_source_position))

        return definitions

    def store_transformed_model(self, ast):
        if FrontendConfiguration.store_log:
            with open(
                    str(
                        os.path.join(FrontendConfiguration.get_target_path(),
                                     '..', 'report', ast.get_name())) + '.txt',
                    'w+') as f:
                f.write(str(ast))
Example #10
0
class NestPrinter(object):
    """
    This class contains all methods as required to transform
    """
    
    def __init__(self, expression_pretty_printer, reference_convert=None):
        """
        The standard constructor.
        :param reference_convert: a single reference converter
        :type reference_convert: IReferenceConverter
        """
        if expression_pretty_printer is not None:
            self.expression_pretty_printer = expression_pretty_printer
        else:
            self.expression_pretty_printer = ExpressionsPrettyPrinter(reference_convert)
        return

    def print_expression(self, node):
        # type: (ASTExpressionNode) -> str
        """
        Pretty Prints the handed over rhs to a nest readable format.
        :param node: a single meta_model node.
        :type node: ASTExpressionNode
        :return: the corresponding string representation
        :rtype: str
        """
        return self.expression_pretty_printer.print_expression(node)

    def print_method_call(self, node):
        # type: (ASTFunctionCall) -> str
        """
        Prints a single handed over function call.
        :param node: a single function call.
        :type node: ASTFunctionCall
        :return: the corresponding string representation.
        :rtype: str
        """
        return self.expression_pretty_printer.print_function_call(node)

    @classmethod
    def print_comparison_operator(cls, for_stmt):
        """
        Prints a single handed over comparison operator for a for stmt to a Nest processable format.
        :param for_stmt: a single for stmt
        :type for_stmt: ASTForStmt
        :return: a string representation
        :rtype: str
        """
        step = for_stmt.get_step()
        if step < 0:
            return '>'
        elif step > 0:
            return '<'
        else:
            return '!='

    @classmethod
    def print_step(cls, for_stmt):
        """
        Prints the step length to a nest processable format.
        :param for_stmt: a single for stmt
        :type for_stmt: ASTForStmt
        :return: a string representation
        :rtype: str
        """
        assert isinstance(for_stmt, ASTForStmt), \
            '(PyNestML.CodeGenerator.Printer) No or wrong type of for-stmt provided (%s)!' % type(for_stmt)
        return for_stmt.get_step()

    @classmethod
    def print_origin(cls, variable_symbol):
        """
        Returns a prefix corresponding to the origin of the variable symbol.
        :param variable_symbol: a single variable symbol.
        :type variable_symbol: VariableSymbol
        :return: the corresponding prefix
        :rtype: str
        """
        assert isinstance(variable_symbol, VariableSymbol), \
            '(PyNestML.CodeGenerator.Printer) No or wrong type of variable symbol provided (%s)!' % type(
                variable_symbol)
        if variable_symbol.block_type == BlockType.STATE:
            return 'S_.'
        elif variable_symbol.block_type == BlockType.INITIAL_VALUES:
            return 'S_.'
        elif variable_symbol.block_type == BlockType.EQUATION:
            return 'S_.'
        elif variable_symbol.block_type == BlockType.PARAMETERS:
            return 'P_.'
        elif variable_symbol.block_type == BlockType.INTERNALS:
            return 'V_.'
        elif variable_symbol.block_type == BlockType.INPUT_BUFFER_CURRENT:
            return 'B_.'
        elif variable_symbol.block_type == BlockType.INPUT_BUFFER_SPIKE:
            return 'B_.'
        else:
            return ''

    @classmethod
    def print_output_event(cls, ast_body):
        """
        For the handed over neuron, this operations checks of output event shall be preformed.
        :param ast_body: a single neuron body
        :type ast_body: ASTBody
        :return: the corresponding representation of the event
        :rtype: str
        """
        assert (ast_body is not None and isinstance(ast_body, ASTBody)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of body provided (%s)!' % type(ast_body)
        outputs = ast_body.get_output_blocks()
        if len(outputs) > 0:
            output = outputs[0]
            if output.is_spike():
                return 'nest::SpikeEvent'
            elif output.is_current():
                return 'nest::CurrentEvent'
            else:
                raise RuntimeError('Unexpected output type. Must be current or spike, is %s.' % str(output))
        else:
            return 'none'

    @classmethod
    def print_buffer_initialization(cls, variable_symbol):
        """
        Prints the buffer initialization.
        :param variable_symbol: a single variable symbol.
        :type variable_symbol: VariableSymbol
        :return: a buffer initialization
        :rtype: str
        """
        return 'get_' + variable_symbol.get_symbol_name() + '().clear(); //includes resize'

    @classmethod
    def print_function_declaration(cls, ast_function):
        """
        Returns a nest processable function declaration head, i.e. the part which appears in the .h file.
        :param ast_function: a single function.
        :type ast_function: ASTFunction
        :return: the corresponding string representation.
        :rtype: str
        """
        from pynestml.meta_model.ast_function import ASTFunction
        from pynestml.symbols.symbol import SymbolKind
        assert (ast_function is not None and isinstance(ast_function, ASTFunction)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_function provided (%s)!' % type(ast_function)
        function_symbol = ast_function.get_scope().resolve_to_symbol(ast_function.get_name(), SymbolKind.FUNCTION)
        if function_symbol is not None:
            declaration = ast_function.print_comment('//') + '\n'
            declaration += PyNestml2NestTypeConverter.convert(function_symbol.get_return_type()).replace('.', '::')
            declaration += ' '
            declaration += ast_function.get_name() + '('
            for typeSym in function_symbol.get_parameter_types():
                declaration += PyNestml2NestTypeConverter.convert(typeSym)
                if function_symbol.get_parameter_types().index(typeSym) < len(
                        function_symbol.get_parameter_types()) - 1:
                    declaration += ', '
            declaration += ')\n'
            return declaration
        else:
            raise RuntimeError('Cannot resolve the method ' + ast_function.get_name())

    @classmethod
    def print_function_definition(cls, ast_function, namespace):
        """
        Returns a nest processable function definition, i.e. the part which appears in the .c file.
        :param ast_function: a single function.
        :type ast_function: ASTFunction
        :param namespace: the namespace in which this function is defined in
        :type namespace: str
        :return: the corresponding string representation.
        :rtype: str
        """
        assert isinstance(ast_function, ASTFunction), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_function provided (%s)!' % type(ast_function)
        assert isinstance(namespace, str), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of namespace provided (%s)!' % type(namespace)
        function_symbol = ast_function.get_scope().resolve_to_symbol(ast_function.get_name(), SymbolKind.FUNCTION)
        if function_symbol is not None:
            # first collect all parameters
            params = list()
            for param in ast_function.get_parameters():
                params.append(param.get_name())
            declaration = ast_function.print_comment('//') + '\n'
            declaration += PyNestml2NestTypeConverter.convert(function_symbol.get_return_type()).replace('.', '::')
            declaration += ' '
            if namespace is not None:
                declaration += namespace + '::'
            declaration += ast_function.get_name() + '('
            for typeSym in function_symbol.get_parameter_types():
                # create the type name combination, e.g. double Tau
                declaration += PyNestml2NestTypeConverter.convert(typeSym) + ' ' + \
                               params[function_symbol.get_parameter_types().index(typeSym)]
                # if not the last component, separate by ','
                if function_symbol.get_parameter_types().index(typeSym) < \
                        len(function_symbol.get_parameter_types()) - 1:
                    declaration += ', '
            declaration += ')\n'
            return declaration
        else:
            raise RuntimeError('Cannot resolve the method ' + ast_function.get_name())

    def print_buffer_array_getter(self, ast_buffer):
        """
        Returns a string containing the nest declaration for a multi-receptor spike buffer.
        :param ast_buffer: a single buffer Variable Symbol
        :type ast_buffer: VariableSymbol
        :return: a string representation of the getter
        :rtype: str
        """
        assert (ast_buffer is not None and isinstance(ast_buffer, VariableSymbol)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.is_spike_buffer() and ast_buffer.is_inhibitory() and ast_buffer.is_excitatory():
            return 'inline ' + PyNestml2NestTypeConverter.convert(ast_buffer.get_type_symbol()) + '&' + ' get_' \
                   + ast_buffer.get_symbol_name() + '() {' + \
                   '  return spike_inputs_[' + ast_buffer.get_symbol_name().upper() + ' - 1]; }'
        else:
            return self.print_buffer_getter(ast_buffer, True)

    @classmethod
    def print_buffer_getter(cls, ast_buffer, is_in_struct=False):
        """
        Returns a string representation declaring a buffer getter as required in nest.
        :param ast_buffer: a single variable symbol representing a buffer.
        :type ast_buffer: VariableSymbol
        :param is_in_struct: indicates whether this getter is used in a struct or not
        :type is_in_struct: bool
        :return: a string representation of the getter.
        :rtype: str
        """
        assert (ast_buffer is not None and isinstance(ast_buffer, VariableSymbol)), \
            '(PyNestMl.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        assert (is_in_struct is not None and isinstance(is_in_struct, bool)), \
            '(PyNestMl.CodeGeneration.Printer) No or wrong type of is-in-struct provided (%s)!' % type(is_in_struct)
        declaration = 'inline '
        if ast_buffer.has_vector_parameter():
            declaration += 'std::vector<'
            declaration += PyNestml2NestTypeConverter.convert(ast_buffer.get_type_symbol())
            declaration += '> &'
        else:
            declaration += PyNestml2NestTypeConverter.convert(ast_buffer.get_type_symbol()) + '&'
        declaration += ' get_' + ast_buffer.get_symbol_name() + '() {'
        if is_in_struct:
            declaration += 'return ' + ast_buffer.get_symbol_name() + ';'
        else:
            declaration += 'return B_.get_' + ast_buffer.get_symbol_name() + '();'
        declaration += '}'
        return declaration

    @classmethod
    def print_buffer_declaration_value(cls, ast_buffer):
        """
        Returns a string representation for the declaration of a buffer's value.
        :param ast_buffer: a single buffer variable symbol
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.has_vector_parameter():
            return 'std::vector<double> ' + NestNamesConverter.buffer_value(ast_buffer)
        else:
            return 'double ' + NestNamesConverter.buffer_value(ast_buffer)

    @classmethod
    def print_buffer_declaration(cls, ast_buffer):
        """
        Returns a string representation for the declaration of a buffer.
        :param ast_buffer: a single buffer variable symbol
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.has_vector_parameter():
            buffer_type = 'std::vector< ' + PyNestml2NestTypeConverter.convert(ast_buffer.get_type_symbol()) + ' >'
        else:
            buffer_type = PyNestml2NestTypeConverter.convert(ast_buffer.get_type_symbol())
        buffer_type.replace(".", "::")
        return buffer_type + " " + ast_buffer.get_symbol_name()

    @classmethod
    def print_buffer_declaration_header(cls, ast_buffer):
        """
        Prints the comment as stated over the buffer declaration.
        :param ast_buffer: a single buffer variable symbol.
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        return '//!< Buffer incoming ' + ast_buffer.get_type_symbol().get_symbol_name() + 's through delay, as sum'
Example #11
0
class NESTCodeGenerator(CodeGenerator):

    _variable_matching_template = r'(\b)({})(\b)'

    def __init__(self):
        # setup the template environment
        def raise_helper(msg):
            raise TemplateRuntimeError(msg)

        env = Environment(loader=FileSystemLoader(
            os.path.join(os.path.dirname(__file__), 'resources_nest')))
        env.globals['raise'] = raise_helper
        setup_env = Environment(loader=FileSystemLoader(
            os.path.join(os.path.dirname(__file__), 'resources_nest',
                         'setup')))
        setup_env.globals['raise'] = raise_helper
        # setup the cmake template
        self._template_cmakelists = setup_env.get_template('CMakeLists.jinja2')
        # setup the module class template
        self._template_module_class = env.get_template('ModuleClass.jinja2')
        # setup the NEST module template
        self._template_module_header = env.get_template('ModuleHeader.jinja2')
        # setup the SLI_Init file
        self._template_sli_init = setup_env.get_template('SLI_Init.jinja2')
        # setup the neuron header template
        self._template_neuron_h_file = env.get_template('NeuronHeader.jinja2')
        # setup the neuron implementation template
        self._template_neuron_cpp_file = env.get_template('NeuronClass.jinja2')

        self._printer = ExpressionsPrettyPrinter()

    def generate_code(self, neurons):
        self.analyse_transform_neurons(neurons)
        self.generate_neurons(neurons)
        self.generate_module_code(neurons)

    def generate_module_code(self, neurons):
        # type: (list(ASTNeuron)) -> None
        """
        Generates code that is necessary to integrate neuron models into the NEST infrastructure.
        :param neurons: a list of neurons
        :type neurons: list(ASTNeuron)
        """
        namespace = {
            'neurons': neurons,
            'moduleName': FrontendConfiguration.get_module_name(),
            'now': datetime.datetime.utcnow()
        }
        if not os.path.exists(FrontendConfiguration.get_target_path()):
            os.makedirs(FrontendConfiguration.get_target_path())

        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 FrontendConfiguration.get_module_name())) +
                '.h', 'w+') as f:
            f.write(str(self._template_module_header.render(namespace)))

        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 FrontendConfiguration.get_module_name())) +
                '.cpp', 'w+') as f:
            f.write(str(self._template_module_class.render(namespace)))

        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 'CMakeLists')) + '.txt', 'w+') as f:
            f.write(str(self._template_cmakelists.render(namespace)))

        if not os.path.isdir(
                os.path.realpath(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 'sli'))):
            os.makedirs(
                os.path.realpath(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 'sli')))

        with open(
                str(
                    os.path.join(
                        FrontendConfiguration.get_target_path(), 'sli',
                        FrontendConfiguration.get_module_name() + "-init")) +
                '.sli', 'w+') as f:
            f.write(str(self._template_sli_init.render(namespace)))

        code, message = Messages.get_module_generated(
            FrontendConfiguration.get_target_path())
        Logger.log_message(None, code, message, None, LoggingLevel.INFO)

    def analyse_transform_neurons(self, neurons):
        # type: (list(ASTNeuron)) -> None
        """
        Analyse and transform a list of neurons.
        :param neurons: a list of neurons.
        """
        for neuron in neurons:
            code, message = Messages.get_analysing_transforming_neuron(
                neuron.get_name())
            Logger.log_message(None, code, message, None, LoggingLevel.INFO)
            self.analyse_neuron(neuron)
            # now store the transformed model
            self.store_transformed_model(neuron)

    def analyse_neuron(self, neuron):
        # type: (ASTNeuron) -> None
        """
        Analyse and transform a single neuron.
        :param neuron: a single neuron.
        """
        code, message = Messages.get_start_processing_neuron(neuron.get_name())
        Logger.log_message(neuron, code, message, neuron.get_source_position(),
                           LoggingLevel.INFO)
        # make normalization
        # apply spikes to buffers
        # get rid of convolve, store them and apply then at the end
        equations_block = neuron.get_equations_block()
        shape_to_buffers = {}
        if neuron.get_equations_block() is not None:
            # extract function names and corresponding incoming buffers
            convolve_calls = OdeTransformer.get_sum_function_calls(
                equations_block)
            for convolve in convolve_calls:
                shape_to_buffers[str(convolve.get_args()[0])] = str(
                    convolve.get_args()[1])
            OdeTransformer.refactor_convolve_call(neuron.get_equations_block())
            self.make_functions_self_contained(
                equations_block.get_ode_functions())
            self.replace_functions_through_defining_expressions(
                equations_block.get_ode_equations(),
                equations_block.get_ode_functions())
            # transform everything into gsl processable (e.g. no functional shapes) or exact form.
            self.transform_shapes_and_odes(neuron, shape_to_buffers)
            self.apply_spikes_from_buffers(neuron, shape_to_buffers)
            # update the symbol table
            symbol_table_visitor = ASTSymbolTableVisitor()
            symbol_table_visitor.after_ast_rewrite_ = True  # ODE block might have been removed entirely: suppress warnings
            neuron.accept(symbol_table_visitor)

    def generate_neuron_code(self, neuron):
        # type: (ASTNeuron) -> None
        """
        For a handed over neuron, this method generates the corresponding header and implementation file.
        :param neuron: a single neuron object.
        """
        if not os.path.isdir(FrontendConfiguration.get_target_path()):
            os.makedirs(FrontendConfiguration.get_target_path())
        self.generate_model_h_file(neuron)
        self.generate_neuron_cpp_file(neuron)

    def generate_model_h_file(self, neuron):
        # type: (ASTNeuron) -> None
        """
        For a handed over neuron, this method generates the corresponding header file.
        :param neuron: a single neuron object.
        """
        # print("!!!", neuron)
        neuron_h_file = self._template_neuron_h_file.render(
            self.setup_generation_helpers(neuron))
        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 neuron.get_name())) + '.h', 'w+') as f:
            f.write(str(neuron_h_file))

    def generate_neuron_cpp_file(self, neuron):
        # type: (ASTNeuron) -> None
        """
        For a handed over neuron, this method generates the corresponding implementation file.
        :param neuron: a single neuron object.
        """
        neuron_cpp_file = self._template_neuron_cpp_file.render(
            self.setup_generation_helpers(neuron))
        with open(
                str(
                    os.path.join(FrontendConfiguration.get_target_path(),
                                 neuron.get_name())) + '.cpp', 'w+') as f:
            f.write(str(neuron_cpp_file))

    def setup_generation_helpers(self, neuron):
        """
        Returns a standard namespace with often required functionality.
        :param neuron: a single neuron instance
        :type neuron: ASTNeuron
        :return: a map from name to functionality.
        :rtype: dict
        """
        gsl_converter = GSLReferenceConverter()
        gsl_printer = LegacyExpressionPrinter(gsl_converter)
        # helper classes and objects
        converter = NESTReferenceConverter(False)
        legacy_pretty_printer = LegacyExpressionPrinter(converter)

        namespace = dict()

        namespace['neuronName'] = neuron.get_name()
        namespace['neuron'] = neuron
        namespace['moduleName'] = FrontendConfiguration.get_module_name()
        namespace['printer'] = NestPrinter(legacy_pretty_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['idemPrinter'] = LegacyExpressionPrinter()
        namespace['outputEvent'] = namespace['printer'].print_output_event(
            neuron.get_body())
        namespace['is_spike_input'] = ASTUtils.is_spike_input(
            neuron.get_body())
        namespace['is_current_input'] = ASTUtils.is_current_input(
            neuron.get_body())
        namespace['odeTransformer'] = OdeTransformer()
        namespace['printerGSL'] = gsl_printer
        namespace['now'] = datetime.datetime.utcnow()

        self.define_solver_type(neuron, namespace)
        return namespace

    def define_solver_type(self, neuron, namespace):
        # type: (ASTNeuron, dict) -> None
        """
        For a handed over neuron this method enriches the namespace by methods which are used to solve
        odes.
        :param namespace: a single namespace dict.
        :param neuron: a single neuron
        """
        namespace['useGSL'] = False
        if neuron.get_equations_block() is not None and len(
                neuron.get_equations_block().get_declarations()) > 0:
            if (not self.is_functional_shape_present(neuron.get_equations_block().get_ode_shapes())) or \
                    len(neuron.get_equations_block().get_ode_equations()) > 1:
                namespace['names'] = GSLNamesConverter()
                namespace['useGSL'] = True
                converter = NESTReferenceConverter(True)
                legacy_pretty_printer = LegacyExpressionPrinter(converter)
                namespace['printer'] = NestPrinter(legacy_pretty_printer)
        return

    def is_functional_shape_present(self, shapes):
        # type: (list(ASTOdeShape)) -> bool
        """
        For a handed over list of shapes this method checks if a single shape exits with differential order of 0.
        :param shapes: a list of shapes
        :type shapes: list(ASTOdeShape)
        :return: True if at leas one shape with diff. order of 0 exits, otherwise False.
        :rtype: bool
        """
        for shape in shapes:
            if shape.get_variable().get_differential_order() == 0:
                return True
        return False

    def transform_shapes_and_odes(self, neuron, shape_to_buffers):
        # type: (ASTNeuron, map(str, str)) -> ASTNeuron
        """
        Solves all odes and equations in the handed over neuron.

        Precondition: it should be ensured that most one equations block is present.

        :param neuron: a single neuron instance.
        :param shape_to_buffers: Map of shape names to buffers to which they were connected.
        :return: A transformed version of the neuron that can be passed to the GSL.
        """

        assert isinstance(
            neuron.get_equations_blocks(), ASTEquationsBlock
        ), "Precondition violated: only one equation block should be present"

        equations_block = neuron.get_equations_block()

        if len(equations_block.get_ode_shapes()) == 0:
            code, message = Messages.get_neuron_solved_by_solver(
                neuron.get_name())
            Logger.log_message(neuron, code, message,
                               neuron.get_source_position(), LoggingLevel.INFO)
            return neuron
        elif len(equations_block.get_ode_shapes()) == 1 and \
                str(equations_block.get_ode_shapes()[0].get_expression()).strip().startswith(
                    "delta"):  # assume the model is well formed
            shape = equations_block.get_ode_shapes()[0]
            integrate_delta_solution(equations_block, neuron, shape,
                                     shape_to_buffers)
            return neuron
        elif len(equations_block.get_ode_equations()) == 1:
            code, message = Messages.get_neuron_analyzed(neuron.get_name())
            Logger.log_message(neuron, code, message,
                               neuron.get_source_position(), LoggingLevel.INFO)
            solver_result = self.solve_ode_with_shapes(equations_block)

            if solver_result["solver"] is "analytical":
                neuron = integrate_exact_solution(neuron, solver_result)
                neuron.remove_equations_block()
            elif (solver_result["solver"] is "numeric"
                  and self.is_functional_shape_present(
                      equations_block.get_ode_shapes())):
                functional_shapes_to_odes(neuron, solver_result)

            return neuron
        else:
            code, message = Messages.get_neuron_solved_by_solver(
                neuron.get_name())
            Logger.log_message(neuron, code, message,
                               neuron.get_source_position(), LoggingLevel.INFO)

            if self.is_functional_shape_present(
                    equations_block.get_ode_shapes()):
                ode_shapes = self.solve_functional_shapes(equations_block)
                functional_shapes_to_odes(neuron, ode_shapes)

            return neuron

    def apply_spikes_from_buffers(self, neuron, shape_to_buffers):
        """generate the equations that update the dynamical variables when incoming spikes arrive.

        For example, a resulting `assignment_string` could be "I_shape_in += (in_spikes/nS) * 1".

        The definition of the spike kernel shape is then set to 0.
        """
        spike_updates = []
        initial_values = neuron.get_initial_values_blocks()
        for declaration in initial_values.get_declarations():
            variable = declaration.get_variables()[0]
            for shape in shape_to_buffers:
                matcher_computed_shape_odes = re.compile(shape + r"(__\d+)?")
                if re.match(matcher_computed_shape_odes, str(variable)):
                    buffer_type = neuron.get_scope(). \
                        resolve_to_symbol(shape_to_buffers[shape], SymbolKind.VARIABLE).get_type_symbol()
                    assignment_string = variable.get_complete_name() + " += (" + shape_to_buffers[
                        shape] + '/' + buffer_type.print_nestml_type() + ") * " + \
                                        self._printer.print_expression(declaration.get_expression())
                    spike_updates.append(
                        ModelParser.parse_assignment(assignment_string))
                    # the IV is applied. can be reset
                    declaration.set_expression(
                        ModelParser.parse_expression("0"))
        for assignment in spike_updates:
            add_assignment_to_update_block(assignment, neuron)

    def solve_ode_with_shapes(self, equations_block):
        # type: (ASTEquationsBlock) -> dict[str, list]
        odes_shapes_json = self.transform_ode_and_shapes_to_json(
            equations_block)

        return analysis(odes_shapes_json, enable_stiffness_check=False)

    def transform_ode_and_shapes_to_json(self, equations_block):
        # type: (ASTEquationsBlock) -> dict[str, list]
        """
        Converts AST node to a JSON representation
        :param equations_block:equations_block
        :return: json mapping: {odes: [...], shape: [...]}
        """
        result = {"odes": [], "shapes": []}

        for equation in equations_block.get_ode_equations():
            result["odes"].append({
                "symbol":
                equation.get_lhs().get_name(),
                "definition":
                self._printer.print_expression(equation.get_rhs())
            })

        ode_shape_names = set()
        for shape in equations_block.get_ode_shapes():
            if shape.get_variable().get_differential_order() == 0:
                result["shapes"].append({
                    "type":
                    "function",
                    "symbol":
                    shape.get_variable().get_complete_name(),
                    "definition":
                    self._printer.print_expression(shape.get_expression())
                })

            else:
                extracted_shape_name = shape.get_variable().get_name()
                if '__' in shape.get_variable().get_name():
                    extracted_shape_name = shape.get_variable().get_name(
                    )[0:shape.get_variable().get_name().find("__")]
                if extracted_shape_name not in ode_shape_names:  # add shape name only once
                    ode_shape_names.add(extracted_shape_name)

        # try to resolve all available initial values
        shape_name_to_initial_values = {}
        shape_name_to_shape_definition = {}

        for shape_name in ode_shape_names:
            shape_name_symbol = equations_block.get_scope().resolve_to_symbol(
                shape_name, SymbolKind.VARIABLE)
            shape_name_to_initial_values[shape_name] = [
                self._printer.print_expression(
                    shape_name_symbol.get_declaring_expression())
            ]
            shape_name_to_shape_definition[
                shape_name] = self._printer.print_expression(
                    shape_name_symbol.get_ode_definition())
            order = 1
            while True:
                shape_name_symbol = equations_block.get_scope(
                ).resolve_to_symbol(shape_name + "__" + 'd' * order,
                                    SymbolKind.VARIABLE)
                if shape_name_symbol is not None:
                    shape_name_to_initial_values[shape_name].append(
                        self._printer.print_expression(
                            shape_name_symbol.get_declaring_expression()))
                    shape_name_to_shape_definition[
                        shape_name] = self._printer.print_expression(
                            shape_name_symbol.get_ode_definition())
                else:
                    break
                order = order + 1

        for shape_name in ode_shape_names:
            result["shapes"].append({
                "type":
                "ode",
                "symbol":
                shape_name,
                "definition":
                shape_name_to_shape_definition[shape_name],
                "initial_values":
                shape_name_to_initial_values[shape_name]
            })

        result["parameters"] = {}  # ode-framework requires this.
        return result

    def solve_functional_shapes(self, equations_block):
        # type: (ASTEquationsBlock) -> dict[str, list]
        shapes_json = self.transform_functional_shapes_to_json(equations_block)

        return analysis(shapes_json, enable_stiffness_check=False)

    def transform_functional_shapes_to_json(self, equations_block):
        # type: (ASTEquationsBlock) -> dict[str, list]
        """
        Converts AST node to a JSON representation
        :param equations_block:equations_block
        :return: json mapping: {odes: [...], shape: [...]}
        """
        result = {"odes": [], "shapes": []}

        for shape in equations_block.get_ode_shapes():
            if shape.get_variable().get_differential_order() == 0:
                result["shapes"].append({
                    "type":
                    "function",
                    "symbol":
                    shape.get_variable().get_complete_name(),
                    "definition":
                    self._printer.print_expression(shape.get_expression())
                })

        result["parameters"] = {}  # ode-framework requires this.
        return result

    def make_functions_self_contained(self, functions):
        # type: (list(ASTOdeFunction)) -> list(ASTOdeFunction)
        """
        TODO: it should be a method inside of the ASTOdeFunction
        TODO by KP: this should be done by means of a visitor
        Make function definition self contained, e.g. without any references to functions from `functions`.
        :param functions: A sorted list with entries ASTOdeFunction.
        :return: A list with ASTOdeFunctions. Defining expressions don't depend on each other.
        """
        for source in functions:
            for target in functions:
                matcher = re.compile(
                    self._variable_matching_template.format(
                        source.get_variable_name()))
                target_definition = str(target.get_expression())
                target_definition = re.sub(
                    matcher, "(" + str(source.get_expression()) + ")",
                    target_definition)
                target.expression = ModelParser.parse_expression(
                    target_definition)
        return functions

    def replace_functions_through_defining_expressions(self, definitions,
                                                       functions):
        # type: (list(ASTOdeEquation), list(ASTOdeFunction)) -> list(ASTOdeFunction)
        """
        Refractors symbols form `functions` in `definitions` with corresponding defining expressions from `functions`.
        :param definitions: A sorted list with entries {"symbol": "name", "definition": "expression"} that should be made
        free from.
        :param functions: A sorted list with entries {"symbol": "name", "definition": "expression"} with functions which
        must be replaced in `definitions`.
        :return: A list with definitions. Expressions in `definitions` don't depend on functions from `functions`.
        """
        for fun in functions:
            for target in definitions:
                matcher = re.compile(
                    self._variable_matching_template.format(
                        fun.get_variable_name()))
                target_definition = str(target.get_rhs())
                target_definition = re.sub(
                    matcher, "(" + str(fun.get_expression()) + ")",
                    target_definition)
                target.rhs = ModelParser.parse_expression(target_definition)
        return definitions

    def transform_functions_json(self, equations_block):
        # type: (ASTEquationsBlock) -> list[dict[str, str]]
        """
        Converts AST node to a JSON representation
        :param equations_block:equations_block
        :return: json mapping: {odes: [...], shape: [...]}
        """
        equations_block = OdeTransformer.refactor_convolve_call(
            equations_block)
        result = []

        for fun in equations_block.get_functions():
            result.append({
                "symbol":
                fun.get_variable_name(),
                "definition":
                self._printer.print_expression(fun.get_expression())
            })

        return result

    def store_transformed_model(self, ast):
        if FrontendConfiguration.store_log:
            with open(
                    str(
                        os.path.join(FrontendConfiguration.get_target_path(),
                                     '..', 'report', ast.get_name())) + '.txt',
                    'w+') as f:
                f.write(str(ast))
Example #12
0
env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'resources_NEST')))
setup_env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'resources_NEST', 'setup')))
# setup the cmake template
template_cmakelists = setup_env.get_template('CMakeLists.jinja2')
# setup the module class template
template_module_class = env.get_template('ModuleClass.jinja2')
# setup the NEST module template
template_module_header = env.get_template('ModuleHeader.jinja2')
# setup the SLI_Init file
template_sli_init = setup_env.get_template('SLI_Init.jinja2')
# setup the neuron header template
template_neuron_h_file = env.get_template('NeuronHeader.jinja2')
# setup the neuron implementation template
template_neuron_cpp_file = env.get_template('NeuronClass.jinja2')

_printer = ExpressionsPrettyPrinter()


def generate_nest_module_code(neurons):
    # type: (list(ASTNeuron)) -> None
    """
    Generates code that is necessary to integrate neuron models into the NEST infrastructure.
    :param neurons: a list of neurons
    :type neurons: list(ASTNeuron)
    """
    namespace = {'neurons': neurons, 'moduleName': FrontendConfiguration.get_module_name(),
                 'now': datetime.datetime.utcnow()}
    if not os.path.exists(FrontendConfiguration.get_target_path()):
        os.makedirs(FrontendConfiguration.get_target_path())

    with open(str(os.path.join(FrontendConfiguration.get_target_path(),
Example #13
0
class NestPrinter(object):
    """
    This class contains all methods as required to transform
    """
    def __init__(self, expression_pretty_printer, reference_convert=None):
        """
        The standard constructor.
        :param reference_convert: a single reference converter
        :type reference_convert: IReferenceConverter
        """
        if expression_pretty_printer is not None:
            self.expression_pretty_printer = expression_pretty_printer
        else:
            self.expression_pretty_printer = ExpressionsPrettyPrinter(
                reference_convert)
        return

    def print_expression(self, node):
        # type: (ASTExpressionNode) -> str
        """
        Pretty Prints the handed over rhs to a nest readable format.
        :param node: a single meta_model node.
        :type node: ASTExpressionNode
        :return: the corresponding string representation
        :rtype: str
        """
        return self.expression_pretty_printer.print_expression(node)

    def print_method_call(self, node):
        # type: (ASTFunctionCall) -> str
        """
        Prints a single handed over function call.
        :param node: a single function call.
        :type node: ASTFunctionCall
        :return: the corresponding string representation.
        :rtype: str
        """
        return self.expression_pretty_printer.print_function_call(node)

    @classmethod
    def print_comparison_operator(cls, for_stmt):
        """
        Prints a single handed over comparison operator for a for stmt to a Nest processable format.
        :param for_stmt: a single for stmt
        :type for_stmt: ASTForStmt
        :return: a string representation
        :rtype: str
        """
        step = for_stmt.get_step()
        if step < 0:
            return '>'
        elif step > 0:
            return '<'
        else:
            return '!='

    @classmethod
    def print_step(cls, for_stmt):
        """
        Prints the step length to a nest processable format.
        :param for_stmt: a single for stmt
        :type for_stmt: ASTForStmt
        :return: a string representation
        :rtype: str
        """
        assert isinstance(for_stmt, ASTForStmt), \
            '(PyNestML.CodeGenerator.Printer) No or wrong type of for-stmt provided (%s)!' % type(for_stmt)
        return for_stmt.get_step()

    @classmethod
    def print_origin(cls, variable_symbol):
        """
        Returns a prefix corresponding to the origin of the variable symbol.
        :param variable_symbol: a single variable symbol.
        :type variable_symbol: VariableSymbol
        :return: the corresponding prefix
        :rtype: str
        """
        assert isinstance(variable_symbol, VariableSymbol), \
            '(PyNestML.CodeGenerator.Printer) No or wrong type of variable symbol provided (%s)!' % type(
                variable_symbol)
        if variable_symbol.block_type == BlockType.STATE:
            return 'S_.'
        elif variable_symbol.block_type == BlockType.INITIAL_VALUES:
            return 'S_.'
        elif variable_symbol.block_type == BlockType.EQUATION:
            return 'S_.'
        elif variable_symbol.block_type == BlockType.PARAMETERS:
            return 'P_.'
        elif variable_symbol.block_type == BlockType.INTERNALS:
            return 'V_.'
        elif variable_symbol.block_type == BlockType.INPUT_BUFFER_CURRENT:
            return 'B_.'
        elif variable_symbol.block_type == BlockType.INPUT_BUFFER_SPIKE:
            return 'B_.'
        else:
            return ''

    @classmethod
    def print_output_event(cls, ast_body):
        """
        For the handed over neuron, this operations checks of output event shall be preformed.
        :param ast_body: a single neuron body
        :type ast_body: ASTBody
        :return: the corresponding representation of the event
        :rtype: str
        """
        assert (ast_body is not None and isinstance(ast_body, ASTBody)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of body provided (%s)!' % type(ast_body)
        outputs = ast_body.get_output_blocks()
        if len(outputs) > 0:
            output = outputs[0]
            if output.is_spike():
                return 'nest::SpikeEvent'
            elif output.is_current():
                return 'nest::CurrentEvent'
            else:
                raise RuntimeError(
                    'Unexpected output type. Must be current or spike, is %s.'
                    % str(output))
        else:
            return 'none'

    @classmethod
    def print_buffer_initialization(cls, variable_symbol):
        """
        Prints the buffer initialization.
        :param variable_symbol: a single variable symbol.
        :type variable_symbol: VariableSymbol
        :return: a buffer initialization
        :rtype: str
        """
        return 'get_' + variable_symbol.get_symbol_name(
        ) + '().clear(); //includes resize'

    @classmethod
    def print_function_declaration(cls, ast_function):
        """
        Returns a nest processable function declaration head, i.e. the part which appears in the .h file.
        :param ast_function: a single function.
        :type ast_function: ASTFunction
        :return: the corresponding string representation.
        :rtype: str
        """
        from pynestml.meta_model.ast_function import ASTFunction
        from pynestml.symbols.symbol import SymbolKind
        assert (ast_function is not None and isinstance(ast_function, ASTFunction)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_function provided (%s)!' % type(ast_function)
        function_symbol = ast_function.get_scope().resolve_to_symbol(
            ast_function.get_name(), SymbolKind.FUNCTION)
        if function_symbol is None:
            raise RuntimeError('Cannot resolve the method ' +
                               ast_function.get_name())
        declaration = ast_function.print_comment('//') + '\n'
        declaration += PyNestml2NestTypeConverter.convert(
            function_symbol.get_return_type()).replace('.', '::')
        declaration += ' '
        declaration += ast_function.get_name() + '('
        for typeSym in function_symbol.get_parameter_types():
            declaration += PyNestml2NestTypeConverter.convert(typeSym)
            if function_symbol.get_parameter_types().index(typeSym) < len(
                    function_symbol.get_parameter_types()) - 1:
                declaration += ', '
        declaration += ') const\n'
        return declaration

    @classmethod
    def print_function_definition(cls, ast_function, namespace):
        """
        Returns a nest processable function definition, i.e. the part which appears in the .cpp file.
        :param ast_function: a single function.
        :type ast_function: ASTFunction
        :param namespace: the namespace in which this function is defined in
        :type namespace: str
        :return: the corresponding string representation.
        :rtype: str
        """
        assert isinstance(ast_function, ASTFunction), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_function provided (%s)!' % type(ast_function)
        assert isinstance(namespace, str), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of namespace provided (%s)!' % type(namespace)
        function_symbol = ast_function.get_scope().resolve_to_symbol(
            ast_function.get_name(), SymbolKind.FUNCTION)
        if function_symbol is None:
            raise RuntimeError('Cannot resolve the method ' +
                               ast_function.get_name())
        # first collect all parameters
        params = list()
        for param in ast_function.get_parameters():
            params.append(param.get_name())
        declaration = ast_function.print_comment('//') + '\n'
        declaration += PyNestml2NestTypeConverter.convert(
            function_symbol.get_return_type()).replace('.', '::')
        declaration += ' '
        if namespace is not None:
            declaration += namespace + '::'
        declaration += ast_function.get_name() + '('
        for typeSym in function_symbol.get_parameter_types():
            # create the type name combination, e.g. double Tau
            declaration += PyNestml2NestTypeConverter.convert(typeSym) + ' ' + \
                            params[function_symbol.get_parameter_types().index(typeSym)]
            # if not the last component, separate by ','
            if function_symbol.get_parameter_types().index(typeSym) < \
                    len(function_symbol.get_parameter_types()) - 1:
                declaration += ', '
        declaration += ') const\n'
        return declaration

    def print_buffer_array_getter(self, ast_buffer):
        """
        Returns a string containing the nest declaration for a multi-receptor spike buffer.
        :param ast_buffer: a single buffer Variable Symbol
        :type ast_buffer: VariableSymbol
        :return: a string representation of the getter
        :rtype: str
        """
        assert (ast_buffer is not None and isinstance(ast_buffer, VariableSymbol)), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.is_spike_buffer() and ast_buffer.is_inhibitory(
        ) and ast_buffer.is_excitatory():
            return 'inline ' + PyNestml2NestTypeConverter.convert(ast_buffer.get_type_symbol()) + '&' + ' get_' \
                   + ast_buffer.get_symbol_name() + '() {' + \
                   '  return spike_inputs_[' + ast_buffer.get_symbol_name().upper() + ' - 1]; }'
        else:
            return self.print_buffer_getter(ast_buffer, True)

    @classmethod
    def print_buffer_getter(cls, ast_buffer, is_in_struct=False):
        """
        Returns a string representation declaring a buffer getter as required in nest.
        :param ast_buffer: a single variable symbol representing a buffer.
        :type ast_buffer: VariableSymbol
        :param is_in_struct: indicates whether this getter is used in a struct or not
        :type is_in_struct: bool
        :return: a string representation of the getter.
        :rtype: str
        """
        assert (ast_buffer is not None and isinstance(ast_buffer, VariableSymbol)), \
            '(PyNestMl.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        assert (is_in_struct is not None and isinstance(is_in_struct, bool)), \
            '(PyNestMl.CodeGeneration.Printer) No or wrong type of is-in-struct provided (%s)!' % type(is_in_struct)
        declaration = 'inline '
        if ast_buffer.has_vector_parameter():
            declaration += 'std::vector<'
            declaration += PyNestml2NestTypeConverter.convert(
                ast_buffer.get_type_symbol())
            declaration += '> &'
        else:
            declaration += PyNestml2NestTypeConverter.convert(
                ast_buffer.get_type_symbol()) + '&'
        declaration += ' get_' + ast_buffer.get_symbol_name() + '() {'
        if is_in_struct:
            declaration += 'return ' + ast_buffer.get_symbol_name() + ';'
        else:
            declaration += 'return B_.get_' + ast_buffer.get_symbol_name(
            ) + '();'
        declaration += '}'
        return declaration

    @classmethod
    def print_buffer_declaration_value(cls, ast_buffer):
        """
        Returns a string representation for the declaration of a buffer's value.
        :param ast_buffer: a single buffer variable symbol
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.has_vector_parameter():
            return 'std::vector<double> ' + NestNamesConverter.buffer_value(
                ast_buffer)
        else:
            return 'double ' + NestNamesConverter.buffer_value(ast_buffer)

    @classmethod
    def print_buffer_declaration(cls, ast_buffer):
        """
        Returns a string representation for the declaration of a buffer.
        :param ast_buffer: a single buffer variable symbol
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        if ast_buffer.has_vector_parameter():
            buffer_type = 'std::vector< ' + PyNestml2NestTypeConverter.convert(
                ast_buffer.get_type_symbol()) + ' >'
        else:
            buffer_type = PyNestml2NestTypeConverter.convert(
                ast_buffer.get_type_symbol())
        buffer_type.replace(".", "::")
        return buffer_type + " " + ast_buffer.get_symbol_name()

    @classmethod
    def print_buffer_declaration_header(cls, ast_buffer):
        """
        Prints the comment as stated over the buffer declaration.
        :param ast_buffer: a single buffer variable symbol.
        :type ast_buffer: VariableSymbol
        :return: the corresponding string representation
        :rtype: str
        """
        assert isinstance(ast_buffer, VariableSymbol), \
            '(PyNestML.CodeGeneration.Printer) No or wrong type of ast_buffer symbol provided (%s)!' % type(ast_buffer)
        return '//!< Buffer incoming ' + ast_buffer.get_type_symbol(
        ).get_symbol_name() + 's through delay, as sum'
Example #14
0
class NESTCodeGenerator(CodeGenerator):

    _variable_matching_template = r'(\b)({})(\b)'

    def __init__(self):
        # setup the template environment
        def raise_helper(msg):
            raise TemplateRuntimeError(msg)
        env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'resources_nest')))
        env.globals['raise'] = raise_helper
        setup_env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'resources_nest', 'setup')))
        setup_env.globals['raise'] = raise_helper
        # setup the cmake template
        self._template_cmakelists = setup_env.get_template('CMakeLists.jinja2')
        # setup the module class template
        self._template_module_class = env.get_template('ModuleClass.jinja2')
        # setup the NEST module template
        self._template_module_header = env.get_template('ModuleHeader.jinja2')
        # setup the SLI_Init file
        self._template_sli_init = setup_env.get_template('SLI_Init.jinja2')
        # setup the neuron header template
        self._template_neuron_h_file = env.get_template('NeuronHeader.jinja2')
        # setup the neuron implementation template
        self._template_neuron_cpp_file = env.get_template('NeuronClass.jinja2')

        self._printer = ExpressionsPrettyPrinter()

    def generate_code(self, neurons):
        self.analyse_transform_neurons(neurons)
        self.generate_neurons(neurons)
        self.generate_module_code(neurons)

    def generate_module_code(self, neurons):
        # type: (list(ASTNeuron)) -> None
        """
        Generates code that is necessary to integrate neuron models into the NEST infrastructure.
        :param neurons: a list of neurons
        :type neurons: list(ASTNeuron)
        """
        namespace = {'neurons': neurons,
                     'moduleName': FrontendConfiguration.get_module_name(),
                     'now': datetime.datetime.utcnow()}
        if not os.path.exists(FrontendConfiguration.get_target_path()):
            os.makedirs(FrontendConfiguration.get_target_path())

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   FrontendConfiguration.get_module_name())) + '.h', 'w+') as f:
            f.write(str(self._template_module_header.render(namespace)))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   FrontendConfiguration.get_module_name())) + '.cpp', 'w+') as f:
            f.write(str(self._template_module_class.render(namespace)))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   'CMakeLists')) + '.txt', 'w+') as f:
            f.write(str(self._template_cmakelists.render(namespace)))

        if not os.path.isdir(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli'))):
            os.makedirs(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli')))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(), 'sli',
                                   FrontendConfiguration.get_module_name() + "-init")) + '.sli', 'w+') as f:
            f.write(str(self._template_sli_init.render(namespace)))

        code, message = Messages.get_module_generated(FrontendConfiguration.get_target_path())
        Logger.log_message(None, code, message, None, LoggingLevel.INFO)


    def analyse_transform_neurons(self, neurons):
        # type: (list(ASTNeuron)) -> None
        """
        Analyse and transform a list of neurons.
        :param neurons: a list of neurons.
        """
        for neuron in neurons:
            code, message = Messages.get_analysing_transforming_neuron(neuron.get_name())
            Logger.log_message(None, code, message, None, LoggingLevel.INFO)
            self.analyse_neuron(neuron)
            # now store the transformed model
            self.store_transformed_model(neuron)


    def analyse_neuron(self, neuron):
        # type: (ASTNeuron) -> None
        """
        Analyse and transform a single neuron.
        :param neuron: a single neuron.
        """
        code, message = Messages.get_start_processing_neuron(neuron.get_name())
        Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
        # make normalization
        # apply spikes to buffers
        # get rid of convolve, store them and apply then at the end
        equations_block = neuron.get_equations_block()
        shape_to_buffers = {}
        if neuron.get_equations_block() is not None:
            # extract function names and corresponding incoming buffers
            convolve_calls = OdeTransformer.get_sum_function_calls(equations_block)
            for convolve in convolve_calls:
                shape_to_buffers[str(convolve.get_args()[0])] = str(convolve.get_args()[1])
            OdeTransformer.refactor_convolve_call(neuron.get_equations_block())
            self.make_functions_self_contained(equations_block.get_ode_functions())
            self.replace_functions_through_defining_expressions(equations_block.get_ode_equations(),
                                                           equations_block.get_ode_functions())
            # transform everything into gsl processable (e.g. no functional shapes) or exact form.
            self.transform_shapes_and_odes(neuron, shape_to_buffers)
            self.apply_spikes_from_buffers(neuron, shape_to_buffers)
            # update the symbol table
            symbol_table_visitor = ASTSymbolTableVisitor()
            symbol_table_visitor.after_ast_rewrite_ = True		# ODE block might have been removed entirely: suppress warnings
            neuron.accept(symbol_table_visitor)


    def generate_neuron_code(self, neuron):
        # type: (ASTNeuron) -> None
        """
        For a handed over neuron, this method generates the corresponding header and implementation file.
        :param neuron: a single neuron object.
        """
        if not os.path.isdir(FrontendConfiguration.get_target_path()):
            os.makedirs(FrontendConfiguration.get_target_path())
        self.generate_model_h_file(neuron)
        self.generate_neuron_cpp_file(neuron)


    def generate_model_h_file(self, neuron):
        # type: (ASTNeuron) -> None
        """
        For a handed over neuron, this method generates the corresponding header file.
        :param neuron: a single neuron object.
        """
        # print("!!!", neuron)
        neuron_h_file = self._template_neuron_h_file.render(self.setup_generation_helpers(neuron))
        with open(str(os.path.join(FrontendConfiguration.get_target_path(), neuron.get_name())) + '.h', 'w+') as f:
            f.write(str(neuron_h_file))


    def generate_neuron_cpp_file(self, neuron):
        # type: (ASTNeuron) -> None
        """
        For a handed over neuron, this method generates the corresponding implementation file.
        :param neuron: a single neuron object.
        """
        neuron_cpp_file = self._template_neuron_cpp_file.render(self.setup_generation_helpers(neuron))
        with open(str(os.path.join(FrontendConfiguration.get_target_path(), neuron.get_name())) + '.cpp', 'w+') as f:
            f.write(str(neuron_cpp_file))


    def setup_generation_helpers(self, neuron):
        """
        Returns a standard namespace with often required functionality.
        :param neuron: a single neuron instance
        :type neuron: ASTNeuron
        :return: a map from name to functionality.
        :rtype: dict
        """
        gsl_converter = GSLReferenceConverter()
        gsl_printer = LegacyExpressionPrinter(gsl_converter)
        # helper classes and objects
        converter = NESTReferenceConverter(False)
        legacy_pretty_printer = LegacyExpressionPrinter(converter)

        namespace = dict()

        namespace['neuronName'] = neuron.get_name()
        namespace['neuron'] = neuron
        namespace['moduleName'] = FrontendConfiguration.get_module_name()
        namespace['printer'] = NestPrinter(legacy_pretty_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['idemPrinter'] = LegacyExpressionPrinter()
        namespace['outputEvent'] = namespace['printer'].print_output_event(neuron.get_body())
        namespace['is_spike_input'] = ASTUtils.is_spike_input(neuron.get_body())
        namespace['is_current_input'] = ASTUtils.is_current_input(neuron.get_body())
        namespace['odeTransformer'] = OdeTransformer()
        namespace['printerGSL'] = gsl_printer
        namespace['now'] = datetime.datetime.utcnow()

        self.define_solver_type(neuron, namespace)
        return namespace


    def define_solver_type(self, neuron, namespace):
        # type: (ASTNeuron, dict) -> None
        """
        For a handed over neuron this method enriches the namespace by methods which are used to solve
        odes.
        :param namespace: a single namespace dict.
        :param neuron: a single neuron
        """
        namespace['useGSL'] = False
        if neuron.get_equations_block() is not None and len(neuron.get_equations_block().get_declarations()) > 0:
            if (not self.is_functional_shape_present(neuron.get_equations_block().get_ode_shapes())) or \
                    len(neuron.get_equations_block().get_ode_equations()) > 1:
                namespace['names'] = GSLNamesConverter()
                namespace['useGSL'] = True
                converter = NESTReferenceConverter(True)
                legacy_pretty_printer = LegacyExpressionPrinter(converter)
                namespace['printer'] = NestPrinter(legacy_pretty_printer)
        return


    def is_functional_shape_present(self, shapes):
        # type: (list(ASTOdeShape)) -> bool
        """
        For a handed over list of shapes this method checks if a single shape exits with differential order of 0.
        :param shapes: a list of shapes
        :type shapes: list(ASTOdeShape)
        :return: True if at leas one shape with diff. order of 0 exits, otherwise False.
        :rtype: bool
        """
        for shape in shapes:
            if shape.get_variable().get_differential_order() == 0:
                return True
        return False


    def transform_shapes_and_odes(self, neuron, shape_to_buffers):
        # type: (ASTNeuron, map(str, str)) -> ASTNeuron
        """
        Solves all odes and equations in the handed over neuron.

        Precondition: it should be ensured that most one equations block is present.

        :param neuron: a single neuron instance.
        :param shape_to_buffers: Map of shape names to buffers to which they were connected.
        :return: A transformed version of the neuron that can be passed to the GSL.
        """

        assert isinstance(neuron.get_equations_blocks(), ASTEquationsBlock), "Precondition violated: only one equation block should be present"

        equations_block = neuron.get_equations_block()

        if len(equations_block.get_ode_shapes()) == 0:
            code, message = Messages.get_neuron_solved_by_solver(neuron.get_name())
            Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
            return neuron
        elif len(equations_block.get_ode_shapes()) == 1 and \
                str(equations_block.get_ode_shapes()[0].get_expression()).strip().startswith(
                    "delta"):  # assume the model is well formed
            shape = equations_block.get_ode_shapes()[0]
            integrate_delta_solution(equations_block, neuron, shape, shape_to_buffers)
            return neuron
        elif len(equations_block.get_ode_equations()) == 1:
            code, message = Messages.get_neuron_analyzed(neuron.get_name())
            Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
            solver_result = self.solve_ode_with_shapes(equations_block)

            if solver_result["solver"] is "analytical":
                neuron = integrate_exact_solution(neuron, solver_result)
                neuron.remove_equations_block()
            elif (solver_result["solver"] is "numeric"
                  and self.is_functional_shape_present(equations_block.get_ode_shapes())):
                functional_shapes_to_odes(neuron, solver_result)

            return neuron
        else:
            code, message = Messages.get_neuron_solved_by_solver(neuron.get_name())
            Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)

            if self.is_functional_shape_present(equations_block.get_ode_shapes()):
                ode_shapes = self.solve_functional_shapes(equations_block)
                functional_shapes_to_odes(neuron, ode_shapes)

            return neuron


    def apply_spikes_from_buffers(self, neuron, shape_to_buffers):
        """generate the equations that update the dynamical variables when incoming spikes arrive.

        For example, a resulting `assignment_string` could be "I_shape_in += (in_spikes/nS) * 1".

        The definition of the spike kernel shape is then set to 0.
        """
        spike_updates = []
        initial_values = neuron.get_initial_values_blocks()
        for declaration in initial_values.get_declarations():
            variable = declaration.get_variables()[0]
            for shape in shape_to_buffers:
                matcher_computed_shape_odes = re.compile(shape + r"(__\d+)?")
                if re.match(matcher_computed_shape_odes, str(variable)):
                    buffer_type = neuron.get_scope(). \
                        resolve_to_symbol(shape_to_buffers[shape], SymbolKind.VARIABLE).get_type_symbol()
                    assignment_string = variable.get_complete_name() + " += (" + shape_to_buffers[
                        shape] + '/' + buffer_type.print_nestml_type() + ") * " + \
                                        self._printer.print_expression(declaration.get_expression())
                    spike_updates.append(ModelParser.parse_assignment(assignment_string))
                    # the IV is applied. can be reset
                    declaration.set_expression(ModelParser.parse_expression("0"))
        for assignment in spike_updates:
            add_assignment_to_update_block(assignment, neuron)


    def solve_ode_with_shapes(self, equations_block):
        # type: (ASTEquationsBlock) -> dict[str, list]
        odes_shapes_json = self.transform_ode_and_shapes_to_json(equations_block)

        return analysis(odes_shapes_json, enable_stiffness_check=False)


    def transform_ode_and_shapes_to_json(self, equations_block):
        # type: (ASTEquationsBlock) -> dict[str, list]
        """
        Converts AST node to a JSON representation
        :param equations_block:equations_block
        :return: json mapping: {odes: [...], shape: [...]}
        """
        result = {"odes": [], "shapes": []}

        for equation in equations_block.get_ode_equations():
            result["odes"].append({"symbol": equation.get_lhs().get_name(),
                                   "definition": self._printer.print_expression(equation.get_rhs())})

        ode_shape_names = set()
        for shape in equations_block.get_ode_shapes():
            if shape.get_variable().get_differential_order() == 0:
                result["shapes"].append({"type": "function",
                                         "symbol": shape.get_variable().get_complete_name(),
                                         "definition": self._printer.print_expression(shape.get_expression())})

            else:
                extracted_shape_name = shape.get_variable().get_name()
                if '__' in shape.get_variable().get_name():
                    extracted_shape_name = shape.get_variable().get_name()[0:shape.get_variable().get_name().find("__")]
                if extracted_shape_name not in ode_shape_names:  # add shape name only once
                    ode_shape_names.add(extracted_shape_name)

        # try to resolve all available initial values
        shape_name_to_initial_values = {}
        shape_name_to_shape_definition = {}

        for shape_name in ode_shape_names:
            shape_name_symbol = equations_block.get_scope().resolve_to_symbol(shape_name, SymbolKind.VARIABLE)
            shape_name_to_initial_values[shape_name] = [
                self._printer.print_expression(shape_name_symbol.get_declaring_expression())]
            shape_name_to_shape_definition[shape_name] = self._printer.print_expression(shape_name_symbol.get_ode_definition())
            order = 1
            while True:
                shape_name_symbol = equations_block.get_scope().resolve_to_symbol(shape_name + "__" + 'd' * order,
                                                                                  SymbolKind.VARIABLE)
                if shape_name_symbol is not None:
                    shape_name_to_initial_values[shape_name].append(
                        self._printer.print_expression(shape_name_symbol.get_declaring_expression()))
                    shape_name_to_shape_definition[shape_name] = self._printer.print_expression(
                        shape_name_symbol.get_ode_definition())
                else:
                    break
                order = order + 1

        for shape_name in ode_shape_names:
            result["shapes"].append({"type": "ode",
                                     "symbol": shape_name,
                                     "definition": shape_name_to_shape_definition[shape_name],
                                     "initial_values": shape_name_to_initial_values[shape_name]})

        result["parameters"] = {}  # ode-framework requires this.
        return result


    def solve_functional_shapes(self, equations_block):
        # type: (ASTEquationsBlock) -> dict[str, list]
        shapes_json = self.transform_functional_shapes_to_json(equations_block)

        return analysis(shapes_json, enable_stiffness_check=False)


    def transform_functional_shapes_to_json(self, equations_block):
        # type: (ASTEquationsBlock) -> dict[str, list]
        """
        Converts AST node to a JSON representation
        :param equations_block:equations_block
        :return: json mapping: {odes: [...], shape: [...]}
        """
        result = {"odes": [], "shapes": []}

        for shape in equations_block.get_ode_shapes():
            if shape.get_variable().get_differential_order() == 0:
                result["shapes"].append({"type": "function",
                                         "symbol": shape.get_variable().get_complete_name(),
                                         "definition": self._printer.print_expression(shape.get_expression())})

        result["parameters"] = {}  # ode-framework requires this.
        return result

    def make_functions_self_contained(self, functions):
        # type: (list(ASTOdeFunction)) -> list(ASTOdeFunction)
        """
        TODO: it should be a method inside of the ASTOdeFunction
        TODO by KP: this should be done by means of a visitor
        Make function definition self contained, e.g. without any references to functions from `functions`.
        :param functions: A sorted list with entries ASTOdeFunction.
        :return: A list with ASTOdeFunctions. Defining expressions don't depend on each other.
        """
        for source in functions:
            for target in functions:
                matcher = re.compile(self._variable_matching_template.format(source.get_variable_name()))
                target_definition = str(target.get_expression())
                target_definition = re.sub(matcher, "(" + str(source.get_expression()) + ")", target_definition)
                target.expression = ModelParser.parse_expression(target_definition)
        return functions


    def replace_functions_through_defining_expressions(self, definitions, functions):
        # type: (list(ASTOdeEquation), list(ASTOdeFunction)) -> list(ASTOdeFunction)
        """
        Refractors symbols form `functions` in `definitions` with corresponding defining expressions from `functions`.
        :param definitions: A sorted list with entries {"symbol": "name", "definition": "expression"} that should be made
        free from.
        :param functions: A sorted list with entries {"symbol": "name", "definition": "expression"} with functions which
        must be replaced in `definitions`.
        :return: A list with definitions. Expressions in `definitions` don't depend on functions from `functions`.
        """
        for fun in functions:
            for target in definitions:
                matcher = re.compile(self._variable_matching_template.format(fun.get_variable_name()))
                target_definition = str(target.get_rhs())
                target_definition = re.sub(matcher, "(" + str(fun.get_expression()) + ")", target_definition)
                target.rhs = ModelParser.parse_expression(target_definition)
        return definitions


    def transform_functions_json(self, equations_block):
        # type: (ASTEquationsBlock) -> list[dict[str, str]]
        """
        Converts AST node to a JSON representation
        :param equations_block:equations_block
        :return: json mapping: {odes: [...], shape: [...]}
        """
        equations_block = OdeTransformer.refactor_convolve_call(equations_block)
        result = []

        for fun in equations_block.get_functions():
            result.append({"symbol": fun.get_variable_name(),
                           "definition": self._printer.print_expression(fun.get_expression())})

        return result


    def store_transformed_model(self, ast):
        if FrontendConfiguration.store_log:
            with open(str(os.path.join(FrontendConfiguration.get_target_path(), '..', 'report',
                                       ast.get_name())) + '.txt', 'w+') as f:
                f.write(str(ast))