Ejemplo n.º 1
0
    def replace_variable_names_in_expressions(
            cls, neuron: ASTNeuron, solver_dicts: List[dict]) -> None:
        """
        Replace all occurrences of variables names in NESTML format (e.g. `g_ex$''`)` with the ode-toolbox formatted
        variable name (e.g. `g_ex__DOLLAR__d__d`).

        Variables aliasing convolutions should already have been covered by replace_convolution_aliasing_inlines().
        """
        def replace_var(_expr=None):
            if isinstance(_expr, ASTSimpleExpression) and _expr.is_variable():
                var = _expr.get_variable()
                if cls.variable_in_solver(
                        cls.to_ode_toolbox_processed_name(
                            var.get_complete_name()), solver_dicts):
                    ast_variable = ASTVariable(
                        cls.to_ode_toolbox_processed_name(
                            var.get_complete_name()),
                        differential_order=0)
                    ast_variable.set_source_position(var.get_source_position())
                    _expr.set_variable(ast_variable)

            elif isinstance(_expr, ASTVariable):
                var = _expr
                if cls.variable_in_solver(
                        cls.to_ode_toolbox_processed_name(
                            var.get_complete_name()), solver_dicts):
                    var.set_name(
                        cls.to_ode_toolbox_processed_name(
                            var.get_complete_name()))
                    var.set_differential_order(0)

        def func(x):
            return replace_var(x)

        neuron.accept(ASTHigherOrderVisitor(func))
Ejemplo n.º 2
0
    def update_initial_values_for_odes(cls, neuron: ASTNeuron,
                                       solver_dicts: List[dict]) -> None:
        """
        Update initial values for original ODE declarations (e.g. V_m', g_ahp'') that are present in the model
        before ODE-toolbox processing, with the formatted variable names and initial values returned by ODE-toolbox.
        """
        assert isinstance(
            neuron.get_equations_blocks(),
            ASTEquationsBlock), "only one equation block should be present"

        if neuron.get_state_blocks() is None:
            return

        for iv_decl in neuron.get_state_blocks().get_declarations():
            for var in iv_decl.get_variables():
                var_name = var.get_complete_name()
                if cls.is_ode_variable(var.get_name(), neuron):
                    assert cls.variable_in_solver(
                        cls.to_ode_toolbox_processed_name(var_name),
                        solver_dicts)

                    # replace the left-hand side variable name by the ode-toolbox format
                    var.set_name(
                        cls.to_ode_toolbox_processed_name(
                            var.get_complete_name()))
                    var.set_differential_order(0)

                    # replace the defining expression by the ode-toolbox result
                    iv_expr = cls.get_initial_value_from_ode_toolbox_result(
                        cls.to_ode_toolbox_processed_name(var_name),
                        solver_dicts)
                    assert iv_expr is not None
                    iv_expr = ModelParser.parse_expression(iv_expr)
                    iv_expr.update_scope(neuron.get_state_blocks().get_scope())
                    iv_decl.set_expression(iv_expr)
Ejemplo n.º 3
0
def apply_incoming_spikes(neuron: ASTNeuron):
    """
    Adds a set of update instructions to the handed over neuron.
    :param neuron: a single neuron instance
    :type neuron: ASTNeuron
    :return: the modified neuron
    :rtype: ASTNeuron
    """
    assert (neuron is not None and isinstance(neuron, ASTNeuron)), \
        '(PyNestML.Solver.BaseTransformer) No or wrong type of neuron provided (%s)!' % type(neuron)
    conv_calls = OdeTransformer.get_sum_function_calls(neuron)
    printer = ExpressionsPrettyPrinter()
    spikes_updates = list()
    for convCall in conv_calls:
        kernel = convCall.get_args()[0].get_variable().get_complete_name()
        buffer = convCall.get_args()[1].get_variable().get_complete_name()
        initial_values = (
            neuron.get_initial_values_blocks().get_declarations()
            if neuron.get_initial_values_blocks() is not None else list())
        for astDeclaration in initial_values:
            for variable in astDeclaration.get_variables():
                if re.match(kernel + "[\']*",
                            variable.get_complete_name()) or re.match(
                                kernel + '__[\\d]+$',
                                variable.get_complete_name()):
                    spikes_updates.append(
                        ModelParser.parse_assignment(
                            variable.get_complete_name() + " += " + buffer +
                            " * " + printer.print_expression(
                                astDeclaration.get_expression())))
    for update in spikes_updates:
        add_assignment_to_update_block(update, neuron)
    return neuron
Ejemplo n.º 4
0
    def replace_convolution_aliasing_inlines(cls, neuron: ASTNeuron) -> None:
        """
        Replace all occurrences of kernel names (e.g. ``I_dend`` and ``I_dend'`` for a definition involving a second-order kernel ``inline kernel I_dend = convolve(kern_name, spike_buf)``) with the ODE-toolbox generated variable ``kern_name__X__spike_buf``.
        """
        def replace_var(_expr, replace_var_name: str,
                        replace_with_var_name: str):
            if isinstance(_expr, ASTSimpleExpression) and _expr.is_variable():
                var = _expr.get_variable()
                if var.get_name() == replace_var_name:
                    ast_variable = ASTVariable(
                        replace_with_var_name +
                        '__d' * var.get_differential_order(),
                        differential_order=0)
                    ast_variable.set_source_position(var.get_source_position())
                    _expr.set_variable(ast_variable)

            elif isinstance(_expr, ASTVariable):
                var = _expr
                if var.get_name() == replace_var_name:
                    var.set_name(replace_with_var_name +
                                 '__d' * var.get_differential_order())
                    var.set_differential_order(0)

        for decl in neuron.get_equations_block().get_declarations():
            from pynestml.utils.ast_utils import ASTUtils
            if isinstance(decl, ASTInlineExpression) \
               and isinstance(decl.get_expression(), ASTSimpleExpression) \
               and '__X__' in str(decl.get_expression()):
                replace_with_var_name = decl.get_expression().get_variable(
                ).get_name()
                neuron.accept(
                    ASTHigherOrderVisitor(lambda x: replace_var(
                        x, decl.get_variable_name(), replace_with_var_name)))
Ejemplo n.º 5
0
 def get_cm_info(cls, neuron: ASTNeuron):
     """
     Checks if this compartmental conditions apply for the handed over neuron. 
     If yes, it checks the presence of expected functions and declarations.
     In addition it organizes and builds a dictionary (cm_info) 
     which describes all the relevant data that was found
     :param neuron: a single neuron instance.
     :type neuron: ASTNeuron
     """
             
     cm_info = cls.detectCMInlineExpressions(neuron)
     
     # further computation not necessary if there were no cm neurons
     if not cm_info: cm_info = dict()
     
     cm_info = cls.calcExpectedFunctionNamesForChannels(cm_info)
     cm_info = cls.checkAndFindFunctions(neuron, cm_info)
     cm_info = cls.addChannelVariablesSectionAndEnforceProperVariableNames(neuron, cm_info)
     
     # now check for existence of expected state variables 
     # and add their ASTVariable objects to cm_info
     missing_states_visitor = StateMissingVisitor(cm_info)
     neuron.accept(missing_states_visitor)
     
     return missing_states_visitor.cm_info
Ejemplo n.º 6
0
def add_declaration_to_state_block(neuron: ASTNeuron, variable: str,
                                   initial_value: str) -> ASTNeuron:
    """
    Adds a single declaration to the state block of the neuron. The declared variable is of type real.
    :param neuron: a neuron
    :param variable: state variable to add
    :param initial_value: corresponding initial value
    :return: a modified neuron
    """
    tmp = ModelParser.parse_expression(initial_value)
    vector_variable = ASTUtils.get_vectorized_variable(tmp, neuron.get_scope())
    declaration_string = variable + ' real' + (
        '[' + vector_variable.get_vector_parameter() + ']' if
        vector_variable is not None and vector_variable.has_vector_parameter()
        else '') + ' = ' + initial_value
    ast_declaration = ModelParser.parse_declaration(declaration_string)
    if vector_variable is not None:
        ast_declaration.set_size_parameter(
            vector_variable.get_vector_parameter())
    neuron.add_to_state_block(ast_declaration)
    ast_declaration.update_scope(neuron.get_state_blocks().get_scope())

    symtable_visitor = ASTSymbolTableVisitor()
    symtable_visitor.block_type_stack.push(BlockType.STATE)
    ast_declaration.accept(symtable_visitor)
    symtable_visitor.block_type_stack.pop()

    return neuron
Ejemplo n.º 7
0
def add_declaration_to_internals(neuron: ASTNeuron, variable_name: str,
                                 init_expression: str) -> ASTNeuron:
    """
    Adds the variable as stored in the declaration tuple to the neuron. The declared variable is of type real.
    :param neuron: a single neuron instance
    :param variable_name: the name of the variable to add
    :param init_expression: initialization expression
    :return: the neuron extended by the variable
    """
    tmp = ModelParser.parse_expression(init_expression)
    vector_variable = ASTUtils.get_vectorized_variable(tmp, neuron.get_scope())

    declaration_string = variable_name + ' real' + (
        '[' + vector_variable.get_vector_parameter() + ']' if
        vector_variable is not None and vector_variable.has_vector_parameter()
        else '') + ' = ' + init_expression
    ast_declaration = ModelParser.parse_declaration(declaration_string)
    if vector_variable is not None:
        ast_declaration.set_size_parameter(
            vector_variable.get_vector_parameter())
    neuron.add_to_internal_block(ast_declaration)
    ast_declaration.update_scope(neuron.get_internals_blocks().get_scope())
    symtable_visitor = ASTSymbolTableVisitor()
    symtable_visitor.block_type_stack.push(BlockType.INTERNALS)
    ast_declaration.accept(symtable_visitor)
    symtable_visitor.block_type_stack.pop()
    return neuron
Ejemplo n.º 8
0
    def ode_toolbox_analysis(self, neuron: ASTNeuron,
                             kernel_buffers: Mapping[ASTKernel, ASTInputPort]):
        """
        Prepare data for ODE-toolbox input format, invoke ODE-toolbox analysis via its API, and return the output.
        """
        assert isinstance(
            neuron.get_equations_blocks(),
            ASTEquationsBlock), "only one equation block should be present"

        equations_block = neuron.get_equations_block()

        if len(equations_block.get_kernels()) == 0 and len(
                equations_block.get_ode_equations()) == 0:
            # no equations defined -> no changes to the neuron
            return None, None

        code, message = Messages.get_neuron_analyzed(neuron.get_name())
        Logger.log_message(neuron, code, message, neuron.get_source_position(),
                           LoggingLevel.INFO)

        parameters_block = neuron.get_parameter_blocks()
        odetoolbox_indict = self.transform_ode_and_kernels_to_json(
            neuron, parameters_block, kernel_buffers)
        odetoolbox_indict["options"] = {}
        odetoolbox_indict["options"]["output_timestep_symbol"] = "__h"
        solver_result = analysis(
            odetoolbox_indict,
            disable_stiffness_check=True,
            debug=FrontendConfiguration.logging_level == "DEBUG")
        analytic_solver = None
        analytic_solvers = [
            x for x in solver_result if x["solver"] == "analytical"
        ]
        assert len(
            analytic_solvers
        ) <= 1, "More than one analytic solver not presently supported"
        if len(analytic_solvers) > 0:
            analytic_solver = analytic_solvers[0]

        # if numeric solver is required, generate a stepping function that includes each state variable
        numeric_solver = None
        numeric_solvers = [
            x for x in solver_result if x["solver"].startswith("numeric")
        ]
        if numeric_solvers:
            solver_result = analysis(
                odetoolbox_indict,
                disable_stiffness_check=True,
                disable_analytic_solver=True,
                debug=FrontendConfiguration.logging_level == "DEBUG")
            numeric_solvers = [
                x for x in solver_result if x["solver"].startswith("numeric")
            ]
            assert len(
                numeric_solvers
            ) <= 1, "More than one numeric solver not presently supported"
            if len(numeric_solvers) > 0:
                numeric_solver = numeric_solvers[0]

        return analytic_solver, numeric_solver
Ejemplo n.º 9
0
    def remove_initial_values_for_kernels(cls, neuron: ASTNeuron) -> None:
        """
        Remove initial values for original declarations (e.g. g_in, g_in', V_m); these might conflict with the initial value expressions returned from ODE-toolbox.
        """
        assert isinstance(
            neuron.get_equations_blocks(),
            ASTEquationsBlock), "only one equation block should be present"

        equations_block = neuron.get_equations_block()
        symbols_to_remove = set()
        for kernel in equations_block.get_kernels():
            for kernel_var in kernel.get_variables():
                kernel_var_order = kernel_var.get_differential_order()
                for order in range(kernel_var_order):
                    symbol_name = kernel_var.get_name() + "'" * order
                    symbols_to_remove.add(symbol_name)

        decl_to_remove = set()
        for symbol_name in symbols_to_remove:
            for decl in neuron.get_state_blocks().get_declarations():
                if len(decl.get_variables()) == 1:
                    if decl.get_variables()[0].get_name() == symbol_name:
                        decl_to_remove.add(decl)
                else:
                    for var in decl.get_variables():
                        if var.get_name() == symbol_name:
                            decl.variables.remove(var)

        for decl in decl_to_remove:
            neuron.get_state_blocks().get_declarations().remove(decl)
Ejemplo n.º 10
0
    def setup_model_generation_helpers(self, neuron: ASTNeuron):
        """
        Returns a namespace for Jinja2 neuron model documentation template.

        :param neuron: a single neuron instance
        :type neuron: ASTNeuron
        :return: a map from name to functionality.
        :rtype: dict
        """
        converter = LatexReferenceConverter()
        latex_expression_printer = LatexExpressionPrinter(converter)

        namespace = dict()

        namespace['now'] = datetime.datetime.utcnow()
        namespace['neuron'] = neuron
        namespace['neuronName'] = str(neuron.get_name())
        namespace['printer'] = NestPrinter(latex_expression_printer)
        namespace['assignments'] = NestAssignmentsHelper()
        namespace['names'] = NestNamesConverter()
        namespace['declarations'] = NestDeclarationsHelper()
        namespace['utils'] = ASTUtils()
        namespace['odeTransformer'] = OdeTransformer()

        import textwrap
        pre_comments_bak = neuron.pre_comments
        neuron.pre_comments = []
        namespace['neuron_source_code'] = textwrap.indent(
            neuron.__str__(), "   ")
        neuron.pre_comments = pre_comments_bak

        return namespace
Ejemplo n.º 11
0
    def get_spike_update_expressions(self, neuron: ASTNeuron, kernel_buffers, solver_dicts, delta_factors) -> List[ASTAssignment]:
        """
        Generate the equations that update the dynamical variables when incoming spikes arrive. To be invoked after ode-toolbox.

        For example, a resulting `assignment_str` could be "I_kernel_in += (in_spikes/nS) * 1". The values are taken from the initial values for each corresponding dynamical variable, either from ode-toolbox or directly from user specification in the model.

        Note that for kernels, `initial_values` actually contains the increment upon spike arrival, rather than the initial value of the corresponding ODE dimension.
        """
        spike_updates = []
        initial_values = neuron.get_initial_values_blocks()

        for kernel, spike_input_port in kernel_buffers:
            if neuron.get_scope().resolve_to_symbol(str(spike_input_port), SymbolKind.VARIABLE) is None:
                continue

            buffer_type = neuron.get_scope().resolve_to_symbol(str(spike_input_port), SymbolKind.VARIABLE).get_type_symbol()

            if is_delta_kernel(kernel):
                continue

            for kernel_var in kernel.get_variables():
                for var_order in range(get_kernel_var_order_from_ode_toolbox_result(kernel_var.get_name(), solver_dicts)):
                    kernel_spike_buf_name = construct_kernel_X_spike_buf_name(
                        kernel_var.get_name(), spike_input_port, var_order)
                    expr = get_initial_value_from_ode_toolbox_result(kernel_spike_buf_name, solver_dicts)
                    assert expr is not None, "Initial value not found for kernel " + kernel_var
                    expr = str(expr)
                    if expr in ["0", "0.", "0.0"]:
                        continue    # skip adding the statement if we're only adding zero

                    assignment_str = kernel_spike_buf_name + " += "
                    assignment_str += "(" + str(spike_input_port) + ")"
                    if not expr in ["1.", "1.0", "1"]:
                        assignment_str += " * (" + \
                            self._printer.print_expression(ModelParser.parse_expression(expr)) + ")"

                    if not buffer_type.print_nestml_type() in ["1.", "1.0", "1"]:
                        assignment_str += " / (" + buffer_type.print_nestml_type() + ")"

                    ast_assignment = ModelParser.parse_assignment(assignment_str)
                    ast_assignment.update_scope(neuron.get_scope())
                    ast_assignment.accept(ASTSymbolTableVisitor())

                    spike_updates.append(ast_assignment)

        for k, factor in delta_factors.items():
            var = k[0]
            inport = k[1]
            assignment_str = var.get_name() + "'" * (var.get_differential_order() - 1) + " += "
            if not factor in ["1.", "1.0", "1"]:
                assignment_str += "(" + self._printer.print_expression(ModelParser.parse_expression(factor)) + ") * "
            assignment_str += str(inport)
            ast_assignment = ModelParser.parse_assignment(assignment_str)
            ast_assignment.update_scope(neuron.get_scope())
            ast_assignment.accept(ASTSymbolTableVisitor())

            spike_updates.append(ast_assignment)

        return spike_updates
 def check_co_co(cls, neuron: ASTNeuron):
     """
     Checks the coco for the handed over neuron.
     :param neuron: a single neuron instance.
     """
     visitor = OutputPortDefinedIfEmitCalledVisitor()
     visitor.neuron = neuron
     neuron.accept(visitor)
Ejemplo n.º 13
0
 def check_co_co(cls, node: ASTNeuron):
     """
     Ensures the coco for the handed over neuron.
     :param node: a single neuron instance.
     :type node: ASTNeuron
     """
     kernel_type_visitor = KernelTypeVisitor()
     kernel_type_visitor._neuron = node
     node.accept(kernel_type_visitor)
Ejemplo n.º 14
0
 def add_timestep_symbol(cls, neuron: ASTNeuron) -> None:
     """
     Add timestep variable to the internals block
     """
     assert neuron.get_initial_value(
         "__h"
     ) is None, "\"__h\" is a reserved name, please do not use variables by this name in your NESTML file"
     assert not "__h" in [
         sym.name for sym in neuron.get_internal_symbols()
     ], "\"__h\" is a reserved name, please do not use variables by this name in your NESTML file"
     neuron.add_to_internal_block(
         ModelParser.parse_declaration('__h ms = resolution()'), index=0)
 def check_co_co(cls, node: ASTNeuron):
     """
     Ensures the coco for the handed over neuron.
     :param node: a single neuron instance.
     """
     equations_defined_visitor = EquationsDefinedVisitor()
     node.accept(equations_defined_visitor)
     integrate_odes_called_visitor = IntegrateOdesCalledVisitor()
     node.accept(integrate_odes_called_visitor)
     if equations_defined_visitor.equations_defined() and not integrate_odes_called_visitor.integrate_odes_called():
         code, message = Messages.get_equations_defined_but_integrate_odes_not_called()
         Logger.log_message(code=code, message=message,
                            error_position=node.get_source_position(), log_level=LoggingLevel.ERROR)
Ejemplo n.º 16
0
 def check_co_co(cls, node: ASTNeuron):
     """
     Checks if the coco applies for the node. All the variables declared in the state block
     must be initialized with a value.
     :param node:
     """
     for variable in node.get_state_symbols():
         if not variable.has_declaring_expression():
             code, message = Messages.get_state_variables_not_initialized(
                 var_name=variable.get_symbol_name())
             Logger.log_message(error_position=node.get_source_position(),
                                code=code,
                                message=message,
                                log_level=LoggingLevel.ERROR)
Ejemplo n.º 17
0
    def get_delta_factors_(cls, neuron: ASTNeuron,
                           equations_block: ASTEquationsBlock) -> dict:
        r"""
        For every occurrence of a convolution of the form `x^(n) = a * convolve(kernel, inport) + ...` where `kernel` is a delta function, add the element `(x^(n), inport) --> a` to the set.
        """
        delta_factors = {}
        for ode_eq in equations_block.get_ode_equations():
            var = ode_eq.get_lhs()
            expr = ode_eq.get_rhs()
            conv_calls = OdeTransformer.get_convolve_function_calls(expr)
            for conv_call in conv_calls:
                assert len(
                    conv_call.args
                ) == 2, "convolve() function call should have precisely two arguments: kernel and spike input port"
                kernel = conv_call.args[0]
                if cls.is_delta_kernel(
                        neuron.get_kernel_by_name(
                            kernel.get_variable().get_name())):
                    inport = conv_call.args[1].get_variable()
                    expr_str = str(expr)
                    sympy_expr = sympy.parsing.sympy_parser.parse_expr(
                        expr_str)
                    sympy_expr = sympy.expand(sympy_expr)
                    sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(
                        str(conv_call))
                    factor_str = []
                    for term in sympy.Add.make_args(sympy_expr):
                        if term.find(sympy_conv_expr):
                            factor_str.append(
                                str(term.replace(sympy_conv_expr, 1)))
                    factor_str = " + ".join(factor_str)
                    delta_factors[(var, inport)] = factor_str

        return delta_factors
Ejemplo n.º 18
0
def variable_in_neuron_initial_values(name: str, neuron: ASTNeuron):
    for decl in neuron.get_initial_blocks().get_declarations():
        assert len(
            decl.get_variables()
        ) == 1, "Multiple declarations in the same statement not yet supported"
        if decl.get_variables()[0].get_complete_name() == name:
            return True
    return False
Ejemplo n.º 19
0
 def generate_neuron_cpp_file(self, neuron: ASTNeuron) -> None:
     """
     For a handed over neuron, this method generates the corresponding implementation file.
     :param neuron: a single neuron object.
     """
     neuron_cpp_file = self._template_neuron_cpp_file.render(self.setup_generation_helpers(neuron))
     with open(str(os.path.join(FrontendConfiguration.get_target_path(), neuron.get_name())) + '.cpp', 'w+') as f:
         f.write(str(neuron_cpp_file))
Ejemplo n.º 20
0
def declaration_in_state_block(neuron: ASTNeuron, variable_name: str) -> bool:
    """
    Checks if the variable is declared in the state block
    :param neuron:
    :param variable_name:
    :return:
    """
    assert type(variable_name) is str

    if neuron.get_state_blocks() is None:
        return False

    for decl in neuron.get_state_blocks().get_declarations():
        for var in decl.get_variables():
            if var.get_complete_name() == variable_name:
                return True

    return False
Ejemplo n.º 21
0
 def generate_neuron_code(self, neuron: ASTNeuron):
     """
     Generate model documentation for neuron model.
     :param neuron: a single neuron object.
     """
     nestml_model_doc = self._template_neuron_nestml_model.render(self.setup_neuron_model_generation_helpers(neuron))
     with open(str(os.path.join(FrontendConfiguration.get_target_path(), neuron.get_name())) + '.rst',
               'w+') as f:
         f.write(str(nestml_model_doc))
Ejemplo n.º 22
0
 def check_co_co(cls, node: ASTNeuron):
     """
     Ensures the coco for the handed over neuron.
     :param node: a single neuron instance.
     """
     if isinstance(node, ASTSynapse):
         return  # XXX: TODO: check that there are no equations other than the ones moved to the neuron (if any)
     equations_defined_visitor = EquationsDefinedVisitor()
     node.accept(equations_defined_visitor)
     integrate_odes_called_visitor = IntegrateOdesCalledVisitor()
     node.accept(integrate_odes_called_visitor)
     if equations_defined_visitor.equations_defined(
     ) and not integrate_odes_called_visitor.integrate_odes_called():
         code, message = Messages.get_equations_defined_but_integrate_odes_not_called(
         )
         Logger.log_message(code=code,
                            message=message,
                            error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
Ejemplo n.º 23
0
def declaration_in_initial_values(neuron: ASTNeuron,
                                  variable_name: str) -> bool:
    assert type(variable_name) is str

    for decl in neuron.get_initial_values_blocks().get_declarations():
        for var in decl.get_variables():
            if var.get_complete_name() == variable_name:
                return True

    return False
Ejemplo n.º 24
0
 def is_ode_variable(cls, var_base_name: str, neuron: ASTNeuron) -> bool:
     """
     Checks if the variable is present in an ODE
     """
     equations_block = neuron.get_equations_blocks()
     for ode_eq in equations_block.get_ode_equations():
         var = ode_eq.get_lhs()
         if var.get_name() == var_base_name:
             return True
     return False
Ejemplo n.º 25
0
    def remove_ode_definitions_from_equations_block(cls,
                                                    neuron: ASTNeuron) -> None:
        """
        Removes all ODEs in this block.
        """
        equations_block = neuron.get_equations_block()

        decl_to_remove = set()
        for decl in equations_block.get_ode_equations():
            decl_to_remove.add(decl)

        for decl in decl_to_remove:
            equations_block.get_declarations().remove(decl)
Ejemplo n.º 26
0
 def check_co_co(cls, neuron: ASTNeuron):
     """
     Checks if this compartmental conditions apply for the handed over neuron. 
     Models which do not have a state variable named as specified 
     in the value of cm_trigger_variable_name are not relevant
     :param neuron: a single neuron instance.
     :type neuron: ASTNeuron
     """
     
     cm_info = cls.detectCMInlineExpressions(neuron)
     
     # further computation not necessary if there were no cm neurons
     if not cm_info: return True   
          
     cm_info = cls.calcExpectedFunctionNamesForChannels(cm_info)
     cm_info = cls.checkAndFindFunctions(neuron, cm_info)
     cm_info = cls.addChannelVariablesSectionAndEnforceProperVariableNames(neuron, cm_info)
     
     # now check for existence of expected state variables 
     # and add their ASTVariable objects to cm_info
     missing_states_visitor = StateMissingVisitor(cm_info)
     neuron.accept(missing_states_visitor)
Ejemplo n.º 27
0
    def analyse_neuron(self, neuron: ASTNeuron) -> List[ASTAssignment]:
        """
        Analyse and transform a single neuron.
        :param neuron: a single neuron.
        :return: spike_updates: list of spike updates, see documentation for get_spike_update_expressions() for more information.
        """
        code, message = Messages.get_start_processing_neuron(neuron.get_name())
        Logger.log_message(neuron, code, message, neuron.get_source_position(),
                           LoggingLevel.INFO)

        equations_block = neuron.get_equations_block()

        if equations_block is None:
            return []

        delta_factors = self.get_delta_factors_(neuron, equations_block)
        kernel_buffers = self.generate_kernel_buffers_(neuron, equations_block)
        self.replace_convolve_calls_with_buffers_(neuron, equations_block,
                                                  kernel_buffers)
        self.make_inline_expressions_self_contained(
            equations_block.get_inline_expressions())
        self.replace_inline_expressions_through_defining_expressions(
            equations_block.get_ode_equations(),
            equations_block.get_inline_expressions())

        analytic_solver, numeric_solver = self.ode_toolbox_analysis(
            neuron, kernel_buffers)
        self.analytic_solver[neuron.get_name()] = analytic_solver
        self.numeric_solver[neuron.get_name()] = numeric_solver
        self.remove_initial_values_for_kernels(neuron)
        kernels = self.remove_kernel_definitions_from_equations_block(neuron)
        self.update_initial_values_for_odes(neuron,
                                            [analytic_solver, numeric_solver],
                                            kernels)
        self.remove_ode_definitions_from_equations_block(neuron)
        self.create_initial_values_for_kernels(
            neuron, [analytic_solver, numeric_solver], kernels)
        self.replace_variable_names_in_expressions(
            neuron, [analytic_solver, numeric_solver])
        self.add_timestep_symbol(neuron)

        if self.analytic_solver[neuron.get_name()] is not None:
            neuron = add_declarations_to_internals(
                neuron, self.analytic_solver[neuron.get_name()]["propagators"])

        self.update_symbol_table(neuron, kernel_buffers)
        spike_updates = self.get_spike_update_expressions(
            neuron, kernel_buffers, [analytic_solver, numeric_solver],
            delta_factors)

        return spike_updates
Ejemplo n.º 28
0
 def is_compartmental_model(cls, neuron: ASTNeuron):
     state_blocks = neuron.get_state_blocks()
     if state_blocks is None: return False
     if isinstance(state_blocks, ASTBlockWithVariables):
         state_blocks = [state_blocks]
     
     for state_block in state_blocks:
         declarations = state_block.get_declarations()
         for declaration in declarations:
             variables = declaration.get_variables()
             for variable in variables:
                 variable_name = variable.get_name().lower().strip()
                 if variable_name == cls.cm_trigger_variable_name:
                     return True
     return False
Ejemplo n.º 29
0
    def remove_kernel_definitions_from_equations_block(
            cls, neuron: ASTNeuron) -> ASTDeclaration:
        """
        Removes all kernels in this block.
        """
        equations_block = neuron.get_equations_block()

        decl_to_remove = set()
        for decl in equations_block.get_declarations():
            if type(decl) is ASTKernel:
                decl_to_remove.add(decl)

        for decl in decl_to_remove:
            equations_block.get_declarations().remove(decl)

        return decl_to_remove
Ejemplo n.º 30
0
def add_declaration_to_update_block(declaration: ASTDeclaration, neuron: ASTNeuron) -> ASTNeuron:
    """
    Adds a single declaration to the end of the update block of the handed over neuron.
    :param declaration: ASTDeclaration node to add
    :param neuron: a single neuron instance
    :return: a modified neuron
    """
    small_stmt = ASTNodeFactory.create_ast_small_stmt(declaration=declaration,
                                                      source_position=ASTSourceLocation.get_added_source_position())
    stmt = ASTNodeFactory.create_ast_stmt(small_stmt=small_stmt,
                                          source_position=ASTSourceLocation.get_added_source_position())
    if not neuron.get_update_blocks():
        neuron.create_empty_update_block()
    neuron.get_update_blocks().get_block().get_stmts().append(stmt)
    small_stmt.update_scope(neuron.get_update_blocks().get_block().get_scope())
    stmt.update_scope(neuron.get_update_blocks().get_block().get_scope())
    return neuron