Пример #1
0
 def is_vectorized_assignment(cls, assignment):
     """
     Indicates whether the handed over assignment is vectorized, i.e., an assignment of vectors.
     :param assignment: a single assignment.
     :type assignment: ASTAssignment
     :return: True if vectorized, otherwise False.
     :rtype: bool
     """
     from pynestml.symbols.symbol import SymbolKind
     assert isinstance(assignment, ASTAssignment), \
         '(PyNestML.CodeGeneration.Assignments) No or wrong type of assignment provided (%s)!' % type(assignment)
     symbol = assignment.get_scope().resolve_to_symbol(assignment.get_variable().get_complete_name(),
                                                       SymbolKind.VARIABLE)
     if symbol is not None:
         if symbol.has_vector_parameter():
             return True
         else:
             # otherwise we have to check if one of the variables used in the rhs is a vector
             for var in assignment.get_expression().get_variables():
                 symbol = var.get_scope().resolve_to_symbol(var.get_complete_name(), SymbolKind.VARIABLE)
                 if symbol is not None and symbol.has_vector_parameter():
                     return True
             return False
     else:
         Logger.log_message(message='No symbol could be resolved!', log_level=LoggingLevel.ERROR)
         return False
Пример #2
0
 def is_vectorized_assignment(cls, assignment):
     """
     Indicates whether the handed over assignment is vectorized, i.e., an assignment of vectors.
     :param assignment: a single assignment.
     :type assignment: ASTAssignment
     :return: True if vectorized, otherwise False.
     :rtype: bool
     """
     from pynestml.symbols.symbol import SymbolKind
     assert isinstance(assignment, ASTAssignment), \
         '(PyNestML.CodeGeneration.Assignments) No or wrong type of assignment provided (%s)!' % type(assignment)
     symbol = assignment.get_scope().resolve_to_symbol(
         assignment.get_variable().get_complete_name(), SymbolKind.VARIABLE)
     if symbol is not None:
         if symbol.has_vector_parameter():
             return True
         else:
             # otherwise we have to check if one of the variables used in the rhs is a vector
             for var in assignment.get_expression().get_variables():
                 symbol = var.get_scope().resolve_to_symbol(
                     var.get_complete_name(), SymbolKind.VARIABLE)
                 if symbol is not None and symbol.has_vector_parameter():
                     return True
             return False
     else:
         Logger.log_message(message='No symbol could be resolved!',
                            log_level=LoggingLevel.ERROR)
         return False
Пример #3
0
    def visit_simple_expression(self, node):
        """
        Visits a single variable as contained in a simple expression and derives its type.
        :param node: a single simple expression
        :type node: ASTSimpleExpression
        """
        assert isinstance(node, ASTSimpleExpression), \
            '(PyNestML.Visitor.VariableVisitor) No or wrong type of simple expression provided (%s)!' % type(node)
        assert (node.get_scope() is not None), \
            '(PyNestML.Visitor.VariableVisitor) No scope found, run symboltable creator!'

        scope = node.get_scope()
        var_name = node.get_variable().get_name()
        var_resolve = scope.resolve_to_symbol(var_name, SymbolKind.VARIABLE)

        # update the type of the variable according to its symbol type.
        if var_resolve is not None:
            node.type = var_resolve.get_type_symbol()
            node.type.referenced_object = node
        else:
            # check if var_name is actually a type literal (e.g. "mV")
            var_resolve = scope.resolve_to_symbol(var_name, SymbolKind.TYPE)
            if var_resolve is not None:
                node.type = var_resolve
                node.type.referenced_object = node
            else:
                message = 'Variable ' + str(node) + ' could not be resolved!'
                Logger.log_message(code=MessageCode.SYMBOL_NOT_RESOLVED,
                                   error_position=node.get_source_position(),
                                   message=message, log_level=LoggingLevel.ERROR)
                node.type = ErrorTypeSymbol()
        return
    def visit_expression(self, node):
        """
        Visits an expression which uses a binary logic operator and updates the type.
        :param node: a single expression.
        :type node: ast_expression
        """
        lhs_type = node.get_lhs().type
        rhs_type = node.get_rhs().type

        lhs_type.referenced_object = node.get_lhs()
        rhs_type.referenced_object = node.get_rhs()

        if isinstance(lhs_type, BooleanTypeSymbol) and isinstance(rhs_type, BooleanTypeSymbol):
            node.type = PredefinedTypes.get_boolean_type()
        else:
            if isinstance(lhs_type, BooleanTypeSymbol):
                offending_type = lhs_type
            else:
                offending_type = rhs_type
            code, message = Messages.get_type_different_from_expected(BooleanTypeSymbol(), offending_type)
            Logger.log_message(code=code, message=message,
                               error_position=lhs_type.referenced_object.get_source_position(),
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
        return
Пример #5
0
    def generate_module_code(self, neurons):
        # type: (list(ASTNeuron)) -> None
        """
        Generates code that is necessary to integrate neuron models into the NEST infrastructure.
        :param neurons: a list of neurons
        :type neurons: list(ASTNeuron)
        """
        namespace = {'neurons': neurons,
                     'moduleName': FrontendConfiguration.get_module_name(),
                     'now': datetime.datetime.utcnow()}
        if not os.path.exists(FrontendConfiguration.get_target_path()):
            os.makedirs(FrontendConfiguration.get_target_path())

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   FrontendConfiguration.get_module_name())) + '.h', 'w+') as f:
            f.write(str(self._template_module_header.render(namespace)))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   FrontendConfiguration.get_module_name())) + '.cpp', 'w+') as f:
            f.write(str(self._template_module_class.render(namespace)))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                                   'CMakeLists')) + '.txt', 'w+') as f:
            f.write(str(self._template_cmakelists.render(namespace)))

        if not os.path.isdir(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli'))):
            os.makedirs(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli')))

        with open(str(os.path.join(FrontendConfiguration.get_target_path(), 'sli',
                                   FrontendConfiguration.get_module_name() + "-init")) + '.sli', 'w+') as f:
            f.write(str(self._template_sli_init.render(namespace)))

        code, message = Messages.get_module_generated(FrontendConfiguration.get_target_path())
        Logger.log_message(None, code, message, None, LoggingLevel.INFO)
Пример #6
0
        def check_simple_delta(_expr=None):
            if _expr.is_function_call() and _expr.get_function_call().get_name(
            ) == "delta":
                deltafunc = _expr.get_function_call()
                parent = neuron.get_parent(_expr)

                # check the argument
                if not (len(deltafunc.get_args()) == 1 and
                        type(deltafunc.get_args()[0]) is ASTSimpleExpression
                        and deltafunc.get_args()[0].get_variable() is not None
                        and deltafunc.get_args()[0].get_variable().name
                        == "t"):
                    code, message = Messages.delta_function_one_arg(deltafunc)
                    Logger.log_message(
                        code=code,
                        message=message,
                        error_position=_expr.get_source_position(),
                        log_level=LoggingLevel.ERROR)

                if type(parent) is not ASTKernel:
                    code, message = Messages.delta_function_cannot_be_mixed()
                    Logger.log_message(
                        code=code,
                        message=message,
                        error_position=_expr.get_source_position(),
                        log_level=LoggingLevel.ERROR)
Пример #7
0
    def ode_toolbox_analysis(self, neuron: ASTNeuron,
                             kernel_buffers: Mapping[ASTKernel, ASTInputPort]):
        """
        Prepare data for ODE-toolbox input format, invoke ODE-toolbox analysis via its API, and return the output.
        """
        assert isinstance(
            neuron.get_equations_blocks(),
            ASTEquationsBlock), "only one equation block should be present"

        equations_block = neuron.get_equations_block()

        if len(equations_block.get_kernels()) == 0 and len(
                equations_block.get_ode_equations()) == 0:
            # no equations defined -> no changes to the neuron
            return None, None

        code, message = Messages.get_neuron_analyzed(neuron.get_name())
        Logger.log_message(neuron, code, message, neuron.get_source_position(),
                           LoggingLevel.INFO)

        parameters_block = neuron.get_parameter_blocks()
        odetoolbox_indict = self.transform_ode_and_kernels_to_json(
            neuron, parameters_block, kernel_buffers)
        odetoolbox_indict["options"] = {}
        odetoolbox_indict["options"]["output_timestep_symbol"] = "__h"
        solver_result = analysis(
            odetoolbox_indict,
            disable_stiffness_check=True,
            debug=FrontendConfiguration.logging_level == "DEBUG")
        analytic_solver = None
        analytic_solvers = [
            x for x in solver_result if x["solver"] == "analytical"
        ]
        assert len(
            analytic_solvers
        ) <= 1, "More than one analytic solver not presently supported"
        if len(analytic_solvers) > 0:
            analytic_solver = analytic_solvers[0]

        # if numeric solver is required, generate a stepping function that includes each state variable
        numeric_solver = None
        numeric_solvers = [
            x for x in solver_result if x["solver"].startswith("numeric")
        ]
        if numeric_solvers:
            solver_result = analysis(
                odetoolbox_indict,
                disable_stiffness_check=True,
                disable_analytic_solver=True,
                debug=FrontendConfiguration.logging_level == "DEBUG")
            numeric_solvers = [
                x for x in solver_result if x["solver"].startswith("numeric")
            ]
            assert len(
                numeric_solvers
            ) <= 1, "More than one numeric solver not presently supported"
            if len(numeric_solvers) > 0:
                numeric_solver = numeric_solvers[0]

        return analytic_solver, numeric_solver
 def visit_assignment(self, node):
     symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_complete_name(),
                                                 SymbolKind.VARIABLE)
     if symbol is None:
         code, message = Messages.get_variable_not_defined(node.get_variable().get_complete_name())
         Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR, neuron=self.neuron)
Пример #9
0
def add_ode_to_variable(ode_equation):
    """
    Resolves to the corresponding symbol and updates the corresponding ode-declaration. In the case that
    :param ode_equation: a single ode-equation
    :type ode_equation: ast_ode_equation
    """
    # the definition of a differential equations is defined by stating the derivation, thus derive the actual order
    diff_order = ode_equation.get_lhs().get_differential_order() - 1
    # we check if the corresponding symbol already exists, e.g. V_m' has already been declared
    existing_symbol = (ode_equation.get_scope().resolve_to_symbol(
        ode_equation.get_lhs().get_name() + '\'' * diff_order,
        SymbolKind.VARIABLE))
    if existing_symbol is not None:
        existing_symbol.set_ode_definition(ode_equation.get_rhs())
        # todo added on merge
        ode_equation.get_scope().update_variable_symbol(existing_symbol)
        code, message = Messages.get_ode_updated(
            ode_equation.get_lhs().get_name_of_lhs())
        Logger.log_message(error_position=existing_symbol.
                           get_referenced_object().get_source_position(),
                           code=code,
                           message=message,
                           log_level=LoggingLevel.INFO)
    else:
        code, message = Messages.get_no_variable_found(
            ode_equation.get_lhs().get_name_of_lhs())
        Logger.log_message(code=code,
                           message=message,
                           error_position=ode_equation.get_source_position(),
                           log_level=LoggingLevel.ERROR)
    return
Пример #10
0
    def visit_simple_expression(self, node):
        """
        Visits a single variable as contained in a simple expression and derives its type.
        :param node: a single simple expression
        :type node: ASTSimpleExpression
        """
        assert isinstance(node, ASTSimpleExpression), \
            '(PyNestML.Visitor.VariableVisitor) No or wrong type of simple expression provided (%s)!' % type(node)
        assert (node.get_scope() is not None), \
            '(PyNestML.Visitor.VariableVisitor) No scope found, run symboltable creator!'

        scope = node.get_scope()
        var_name = node.get_variable().get_name()
        var_resolve = scope.resolve_to_symbol(var_name, SymbolKind.VARIABLE)

        # update the type of the variable according to its symbol type.
        if var_resolve is not None:
            node.type = var_resolve.get_type_symbol()
            node.type.referenced_object = node
        else:
            message = 'Variable ' + str(node) + ' could not be resolved!'
            Logger.log_message(code=MessageCode.SYMBOL_NOT_RESOLVED,
                               error_position=node.get_source_position(),
                               message=message,
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
        return
Пример #11
0
    def handle_input_path(cls, path):
        if path is None or path == '':
            # check if the mandatory path arg has been handed over, just terminate
            raise InvalidPathException('No input path specified.')

        cls.paths_to_compilation_units = list()
        if os.path.isabs(path):
            cls.provided_path = path
        else:
            # a relative path, reconstruct it. get the parent dir where models, pynestml etc. is located
            pynestml_dir = os.getcwd()
            cls.provided_path = os.path.join(pynestml_dir, path)

        if os.path.isfile(cls.provided_path):
            cls.paths_to_compilation_units.append(cls.provided_path)
        elif os.path.isdir(cls.provided_path):
            for filename in os.listdir(cls.provided_path):
                if filename.endswith('.nestml'):
                    cls.paths_to_compilation_units.append(
                        os.path.join(cls.provided_path, filename))
        else:
            # input_path should be either a file or a directory
            code, message = Messages.get_input_path_not_found(
                path=cls.provided_path)
            Logger.log_message(code=code,
                               message=message,
                               log_level=LoggingLevel.ERROR)
            raise Exception(message)
Пример #12
0
def add_ode_to_variable(ode_equation):
    """
    Resolves to the corresponding symbol and updates the corresponding ode-declaration.
    :param ode_equation: a single ode-equation
    :type ode_equation: ast_ode_equation
    """
    for diff_order in range(ode_equation.get_lhs().get_differential_order()):
        var_name = ode_equation.get_lhs().get_name() + "'" * diff_order
        existing_symbol = ode_equation.get_scope().resolve_to_symbol(
            var_name, SymbolKind.VARIABLE)

        if existing_symbol is None:
            code, message = Messages.get_no_variable_found(
                ode_equation.get_lhs().get_name_of_lhs())
            Logger.log_message(
                code=code,
                message=message,
                error_position=ode_equation.get_source_position(),
                log_level=LoggingLevel.ERROR)
            return

        existing_symbol.set_ode_or_kernel(ode_equation)

        ode_equation.get_scope().update_variable_symbol(existing_symbol)
        code, message = Messages.get_ode_updated(
            ode_equation.get_lhs().get_name_of_lhs())
        Logger.log_message(error_position=existing_symbol.
                           get_referenced_object().get_source_position(),
                           code=code,
                           message=message,
                           log_level=LoggingLevel.INFO)
Пример #13
0
 def analyse_neuron(self, neuron):
     # type: (ASTNeuron) -> None
     """
     Analyse and transform a single neuron.
     :param neuron: a single neuron.
     """
     code, message = Messages.get_start_processing_neuron(neuron.get_name())
     Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
     # make normalization
     # apply spikes to buffers
     # get rid of convolve, store them and apply then at the end
     equations_block = neuron.get_equations_block()
     shape_to_buffers = {}
     if neuron.get_equations_block() is not None:
         # extract function names and corresponding incoming buffers
         convolve_calls = OdeTransformer.get_sum_function_calls(equations_block)
         for convolve in convolve_calls:
             shape_to_buffers[str(convolve.get_args()[0])] = str(convolve.get_args()[1])
         OdeTransformer.refactor_convolve_call(neuron.get_equations_block())
         self.make_functions_self_contained(equations_block.get_ode_functions())
         self.replace_functions_through_defining_expressions(equations_block.get_ode_equations(),
                                                        equations_block.get_ode_functions())
         # transform everything into gsl processable (e.g. no functional shapes) or exact form.
         self.transform_shapes_and_odes(neuron, shape_to_buffers)
         self.apply_spikes_from_buffers(neuron, shape_to_buffers)
         # update the symbol table
         symbol_table_visitor = ASTSymbolTableVisitor()
         symbol_table_visitor.after_ast_rewrite_ = True		# ODE block might have been removed entirely: suppress warnings
         neuron.accept(symbol_table_visitor)
Пример #14
0
    def convert_name_reference(self,
                               ast_variable: ASTVariable,
                               prefix: str = ''):
        """
        Converts a single name reference to a gsl processable format.
        :param ast_variable: a single variable
        :type ast_variable: ASTVariable
        :return: a gsl processable format of the variable
        :rtype: str
        """
        variable_name = NestNamesConverter.convert_to_cpp_name(
            ast_variable.get_name())

        if variable_name == PredefinedVariables.E_CONSTANT:
            return 'numerics::e'

        symbol = ast_variable.get_scope().resolve_to_symbol(
            ast_variable.get_complete_name(), SymbolKind.VARIABLE)
        if symbol is None:
            # test if variable name can be resolved to a type
            if PredefinedUnits.is_unit(ast_variable.get_complete_name()):
                return str(
                    UnitConverter.get_factor(
                        PredefinedUnits.get_unit(
                            ast_variable.get_complete_name()).get_unit()))

            code, message = Messages.get_could_not_resolve(variable_name)
            Logger.log_message(
                log_level=LoggingLevel.ERROR,
                code=code,
                message=message,
                error_position=ast_variable.get_source_position())
            return ''

        if symbol.is_init_values():
            return GSLNamesConverter.name(symbol)

        if symbol.is_buffer():
            if isinstance(symbol.get_type_symbol(), UnitTypeSymbol):
                units_conversion_factor = UnitConverter.get_factor(
                    symbol.get_type_symbol().unit.unit)
            else:
                units_conversion_factor = 1
            s = ""
            if not units_conversion_factor == 1:
                s += "(" + str(units_conversion_factor) + " * "
            s += prefix + 'B_.' + NestNamesConverter.buffer_value(symbol)
            if symbol.has_vector_parameter():
                s += '[i]'
            if not units_conversion_factor == 1:
                s += ")"
            return s

        if symbol.is_local() or symbol.is_function:
            return variable_name

        if symbol.has_vector_parameter():
            return prefix + 'get_' + variable_name + '()[i]'

        return prefix + 'get_' + variable_name + '()'
    def endvisit_assignment(self, node):
        scope = node.get_scope()
        var_name = node.get_variable().get_name()

        _expr = node.get_expression()

        var_symbol = scope.resolve_to_symbol(var_name, SymbolKind.VARIABLE)

        _equals = var_symbol.get_type_symbol().equals(_expr.type) \
         or var_symbol.get_type_symbol().differs_only_in_magnitude(_expr.type)

        message = 'line ' + str(_expr.get_source_position()) + ' : LHS = ' + \
                  var_symbol.get_type_symbol().get_symbol_name() + \
                  ' RHS = ' + _expr.type.get_symbol_name() + \
                  ' Equal ? ' + str(_equals)

        if isinstance(_expr.type, UnitTypeSymbol):
            message += " Neuroscience Factor: " + \
                       str(UnitConverter().get_factor(_expr.type.astropy_unit))

        Logger.log_message(error_position=node.get_source_position(), code=MessageCode.TYPE_MISMATCH,
                           message=message, log_level=LoggingLevel.INFO)

        if _equals is False:
            Logger.log_message(message="Type mismatch in test!",
                               code=MessageCode.TYPE_MISMATCH,
                               error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
        return
Пример #16
0
    def visit_expression(self, node):
        """
        Visits a single expression containing a plus or minus operator and updates its type.
        :param node: a single expression
        :type node: ast_expression
        """
        lhs_type = node.get_lhs().type
        rhs_type = node.get_rhs().type

        arith_op = node.get_binary_operator()

        lhs_type.referenced_object = node.get_lhs()
        rhs_type.referenced_object = node.get_rhs()

        node.type = ErrorTypeSymbol()
        if arith_op.is_plus_op:
            node.type = lhs_type + rhs_type
        elif arith_op.is_minus_op:
            node.type = lhs_type - rhs_type

        if isinstance(node.type, ErrorTypeSymbol):
            code, message = Messages.get_binary_operation_type_could_not_be_derived(
                lhs=str(node.get_lhs()),
                operator=str(arith_op),
                rhs=str(node.get_rhs()),
                lhs_type=str(lhs_type.print_nestml_type()),
                rhs_type=str(rhs_type.print_nestml_type()))
            Logger.log_message(code=code,
                               message=message,
                               error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
Пример #17
0
    def move_decls(cls, var_name, from_block, to_block, var_name_suffix, block_type: BlockType, mode="move", scope=None) -> List[ASTDeclaration]:
        from pynestml.codegeneration.ast_transformers import ASTTransformers
        from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor
        assert mode in ["move", "copy"]

        if not from_block \
           or not to_block:
            return []

        decls = ASTTransformers.get_declarations_from_block(var_name, from_block)
        if var_name.endswith(var_name_suffix):
            decls.extend(ASTTransformers.get_declarations_from_block(var_name.removesuffix(var_name_suffix), from_block))

        if decls:
            Logger.log_message(None, -1, "Moving definition of " + var_name + " from synapse to neuron",
                               None, LoggingLevel.INFO)
            for decl in decls:
                if mode == "move":
                    from_block.declarations.remove(decl)
                if mode == "copy":
                    decl = decl.clone()
                assert len(decl.get_variables()) <= 1
                if not decl.get_variables()[0].name.endswith(var_name_suffix):
                    ASTUtils.add_suffix_to_decl_lhs(decl, suffix=var_name_suffix)
                to_block.get_declarations().append(decl)
                decl.update_scope(to_block.get_scope())

                ast_symbol_table_visitor = ASTSymbolTableVisitor()
                ast_symbol_table_visitor.block_type_stack.push(block_type)
                decl.accept(ast_symbol_table_visitor)
                ast_symbol_table_visitor.block_type_stack.pop()

        return decls
Пример #18
0
def analyse_and_generate_neuron(neuron):
    # type: (ASTNeuron) -> None
    """
    Analysis a single neuron, solves it and generates the corresponding code.
    :param neuron: a single neuron.
    """
    code, message = Messages.get_start_processing_neuron(neuron.get_name())
    Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
    # make normalization
    # apply spikes to buffers
    # get rid of convolve, store them and apply then at the end
    equations_block = neuron.get_equations_block()
    shape_to_buffers = {}
    if neuron.get_equations_block() is not None:
        # extract function names and corresponding incoming buffers
        convolve_calls = OdeTransformer.get_sum_function_calls(equations_block)
        for convolve in convolve_calls:
            shape_to_buffers[str(convolve.get_args()[0])] = str(convolve.get_args()[1])
        OdeTransformer.refactor_convolve_call(neuron.get_equations_block())
        make_functions_self_contained(equations_block.get_ode_functions())
        replace_functions_through_defining_expressions(equations_block.get_ode_equations(),
                                                       equations_block.get_ode_functions())
        # transform everything into gsl processable (e.g. no functional shapes) or exact form.
        transform_shapes_and_odes(neuron, shape_to_buffers)
        # update the symbol table
        neuron.accept(ASTSymbolTableVisitor())
    generate_nest_code(neuron)
    # now store the transformed model
    store_transformed_model(neuron)
    # at that point all shapes are transformed into the ODE form and spikes can be applied
    code, message = Messages.get_code_generated(neuron.get_name(), FrontendConfiguration.get_target_path())
    Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO)
 def visit_variable(self, node):
     """
     Visits each shape and checks if it is used correctly.
     :param node: a single node.
     :type node: AST_
     """
     for shapeName in self.__shapes:
         # in order to allow shadowing by local scopes, we first check if the element has been declared locally
         symbol = node.get_scope().resolve_to_symbol(shapeName, SymbolKind.VARIABLE)
         # if it is not a shape just continue
         if symbol is None:
             code, message = Messages.get_no_variable_found(shapeName)
             Logger.log_message(neuron=self.__neuron_node, code=code, message=message, log_level=LoggingLevel.ERROR)
             continue
         if not symbol.is_shape():
             continue
         if node.get_complete_name() == shapeName:
             parent = self.__neuron_node.get_parent(node)
             if parent is not None:
                 if isinstance(parent, ASTOdeShape):
                     continue
                 grandparent = self.__neuron_node.get_parent(parent)
                 if grandparent is not None and isinstance(grandparent, ASTFunctionCall):
                     grandparent_func_name = grandparent.get_name()
                     if grandparent_func_name == 'curr_sum' or grandparent_func_name == 'cond_sum' or \
                             grandparent_func_name == 'convolve':
                         continue
             code, message = Messages.get_shape_outside_convolve(shapeName)
             Logger.log_message(error_position=node.get_source_position(),
                                code=code, message=message,
                                log_level=LoggingLevel.ERROR)
     return
Пример #20
0
 def check_co_co(cls, _neuron=None):
     """
     Checks the coco for the handed over neuron.
     :param _neuron: a single neuron instance.
     :type _neuron: ASTNeuron
     """
     assert (_neuron is not None and isinstance(_neuron, ASTNeuron)), \
         '(PyNestML.CoCo.FunctionCallsConsistent) No or wrong type of neuron provided (%s)!' % type(_neuron)
     cls.__neuronName = _neuron.get_name()
     for userDefinedFunction in _neuron.get_functions():
         cls.processed_function = userDefinedFunction
         symbol = userDefinedFunction.get_scope().resolve_to_symbol(userDefinedFunction.get_name(),
                                                                    SymbolKind.FUNCTION)
         # first ensure that the block contains at least one statement
         if symbol is not None and len(userDefinedFunction.get_block().get_stmts()) > 0:
             # now check that the last statement is a return
             cls.__check_return_recursively(symbol.get_return_type(),
                                            userDefinedFunction.get_block().get_stmts(), False)
         # now if it does not have a statement, but uses a return type, it is an error
         elif symbol is not None and userDefinedFunction.has_return_type() and \
                 not symbol.get_return_type().equals(PredefinedTypes.get_void_type()):
             code, message = Messages.get_no_return()
             Logger.log_message(node=_neuron, code=code, message=message,
                                error_position=userDefinedFunction.get_source_position(),
                                log_level=LoggingLevel.ERROR)
     return
Пример #21
0
 def binary_operation_not_defined_error(self, _operator, _other):
     from pynestml.symbols.error_type_symbol import ErrorTypeSymbol
     result = ErrorTypeSymbol()
     code, message = Messages.get_binary_operation_not_defined(lhs=self, operator=_operator, rhs=_other)
     Logger.log_message(code=code, message=message, error_position=self.referenced_object.get_source_position(),
                        log_level=LoggingLevel.ERROR)
     return result
 def visit_variable(self, node: ASTNode):
     """
     Visits each kernel and checks if it is used correctly.
     :param node: a single node.
     """
     for kernelName in self.__kernels:
         # in order to allow shadowing by local scopes, we first check if the element has been declared locally
         symbol = node.get_scope().resolve_to_symbol(
             kernelName, SymbolKind.VARIABLE)
         # if it is not a kernel just continue
         if symbol is None:
             continue
         if not symbol.is_kernel():
             continue
         if node.get_complete_name() == kernelName:
             parent = self.__neuron_node.get_parent(node)
             if parent is not None:
                 if isinstance(parent, ASTKernel):
                     continue
                 grandparent = self.__neuron_node.get_parent(parent)
                 if grandparent is not None and isinstance(
                         grandparent, ASTFunctionCall):
                     grandparent_func_name = grandparent.get_name()
                     if grandparent_func_name == 'convolve':
                         continue
             code, message = Messages.get_kernel_outside_convolve(
                 kernelName)
             Logger.log_message(code=code,
                                message=message,
                                log_level=LoggingLevel.ERROR,
                                error_position=node.get_source_position())
Пример #23
0
def add_ode_shape_to_variable(ode_shape):
    """
    Adds the shape as the defining equation.
    :param ode_shape: a single shape object.
    :type ode_shape: ast_ode_shape
    """
    if ode_shape.get_variable().get_differential_order() == 0:
        # we only update those which define an ode
        return
    # we check if the corresponding symbol already exists, e.g. V_m' has already been declared
    existing_symbol = ode_shape.get_scope().resolve_to_symbol(
        ode_shape.get_variable().get_name_of_lhs(), SymbolKind.VARIABLE)
    if existing_symbol is not None:
        existing_symbol.set_ode_definition(ode_shape.get_expression())
        existing_symbol.set_variable_type(VariableType.SHAPE)
        ode_shape.get_scope().update_variable_symbol(existing_symbol)
        code, message = Messages.get_ode_updated(
            ode_shape.get_variable().get_name_of_lhs())
        Logger.log_message(error_position=existing_symbol.
                           get_referenced_object().get_source_position(),
                           code=code,
                           message=message,
                           log_level=LoggingLevel.INFO)
    else:
        code, message = Messages.get_no_variable_found(
            ode_shape.get_variable().get_name_of_lhs())
        Logger.log_message(code=code,
                           message=message,
                           error_position=ode_shape.get_source_position(),
                           log_level=LoggingLevel.ERROR)
    return
Пример #24
0
 def visit_while_stmt(self, node):
     """
     Visits a single while stmt and checks that its condition is of boolean type.
     :param node: a single while stmt
     :type node: ASTWhileStmt
     """
     if node.get_source_position().equals(
             ASTSourceLocation.get_added_source_position()):
         # no type checks are executed for added nodes, since we assume correctness
         return
     cond_type = node.get_condition().type
     if isinstance(cond_type, ErrorTypeSymbol):
         code, message = Messages.get_type_could_not_be_derived(
             node.get_condition())
         Logger.log_message(
             code=code,
             message=message,
             error_position=node.get_condition().get_source_position(),
             log_level=LoggingLevel.ERROR)
     elif not cond_type.equals(PredefinedTypes.get_boolean_type()):
         code, message = Messages.get_type_different_from_expected(
             PredefinedTypes.get_boolean_type(), cond_type)
         Logger.log_message(
             code=code,
             message=message,
             error_position=node.get_condition().get_source_position(),
             log_level=LoggingLevel.ERROR)
     return
Пример #25
0
def generate_nest_module_code(neurons):
    # type: (list(ASTNeuron)) -> None
    """
    Generates code that is necessary to integrate neuron models into the NEST infrastructure.
    :param neurons: a list of neurons
    :type neurons: list(ASTNeuron)
    """
    namespace = {'neurons': neurons, 'moduleName': FrontendConfiguration.get_module_name(),
                 'now': datetime.datetime.utcnow()}
    if not os.path.exists(FrontendConfiguration.get_target_path()):
        os.makedirs(FrontendConfiguration.get_target_path())

    with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                               FrontendConfiguration.get_module_name())) + '.h', 'w+') as f:
        f.write(str(template_module_header.render(namespace)))

    with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                               FrontendConfiguration.get_module_name())) + '.cpp', 'w+') as f:
        f.write(str(template_module_class.render(namespace)))

    with open(str(os.path.join(FrontendConfiguration.get_target_path(),
                               'CMakeLists')) + '.txt', 'w+') as f:
        f.write(str(template_cmakelists.render(namespace)))

    if not os.path.isdir(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli'))):
        os.makedirs(os.path.realpath(os.path.join(FrontendConfiguration.get_target_path(), 'sli')))

    with open(str(os.path.join(FrontendConfiguration.get_target_path(), 'sli',
                               FrontendConfiguration.get_module_name() + "-init")) + '.sli', 'w+') as f:
        f.write(str(template_sli_init.render(namespace)))

    code, message = Messages.get_module_generated(FrontendConfiguration.get_target_path())
    Logger.log_message(None, code, message, None, LoggingLevel.INFO)
    def endvisit_assignment(self, node):
        scope = node.get_scope()
        var_name = node.get_variable().get_name()

        _expr = node.get_expression()

        var_symbol = scope.resolve_to_symbol(var_name, SymbolKind.VARIABLE)

        _equals = var_symbol.get_type_symbol().equals(_expr.type)

        message = 'line ' + str(_expr.get_source_position()) + ' : LHS = ' + \
                  var_symbol.get_type_symbol().get_symbol_name() + \
                  ' RHS = ' + _expr.type.get_symbol_name() + \
                  ' Equal ? ' + str(_equals)

        if isinstance(_expr.type, UnitTypeSymbol):
            message += " Neuroscience Factor: " + \
                       str(UnitConverter().get_factor(_expr.type.astropy_unit))

        Logger.log_message(error_position=node.get_source_position(), code=MessageCode.TYPE_MISMATCH,
                           message=message, log_level=LoggingLevel.INFO)

        if _equals is False:
            Logger.log_message(message="Type mismatch in test!",
                               code=MessageCode.TYPE_MISMATCH,
                               error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
        return
Пример #27
0
 def visit_declaration(self, node):
     """
     Checks the coco for a declaration.
     :param node: a single declaration.
     :type node: ASTDeclaration
     """
     assert isinstance(node, ASTDeclaration)
     if node.has_invariant():
         invariant_type = node.get_invariant().type
         if invariant_type is None or isinstance(invariant_type,
                                                 ErrorTypeSymbol):
             code, message = Messages.get_type_could_not_be_derived(
                 str(node.get_invariant()))
             Logger.log_message(
                 error_position=node.get_invariant().get_source_position(),
                 code=code,
                 message=message,
                 log_level=LoggingLevel.ERROR)
         elif not invariant_type.equals(PredefinedTypes.get_boolean_type()):
             code, message = Messages.get_type_different_from_expected(
                 PredefinedTypes.get_boolean_type(), invariant_type)
             Logger.log_message(
                 error_position=node.get_invariant().get_source_position(),
                 code=code,
                 message=message,
                 log_level=LoggingLevel.ERROR)
     return
Пример #28
0
    def visit_declaration(self, node):
        """
        Checks the coco.
        :param node: a single declaration.
        :type node: ast_declaration
        """
        if node.has_expression():
            variables = node.get_expression().get_variables()
            for variable in variables:
                if variable is not None:
                    symbol = node.get_scope().resolve_to_symbol(
                        variable.get_complete_name(), SymbolKind.VARIABLE)
                    if symbol is not None and symbol.has_vector_parameter(
                    ) and not node.has_size_parameter():
                        code, message = Messages.get_vector_in_non_vector(
                            vector=symbol.get_symbol_name(),
                            non_vector=list(var.get_complete_name()
                                            for var in node.get_variables()))

                        Logger.log_message(
                            error_position=node.get_source_position(),
                            code=code,
                            message=message,
                            log_level=LoggingLevel.ERROR)
        return
Пример #29
0
    def visit_simple_expression(self, node):
        """
        Visits a single function call as stored in a simple expression and checks to see whether any calls are made to generate a random number. If so, set a flag so that the necessary initialisers can be called at the right time in the generated code.
        """
        assert isinstance(node, ASTSimpleExpression), \
            '(PyNestML.Visitor.FunctionCallVisitor) No or wrong type of simple expression provided (%s)!' % tuple(node)
        assert (node.get_scope() is not None), \
            "(PyNestML.Visitor.FunctionCallVisitor) No scope found, run symboltable creator!"
        scope = node.get_scope()
        if node.get_function_call() is None:
            return
        function_name = node.get_function_call().get_name()
        method_symbol = scope.resolve_to_symbol(function_name, SymbolKind.FUNCTION)

        # check if this function exists
        if method_symbol is None:
            code, message = Messages.get_could_not_resolve(function_name)
            Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
            return

        if function_name == PredefinedFunctions.RANDOM_NORMAL:
            self._norm_rng_is_used = True
            return
Пример #30
0
 def try_to_recover_or_error(_lhs_type_symbol, _rhs_type_symbol,
                             _containing_expression):
     if _rhs_type_symbol.is_castable_to(_lhs_type_symbol):
         if isinstance(_lhs_type_symbol, UnitTypeSymbol) \
          and isinstance(_rhs_type_symbol, UnitTypeSymbol):
             conversion_factor = UnitTypeSymbol.get_conversion_factor(
                 _lhs_type_symbol.astropy_unit,
                 _rhs_type_symbol.astropy_unit)
             if not conversion_factor == 1.:
                 # the units are mutually convertible, but require a factor unequal to 1 (e.g. mV and A*Ohm)
                 TypeCaster.do_magnitude_conversion_rhs_to_lhs(
                     _rhs_type_symbol, _lhs_type_symbol,
                     _containing_expression)
         # the units are mutually convertible (e.g. V and A*Ohm)
         code, message = Messages.get_implicit_cast_rhs_to_lhs(
             _rhs_type_symbol.print_symbol(),
             _lhs_type_symbol.print_symbol())
         Logger.log_message(
             error_position=_containing_expression.get_source_position(),
             code=code,
             message=message,
             log_level=LoggingLevel.INFO)
     else:
         code, message = Messages.get_type_different_from_expected(
             _lhs_type_symbol, _rhs_type_symbol)
         Logger.log_message(
             error_position=_containing_expression.get_source_position(),
             code=code,
             message=message,
             log_level=LoggingLevel.ERROR)
Пример #31
0
 def visit_assignment(self, node):
     symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_complete_name(),
                                                 SymbolKind.VARIABLE)
     if symbol is None:
         code, message = Messages.get_variable_not_defined(node.get_variable().get_complete_name())
         Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR, node=self.neuron)
    def convert_name_reference(self, variable):
        """
        Converts a single variable to nest processable format.
        :param variable: a single variable.
        :type variable: ASTVariable
        :return: a nest processable format.
        :rtype: str
        """
        from pynestml.codegeneration.nest_printer import NestPrinter
        assert (variable is not None and isinstance(variable, ASTVariable)), \
            '(PyNestML.CodeGeneration.NestReferenceConverter) No or wrong type of uses-gsl provided (%s)!' % type(
                variable)
        variable_name = NestNamesConverter.convert_to_cpp_name(
            variable.get_complete_name())

        if variable_name == PredefinedVariables.E_CONSTANT:
            return 'numerics::e'
        else:
            symbol = variable.get_scope().resolve_to_symbol(
                variable_name, SymbolKind.VARIABLE)
            if symbol is None:
                # test if variable name can be resolved to a type
                if PredefinedUnits.is_unit(variable.get_complete_name()):
                    return str(
                        UnitConverter.get_factor(
                            PredefinedUnits.get_unit(
                                variable.get_complete_name()).get_unit()))

                code, message = Messages.get_could_not_resolve(variable_name)
                Logger.log_message(
                    log_level=LoggingLevel.ERROR,
                    code=code,
                    message=message,
                    error_position=variable.get_source_position())
                return ''
            else:
                if symbol.is_local():
                    return variable_name + (
                        '[i]' if symbol.has_vector_parameter() else '')
                elif symbol.is_buffer():
                    return NestPrinter.print_origin(symbol) + NestNamesConverter.buffer_value(symbol) \
                           + ('[i]' if symbol.has_vector_parameter() else '')
                else:
                    if symbol.is_function:
                        return 'get_' + variable_name + '()' + (
                            '[i]' if symbol.has_vector_parameter() else '')
                    else:
                        if symbol.is_init_values():
                            temp = NestPrinter.print_origin(symbol)
                            if self.uses_gsl:
                                temp += GSLNamesConverter.name(symbol)
                            else:
                                temp += NestNamesConverter.name(symbol)
                            temp += ('[i]'
                                     if symbol.has_vector_parameter() else '')
                            return temp
                        else:
                            return NestPrinter.print_origin(symbol) + \
                                   NestNamesConverter.name(symbol) + \
                                   ('[i]' if symbol.has_vector_parameter() else '')
Пример #33
0
 def visit_neuron(self, node):
     """
     Private method: Used to visit a single neuron and create the corresponding global as well as local scopes.
     :return: a single neuron.
     :rtype: ast_neuron
     """
     # set current processed neuron
     Logger.set_current_neuron(node)
     code, message = Messages.get_start_building_symbol_table()
     Logger.log_message(neuron=node, code=code, error_position=node.get_source_position(),
                        message=message, log_level=LoggingLevel.INFO)
     # before starting the work on the neuron, make everything which was implicit explicit
     # but if we have a model without an equations block, just skip this step
     if node.get_equations_blocks() is not None:
         make_implicit_odes_explicit(node.get_equations_blocks())
     scope = Scope(scope_type=ScopeType.GLOBAL, source_position=node.get_source_position())
     node.update_scope(scope)
     node.get_body().update_scope(scope)
     # now first, we add all predefined elements to the scope
     variables = PredefinedVariables.get_variables()
     functions = PredefinedFunctions.get_function_symbols()
     types = PredefinedTypes.get_types()
     for symbol in variables.keys():
         node.get_scope().add_symbol(variables[symbol])
     for symbol in functions.keys():
         node.get_scope().add_symbol(functions[symbol])
     for symbol in types.keys():
         node.get_scope().add_symbol(types[symbol])
 def check_co_co(cls, _neuron=None):
     """
     Checks the coco for the handed over neuron.
     :param _neuron: a single neuron instance.
     :type _neuron: ASTNeuron
     """
     assert (_neuron is not None and isinstance(_neuron, ASTNeuron)), \
         '(PyNestML.CoCo.FunctionCallsConsistent) No or wrong type of neuron provided (%s)!' % type(_neuron)
     cls.__neuronName = _neuron.get_name()
     for userDefinedFunction in _neuron.get_functions():
         cls.processed_function = userDefinedFunction
         symbol = userDefinedFunction.get_scope().resolve_to_symbol(userDefinedFunction.get_name(),
                                                                    SymbolKind.FUNCTION)
         # first ensure that the block contains at least one statement
         if symbol is not None and len(userDefinedFunction.get_block().get_stmts()) > 0:
             # now check that the last statement is a return
             cls.__check_return_recursively(symbol.get_return_type(),
                                            userDefinedFunction.get_block().get_stmts(), False)
         # now if it does not have a statement, but uses a return type, it is an error
         elif symbol is not None and userDefinedFunction.has_return_type() and \
                 not symbol.get_return_type().equals(PredefinedTypes.get_void_type()):
             code, message = Messages.get_no_return()
             Logger.log_message(neuron=_neuron, code=code, message=message,
                                error_position=userDefinedFunction.get_source_position(),
                                log_level=LoggingLevel.ERROR)
     return
 def visit_input_line(self, node):
     """
     Checks the coco on the current node.
     :param node: a single input line.
     :type node: ast_input_line
     """
     if node.is_spike():
         if node.has_input_types() and len(node.get_input_types()) > 1:
             inh = 0
             ext = 0
             for typ in node.get_input_types():
                 if typ.is_excitatory:
                     ext += 1
                 if typ.is_inhibitory:
                     inh += 1
             if inh > 1:
                 code, message = Messages.get_multiple_keywords(
                     'inhibitory')
                 Logger.log_message(
                     error_position=node.get_source_position(),
                     code=code,
                     message=message,
                     log_level=LoggingLevel.ERROR)
             if ext > 1:
                 code, message = Messages.get_multiple_keywords(
                     'excitatory')
                 Logger.log_message(
                     error_position=node.get_source_position(),
                     code=code,
                     message=message,
                     log_level=LoggingLevel.ERROR)
     return
Пример #36
0
 def visit_neuron(self, node):
     """
     Private method: Used to visit a single neuron and create the corresponding global as well as local scopes.
     :return: a single neuron.
     :rtype: ast_neuron
     """
     # set current processed neuron
     Logger.set_current_node(node)
     code, message = Messages.get_start_building_symbol_table()
     Logger.log_message(node=node,
                        code=code,
                        error_position=node.get_source_position(),
                        message=message,
                        log_level=LoggingLevel.INFO)
     scope = Scope(scope_type=ScopeType.GLOBAL,
                   source_position=node.get_source_position())
     node.update_scope(scope)
     node.get_body().update_scope(scope)
     # now first, we add all predefined elements to the scope
     variables = PredefinedVariables.get_variables()
     functions = PredefinedFunctions.get_function_symbols()
     types = PredefinedTypes.get_types()
     for symbol in variables.keys():
         node.get_scope().add_symbol(variables[symbol])
     for symbol in functions.keys():
         node.get_scope().add_symbol(functions[symbol])
     for symbol in types.keys():
         node.get_scope().add_symbol(types[symbol])
Пример #37
0
    def visit_expression(self, node):
        """
        Visits an expression which uses a binary logic operator and updates the type.
        :param node: a single expression.
        :type node: ast_expression
        """
        lhs_type = node.get_lhs().type
        rhs_type = node.get_rhs().type

        lhs_type.referenced_object = node.get_lhs()
        rhs_type.referenced_object = node.get_rhs()

        if isinstance(lhs_type, BooleanTypeSymbol) and isinstance(
                rhs_type, BooleanTypeSymbol):
            node.type = PredefinedTypes.get_boolean_type()
        else:
            if isinstance(lhs_type, BooleanTypeSymbol):
                offending_type = lhs_type
            else:
                offending_type = rhs_type
            code, message = Messages.get_type_different_from_expected(
                BooleanTypeSymbol(), offending_type)
            Logger.log_message(code=code,
                               message=message,
                               error_position=lhs_type.referenced_object.
                               get_source_position(),
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
        return
Пример #38
0
def add_kernel_to_variable(kernel):
    """
    Adds the kernel as the defining equation.

    If the definition of the kernel is e.g. `g'' = ...` then variable symbols `g` and `g'` will have their kernel definition and variable type set.

    :param kernel: a single kernel object.
    :type kernel: ASTKernel
    """
    if len(kernel.get_variables()) == 1 \
            and kernel.get_variables()[0].get_differential_order() == 0:
        # we only update those which define an ODE; skip "direct function of time" specifications
        return

    for var, expr in zip(kernel.get_variables(), kernel.get_expressions()):
        for diff_order in range(var.get_differential_order()):
            var_name = var.get_name() + "'" * diff_order
            existing_symbol = kernel.get_scope().resolve_to_symbol(
                var_name, SymbolKind.VARIABLE)

            if existing_symbol is None:
                code, message = Messages.get_no_variable_found(
                    var.get_name_of_lhs())
                Logger.log_message(code=code,
                                   message=message,
                                   error_position=kernel.get_source_position(),
                                   log_level=LoggingLevel.ERROR)
                return

            existing_symbol.set_ode_or_kernel(expr)
            existing_symbol.set_variable_type(VariableType.KERNEL)
            kernel.get_scope().update_variable_symbol(existing_symbol)
Пример #39
0
 def visit_neuron(self, node):
     """
     Private method: Used to visit a single neuron and create the corresponding global as well as local scopes.
     :return: a single neuron.
     :rtype: ast_neuron
     """
     # set current processed neuron
     Logger.set_current_neuron(node)
     code, message = Messages.get_start_building_symbol_table()
     Logger.log_message(neuron=node,
                        code=code,
                        error_position=node.get_source_position(),
                        message=message,
                        log_level=LoggingLevel.INFO)
     # before starting the work on the neuron, make everything which was implicit explicit
     # but if we have a model without an equations block, just skip this step
     if node.get_equations_blocks() is not None:
         make_implicit_odes_explicit(node.get_equations_blocks())
     scope = Scope(scope_type=ScopeType.GLOBAL,
                   source_position=node.get_source_position())
     node.update_scope(scope)
     node.get_body().update_scope(scope)
     # now first, we add all predefined elements to the scope
     variables = PredefinedVariables.get_variables()
     functions = PredefinedFunctions.get_function_symbols()
     types = PredefinedTypes.get_types()
     for symbol in variables.keys():
         node.get_scope().add_symbol(variables[symbol])
     for symbol in functions.keys():
         node.get_scope().add_symbol(functions[symbol])
     for symbol in types.keys():
         node.get_scope().add_symbol(types[symbol])
    def visit_simple_expression(self, node):
        """
        Visits a single function call

        :param node: a simple expression
        """
        assert isinstance(node, ASTSimpleExpression), \
            '(PyNestML.Visitor.FunctionCallVisitor) No or wrong type of simple expression provided (%s)!' % tuple(node)
        assert (node.get_scope() is not None), \
            "(PyNestML.Visitor.FunctionCallVisitor) No scope found, run symboltable creator!"
        if node.get_function_call() is None:
            return
        function_name = node.get_function_call().get_name()
        if function_name == PredefinedFunctions.TIME_RESOLUTION:
            _node = node
            while _node:
                _node = self.neuron.get_parent(_node)

                if isinstance(_node, ASTEquationsBlock) \
                        or isinstance(_node, ASTFunction):
                    code, message = Messages.get_could_not_resolve(
                        function_name)
                    Logger.log_message(
                        code=code,
                        message=message,
                        error_position=node.get_source_position(),
                        log_level=LoggingLevel.ERROR)
Пример #41
0
 def analyse_neuron(self, neuron):
     # type: (ASTNeuron) -> None
     """
     Analyse and transform a single neuron.
     :param neuron: a single neuron.
     """
     code, message = Messages.get_start_processing_neuron(neuron.get_name())
     Logger.log_message(neuron, code, message, neuron.get_source_position(),
                        LoggingLevel.INFO)
     # make normalization
     # apply spikes to buffers
     # get rid of convolve, store them and apply then at the end
     equations_block = neuron.get_equations_block()
     shape_to_buffers = {}
     if neuron.get_equations_block() is not None:
         # extract function names and corresponding incoming buffers
         convolve_calls = OdeTransformer.get_sum_function_calls(
             equations_block)
         for convolve in convolve_calls:
             shape_to_buffers[str(convolve.get_args()[0])] = str(
                 convolve.get_args()[1])
         OdeTransformer.refactor_convolve_call(neuron.get_equations_block())
         self.make_functions_self_contained(
             equations_block.get_ode_functions())
         self.replace_functions_through_defining_expressions(
             equations_block.get_ode_equations(),
             equations_block.get_ode_functions())
         # transform everything into gsl processable (e.g. no functional shapes) or exact form.
         self.transform_shapes_and_odes(neuron, shape_to_buffers)
         self.apply_spikes_from_buffers(neuron, shape_to_buffers)
         # update the symbol table
         symbol_table_visitor = ASTSymbolTableVisitor()
         symbol_table_visitor.after_ast_rewrite_ = True  # ODE block might have been removed entirely: suppress warnings
         neuron.accept(symbol_table_visitor)
 def visit_assignment(self, node):
     symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_name(), SymbolKind.VARIABLE)
     if symbol is not None and (symbol.block_type == BlockType.INPUT_BUFFER_SPIKE or
                                symbol.block_type == BlockType.INPUT_BUFFER_CURRENT):
         code, message = Messages.get_value_assigned_to_buffer(node.get_variable().get_complete_name())
         Logger.log_message(code=code, message=message,
                            error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
     return
 def visit_unit_type(self, node):
     """
     Check if the coco applies,
     :param node: a single unit type object.
     :type node: ast_unit_type
     """
     if node.is_div and isinstance(node.lhs, int) and node.lhs != 1:
         code, message = Messages.get_wrong_numerator(str(node))
         Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
 def visit_ode_equation(self, node):
     """
     Checks the coco.
     :param node: A single ode equation.
     :type node: ast_ode_equation
     """
     if node.get_lhs().get_differential_order() == 0:
         code, message = Messages.get_order_not_declared(node.get_lhs().get_name())
         Logger.log_message(error_position=node.get_source_position(), code=code,
                            message=message, log_level=LoggingLevel.ERROR)
Пример #45
0
    def handle_target(cls, target):
        if target is None or target.upper() == "NONE":
            target = ""     # make sure `target` is always a string

        if target not in CodeGenerator.get_known_targets():
            code, message = Messages.get_unknown_target(target)
            Logger.log_message(None, code, message, None, LoggingLevel.ERROR)
            raise InvalidTargetException()

        cls.target = target
Пример #46
0
 def register_type(cls, symbol):
     """
     Registers a new type into the system.
     :param: a single type symbol.
     :type: UnitTypeSymbol
     """
     if not symbol.is_primitive() and symbol.unit.get_name() not in cls.name2type.keys():
         cls.name2type[symbol.unit.get_name()] = symbol
         code, message = Messages.get_new_type_registered(symbol.unit.get_name())
         Logger.log_message(code=code, message=message, log_level=LoggingLevel.INFO)
     return
Пример #47
0
 def visit_declaration(self, node):
     """
     Checks if the coco applies.
     :param node: a single declaration.
     :type node: ASTDeclaration.
     """
     if node.is_function and not node.has_expression():
         code, message = Messages.get_no_rhs(node.get_variables()[0].get_name())
         Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR,
                            code=code, message=message)
     return
 def visit_declaration(self, node):
     """
     Checks the coco.
     :param node: a single declaration.
     :type node: ast_declaration
     """
     if node.is_function and len(node.get_variables()) > 1:
         code, message = Messages.get_several_lhs(list((var.get_name() for var in node.get_variables())))
         Logger.log_message(error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR,
                            code=code, message=message)
     return
Пример #49
0
 def analyse_transform_neurons(self, neurons):
     # type: (list(ASTNeuron)) -> None
     """
     Analyse and transform a list of neurons.
     :param neurons: a list of neurons.
     """
     for neuron in neurons:
         code, message = Messages.get_analysing_transforming_neuron(neuron.get_name())
         Logger.log_message(None, code, message, None, LoggingLevel.INFO)
         self.analyse_neuron(neuron)
         # now store the transformed model
         self.store_transformed_model(neuron)
 def visit_ode_equation(self, node):
     """
     Ensures the coco.
     :param node: a single equation object.
     :type node: ast_ode_equation
     """
     symbol = node.get_scope().resolve_to_symbol(node.get_lhs().get_name_of_lhs(), SymbolKind.VARIABLE)
     if symbol is not None and not symbol.is_init_values():
         code, message = Messages.get_equation_var_not_in_init_values_block(node.get_lhs().get_name_of_lhs())
         Logger.log_message(code=code, message=message,
                            error_position=node.get_source_position(),
                            log_level=LoggingLevel.ERROR)
         return
 def check_co_co(cls, node):
     """
     Ensures the coco for the handed over neuron.
     :param node: a single neuron instance.
     :type node: ast_neuron
     """
     for func in node.get_functions():
         if func.get_name() in cls.nest_name_space:
             code, message = Messages.get_nest_collision(func.get_name())
             Logger.log_message(error_position=func.get_source_position(),
                                code=code, message=message,
                                log_level=LoggingLevel.ERROR)
     return
 def visit_assignment(self, node):
     """
     Checks the coco on the current node.
     :param node: a single node.
     :type node: ast_assignment
     """
     symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_name(), SymbolKind.VARIABLE)
     if (symbol is not None and symbol.block_type == BlockType.PARAMETERS and
             node.get_scope().get_scope_type() != ScopeType.GLOBAL):
         code, message = Messages.get_assignment_not_allowed(node.get_variable().get_complete_name())
         Logger.log_message(error_position=node.get_source_position(),
                            code=code, message=message,
                            log_level=LoggingLevel.ERROR)
     return
Пример #53
0
 def get_unit(cls, name):
     """
     Returns a single UnitType if the corresponding unit has been predefined.
     :param name: the name of a unit
     :type name: str
     :return: a single UnitType object, or None
     :rtype: UnitType
     """
     if name in cls.name2unit.keys():
         return cls.name2unit[name]
     else:
         code, message = Messages.get_unit_does_not_exist(name)
         Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR)
         return None
Пример #54
0
 def visit_input_line(self, node):
     """
     Private method: Used to visit a single input line, create the corresponding symbol and update the scope.
     :param node: a single input line.
     :type node: ast_input_line
     """
     if node.is_spike() and node.has_datatype():
         node.get_datatype().update_scope(node.get_scope())
     elif node.is_spike():
         code, message = Messages.get_buffer_type_not_defined(node.get_name())
         Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                            log_level=LoggingLevel.WARNING)
     for inputType in node.get_input_types():
         inputType.update_scope(node.get_scope())
    def visit_simple_expression(self, node):
        """
        Visits a single function call as stored in a simple expression and derives the correct type of all its
        parameters. :param node: a simple expression :type node: ASTSimpleExpression :rtype void
        """
        assert isinstance(node, ASTSimpleExpression), \
            '(PyNestML.Visitor.FunctionCallVisitor) No or wrong type of simple expression provided (%s)!' % tuple(node)
        assert (node.get_scope() is not None), \
            "(PyNestML.Visitor.FunctionCallVisitor) No scope found, run symboltable creator!"
        scope = node.get_scope()
        function_name = node.get_function_call().get_name()
        method_symbol = scope.resolve_to_symbol(function_name, SymbolKind.FUNCTION)
        # check if this function exists
        if method_symbol is None:
            code, message = Messages.get_could_not_resolve(function_name)
            Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
            return
        return_type = method_symbol.get_return_type()
        return_type.referenced_object = node

        # convolve symbol does not have a return type set.
        # returns whatever type the second parameter is.
        if function_name == PredefinedFunctions.CONVOLVE:
            # Deviations from the assumptions made here are handled in the convolveCoco
            buffer_parameter = node.get_function_call().get_args()[1]

            if buffer_parameter.get_variable() is not None:
                buffer_name = buffer_parameter.get_variable().get_name()
                buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE)
                if buffer_symbol_resolve is not None:
                    node.type = buffer_symbol_resolve.get_type_symbol()
                    return

            # getting here means there is an error with the parameters to convolve
            code, message = Messages.get_convolve_needs_buffer_parameter()
            Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
                               log_level=LoggingLevel.ERROR)
            node.type = ErrorTypeSymbol()
            return

        if isinstance(method_symbol.get_return_type(), VoidTypeSymbol):
            # todo by KP: the error message is not used here, @ptraeder fix this
            # error_msg = ErrorStrings.message_void_function_on_rhs(self, function_name, node.get_source_position())
            node.type = ErrorTypeSymbol()
            return

        # if nothing special is handled, just get the expression type from the return type of the function
        node.type = return_type
Пример #56
0
    def do_magnitude_conversion_rhs_to_lhs(_rhs_type_symbol, _lhs_type_symbol, _containing_expression):
        """
        determine conversion factor from rhs to lhs, register it with the relevant expression, drop warning
        """
        _containing_expression.set_implicit_conversion_factor(
            UnitTypeSymbol.get_conversion_factor(_lhs_type_symbol.astropy_unit,
                                                 _rhs_type_symbol.astropy_unit))
        _containing_expression.type = _lhs_type_symbol

        code, message = Messages.get_implicit_magnitude_conversion(_lhs_type_symbol, _rhs_type_symbol,
                                                                   _containing_expression.get_implicit_conversion_factor())
        Logger.log_message(code=code, message=message,
                           error_position=_containing_expression.get_source_position(),
                           log_level=LoggingLevel.WARNING)
Пример #57
0
    def convert_name_reference(self, variable):
        """
        Converts a single variable to nest processable format.
        :param variable: a single variable.
        :type variable: ASTVariable
        :return: a nest processable format.
        :rtype: str
        """
        from pynestml.codegeneration.nest_printer import NestPrinter
        assert (variable is not None and isinstance(variable, ASTVariable)), \
            '(PyNestML.CodeGeneration.NestReferenceConverter) No or wrong type of uses-gsl provided (%s)!' % type(
                variable)
        variable_name = NestNamesConverter.convert_to_cpp_name(variable.get_complete_name())

        if PredefinedUnits.is_unit(variable.get_complete_name()):
            return str(
                UnitConverter.get_factor(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit()))
        if variable_name == PredefinedVariables.E_CONSTANT:
            return 'numerics::e'
        else:
            symbol = variable.get_scope().resolve_to_symbol(variable_name, SymbolKind.VARIABLE)
            if symbol is None:
                # this should actually not happen, but an error message is better than an exception
                code, message = Messages.get_could_not_resolve(variable_name)
                Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message,
                                   error_position=variable.get_source_position())
                return ''
            else:
                if symbol.is_local():
                    return variable_name + ('[i]' if symbol.has_vector_parameter() else '')
                elif symbol.is_buffer():
                    return NestPrinter.print_origin(symbol) + NestNamesConverter.buffer_value(symbol) \
                           + ('[i]' if symbol.has_vector_parameter() else '')
                else:
                    if symbol.is_function:
                        return 'get_' + variable_name + '()' + ('[i]' if symbol.has_vector_parameter() else '')
                    else:
                        if symbol.is_init_values():
                            temp = NestPrinter.print_origin(symbol)
                            if self.uses_gsl:
                                temp += GSLNamesConverter.name(symbol)
                            else:
                                temp += NestNamesConverter.name(symbol)
                            temp += ('[i]' if symbol.has_vector_parameter() else '')
                            return temp
                        else:
                            return NestPrinter.print_origin(symbol) + \
                                   NestNamesConverter.name(symbol) + \
                                   ('[i]' if symbol.has_vector_parameter() else '')
Пример #58
0
 def visit_expression(self, node):
     """
     Visits a single rhs but does not execute any steps besides printing a message. This
     visitor indicates that no functionality has been implemented for this type of nodes.
     :param node: a single rhs
     :type node: ast_expression or ast_simple_expression
     """
     error_msg = ErrorStrings.message_no_semantics(self, str(node), node.get_source_position())
     node.type = ErrorTypeSymbol()
     # just warn though
     Logger.log_message(message=error_msg,
                        code=MessageCode.NO_SEMANTICS,
                        error_position=node.get_source_position(),
                        log_level=LoggingLevel.WARNING)
     return
 def visit_function_call(self, node):
     """
     Checks the coco on the current function call.
     :param node: a single function call.
     :type node: ast_function_call
     """
     f_name = node.get_name()
     if f_name == PredefinedFunctions.CURR_SUM or \
             f_name == PredefinedFunctions.COND_SUM or f_name == PredefinedFunctions.CONVOLVE:
         for arg in node.get_args():
             if not isinstance(arg, ASTSimpleExpression) or not arg.is_variable():
                 code, message = Messages.get_not_a_variable(str(arg))
                 Logger.log_message(code=code, message=message,
                                    error_position=arg.get_source_position(), log_level=LoggingLevel.ERROR)
     return
 def visit_input_line(self, node):
     """
     Checks the coco on the current node.
     :param node: a single input line node.
     :type node: ast_input_line
     """
     if node.is_spike() and not node.has_datatype():
         code, message = Messages.get_data_type_not_specified(node.get_name())
         Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR,
                            code=code, message=message)
     if node.is_current() and node.has_datatype():
         code, message = Messages.get_not_type_allowed(node.get_name())
         Logger.log_message(error_position=str(node.get_source_position()),
                            code=code, message=message,
                            log_level=LoggingLevel.ERROR)
     return