Example #1
0
    def _replace_mem_call(self, match):
        func, label_name, flag = match.groups()
        size = '0'

        if func not in self.mem_function_map:
            raise NotImplementedError(
                "Model of {} is not supported".format(func))
        elif not self.mem_function_map[func]:
            raise NotImplementedError(
                "Set implementation for the function {}".format(func))

        if isinstance(self.signature, Pointer):
            if func == 'ALLOC' and self.ualloc_flag:
                # Do not alloc memory anyway for unknown resources anyway to avoid incomplete type errors
                func = 'UALLOC'
            if get_conf_property(self._conf,
                                 'disable ualloc') and func == 'UALLOC':
                func = 'ALLOC'
            if func != 'UALLOC' and get_conf_property(self._conf,
                                                      'allocate with sizeof'):
                size = 'sizeof({})'.format(
                    self.signature.points.to_string(
                        '', typedef='complex_and_params'))

            return "{}({})".format(self.mem_function_map[func], size)
        else:
            raise ValueError('This is not a pointer')
Example #2
0
def generate_processes(emg, source, processes, conf, specifications):
    """
    This generator generates processes for verifying Linux kernel modules. It generates the main process which calls
    module and kernel initialization functions and then modules exit functions.

    :param emg: EMG Plugin object.
    :param source: Source collection object.
    :param processes: ProcessCollection object.
    :param conf: Configuration dictionary of this generator.
    :param specifications: Dictionary with required specifications of required kinds
    :return: None
    """
    functions_collection = dict()

    # Import Specifications
    emg.logger.info(
        "Generate an entry process on base of given funcitons list")
    if processes.entry:
        raise ValueError(
            'Do not expect any main process already attached to the model, reorder EMG generators in '
            'configuration')

    # Read configuration in abstract task
    emg.logger.info("Determine functions to call in the environment model")

    expressions = [
        re.compile(e) for e in get_conf_property(conf, "functions to call")
    ]
    strict = get_conf_property(conf, "prefer not called")
    for func in source.source_functions:
        obj = source.get_source_function(func)
        if not obj.static and (not strict or strict and len(obj.called_at) == 0) and \
                (not expressions or any(e.fullmatch(func) for e in expressions)):
            emg.logger.info(
                "Add function {!r} to call in the environment model".format(
                    func))
            functions_collection[func] = obj
        else:
            continue

    if len(functions_collection) == 0:
        raise ValueError(
            "There is no suitable functions to call in the environment model")

    # Read configuration in private configuration about headers
    # todo: in current implementation it is useless but may help in future
    # headers_map = get_conf_property(conf, "additional headers")
    # if headers_map:
    #     for func in (f for f in set(headers_map.keys).intersection(set(functions_list))):
    #         functions_collection[func].headers.extend(headers_map[func])

    # Genrate scenario
    emg.logger.info('Generate main scenario')
    new = __generate_calls(emg.logger, emg, conf, functions_collection)
    processes.entry = new
Example #3
0
    def _compose_control_function(self, automaton):
        self._logger.info(
            'Generate label-based control function for automaton {} based on process {} of category {}'
            .format(automaton.identifier, automaton.process.name,
                    automaton.process.category))

        # Get function prototype
        cf = self._control_function(automaton)
        cf.definition_file = self._cmodel.entry_file

        # Do process initialization
        model_flag = True
        if automaton not in self._model_fsa:
            model_flag = False
            if not get_conf_property(self._conf,
                                     'direct control functions calls'
                                     ) and automaton is not self._entry_fsa:
                if automaton.self_parallelism and \
                        get_necessary_conf_property(self._conf, "self parallel processes") and \
                        get_conf_property(self._conf, 'pure pthread interface'):
                    for var in self.__thread_variable(automaton, 'pair'):
                        self._cmodel.add_global_variable(
                            var, self._cmodel.entry_file, False)
                elif automaton.self_parallelism and get_necessary_conf_property(
                        self._conf, "self parallel processes"):
                    self._cmodel.add_global_variable(self.__thread_variable(
                        automaton, 'array'),
                                                     self._cmodel.entry_file,
                                                     extern=False)
                else:
                    self._cmodel.add_global_variable(self.__thread_variable(
                        automaton, 'single'),
                                                     self._cmodel.entry_file,
                                                     extern=False)

        # Generate function body
        label_based_function(self._conf, self._source, automaton, cf,
                             model_flag)

        # Add function to source code to print
        self._cmodel.add_function_definition(cf)
        self._cmodel.add_function_declaration(self._cmodel.entry_file,
                                              cf,
                                              extern=True)
        if model_flag:
            for file in self._source.get_source_function(
                    automaton.process.name).declaration_files:
                self._cmodel.add_function_declaration(file, cf, extern=True)
        return
Example #4
0
    def __init__(self, logger, conf, interfaces, processes):
        self.logger = logger
        self.conf = conf
        self.__abstr_model_processes = {
            p.name: p
            for p in processes.models.values()
        }
        self.__abstr_event_processes = {
            p.name: p
            for p in processes.environment.values()
        }

        self.model_processes = []
        self.event_processes = []

        # Generate intermediate model
        self.logger.info("Generate an intermediate model")
        self.__select_processes_and_models(interfaces)

        # Convert callback access according to container fields
        self.logger.info(
            "Determine particular interfaces and their implementations for each label or its field"
        )
        self.__resolve_accesses(interfaces)

        # Sanity check
        for process in self.model_processes + self.event_processes:
            if not process.category:
                raise ValueError("Found process without category {!r}".format(
                    process.name))

        # Refine processes
        if get_conf_property(conf, "delete unregistered processes"):
            self.__refine_processes()
Example #5
0
 def _call_cf_code(self, automaton, parameter='0'):
     if automaton.self_parallelism and get_necessary_conf_property(self._conf, 'self parallel processes') and \
             get_conf_property(self._conf, 'pure pthread interface'):
         for var in self.__thread_variable(automaton, 'pair'):
             self._cmodel.add_global_variable(var,
                                              self._cmodel.entry_file,
                                              extern=True)
         # Leave the first parameter to fill twise later
         return 'pthread_create({}, 0, {}, {});'.\
             format('{}', self._control_function(automaton).name, parameter)
     else:
         if automaton.self_parallelism and get_necessary_conf_property(
                 self._conf, 'self parallel processes'):
             sv = self.__thread_variable(automaton, 'array')
             self._cmodel.add_global_variable(sv,
                                              self._cmodel.entry_file,
                                              extern=True)
             return 'pthread_create_N({}, 0, {}, {});'.\
                 format(sv.name, self._control_function(automaton).name, parameter)
         else:
             sv = self.__thread_variable(automaton, 'single')
             self._cmodel.add_global_variable(sv,
                                              self._cmodel.entry_file,
                                              extern=True)
             return 'pthread_create({}, 0, {}, {});'.\
                 format('& ' + sv.name, self._control_function(automaton).name, parameter)
Example #6
0
def label_based_function(conf, analysis, automaton, cf, model=True):
    v_code, f_code = list(), list()

    # Determine returning expression for reuse
    if not get_conf_property(conf, 'direct control functions calls') and not model:
        ret_expression = 'return 0;'
    else:
        ret_expression = 'return;'

    if model:
        kfunction_obj = analysis.get_source_function(automaton.process.name)
        if kfunction_obj.declaration.return_value and kfunction_obj.declaration.return_value.identifier != 'void':
            ret_expression = None

    # Initialize variables
    f_code.extend(initialize_automaton_variables(conf, automaton))
    for var in automaton.variables(only_used=True):
        v_code.append(var.declare_with_init() + ';')

    main_v_code, main_f_code = __label_sequence(automaton, list(automaton.fsa.initial_states)[0], ret_expression)
    v_code.extend(main_v_code)
    f_code.extend(main_f_code)
    f_code.append("/* End of the process */")
    if ret_expression:
        f_code.append(ret_expression)

    processed = []
    for subp in [s for s in sorted(automaton.fsa.states, key=lambda s: s.identifier)
                 if isinstance(s.action, Subprocess)]:
        if subp.action.name not in processed:
            sp_v_code, sp_f_code = __label_sequence(automaton, list(subp.successors)[0], ret_expression)

            v_code.extend(sp_v_code)
            f_code.extend([
                '',
                '/* Sbprocess {} */'.format(subp.action.name),
                'ldv_{}_{}:'.format(subp.action.name, automaton.identifier)
            ])
            f_code.extend(sp_f_code)
            f_code.append("/* End of the subprocess '{}' */".format(subp.action.name))
            if ret_expression:
                f_code.append(ret_expression)
            processed.append(subp.action.name)

    v_code = [model_comment('CONTROL_FUNCTION_INIT_BEGIN', 'Declare auxiliary variables.')] + \
             v_code + \
             [model_comment('CONTROL_FUNCTION_INIT_END', 'Declare auxiliary variables.')]
    if model:
        name = automaton.process.name
        v_code.insert(0, control_function_comment_begin(cf.name, automaton.process.comment))
    else:
        name = '{}({})'.format(automaton.process.name, automaton.process.category)
        v_code.insert(0, control_function_comment_begin(cf.name, automaton.process.comment, automaton.identifier))
    f_code.append(control_function_comment_end(cf.name, name))
    cf.body.extend(v_code + f_code)

    return cf.name
Example #7
0
    def _join_cf(self, automaton):
        """
        Generate statement to join control function thread if it is called in a separate thread.

        :param automaton: Automaton object.
        :return: String expression.
        """
        self._cmodel.add_function_declaration(self._cmodel.entry_file, self._control_function(automaton), extern=True)

        if get_conf_property(self._conf, 'direct control functions calls'):
            return '/* Skip thread join call */'
        else:
            return self._join_cf_code(automaton)
Example #8
0
    def _call_cf(self, automaton, parameter='0'):
        """
        Generate statement with control function call.

        :param automaton: Automaton object.
        :param parameter: String with argument of the control function.
        :return: String expression.
        """
        self._cmodel.add_function_declaration(self._cmodel.entry_file, self._control_function(automaton), extern=True)

        if get_conf_property(self._conf, 'direct control functions calls'):
            return '{}({});'.format(self._control_function(automaton).name, parameter)
        else:
            return self._call_cf_code(automaton, parameter)
Example #9
0
 def _entry_point(self):
     self._logger.info(
         "Finally generate an entry point function {!r}".format(
             self._cmodel.entry_name))
     if get_conf_property(self._conf, "self parallel model"):
         self._control_function(
             self._entry_fsa).declaration = import_declaration(
                 "void *(*start_routine)(void *)")
         name = self._control_function(self._entry_fsa).name
         body = [
             "pthread_t **thread;",
             "pthread_create_N(thread, 0, {}, 0);".format(name),
             "pthread_join_N(thread, {});".format(name)
         ]
     else:
         body = [
             '{}(0);'.format(self._control_function(self._entry_fsa).name)
         ]
     return self._cmodel.compose_entry_point(body)
Example #10
0
def yield_categories(collection):
    """
    Analyze all new types found by SA component and yield final set of interface categories built from manually prepared
    interface specifications and global variables. All new categories and interfaces are added directly to the
    InterfaceCategoriesSpecification object. Also all types declarations are updated according with new imported C
    types. However, there are still unused interfaces present in the collection after this function termination.

    :param collection: InterfaceCategoriesSpecification object.
    :param conf: Configuration property dictionary of InterfaceCategoriesSpecification object.
    :return: None
    """

    # Add resources
    if get_conf_property(collection.conf, "generate new resource interfaces"):
        __populate_resources(collection)

    # Complement interface references
    __complement_interfaces(collection)

    return
Example #11
0
def initialize_automaton_variables(conf, automaton):
    """
    Initalize automaton variables with either external allocated function calls or some known explicit values. Print
    the code of such initialization.

    :param conf: Translator configuration dictionary.
    :param automaton: Automaton object.
    :return: List of C variables initializations.
    """
    initializations = []
    for var in automaton.variables():
        if isinstance(var.declaration, Pointer) and get_conf_property(conf, 'allocate external'):
            var.use += 1
            initializations.append("{} = external_allocated_data();".format(var.name))
        elif isinstance(var.declaration, Primitive) and var.value:
            var.use += 1
            initializations.append('{} = {};'.format(var.name, var.value))

    if len(initializations) > 0:
        initializations.insert(0, '/* Initialize automaton variables */')
    return initializations
Example #12
0
 def _join_cf_code(self, automaton):
     if automaton.self_parallelism and get_necessary_conf_property(self._conf, 'self parallel processes') and \
             get_conf_property(self._conf, 'pure pthread interface'):
         for var in self.__thread_variable(automaton, 'pair'):
             self._cmodel.add_global_variable(var,
                                              self._cmodel.entry_file,
                                              extern=True)
         return 'pthread_join({}, 0);'
     else:
         if automaton.self_parallelism and get_necessary_conf_property(
                 self._conf, 'self parallel processes'):
             sv = self.__thread_variable(automaton, 'array')
             self._cmodel.add_global_variable(sv,
                                              self._cmodel.entry_file,
                                              extern=True)
             return 'pthread_join_N({}, 0);'.format(sv.name)
         else:
             sv = self.__thread_variable(automaton, 'single')
             self._cmodel.add_global_variable(sv,
                                              self._cmodel.entry_file,
                                              extern=True)
             return 'pthread_join({}, 0);'.format(sv.name)
Example #13
0
def generate_processes(emg, source, processes, conf, specifications):
    """
    This generator reads a manually prepared environment model description and some of them just adds to the already
    generated model and some generated processes with the same names it replaces by new manually prepared one. A user
    can just get an automatically generated model by setting option for a translator and modify it to rerun EMG next
    time to make it generate the model with desired properties without modifying any specifications.

    :param emg: EMG Plugin object.
    :param source: Source collection object.
    :param processes: ProcessCollection object.
    :param conf: Configuration dictionary of this generator.
    :return: None.
    """
    # Import Specifications
    or_models = list(processes.models.values())
    or_processes = list(processes.environment.values())
    or_entry = processes.entry

    all_instance_maps = specifications["manual event models"].get(
        "specification")
    fragment_name = emg.abstract_task_desc['fragment']
    descriptions = None
    for imap in all_instance_maps.get("manual event models", []):
        if fragment_name in imap.get('fragments', []):
            descriptions = imap.get("model", None)

    # Import manual process
    if descriptions and ("functions models" in descriptions
                         or "environment processes" in descriptions):
        manual_processes = ProcessCollection(emg.logger, emg.conf)
        manual_processes.parse_event_specification(descriptions)

        # Decide on process replacements
        if manual_processes.entry:
            if (get_conf_property(conf, "enforce replacement")
                    and or_entry) or not or_entry:
                or_entry = manual_processes.entry

        # Replace rest processes
        for collection, manual in ((or_models,
                                    manual_processes.models.values()),
                                   (or_processes,
                                    manual_processes.environment.values())):
            for process in manual:
                if process.pretty_id in {p.pretty_id for p in collection} and \
                        get_conf_property(conf, "enforce replacement"):
                    collection[[p.pretty_id for p in collection
                                ].index(process.pretty_id)] = process
                elif process.pretty_id not in {
                        p.pretty_id
                        for p in collection
                }:
                    collection.insert(0, process)
    else:
        emg.logger.info(
            "There is no specification for {!r} or it has invalid format".
            format(fragment_name))

    processes.entry = or_entry
    processes.models = {p.pretty_id: p for p in or_models}
    processes.environment = {p.pretty_id: p for p in or_processes}
    processes.establish_peers(strict=True)
Example #14
0
    def _import_code_analysis(self, cfiles, dependencies):
        """
        Read global variables, functions and macros to fill up the collection.

        :param source_analysis: Dictionary with the content of source analysis.
        :param files_map: Dictionary to resolve main file by an included file.
        :return: None.
        """
        # Import typedefs if there are provided
        self.logger.info("Extract complete types definitions")
        typedef = self._clade.get_typedefs(
            set(dependencies.keys()).union(cfiles))
        if typedef:
            import_typedefs(typedef, dependencies)

        variables = self._clade.get_variables(cfiles)
        if variables:
            self.logger.info("Import global variables initializations")
            for path, vals in variables.items():
                for variable in vals:
                    variable_name = extract_name(variable['declaration'])
                    if not variable_name:
                        raise ValueError('Global variable without a name')
                    var = Variable(variable_name, variable['declaration'])

                    # Here we know, that if we met a variable in an another file then it is an another variable because
                    # a program should contain a single global variable initialization
                    self.set_source_variable(var, path)
                    var.declaration_files.add(path)
                    var.initialization_file = path
                    var.static = is_static(variable['declaration'])

                    if 'value' in variable:
                        var.value = variable['value']

        # Variables which are used in variables initalizations
        self.logger.info("Import source functions")
        vfunctions = self._clade.get_used_in_vars_functions()

        # Get functions defined in dependencies and in the main functions and have calls
        cg = self._clade.get_callgraph(set(dependencies.keys()))

        # Function scope definitions
        # todo: maybe this should be fixed in Clade
        # As we will not get definitions for library functions if there are in compiled parts we should add all scopes
        # that are given for all function called from outside of the code we analyze
        for scope in (s for s in cfiles if s in cg):
            for func in (f for f in cg[scope] if cg[scope][f].get('calls')):
                for dep in cg[scope][func].get('calls'):
                    dependencies.setdefault(dep, set())
                    dependencies[dep].add(scope)
        fs = self._clade.get_functions_by_file(
            set(dependencies.keys()).union(cfiles))

        # Add called functions
        for scope in cg:
            for func in cg[scope]:
                desc = cg[scope][func]
                if scope in cfiles:
                    # Definition of the function is in the code of interest
                    self._add_function(func, scope, fs, dependencies, cfiles)
                    # Add called functions
                    for def_scope, cf_desc in desc.get('calls',
                                                       dict()).items():
                        if def_scope not in cfiles:
                            for called_func in (
                                    f for f in cf_desc
                                    if def_scope in fs and f in fs[def_scope]):
                                self._add_function(called_func, def_scope, fs,
                                                   dependencies, cfiles)

                elif ('called_in' in desc
                      and set(desc['called_in'].keys()).intersection(cfiles)
                      ) or func in vfunctions:
                    if scope in fs and func in fs[scope]:
                        # Function is called in the target code but defined in dependencies
                        self._add_function(func, scope, fs, dependencies,
                                           cfiles)
                    elif scope != 'unknown':
                        self.logger.warning(
                            "There is no information on declarations of function {!r} from {!r} scope"
                            .format(func, scope))
        # Add functions missed in the call graph
        for scope in (s for s in fs if s in cfiles):
            for func in fs[scope]:
                func_intf = self.get_source_function(func, scope)
                if not func_intf:
                    self._add_function(func, scope, fs, dependencies, cfiles)

        for func in self.source_functions:
            for obj in self.get_source_functions(func):
                scopes = set(obj.declaration_files).union(set(
                    obj.header_files))
                if not obj.definition_file:
                    # It is likely be this way
                    scopes.add('unknown')
                for scope in (s for s in scopes
                              if cg.get(s, dict()).get(func)):
                    for cscope, desc in ((s, d)
                                         for s, d in cg[scope][func].get(
                                             'called_in', {}).items()
                                         if s in cfiles):
                        for caller in desc:
                            for line in desc[caller]:
                                params = desc[caller][line].get('args')
                                caller_intf = self.get_source_function(
                                    caller, cscope)
                                obj.add_call(caller, cscope)

                                if params:
                                    # Here can be functions which are not defined or visible
                                    for _, passed_func in list(params):
                                        passed_obj = self.get_source_function(
                                            passed_func, cscope)
                                        if not passed_obj:
                                            passed_scope = self._search_function(
                                                passed_func, cscope, fs)
                                            if passed_scope:
                                                self._add_function(
                                                    passed_func, passed_scope,
                                                    fs, dependencies, cfiles)
                                            else:
                                                self.logger.warning(
                                                    "Cannot find function {!r} from scope {!r}"
                                                    .format(
                                                        passed_func, cscope))
                                                # Ignore this call since model will not be correct without signature
                                                params = None
                                                break
                                    caller_intf.call_in_function(obj, params)

        macros_file = get_conf_property(self._conf['source analysis'],
                                        'macros white list')
        if macros_file:
            macros_file = find_file_or_dir(
                self.logger, self._conf['main working directory'], macros_file)
            with open(macros_file, 'r', encoding='utf8') as fp:
                white_list = ujson.load(fp)
            if white_list:
                macros = self._clade.get_macros_expansions(cfiles, white_list)
                for path, macros in macros.items():
                    for macro, desc in macros.items():
                        obj = self.get_macro(macro)
                        if not obj:
                            obj = Macro(macro)
                        for call in desc.get('args', []):
                            obj.add_parameters(path, call)
                        self.set_macro(obj)
Example #15
0
def __generate_call(emg, conf, ep, func, obj):
    # Add declaration of caller
    caller_func = Function("ldv_emg_{}_caller".format(func), "void a(void)")
    ep.add_declaration("environment model", caller_func.name,
                       caller_func.declare(True)[0])
    expression = ""
    body = []
    initializations = []

    # Check retval and cast to void call
    if obj.declaration.return_value and obj.declaration.return_value.identifier != 'void':
        expression += "(void) "

    # Get arguments and allocate memory for them
    args = []
    free_args = []
    for index, arg in enumerate(obj.declaration.parameters):
        if not isinstance(arg, str):
            argvar = Variable("ldv_arg_{}".format(index), arg)
            body.append(argvar.declare() + ";")
            args.append(argvar.name)
            if isinstance(arg, Pointer):
                elements = get_conf_property(
                    conf, "initialize strings as null terminated")
                if elements and arg.identifier == 'char **':
                    if isinstance(elements, int) or elements.isnumeric():
                        elements = int(elements)
                    else:
                        elements = 'ldv_undef_int()'
                    argvar_len = Variable(argvar.name + '_len', 'int')
                    # Define explicitly number of arguments, since undef value is too difficult sometimes
                    initializations.append("int {} = {};".format(
                        argvar_len.name, elements))
                    initializations.append(
                        "{} = (char **) ldv_xmalloc({} * sizeof(char *));".
                        format(argvar.name, argvar_len.name))
                    # Initialize all elements but the last one
                    initializations.append(
                        "for (int i = 0; i < {} - 1; i++)".format(
                            argvar_len.name))
                    # Some undefined data
                    initializations.append(
                        "\t{}[i] = (char *) external_allocated_data();".format(
                            argvar.name))
                    # The last element is a string
                    initializations.append("{}[{}] = (char * ) 0;".format(
                        argvar.name, elements - 1))
                    free_args.append(argvar.name)
                elif get_necessary_conf_property(
                        emg.conf["translation options"], "allocate external"):
                    value = "external_allocated_data();"
                    initializations.append("{} = {}".format(
                        argvar.name, value))
                else:
                    if get_necessary_conf_property(
                            emg.conf["translation options"],
                            "allocate with sizeof"):
                        apt = arg.points.to_string(
                            '', typedef='complex_and_params')
                        value = "ldv_xmalloc(sizeof({}));".\
                            format(apt if apt != 'void' else apt + '*')
                    else:
                        value = "ldv_xmalloc_unknown_size(0);"
                    free_args.append(argvar.name)
                    initializations.append("{} = {}".format(
                        argvar.name, value))

    # Generate call
    expression += "{}({});".format(func, ", ".join(args))

    # Generate function body
    body += initializations + [expression]

    # Free memory
    for arg in free_args:
        body.append("ldv_free({});".format(arg))

    caller_func.body = body

    # Add definition of caller
    ep.add_definition(obj.definition_file, caller_func.name,
                      caller_func.define() + ["\n"])

    # Return call expression
    return "{}();".format(caller_func.name)
Example #16
0
    def _dispatch_blocks(self, state, automaton, function_parameters,
                         automata_peers, replicative):
        pre = []
        post = []
        blocks = []

        for name in (n for n in automata_peers
                     if len(automata_peers[n]['states']) > 0):
            decl = self._get_cf_struct(automaton, function_parameters)
            cf_param = 'cf_arg_{}'.format(
                automata_peers[name]['automaton'].identifier)
            vf_param_var = Variable(cf_param, decl.take_pointer)
            pre.append(vf_param_var.declare() + ';')

            if replicative:
                for r_state in automata_peers[name]['states']:
                    block = list()
                    block.append('{} = {}(sizeof({}));'.format(
                        vf_param_var.name,
                        self._cmodel.mem_function_map["ALLOC"],
                        decl.identifier))
                    for index in range(len(function_parameters)):
                        block.append('{}->arg{} = arg{};'.format(
                            vf_param_var.name, index, index))
                    if r_state.action.replicative:
                        call = self._call_cf(automata_peers[name]['automaton'],
                                             cf_param)
                        if get_conf_property(self._conf,
                                             'direct control functions calls'):
                            block.append(call)
                        else:
                            if automata_peers[name]['automaton'].self_parallelism and \
                                    get_necessary_conf_property(self._conf, "self parallel processes") and \
                                    get_conf_property(self._conf, 'pure pthread interface'):
                                thread_vars = self.__thread_variable(
                                    automata_peers[name]['automaton'],
                                    var_type='pair')
                                for v in thread_vars:
                                    # Expect that for this particular case the first argument is unset
                                    block.extend([
                                        'ret = {}'.format(
                                            call.format("& " + v.name)),
                                        'ldv_assume(ret == 0);'
                                    ])
                            else:
                                block.extend([
                                    'ret = {}'.format(call),
                                    'ldv_assume(ret == 0);'
                                ])
                        blocks.append(block)
                        break
                    else:
                        self._logger.warning(
                            'Cannot generate dispatch based on labels for receive {} in process {} with category {}'
                            .format(
                                r_state.action.name,
                                automata_peers[name]['automaton'].process.name,
                                automata_peers[name]
                                ['automaton'].process.category))
            # todo: Pretty ugly, but works
            elif state.action.name.find('dereg') != -1:
                block = list()
                call = self._join_cf(automata_peers[name]['automaton'])
                if not get_conf_property(self._conf,
                                         'direct control functions calls'):
                    if automata_peers[name]['automaton'].self_parallelism and \
                            get_necessary_conf_property(self._conf, "self parallel processes") and \
                            get_conf_property(self._conf, 'pure pthread interface'):
                        thread_vars = self.__thread_variable(
                            automata_peers[name]['automaton'], var_type='pair')
                        for v in thread_vars:
                            # Expect that for this particular case the first argument is unset
                            block.extend([
                                'ret = {}'.format(call.format(v.name)),
                                'ldv_assume(ret == 0);'
                            ])
                    else:
                        block.extend(
                            ['ret = {}'.format(call), 'ldv_assume(ret == 0);'])
                    blocks.append(block)

        return pre, blocks, post
Example #17
0
    def compose_entry_point(self, given_body):
        """
        Generate an entry point function for the environment model.

        :param given_body: Body of the main function provided by a translator.
        :return: List of C statements of the generated function body.
        """
        ep = Function(self.entry_name, "int {}(void)".format(self.entry_name))
        ep.definition_file = self.entry_file
        body = [
            '/* LDV {' +
            '"thread": 1, "type": "CONTROL_FUNCTION_BEGIN", "comment": "Entry point \'{0}\'", '
            '"function": "{0}"'.format(self.entry_name) + '} */'
        ]

        # Init external allocated pointers
        cnt = 0
        functions = []
        if len(self.__external_allocated.keys()) > 0:
            for file in sorted([
                    f for f in self.__external_allocated.keys()
                    if len(self.__external_allocated[f]) > 0
            ]):
                func = Function(
                    'ldv_allocate_external_{}'.format(cnt),
                    "void ldv_allocate_external_{}(void)".format(cnt))
                func.declaration_files.add(file)
                func.definition_file = file

                init = [
                    "{} = {}();".format(var.name, 'external_allocated_data')
                    for var in self.__external_allocated[file]
                ]
                func.body = init

                self.add_function_definition(func)
                self.add_function_declaration(self.entry_file,
                                              func,
                                              extern=True)
                functions.append(func)
                cnt += 1

            gl_init = Function('ldv_initialize_external_data',
                               'void ldv_initialize_external_data(void)')
            gl_init.declaration_files.add(self.entry_file)
            gl_init.definition_file = self.entry_file
            init_body = ['{}();'.format(func.name) for func in functions]
            gl_init.body = init_body
            self.add_function_definition(gl_init)
            body.extend([
                '/* Initialize external data */',
                'ldv_initialize_external_data();'
            ])

        if get_conf_property(self._conf, "initialize requirements"):
            body += [
                '/* LDV {"action": "INIT", "type": "CALL_BEGIN", "callback": true, '
                '"comment": "Initialize requirement models."} */',
                'ldv_initialize();',
                '/* LDV {"action": "INIT", "type": "CALL_END"} */'
            ]

        body += ['/* LDV {"action": "SCENARIOS", "type": "CONDITION_BEGIN", '
                 '"comment": "Begin Environment model scenarios."} */'] + given_body + \
                ['/* LDV {"action": "SCENARIOS", "type": "CONDITION_END"} */']

        if get_conf_property(self._conf, "check final state"):
            body += [
                '/* LDV {"action": "FINAL", "callback": true, "type": "CALL_BEGIN", '
                '"comment": "Check requirement model final state at the exit if required."} */',
                'ldv_check_final_state();',
                '/* LDV {"action": "FINAL", "type": "CALL_END"} */'
            ]

        body.append('return 0;')
        body.append(
            '/* LDV {' +
            '"comment": "Exit entry point \'{0}\'", "type": "CONTROL_FUNCTION_END",'
            ' "function": "{0}"'.format(self.entry_name) + '} */')

        ep.body = body
        self.add_function_definition(ep)

        return body
Example #18
0
    def print_source_code(self, additional_lines):
        """
        Generate an environment model as a C code. The code is distributed across aspect addictions for original
        source files and the main environment model C code.

        :param additional_lines: Dictionary with the user-defined C code:
                                 {'file name': {'definitions': [...], 'declarations': []}}
        :return: Dictionary {'file': Path to generated file with the Code}
        """
        aspect_dir = "aspects"
        self._logger.info(
            "Create directory for aspect files {}".format("aspects"))
        os.makedirs(aspect_dir.encode('utf8'), exist_ok=True)

        if get_conf_property(self._conf["translation options"],
                             "propogate headers to instrumented files"):
            for file in (f for f in self.files if f in additional_lines):
                self.add_headers(
                    file,
                    get_necessary_conf_property(
                        self._conf["translation options"],
                        "additional headers"))

        addictions = dict()
        # Write aspects
        for file in self.files:
            lines = list()

            # Check headers
            if file == self.entry_file:
                if self.entry_file in self._headers:
                    lines.extend([
                        '#include <{}>\n'.format(h)
                        for h in self._collapse_headers_sets(self._headers[
                            self.entry_file])
                    ])
                lines.append("\n")

                for tp in self.types:
                    lines.append(tp.to_string('') + " {\n")
                    for field in list(tp.fields.keys()):
                        lines.append("\t{};\n".format(
                            tp.fields[field].to_string(
                                field, typedef='complex_and_params'),
                            scope={self.entry_file}))
                    lines.append("};\n")
                    lines.append("\n")
            else:
                # Generate function declarations
                self._logger.info('Add aspects to a file {!r}'.format(file))

                # Add headers
                if file in self._headers and self._headers[file]:
                    lines.append('before: file ("$this")\n')
                    lines.append('{\n')
                    lines.extend([
                        '#include <{}>\n'.format(h) for h in
                        self._collapse_headers_sets(self._headers[file])
                    ])
                    lines.append("\n")
                    lines.append("}\n\n")

                # Add model itself
                lines.append('after: file ("$this")\n')
                lines.append('{\n')

            if file in additional_lines and 'declarations' in additional_lines[file] and \
                    len(additional_lines[file]['declarations']) > 0:
                lines.append("\n")
                lines.append("/* EMG aliases */\n")
                lines.extend(additional_lines[file]['declarations'])

            if file in self._function_declarations:
                lines.append("\n")
                lines.append("/* EMG Function declarations */\n")
                for func in self._function_declarations[file].keys():
                    lines.extend(self._function_declarations[file][func])

            if file in self._variables_declarations:
                lines.append("\n")
                lines.append("/* EMG variable declarations */\n")
                for variable in self._variables_declarations[file].keys():
                    lines.extend(self._variables_declarations[file][variable])

            if file in self._variables_initializations and len(
                    self._variables_initializations[file]) > 0:
                lines.append("\n")
                lines.append("/* EMG variable initialization */\n")
                for variable in self._variables_initializations[file].keys():
                    lines.extend(
                        self._variables_initializations[file][variable])

            if file in additional_lines and 'definitions' in additional_lines[file] and \
                    len(additional_lines[file]['definitions']) > 0:
                lines.append("\n")
                lines.append("/* EMG aliases for functions */\n")
                lines.extend(additional_lines[file]['definitions'])

            if file in self._function_definitions and len(
                    self._function_definitions[file]) > 0:
                lines.append("\n")
                lines.append("/* EMG function definitions */\n")
                for func in self._function_definitions[file].keys():
                    lines.extend(self._function_definitions[file][func])
                    lines.append("\n")

            if file != self.entry_file:
                lines.append("}\n\n")

            if file in self._call_aspects and len(
                    self._call_aspects[file]) > 0:
                lines.append("/* EMG kernel function models */\n")
                for aspect in self._call_aspects[file]:
                    lines.extend(aspect.define())
                    lines.append("\n")

            if file != self.entry_file:
                name = "{}.aspect".format(
                    unique_file_name(
                        "aspects/ldv_" +
                        os.path.splitext(os.path.basename(file))[0],
                        '.aspect'))
                path = os.path.relpath(name, self._workdir)
                self._logger.info("Add aspect file {!r}".format(path))
                addictions[file] = path
            else:
                name = self.entry_file
            with open(name, "w", encoding="utf8") as fh:
                fh.writelines(lines)

        return addictions
Example #19
0
    def _dispatch(self, state, automaton):
        """
        Generate a code block for a dispatch action of the process for which the automaton is generated. A dispatch code
        block is always generated in a fixed form: as a function call of auxiliary function. Such a function contains
        switch or if operator to choose one of available optional receivers to send the signal. Implementation of
        particular dispatch to particular receiver is configurable and can be implemented differently in various
        translators.

        :param state: State object.
        :param automaton: Automaton object which contains the dispatch.
        :return: [list of strings with lines of C code statements of the code block],
                 [list of strings with new local variable declarations required for the block],
                 [list of strings with boolean conditional expressions which guard code block entering],
                 [list of strings with model comments which embrace the code block]
        """
        code, v_code, conditions, comments = list(), list(), list(), list()

        # Determine peers to receive the signal
        automata_peers = dict()
        if len(state.action.peers) > 0:
            # Do call only if model which can be called will not hang
            extract_relevant_automata(self._event_fsa + self._model_fsa + [self._entry_fsa],
                                      automata_peers, state.action.peers, Receive)
        else:
            # Generate comment
            code.append("/* Dispatch {!r} is not expected by any process, skipping the action */".
                        format(state.action.name))

        # Make comments
        if len(automata_peers) > 0:
            category = list(automata_peers.values())[0]['automaton'].process.category.upper()
            comment = state.action.comment.format(category)
        else:
            comment = 'Skip the action, since no callbacks has been found.'
        comments.append(action_model_comment(state.action, comment, begin=True))
        comments.append(action_model_comment(state.action, None, begin=False))

        # Add given conditions from a spec
        conditions = []
        if state.action.condition and len(state.action.condition) > 0:
            for statement in state.action.condition:
                cn = self._cmodel.text_processor(automaton, statement)
                conditions.extend(cn)

        if len(automata_peers) > 0:
            # Add conditions on base of dispatches
            checks = self._relevant_checks(automata_peers)
            if len(checks) > 0:
                if automaton in self._model_fsa:
                    conditions.append("({})".format(' || '.join(checks)))
                else:
                    # Convert conditions into assume, because according to signals semantics process could not proceed
                    # until it sends a signal and condition describes precondition to prevent signal sending to a
                    # wrong process.
                    if len(checks) > 0:
                        code.append('ldv_assume({});'.format(' || '.join(checks)))

            # Generate artificial function
            body = []

            if not get_conf_property(self._conf, 'direct control functions calls'):
                body = ['int ret;']

            # Check dispatch type
            replicative = False
            for name in automata_peers:
                for st in automata_peers[name]['states']:
                    if st.action.replicative:
                        replicative = True
                        break

            # Determine parameters
            df_parameters = []
            function_parameters = []

            # Add parameters
            for index in range(len(state.action.parameters)):
                # Determine dispatcher parameter
                # We expect strictly one
                dispatcher_access = automaton.process.resolve_access(state.action.parameters[index])[0]
                variable = automaton.determine_variable(dispatcher_access.label)
                function_parameters.append(variable.declaration)
                df_parameters.append(variable.name)

            # Generate blocks on each receive to another process
            # You can implement your own modelTranslator with different implementations of the function
            pre, blocks, post = self._dispatch_blocks(state, automaton, function_parameters, automata_peers,
                                                      replicative)
            if len(blocks) > 0:
                body.extend(pre)

                # Print body of a dispatching function
                if state.action.broadcast:
                    for block in blocks:
                        body.extend(block)
                else:
                    body.append('switch (ldv_undef_int()) {')
                    for index in range(len(blocks)):
                        body.append('\tcase {}: '.format(index) + '{')
                        body.extend(['\t\t' + stm for stm in blocks[index]])
                        body.append('\t\tbreak;')
                        body.append('\t};')
                    if get_conf_property(self._conf, 'do not skip signals'):
                        body.append('\tdefault: ldv_assume(0);')
                    body.append('};')

                if len(function_parameters) > 0:
                    df = Function(
                        "ldv_dispatch_{}_{}_{}".format(state.action.name, automaton.identifier, state.identifier),
                        "void f({})".format(', '.
                                            join([function_parameters[index].to_string('arg{}'.format(index),
                                                                                       typedef='complex_and_params')
                                                  for index in range(len(function_parameters))])))
                else:
                    df = Function(
                        "ldv_dispatch_{}_{}_{}".format(state.action.name, automaton.identifier, state.identifier),
                        "void f(void)")
                df.definition_file = self._cmodel.entry_file
                body.extend(post)
                body.append('return;')
                df.body.extend(body)

                # Add function definition
                self._cmodel.add_function_definition(df)

                code.extend([
                    '{}({});'.format(df.name, ', '.join(df_parameters))
                ])
            else:
                # This is becouse translators can have specific restrictions
                code.append('/* Skip the dispatch because there is no process to receive the signal */')
        else:
            code.append('/* Skip the dispatch because there is no process to receive the signal */')

        return code, v_code, conditions, comments
Example #20
0
    def __init__(self, logger, conf, source, cmodel, entry_fsa, model_fsa, event_fsa):
        """
        Initialize new FSA modelTranslator object. During the initialization an enviornment model in form of finite
        state machines with process-like actions is translated to C code. Translation includes the following steps:
        each pair label-interface is translated in a separate variable, each action is translated in code blocks
        (aux functions can be additionally generated), for each automaton a control function is generated, control
        functions for event modeling are called in a specific entry point function and control functions for function
        modeling are called insted of modelled functions. This class has an abstract methods to provide ability to
        implement different translators.

        :param logger: Logger object.
        :param conf: Configuration properties dictionary.
        :param source: Source collection object.
        :param cmodel: CModel object.
        :param entry_fsa: An entry point Automaton object.
        :param model_fsa: List with Automaton objects which correspond to function models.
        :param event_fsa:  List with Automaton objects for event modeling.
        """
        self._cmodel = cmodel
        self._entry_fsa = entry_fsa
        self._model_fsa = model_fsa
        self._event_fsa = event_fsa
        self._conf = conf
        self._source = source
        self._logger = logger
        self._structures = dict()
        self._control_functions = dict()
        self._logger.info("Include extra header files if necessary")
        check_or_set_conf_property(conf, 'do not skip signals', default_value=False, expected_type=None)

        # Get from unused interfaces
        header_sets = []
        for process in (a.process for a in self._model_fsa + self._event_fsa if len(a.process.headers) > 0):
            header_sets.append(process.headers)
        header_sets = sorted(header_sets, key=len)
        for hset in header_sets:
            self._cmodel.add_headers(self._cmodel.entry_file, hset)

        # Generates base code blocks
        self._logger.info("Start the preparation of actions code")
        for automaton in self._event_fsa + self._model_fsa + [self._entry_fsa]:
            self._logger.debug("Generate code for instance {} of process '{}' of categorty '{}'".
                               format(automaton.identifier, automaton.process.name, automaton.process.category))
            for state in sorted(automaton.fsa.states, key=attrgetter('identifier')):
                self._compose_action(state, automaton)

        # Make graph postprocessing
        for automaton in self._event_fsa + [self._entry_fsa]:
            self._normalize_event_fsa(automaton)
        for automaton in self._model_fsa:
            self._normalize_model_fsa(automaton)

        # Dump graphs
        if get_conf_property(self._conf, "debug output"):
            self._save_digraphs()

        # Start generation of control functions
        for automaton in self._event_fsa + self._model_fsa + [self._entry_fsa]:
            self._compose_control_function(automaton)

        # Generate aspects with kernel models
        for automaton in self._model_fsa:
            aspect_code = [
                model_comment('KERNEL_MODEL', 'Perform the model code of the function {!r}'.
                              format(automaton.process.name))
            ]
            function_obj = self._source.get_source_function(automaton.process.name)
            params = []
            for position, param in enumerate(function_obj.declaration.parameters):
                if isinstance(param, str):
                    params.append(param)
                else:
                    params.append('$arg{}'.format(str(position + 1)))

            if len(params) == 0 and function_obj.declaration.return_value.identifier == 'void':
                arguments = []
                ret_expression = ''
            elif len(params) == 0:
                arguments = []
                ret_expression = 'return '
            elif function_obj.declaration.return_value.identifier == 'void':
                arguments = params
                ret_expression = ''
            else:
                ret_expression = 'return '
                arguments = params

            if len(arguments) > 0 and '...' == arguments[-1]:
                arguments = arguments[:-1]

            invoke = '{}{}({});'.format(ret_expression, self._control_function(automaton).name, ', '.join(arguments))
            aspect_code.append(invoke)

            self._cmodel.add_function_model(function_obj, aspect_code)

        # Generate entry point function
        self._entry_point()

        # Add types
        self._cmodel.types = sorted(set(self._structures.values()), key=lambda t: t.identifier)

        return
Example #21
0
def __import_inits_exits(logger, conf, avt, source):
    _inits = collections.OrderedDict()
    _exits = collections.OrderedDict()
    deps = {}
    for module, dep in avt['deps'].items():
        deps[module] = list(dep)
    order = calculate_load_order(logger, deps)
    order_c_files = []
    for module in order:
        for module2 in avt['grps']:
            if module2['id'] != module:
                continue
            order_c_files.extend(
                [file['in file'] for file in module2['Extra CCs']])

    init = source.get_macro(get_necessary_conf_property(conf, 'init'))
    if init:
        parameters = dict()
        for path in init.parameters:
            if len(init.parameters[path]) > 1:
                raise ValueError(
                    "Cannot set two initialization functions for a file {!r}".
                    format(path))
            elif len(init.parameters[path]) == 1:
                parameters[path] = init.parameters[path][0][0]

        for module in (m for m in order_c_files if m in parameters):
            _inits[module] = parameters[module]
    elif not get_conf_property(conf, 'kernel'):
        raise ValueError('There is no module initialization function provided')

    exitt = source.get_macro(get_necessary_conf_property(conf, 'exit'))
    if exitt:
        parameters = dict()
        for path in exitt.parameters:
            if len(exitt.parameters[path]) > 1:
                raise KeyError(
                    "Cannot set two exit functions for a file {!r}".format(
                        path))
            elif len(exitt.parameters[path]) == 1:
                parameters[path] = exitt.parameters[path][0][0]

        for module in (m for m in reversed(order_c_files) if m in parameters):
            _exits[module] = parameters[module]
    if not exitt and not get_conf_property(conf, 'kernel'):
        logger.warning('There is no module exit function provided')

    kernel_initializations = []
    if get_conf_property(conf, 'kernel'):
        if get_necessary_conf_property(conf,
                                       "add functions as initialization"):
            extra = get_necessary_conf_property(
                conf, "add functions as initialization")
        else:
            extra = dict()

        for name in get_necessary_conf_property(conf, 'kernel_initialization'):
            mc = source.get_macro(name)

            same_list = []
            if mc:
                for module in (m for m in order_c_files if m in mc.parameters):
                    for call in mc.parameters[module]:
                        same_list.append((module, call[0]))
            if name in extra:
                for func in (source.get_source_function(f) for f in extra[name]
                             if source.get_source_function(f)):
                    if func.definition_file:
                        file = func.definition_file
                    elif len(func.declaration_files) > 0:
                        file = list(func.declaration_files)[0]
                    else:
                        file = None

                    if file:
                        same_list.append((file, func.name))
                    else:
                        logger.warning(
                            "Cannot find file to place alias for {!r}".format(
                                func.name))
            if len(same_list) > 0:
                kernel_initializations.append((name, same_list))

    inits = [(module, _inits[module]) for module in _inits]
    exits = [(module, _exits[module]) for module in _exits]
    return inits, exits, kernel_initializations
Example #22
0
def translate_intermediate_model(logger, conf, avt, source, processes):
    """
    This is the main translator function. It generates automata first for all given processes of the environment model
    and then give them to particular translator chosen by the user defined configuration. At the end it triggers
    code printing and adds necessary information to the (abstract) verification task description.

    :param logger: Logger object.
    :param conf: Configuration dictionary for the whole EMG.
    :param avt: Verification task dictionary.
    :param source: Source object.
    :param processes: ProcessCollection object.
    :return: None.
    """
    if not processes.entry:
        raise RuntimeError(
            "It is impossible to generate an environment model without main process"
        )

    # Prepare main configuration properties
    logger.info("Check necessary configuration properties to be set")
    check_or_set_conf_property(conf['translation options'],
                               'entry point',
                               default_value='main',
                               expected_type=str)
    check_or_set_conf_property(conf['translation options'],
                               'enironment model file',
                               default_value='environment_model.c',
                               expected_type=str)
    check_or_set_conf_property(conf['translation options'],
                               "nested automata",
                               default_value=True,
                               expected_type=bool)
    check_or_set_conf_property(conf['translation options'],
                               "direct control functions calls",
                               default_value=True,
                               expected_type=bool)
    check_or_set_conf_property(conf['translation options'],
                               "code additional aspects",
                               default_value=list(),
                               expected_type=list)
    check_or_set_conf_property(conf['translation options'],
                               "additional headers",
                               default_value=list(),
                               expected_type=list)

    if get_conf_property(conf['translation options'], "debug output"):
        processes.save_collection('environment processes.json')

    # Collect files
    files = set()
    for grp in avt['grps']:
        files.update(
            [f['in file'] for f in grp['Extra CCs'] if 'in file' in f])
    files = sorted(files)
    logger.info("Files found: {}".format(len(files)))

    # Determine entry point file and function
    logger.info("Determine entry point file and function name")
    entry_file = get_necessary_conf_property(conf['translation options'],
                                             "environment model file")
    entry_point_name = get_necessary_conf_property(conf['translation options'],
                                                   'entry point')
    if entry_file not in files:
        files.append(entry_file)
        try:
            entry_file_realpath = find_file_or_dir(
                logger, conf['main working directory'], entry_file)
        except FileNotFoundError:
            entry_file_realpath = os.path.relpath(
                entry_file, conf['main working directory'])

        # Generate new group
        avt['environment model'] = entry_file_realpath

    # First just merge all as is
    additional_code = dict()
    for process in list(processes.models.values()) + list(
            processes.environment.values()) + [processes.entry]:
        for file in process.declarations:
            if file not in additional_code:
                additional_code[file] = {
                    'declarations': process.declarations[file],
                    'definitions': dict()
                }
            else:
                additional_code[file]['declarations'].update(
                    process.declarations[file])
        for file in process.definitions:
            if file not in additional_code:
                additional_code[file] = {
                    'definitions': process.definitions[file],
                    'declarations': dict()
                }
            else:
                additional_code[file]['definitions'].update(
                    process.definitions[file])

    # Then convert into proper format
    for file in additional_code:
        additional_code[file]['declarations'] = list(
            additional_code[file]['declarations'].values())

        defin = additional_code[file]['definitions']
        additional_code[file]['definitions'] = list()
        for block in defin.values():
            additional_code[file]['definitions'].extend(block)

    # Rename main file
    if 'environment model' in additional_code:
        additional_code[entry_file] = additional_code['environment model']
        del additional_code['environment model']

    # Initalize code representation
    cmodel = CModel(logger, conf, conf['main working directory'], files,
                    entry_point_name, entry_file)

    # Add common headers provided by a user
    cmodel.add_headers(
        entry_file,
        get_necessary_conf_property(conf['translation options'],
                                    "additional headers"))

    logger.info("Generate finite state machine on each process")
    entry_fsa = Automaton(processes.entry, 1)
    identifier_cnt = 2
    model_fsa = []
    main_fsa = []
    for process in processes.models.values():
        model_fsa.append(Automaton(process, identifier_cnt))
        identifier_cnt += 1
    for process in processes.environment.values():
        main_fsa.append(Automaton(process, identifier_cnt))
        identifier_cnt += 1

    # Set self parallel flag
    sp_ids = get_conf_property(conf["translation options"],
                               "not self parallel processes")
    if sp_ids and isinstance(sp_ids, list):
        for automaton in (a for a in model_fsa + main_fsa + [entry_fsa]
                          if a.process.pretty_id in sp_ids):
            automaton.self_parallelism = False

    sp_categories = get_conf_property(
        conf["translation options"],
        "not self parallel processes from categories")
    sp_scenarios = get_conf_property(
        conf["translation options"],
        "not self parallel processes from scenarios")
    if sp_categories and isinstance(sp_categories, list):
        for automaton in (a for a in model_fsa + main_fsa + [entry_fsa]
                          if a.process.category in sp_categories):
            automaton.self_parallelism = False
    if sp_scenarios and isinstance(sp_scenarios, list):
        for automaton in (a for a in model_fsa + main_fsa + [entry_fsa]
                          if a.process.name in sp_scenarios):
            automaton.self_parallelism = False

    # Prepare code on each automaton
    logger.info("Translate finite state machines into C code")
    if get_necessary_conf_property(conf['translation options'],
                                   "nested automata"):
        LabelTranslator(logger, conf['translation options'], source, cmodel,
                        entry_fsa, model_fsa, main_fsa)
    else:
        StateTranslator(logger, conf['translation options'], source, cmodel,
                        entry_fsa, model_fsa, main_fsa)

    logger.info("Print generated source code")
    addictions = cmodel.print_source_code(additional_code)

    # Set entry point function in abstract task
    logger.info(
        "Add an entry point function name to the abstract verification task")
    avt["entry points"] = [cmodel.entry_name]
    if get_conf_property(conf['translation options'],
                         "code additional aspects"):
        additional_aspects = [
            os.path.abspath(
                find_file_or_dir(logger, conf["main working directory"], f))
            for f in get_conf_property(conf['translation options'],
                                       "code additional aspects")
        ]
    else:
        additional_aspects = []
    for grp in avt['grps']:
        logger.info('Add aspects to C files of group {!r}'.format(grp['id']))
        for cc_extra_full_desc_file in [
                f for f in grp['Extra CCs'] if 'in file' in f
        ]:
            if cc_extra_full_desc_file["in file"] in addictions:
                if 'plugin aspects' not in cc_extra_full_desc_file:
                    cc_extra_full_desc_file['plugin aspects'] = []
                cc_extra_full_desc_file['plugin aspects'].append({
                    "plugin":
                    "EMG",
                    "aspects":
                    [addictions[cc_extra_full_desc_file["in file"]]] +
                    additional_aspects
                })