Exemplo n.º 1
0
 def _call_cf_code(self, automaton, parameter='0'):
     if automaton.self_parallelism and get_necessary_conf_property(self._conf, 'self parallel processes') and \
             get_conf_property(self._conf, 'pure pthread interface'):
         for var in self.__thread_variable(automaton, 'pair'):
             self._cmodel.add_global_variable(var,
                                              self._cmodel.entry_file,
                                              extern=True)
         # Leave the first parameter to fill twise later
         return 'pthread_create({}, 0, {}, {});'.\
             format('{}', self._control_function(automaton).name, parameter)
     else:
         if automaton.self_parallelism and get_necessary_conf_property(
                 self._conf, 'self parallel processes'):
             sv = self.__thread_variable(automaton, 'array')
             self._cmodel.add_global_variable(sv,
                                              self._cmodel.entry_file,
                                              extern=True)
             return 'pthread_create_N({}, 0, {}, {});'.\
                 format(sv.name, self._control_function(automaton).name, parameter)
         else:
             sv = self.__thread_variable(automaton, 'single')
             self._cmodel.add_global_variable(sv,
                                              self._cmodel.entry_file,
                                              extern=True)
             return 'pthread_create({}, 0, {}, {});'.\
                 format('& ' + sv.name, self._control_function(automaton).name, parameter)
Exemplo n.º 2
0
def generate_processes(emg, source):
    """
    This is the main function for generating processes of the environment model in the intermediate representation.
    From the configuration, the function reads the list of generators names and runs them one by one to obtain a final
    set of processes before translation them into C code.

    :param emg: EMG plugin object.
    :param source: Source collection object.
    :return: ProcessCollection object.
    """
    # In a specific order start proess generators
    generator_names = ('.vtg.emg.processGenerator.{}'.format(e) for e in
                       [list(e.keys())[0] for e in get_necessary_conf_property(emg.conf, "intermediate model options")])
    configurations = [list(e.values())[0] for e in get_necessary_conf_property(emg.conf, "intermediate model options")]
    generators = [importlib.import_module(name, 'core') for name in generator_names]

    processes = ProcessCollection(emg.logger, emg.conf)

    # Get first kinds of specifications
    specifications = {}
    kinds = dict()
    for generator in generators:
        kinds[generator.__name__] = generator.get_specification_kinds(specifications)

    # Get specifications for each kind
    # Import Specifications
    emg.logger.info("Search for interface and event specifications")
    get_specs(emg.logger, emg.conf, os.path.dirname(emg.conf['requirements DB']), specifications)

    for index, generator in enumerate(generators):
        generator.generate_processes(emg, source, processes, configurations[index],
                                     {kind: specifications[kind] for kind in kinds[generator.__name__]})
    return processes
Exemplo n.º 3
0
def __get_path(logger, conf, prop):
    if prop in conf:
        spec_dir = core.vtg.utils.find_file_or_dir(logger,
                                                   get_necessary_conf_property(conf, "main working directory"),
                                                   get_necessary_conf_property(conf, prop))
        return spec_dir
    else:
        return None
Exemplo n.º 4
0
    def _compose_control_function(self, automaton):
        self._logger.info(
            'Generate label-based control function for automaton {} based on process {} of category {}'
            .format(automaton.identifier, automaton.process.name,
                    automaton.process.category))

        # Get function prototype
        cf = self._control_function(automaton)
        cf.definition_file = self._cmodel.entry_file

        # Do process initialization
        model_flag = True
        if automaton not in self._model_fsa:
            model_flag = False
            if not get_conf_property(self._conf,
                                     'direct control functions calls'
                                     ) and automaton is not self._entry_fsa:
                if automaton.self_parallelism and \
                        get_necessary_conf_property(self._conf, "self parallel processes") and \
                        get_conf_property(self._conf, 'pure pthread interface'):
                    for var in self.__thread_variable(automaton, 'pair'):
                        self._cmodel.add_global_variable(
                            var, self._cmodel.entry_file, False)
                elif automaton.self_parallelism and get_necessary_conf_property(
                        self._conf, "self parallel processes"):
                    self._cmodel.add_global_variable(self.__thread_variable(
                        automaton, 'array'),
                                                     self._cmodel.entry_file,
                                                     extern=False)
                else:
                    self._cmodel.add_global_variable(self.__thread_variable(
                        automaton, 'single'),
                                                     self._cmodel.entry_file,
                                                     extern=False)

        # Generate function body
        label_based_function(self._conf, self._source, automaton, cf,
                             model_flag)

        # Add function to source code to print
        self._cmodel.add_function_definition(cf)
        self._cmodel.add_function_declaration(self._cmodel.entry_file,
                                              cf,
                                              extern=True)
        if model_flag:
            for file in self._source.get_source_function(
                    automaton.process.name).declaration_files:
                self._cmodel.add_function_declaration(file, cf, extern=True)
        return
Exemplo n.º 5
0
def __get_specs(logger, conf, directory):
    logger.info('Search for event and interface categories specifications in {}'.format(directory))
    interface_specifications = list()
    event_specifications = list()

    # Find all json files
    file_candidates = set()
    for root, dirs, files in os.walk(directory):
        # Check only full pathes to files
        json_files = glob.glob('{}/*.json'.format(root))
        file_candidates.update(json_files)

    # Filter specifications
    for file in file_candidates:
        with open(file, encoding="utf8") as fh:
            try:
                content = ujson.loads(fh.read())
            except ValueError:
                raise ValueError("Cannot parse EMG specification file {!r}".format(os.path.abspath(file)))

        if isinstance(content, dict):
            for tag in (t for t in content if isinstance(content[t], dict)):
                if "categories" in content[tag]:
                    logger.debug("Specification file {} is treated as interface categories specification".format(file))
                    interface_specifications.append(content)
                elif "environment processes" in content[tag]:
                    logger.debug("Specification file {} is treated as event categories specification".format(file))
                    event_specifications.append(content)
                else:
                    logger.debug("File '{}' is not recognized as a EMG specification".format(file))
                break

    # Check presence of specifications
    if len(interface_specifications) == 0:
        raise FileNotFoundError("Environment model generator missed an interface categories specification")
    elif len(event_specifications) == 0:
        raise FileNotFoundError("Environment model generator missed an event categories specification")

    # Merge specifications
    interface_spec = __merge_spec_versions(interface_specifications,
                                           get_necessary_conf_property(conf, 'specifications set'))
    __save_collection(logger, interface_spec, 'intf_spec.json')
    event_categories_spec = __merge_spec_versions(event_specifications,
                                                  get_necessary_conf_property(conf, 'specifications set'))
    __save_collection(logger, event_categories_spec, 'event_spec.json')

    return interface_spec, event_categories_spec
Exemplo n.º 6
0
def __generate_calls(logger, emg, conf, functions_collection):
    def indented_line(t, s):
        return (t * "\t") + s

    loop = get_necessary_conf_property(conf, "infinite call")

    # Generate process
    ep = Process("main")
    ep.category = 'generic'
    ep.comment = "Call exported functions."
    ep.pretty_id = 'generic'
    ep.process = ''

    # Generate actions for all sequence
    expressions = []
    for func in functions_collection:
        logger.info("Call function {!r}".format(func))
        expr = __generate_call(emg, conf, ep, func, functions_collection[func])
        expressions.append(expr)

    # Generate process description
    code = []
    tab = 0
    if loop:
        code.append(indented_line(tab, "while (1) {"))
        tab += 1

    code.append(indented_line(tab, "switch (ldv_undef_int()) {"))
    tab += 1
    cnt = 0
    for expr in expressions:
        # Add a break after a function call
        code.append(indented_line(tab, "case {}: ".format(cnt) + '{'))
        code.append(indented_line(tab + 1, "{}".format(expr)))
        code.append(indented_line(tab + 1, "break;"))
        code.append(indented_line(tab, "}"))
        cnt += 1
    if loop:
        code.append(indented_line(tab, "default: break;"))
    else:
        code.append(indented_line(tab, "default: ldv_assume(0);"))

    tab -= 1
    code.append(indented_line(tab, "}"))
    if loop:
        code.append("}")
        tab -= 1

    ep.add_condition('function_calls', [], code,
                     'Call all functions independently.')
    ep.process = "<function_calls>"

    return ep
Exemplo n.º 7
0
 def _join_cf_code(self, automaton):
     if automaton.self_parallelism and get_necessary_conf_property(self._conf, 'self parallel processes') and \
             get_conf_property(self._conf, 'pure pthread interface'):
         for var in self.__thread_variable(automaton, 'pair'):
             self._cmodel.add_global_variable(var,
                                              self._cmodel.entry_file,
                                              extern=True)
         return 'pthread_join({}, 0);'
     else:
         if automaton.self_parallelism and get_necessary_conf_property(
                 self._conf, 'self parallel processes'):
             sv = self.__thread_variable(automaton, 'array')
             self._cmodel.add_global_variable(sv,
                                              self._cmodel.entry_file,
                                              extern=True)
             return 'pthread_join_N({}, 0);'.format(sv.name)
         else:
             sv = self.__thread_variable(automaton, 'single')
             self._cmodel.add_global_variable(sv,
                                              self._cmodel.entry_file,
                                              extern=True)
             return 'pthread_join({}, 0);'.format(sv.name)
Exemplo n.º 8
0
    def _control_function(self, automaton):
        """
        Generate control function. This function generates a FunctionDefinition object without a body. It is required
        to call control function within code blocks until all code blocks are translated and control function body
        can be generated.

        :param automaton: Automaton object.
        :return: FunctionDefinition object.
        """
        if automaton.identifier not in self._control_functions:
            # Check that this is an aspect function or not
            if automaton in self._model_fsa:
                name = 'ldv_emg_{}'.format(automaton.process.name)
                function_objs = self._source.get_source_functions(automaton.process.name)
                if len(function_objs) == 0:
                    raise ValueError("Unfortunately there is no function {!r} found by the source analysis".
                                     format(automaton.process.name))
                else:
                    # We ignore there that fact that functions can have different scopes
                    function_obj = function_objs[0]
                params = []
                for position, param in enumerate(function_obj.declaration.parameters):
                    if isinstance(param, str):
                        params.append(param)
                    else:
                        params.append(param.to_string('arg{}'.format(str(position)), typedef='complex_and_params'))

                if len(params) == 0:
                    param_types = ['void']
                else:
                    param_types = params

                declaration = '{0} f({1})'.format(
                    function_obj.declaration.return_value.to_string('', typedef='complex_and_params'),
                    ', '.join(param_types))
                cf = Function(name, declaration)
            else:
                name = 'ldv_{}_{}'.format(automaton.process.name, automaton.identifier)
                if not get_necessary_conf_property(self._conf, "direct control functions calls"):
                    declaration = 'void *f(void *data)'
                else:
                    declaration = 'void f(void *data)'
                cf = Function(name, declaration)
            cf.definition_file = self._cmodel.entry_file

            self._control_functions[automaton.identifier] = cf

        return self._control_functions[automaton.identifier]
Exemplo n.º 9
0
    def __init__(self, logger, conf, source, cmodel, entry_fsa, model_fsa,
                 event_fsa):
        self.__state_variables = dict()
        self.__state_chains_memoization = dict()
        self.__switchers_cache = dict()

        check_or_set_conf_property(conf,
                                   'actions composition',
                                   default_value=[],
                                   expected_type=list)
        self.__jump_types = set([
            t for t in [Dispatch, Receive, Condition, Subprocess]
            if t.__name__ not in get_necessary_conf_property(
                conf, 'actions composition')
        ])
        super(StateTranslator, self).__init__(logger, conf, source, cmodel,
                                              entry_fsa, model_fsa, event_fsa)
Exemplo n.º 10
0
def get_specs(logger, conf, directory, specification_kinds):
    """
    Get specification kinds descriptions and parse all JSON files separating them on the base of markets in
    specification kinds.

    :param logger:
    :param conf:
    :param directory:
    :param specification_kinds:
    :return:
    """
    logger.info(
        'Search for various EMG generators specifications in {}'.format(
            directory))
    # Find all json files
    file_candidates = set()
    for root, dirs, files in os.walk(directory):
        # Check only full paths to files
        json_files = glob.glob('{}/*.json'.format(root))
        file_candidates.update(json_files)

    # Filter specifications
    for file in file_candidates:
        with open(file, encoding="utf8") as fh:
            try:
                content = ujson.loads(fh.read())
            except ValueError:
                raise ValueError(
                    "Cannot parse EMG specification file {!r}".format(
                        os.path.abspath(file)))

        if isinstance(content, dict):
            __check_file(logger, file, content, specification_kinds)

    # Merge specifications
    for kind in specification_kinds:
        spec = __merge_spec_versions(
            specification_kinds[kind]['specification'],
            get_necessary_conf_property(conf, 'specifications set'))
        specification_kinds[kind]['specification'] = spec
        __save_collection(logger, spec, '{} spec.json'.format(kind))
    return specification_kinds
Exemplo n.º 11
0
    def _import_action(self, name, process_strings, dic):
        act = super(AbstractProcessImporter,
                    self)._import_action(name, process_strings, dic)

        # Add comment if it is provided
        if 'comment' in dic:
            act.comment = dic['comment']
        elif not isinstance(act, Call):
            comments_by_type = get_necessary_conf_property(
                self.conf, 'action comments')
            tag = type(act).__name__.lower()
            if tag not in comments_by_type or \
                    not (isinstance(comments_by_type[tag], str) or
                         (isinstance(comments_by_type[tag], dict) and name in comments_by_type[tag])):
                raise KeyError(
                    "Cannot find comment for action {!r} of type {!r} at process {!r} description. You shoud either "
                    "specify in the corresponding environment model specification the comment text manually or set "
                    "the default comment text for all actions of the type {!r} at EMG plugin configuration properties "
                    "within 'action comments' attribute.".format(
                        name, tag, name, tag))
        return act
Exemplo n.º 12
0
def translate_intermediate_model(logger, conf, avt, source, processes):
    """
    This is the main translator function. It generates automata first for all given processes of the environment model
    and then give them to particular translator chosen by the user defined configuration. At the end it triggers
    code printing and adds necessary information to the (abstract) verification task description.

    :param logger: Logger object.
    :param conf: Configuration dictionary for the whole EMG.
    :param avt: Verification task dictionary.
    :param source: Source object.
    :param processes: ProcessCollection object.
    :return: None.
    """
    if not processes.entry:
        raise RuntimeError(
            "It is impossible to generate an environment model without main process"
        )

    # Prepare main configuration properties
    logger.info("Check necessary configuration properties to be set")
    check_or_set_conf_property(conf['translation options'],
                               'entry point',
                               default_value='main',
                               expected_type=str)
    check_or_set_conf_property(conf['translation options'],
                               'enironment model file',
                               default_value='environment_model.c',
                               expected_type=str)
    check_or_set_conf_property(conf['translation options'],
                               "nested automata",
                               default_value=True,
                               expected_type=bool)
    check_or_set_conf_property(conf['translation options'],
                               "direct control functions calls",
                               default_value=True,
                               expected_type=bool)
    check_or_set_conf_property(conf['translation options'],
                               "code additional aspects",
                               default_value=list(),
                               expected_type=list)
    check_or_set_conf_property(conf['translation options'],
                               "additional headers",
                               default_value=list(),
                               expected_type=list)

    if get_conf_property(conf['translation options'], "debug output"):
        processes.save_collection('environment processes.json')

    # Collect files
    files = set()
    for grp in avt['grps']:
        files.update(
            [f['in file'] for f in grp['Extra CCs'] if 'in file' in f])
    files = sorted(files)
    logger.info("Files found: {}".format(len(files)))

    # Determine entry point file and function
    logger.info("Determine entry point file and function name")
    entry_file = get_necessary_conf_property(conf['translation options'],
                                             "environment model file")
    entry_point_name = get_necessary_conf_property(conf['translation options'],
                                                   'entry point')
    if entry_file not in files:
        files.append(entry_file)
        try:
            entry_file_realpath = find_file_or_dir(
                logger, conf['main working directory'], entry_file)
        except FileNotFoundError:
            entry_file_realpath = os.path.relpath(
                entry_file, conf['main working directory'])

        # Generate new group
        avt['environment model'] = entry_file_realpath

    # First just merge all as is
    additional_code = dict()
    for process in list(processes.models.values()) + list(
            processes.environment.values()) + [processes.entry]:
        for file in process.declarations:
            if file not in additional_code:
                additional_code[file] = {
                    'declarations': process.declarations[file],
                    'definitions': dict()
                }
            else:
                additional_code[file]['declarations'].update(
                    process.declarations[file])
        for file in process.definitions:
            if file not in additional_code:
                additional_code[file] = {
                    'definitions': process.definitions[file],
                    'declarations': dict()
                }
            else:
                additional_code[file]['definitions'].update(
                    process.definitions[file])

    # Then convert into proper format
    for file in additional_code:
        additional_code[file]['declarations'] = list(
            additional_code[file]['declarations'].values())

        defin = additional_code[file]['definitions']
        additional_code[file]['definitions'] = list()
        for block in defin.values():
            additional_code[file]['definitions'].extend(block)

    # Rename main file
    if 'environment model' in additional_code:
        additional_code[entry_file] = additional_code['environment model']
        del additional_code['environment model']

    # Initalize code representation
    cmodel = CModel(logger, conf, conf['main working directory'], files,
                    entry_point_name, entry_file)

    # Add common headers provided by a user
    cmodel.add_headers(
        entry_file,
        get_necessary_conf_property(conf['translation options'],
                                    "additional headers"))

    logger.info("Generate finite state machine on each process")
    entry_fsa = Automaton(processes.entry, 1)
    identifier_cnt = 2
    model_fsa = []
    main_fsa = []
    for process in processes.models.values():
        model_fsa.append(Automaton(process, identifier_cnt))
        identifier_cnt += 1
    for process in processes.environment.values():
        main_fsa.append(Automaton(process, identifier_cnt))
        identifier_cnt += 1

    # Set self parallel flag
    sp_ids = get_conf_property(conf["translation options"],
                               "not self parallel processes")
    if sp_ids and isinstance(sp_ids, list):
        for automaton in (a for a in model_fsa + main_fsa + [entry_fsa]
                          if a.process.pretty_id in sp_ids):
            automaton.self_parallelism = False

    sp_categories = get_conf_property(
        conf["translation options"],
        "not self parallel processes from categories")
    sp_scenarios = get_conf_property(
        conf["translation options"],
        "not self parallel processes from scenarios")
    if sp_categories and isinstance(sp_categories, list):
        for automaton in (a for a in model_fsa + main_fsa + [entry_fsa]
                          if a.process.category in sp_categories):
            automaton.self_parallelism = False
    if sp_scenarios and isinstance(sp_scenarios, list):
        for automaton in (a for a in model_fsa + main_fsa + [entry_fsa]
                          if a.process.name in sp_scenarios):
            automaton.self_parallelism = False

    # Prepare code on each automaton
    logger.info("Translate finite state machines into C code")
    if get_necessary_conf_property(conf['translation options'],
                                   "nested automata"):
        LabelTranslator(logger, conf['translation options'], source, cmodel,
                        entry_fsa, model_fsa, main_fsa)
    else:
        StateTranslator(logger, conf['translation options'], source, cmodel,
                        entry_fsa, model_fsa, main_fsa)

    logger.info("Print generated source code")
    addictions = cmodel.print_source_code(additional_code)

    # Set entry point function in abstract task
    logger.info(
        "Add an entry point function name to the abstract verification task")
    avt["entry points"] = [cmodel.entry_name]
    if get_conf_property(conf['translation options'],
                         "code additional aspects"):
        additional_aspects = [
            os.path.abspath(
                find_file_or_dir(logger, conf["main working directory"], f))
            for f in get_conf_property(conf['translation options'],
                                       "code additional aspects")
        ]
    else:
        additional_aspects = []
    for grp in avt['grps']:
        logger.info('Add aspects to C files of group {!r}'.format(grp['id']))
        for cc_extra_full_desc_file in [
                f for f in grp['Extra CCs'] if 'in file' in f
        ]:
            if cc_extra_full_desc_file["in file"] in addictions:
                if 'plugin aspects' not in cc_extra_full_desc_file:
                    cc_extra_full_desc_file['plugin aspects'] = []
                cc_extra_full_desc_file['plugin aspects'].append({
                    "plugin":
                    "EMG",
                    "aspects":
                    [addictions[cc_extra_full_desc_file["in file"]]] +
                    additional_aspects
                })
Exemplo n.º 13
0
def __import_inits_exits(logger, conf, avt, source):
    _inits = collections.OrderedDict()
    _exits = collections.OrderedDict()
    deps = {}
    for module, dep in avt['deps'].items():
        deps[module] = list(dep)
    order = calculate_load_order(logger, deps)
    order_c_files = []
    for module in order:
        for module2 in avt['grps']:
            if module2['id'] != module:
                continue
            order_c_files.extend(
                [file['in file'] for file in module2['Extra CCs']])

    init = source.get_macro(get_necessary_conf_property(conf, 'init'))
    if init:
        parameters = dict()
        for path in init.parameters:
            if len(init.parameters[path]) > 1:
                raise ValueError(
                    "Cannot set two initialization functions for a file {!r}".
                    format(path))
            elif len(init.parameters[path]) == 1:
                parameters[path] = init.parameters[path][0][0]

        for module in (m for m in order_c_files if m in parameters):
            _inits[module] = parameters[module]
    elif not get_conf_property(conf, 'kernel'):
        raise ValueError('There is no module initialization function provided')

    exitt = source.get_macro(get_necessary_conf_property(conf, 'exit'))
    if exitt:
        parameters = dict()
        for path in exitt.parameters:
            if len(exitt.parameters[path]) > 1:
                raise KeyError(
                    "Cannot set two exit functions for a file {!r}".format(
                        path))
            elif len(exitt.parameters[path]) == 1:
                parameters[path] = exitt.parameters[path][0][0]

        for module in (m for m in reversed(order_c_files) if m in parameters):
            _exits[module] = parameters[module]
    if not exitt and not get_conf_property(conf, 'kernel'):
        logger.warning('There is no module exit function provided')

    kernel_initializations = []
    if get_conf_property(conf, 'kernel'):
        if get_necessary_conf_property(conf,
                                       "add functions as initialization"):
            extra = get_necessary_conf_property(
                conf, "add functions as initialization")
        else:
            extra = dict()

        for name in get_necessary_conf_property(conf, 'kernel_initialization'):
            mc = source.get_macro(name)

            same_list = []
            if mc:
                for module in (m for m in order_c_files if m in mc.parameters):
                    for call in mc.parameters[module]:
                        same_list.append((module, call[0]))
            if name in extra:
                for func in (source.get_source_function(f) for f in extra[name]
                             if source.get_source_function(f)):
                    if func.definition_file:
                        file = func.definition_file
                    elif len(func.declaration_files) > 0:
                        file = list(func.declaration_files)[0]
                    else:
                        file = None

                    if file:
                        same_list.append((file, func.name))
                    else:
                        logger.warning(
                            "Cannot find file to place alias for {!r}".format(
                                func.name))
            if len(same_list) > 0:
                kernel_initializations.append((name, same_list))

    inits = [(module, _inits[module]) for module in _inits]
    exits = [(module, _exits[module]) for module in _exits]
    return inits, exits, kernel_initializations
Exemplo n.º 14
0
def generate_processes(emg, source, processes, conf, specifications):
    """
    This generator generates processes for verifying Linux kernel modules and some parts of the Linux kernel itself.
     For instance, it adds function models for kernel functions and calls callbacks in the environment model.
     It uses interface categories specifications and event categories specifications to generate the model.

    :param emg: EMG Plugin object.
    :param source: Source collection object.
    :param processes: ProcessCollection object.
    :param conf: Configuration dictionary of this generator.
    :param specifications: Dictionary with required specifications of required kinds
    :return: None.
    """
    # Get instance maps if possible
    all_instance_maps = specifications["instance maps"].get("specification")
    task_name = emg.abstract_task_desc['fragment']
    instance_maps = dict()
    for imap in all_instance_maps.get('instance maps', []):
        if task_name in imap.get('fragments', []):
            instance_maps = imap.get('instance map', dict())

    emg.logger.info("Import interface categories specification")
    interfaces = InterfaceCollection(emg.logger, conf)
    interfaces.fill_up_collection(source, specifications["interface specification"]["specification"])

    emg.logger.info("Import event categories specification")
    abstract_processes = AbstractProcessImporter(emg.logger, conf)
    abstract_processes.parse_event_specification(specifications["event specification"]["specification"])

    # Now check that we have all necessary interface specifications
    unspecified_functions = [func for func in abstract_processes.models
                             if func in source.source_functions and
                             func not in [i.short_identifier for i in interfaces.function_interfaces]]
    if len(unspecified_functions) > 0:
        raise RuntimeError("You need to specify interface specifications for the following function models: {}"
                           .format(', '.join(unspecified_functions)))
    process_model = ProcessModel(emg.logger, conf, interfaces, abstract_processes)
    abstract_processes.environment = {p.identifier: p for p in process_model.event_processes}
    abstract_processes.models = {p.identifier: p for p in process_model.model_processes}

    emg.logger.info("Generate processes from abstract ones")
    instance_maps, data = generate_instances(emg.logger, conf, source, interfaces, abstract_processes, instance_maps)

    # Send data to the server
    emg.logger.info("Send data about generated instances to the server")
    core.utils.report(emg.logger,
                      'data',
                      {
                          'id': emg.id,
                          'data': instance_maps
                      },
                      emg.mqs['report files'],
                      emg.vals['report id'],
                      get_necessary_conf_property(emg.conf, "main working directory"))
    emg.logger.info("An intermediate environment model has been prepared")

    # Dump to disk instance map
    instance_map_file = 'instance map.json'
    emg.logger.info("Dump information on chosen instances to file '{}'".format(instance_map_file))
    with open(instance_map_file, "w", encoding="utf8") as fd:
        fd.writelines(ujson.dumps(instance_maps, ensure_ascii=False, sort_keys=True, indent=4,
                                  escape_forward_slashes=False))

    processes.parse_event_specification(data)
    processes.establish_peers()
Exemplo n.º 15
0
    def _dispatch_blocks(self, state, automaton, function_parameters,
                         automata_peers, replicative):
        pre = []
        post = []
        blocks = []

        for name in (n for n in automata_peers
                     if len(automata_peers[n]['states']) > 0):
            decl = self._get_cf_struct(automaton, function_parameters)
            cf_param = 'cf_arg_{}'.format(
                automata_peers[name]['automaton'].identifier)
            vf_param_var = Variable(cf_param, decl.take_pointer)
            pre.append(vf_param_var.declare() + ';')

            if replicative:
                for r_state in automata_peers[name]['states']:
                    block = list()
                    block.append('{} = {}(sizeof({}));'.format(
                        vf_param_var.name,
                        self._cmodel.mem_function_map["ALLOC"],
                        decl.identifier))
                    for index in range(len(function_parameters)):
                        block.append('{}->arg{} = arg{};'.format(
                            vf_param_var.name, index, index))
                    if r_state.action.replicative:
                        call = self._call_cf(automata_peers[name]['automaton'],
                                             cf_param)
                        if get_conf_property(self._conf,
                                             'direct control functions calls'):
                            block.append(call)
                        else:
                            if automata_peers[name]['automaton'].self_parallelism and \
                                    get_necessary_conf_property(self._conf, "self parallel processes") and \
                                    get_conf_property(self._conf, 'pure pthread interface'):
                                thread_vars = self.__thread_variable(
                                    automata_peers[name]['automaton'],
                                    var_type='pair')
                                for v in thread_vars:
                                    # Expect that for this particular case the first argument is unset
                                    block.extend([
                                        'ret = {}'.format(
                                            call.format("& " + v.name)),
                                        'ldv_assume(ret == 0);'
                                    ])
                            else:
                                block.extend([
                                    'ret = {}'.format(call),
                                    'ldv_assume(ret == 0);'
                                ])
                        blocks.append(block)
                        break
                    else:
                        self._logger.warning(
                            'Cannot generate dispatch based on labels for receive {} in process {} with category {}'
                            .format(
                                r_state.action.name,
                                automata_peers[name]['automaton'].process.name,
                                automata_peers[name]
                                ['automaton'].process.category))
            # todo: Pretty ugly, but works
            elif state.action.name.find('dereg') != -1:
                block = list()
                call = self._join_cf(automata_peers[name]['automaton'])
                if not get_conf_property(self._conf,
                                         'direct control functions calls'):
                    if automata_peers[name]['automaton'].self_parallelism and \
                            get_necessary_conf_property(self._conf, "self parallel processes") and \
                            get_conf_property(self._conf, 'pure pthread interface'):
                        thread_vars = self.__thread_variable(
                            automata_peers[name]['automaton'], var_type='pair')
                        for v in thread_vars:
                            # Expect that for this particular case the first argument is unset
                            block.extend([
                                'ret = {}'.format(call.format(v.name)),
                                'ldv_assume(ret == 0);'
                            ])
                    else:
                        block.extend(
                            ['ret = {}'.format(call), 'ldv_assume(ret == 0);'])
                    blocks.append(block)

        return pre, blocks, post
Exemplo n.º 16
0
    def __add_process(self,
                      interfaces,
                      process,
                      category=None,
                      model=False,
                      label_map=None,
                      peer=None):
        self.logger.info("Add process {!r} to the model".format(process.name))
        self.logger.debug(
            "Make copy of process {!r} before adding it to the model".format(
                process.name))
        new = copy.deepcopy(process)
        if not category:
            new.category = 'functions models'
            if not new.comment:
                raise KeyError(
                    "You must specify manually 'comment' attribute within the description of the following "
                    "function model process description: {!r}.".format(
                        new.name))
        else:
            new.category = category
            if not new.comment:
                new.comment = get_necessary_conf_property(
                    self.conf, 'process comment')

        # Add comments
        comments_by_type = get_necessary_conf_property(self.conf,
                                                       'action comments')
        for action in new.actions.values():
            # Add comment if it is provided
            if not action.comment:
                tag = type(action).__name__.lower()
                if tag in comments_by_type and isinstance(
                        comments_by_type[tag], str):
                    action.comment = comments_by_type[tag]
                elif tag in comments_by_type and isinstance(comments_by_type[tag], dict) and \
                        action.name in comments_by_type[tag]:
                    action.comment = comments_by_type[tag][action.name]
                elif not isinstance(action, Call):
                    raise KeyError(
                        "Cannot find a comment for action {0!r} of type {2!r} at new {1!r} description. You "
                        "shoud either specify in the corresponding environment model specification the comment "
                        "text manually or set the default comment text for all actions of the type {2!r} at EMG "
                        "plugin configuration properties within 'action comments' attribute."
                        .format(action.name, new.name, tag))

            # Add callback comment
            if isinstance(action, Call):
                callback_comment = get_necessary_conf_property(
                    self.conf, 'callback comment').capitalize()
                if action.comment:
                    action.comment += ' ' + callback_comment
                else:
                    action.comment = callback_comment

        # todo: Assign category for each new process not even for that which have callbacks (issue #6564)
        new.identifier = len(self.model_processes) + len(
            self.event_processes) + 1
        self.logger.info("Finally add process {} to the model".format(
            process.name))

        self.logger.debug("Set interfaces for given labels")
        if label_map:
            for label in label_map["matched labels"].keys():
                for interface in [
                        interfaces.get_or_restore_intf(name)
                        for name in label_map["matched labels"][label]
                ]:
                    self.__assign_label_interface(new.labels[label], interface)
        else:
            for label in new.labels.values():
                for interface in label.interfaces:
                    if not label.get_declaration(interface):
                        try:
                            self.__assign_label_interface(
                                label,
                                interfaces.get_or_restore_intf(interface))
                        except KeyError:
                            self.logger.warning(
                                "Process '{}' for category '{}' cannot be added, since it contains"
                                "unknown interfaces for this program fragment".
                                format(new.name, new.category))
                            return None

        if model and not category:
            self.model_processes.append(new)
        elif not model and category:
            self.event_processes.append(new)
        else:
            raise ValueError(
                'Provide either model or category arguments but not simultaneously'
            )

        if peer:
            self.logger.debug(
                "Match signals with signals of process {} with identifier {}".
                format(peer.name, peer.identifier))
            new.establish_peers(peer)

        self.logger.info(
            "Check is there exist any dispatches or receives after process addiction to tie"
            .format(process.name))
        self.__normalize_model(interfaces)
        return new
Exemplo n.º 17
0
def __generate_call(emg, conf, ep, func, obj):
    # Add declaration of caller
    caller_func = Function("ldv_emg_{}_caller".format(func), "void a(void)")
    ep.add_declaration("environment model", caller_func.name,
                       caller_func.declare(True)[0])
    expression = ""
    body = []
    initializations = []

    # Check retval and cast to void call
    if obj.declaration.return_value and obj.declaration.return_value.identifier != 'void':
        expression += "(void) "

    # Get arguments and allocate memory for them
    args = []
    free_args = []
    for index, arg in enumerate(obj.declaration.parameters):
        if not isinstance(arg, str):
            argvar = Variable("ldv_arg_{}".format(index), arg)
            body.append(argvar.declare() + ";")
            args.append(argvar.name)
            if isinstance(arg, Pointer):
                elements = get_conf_property(
                    conf, "initialize strings as null terminated")
                if elements and arg.identifier == 'char **':
                    if isinstance(elements, int) or elements.isnumeric():
                        elements = int(elements)
                    else:
                        elements = 'ldv_undef_int()'
                    argvar_len = Variable(argvar.name + '_len', 'int')
                    # Define explicitly number of arguments, since undef value is too difficult sometimes
                    initializations.append("int {} = {};".format(
                        argvar_len.name, elements))
                    initializations.append(
                        "{} = (char **) ldv_xmalloc({} * sizeof(char *));".
                        format(argvar.name, argvar_len.name))
                    # Initialize all elements but the last one
                    initializations.append(
                        "for (int i = 0; i < {} - 1; i++)".format(
                            argvar_len.name))
                    # Some undefined data
                    initializations.append(
                        "\t{}[i] = (char *) external_allocated_data();".format(
                            argvar.name))
                    # The last element is a string
                    initializations.append("{}[{}] = (char * ) 0;".format(
                        argvar.name, elements - 1))
                    free_args.append(argvar.name)
                elif get_necessary_conf_property(
                        emg.conf["translation options"], "allocate external"):
                    value = "external_allocated_data();"
                    initializations.append("{} = {}".format(
                        argvar.name, value))
                else:
                    if get_necessary_conf_property(
                            emg.conf["translation options"],
                            "allocate with sizeof"):
                        apt = arg.points.to_string(
                            '', typedef='complex_and_params')
                        value = "ldv_xmalloc(sizeof({}));".\
                            format(apt if apt != 'void' else apt + '*')
                    else:
                        value = "ldv_xmalloc_unknown_size(0);"
                    free_args.append(argvar.name)
                    initializations.append("{} = {}".format(
                        argvar.name, value))

    # Generate call
    expression += "{}({});".format(func, ", ".join(args))

    # Generate function body
    body += initializations + [expression]

    # Free memory
    for arg in free_args:
        body.append("ldv_free({});".format(arg))

    caller_func.body = body

    # Add definition of caller
    ep.add_definition(obj.definition_file, caller_func.name,
                      caller_func.define() + ["\n"])

    # Return call expression
    return "{}();".format(caller_func.name)
Exemplo n.º 18
0
    def print_source_code(self, additional_lines):
        """
        Generate an environment model as a C code. The code is distributed across aspect addictions for original
        source files and the main environment model C code.

        :param additional_lines: Dictionary with the user-defined C code:
                                 {'file name': {'definitions': [...], 'declarations': []}}
        :return: Dictionary {'file': Path to generated file with the Code}
        """
        aspect_dir = "aspects"
        self._logger.info(
            "Create directory for aspect files {}".format("aspects"))
        os.makedirs(aspect_dir.encode('utf8'), exist_ok=True)

        if get_conf_property(self._conf["translation options"],
                             "propogate headers to instrumented files"):
            for file in (f for f in self.files if f in additional_lines):
                self.add_headers(
                    file,
                    get_necessary_conf_property(
                        self._conf["translation options"],
                        "additional headers"))

        addictions = dict()
        # Write aspects
        for file in self.files:
            lines = list()

            # Check headers
            if file == self.entry_file:
                if self.entry_file in self._headers:
                    lines.extend([
                        '#include <{}>\n'.format(h)
                        for h in self._collapse_headers_sets(self._headers[
                            self.entry_file])
                    ])
                lines.append("\n")

                for tp in self.types:
                    lines.append(tp.to_string('') + " {\n")
                    for field in list(tp.fields.keys()):
                        lines.append("\t{};\n".format(
                            tp.fields[field].to_string(
                                field, typedef='complex_and_params'),
                            scope={self.entry_file}))
                    lines.append("};\n")
                    lines.append("\n")
            else:
                # Generate function declarations
                self._logger.info('Add aspects to a file {!r}'.format(file))

                # Add headers
                if file in self._headers and self._headers[file]:
                    lines.append('before: file ("$this")\n')
                    lines.append('{\n')
                    lines.extend([
                        '#include <{}>\n'.format(h) for h in
                        self._collapse_headers_sets(self._headers[file])
                    ])
                    lines.append("\n")
                    lines.append("}\n\n")

                # Add model itself
                lines.append('after: file ("$this")\n')
                lines.append('{\n')

            if file in additional_lines and 'declarations' in additional_lines[file] and \
                    len(additional_lines[file]['declarations']) > 0:
                lines.append("\n")
                lines.append("/* EMG aliases */\n")
                lines.extend(additional_lines[file]['declarations'])

            if file in self._function_declarations:
                lines.append("\n")
                lines.append("/* EMG Function declarations */\n")
                for func in self._function_declarations[file].keys():
                    lines.extend(self._function_declarations[file][func])

            if file in self._variables_declarations:
                lines.append("\n")
                lines.append("/* EMG variable declarations */\n")
                for variable in self._variables_declarations[file].keys():
                    lines.extend(self._variables_declarations[file][variable])

            if file in self._variables_initializations and len(
                    self._variables_initializations[file]) > 0:
                lines.append("\n")
                lines.append("/* EMG variable initialization */\n")
                for variable in self._variables_initializations[file].keys():
                    lines.extend(
                        self._variables_initializations[file][variable])

            if file in additional_lines and 'definitions' in additional_lines[file] and \
                    len(additional_lines[file]['definitions']) > 0:
                lines.append("\n")
                lines.append("/* EMG aliases for functions */\n")
                lines.extend(additional_lines[file]['definitions'])

            if file in self._function_definitions and len(
                    self._function_definitions[file]) > 0:
                lines.append("\n")
                lines.append("/* EMG function definitions */\n")
                for func in self._function_definitions[file].keys():
                    lines.extend(self._function_definitions[file][func])
                    lines.append("\n")

            if file != self.entry_file:
                lines.append("}\n\n")

            if file in self._call_aspects and len(
                    self._call_aspects[file]) > 0:
                lines.append("/* EMG kernel function models */\n")
                for aspect in self._call_aspects[file]:
                    lines.extend(aspect.define())
                    lines.append("\n")

            if file != self.entry_file:
                name = "{}.aspect".format(
                    unique_file_name(
                        "aspects/ldv_" +
                        os.path.splitext(os.path.basename(file))[0],
                        '.aspect'))
                path = os.path.relpath(name, self._workdir)
                self._logger.info("Add aspect file {!r}".format(path))
                addictions[file] = path
            else:
                name = self.entry_file
            with open(name, "w", encoding="utf8") as fh:
                fh.writelines(lines)

        return addictions