Exemplo n.º 1
0
    def parse_event_specification(self, source, raw):
        """
        Parse process descriptions and create corresponding objects to populate the collection.

        :param source: Source code collection.
        :param raw: Dictionary with content of JSON file.
        :return: ProcessCollection
        """
        collection = ProcessCollection()

        self.logger.info(
            "Import processes from provided event categories specification")
        raise_exc = []
        if "functions models" in raw:
            self.logger.info("Import processes from 'kernel model'")
            for name_list, process_desc in raw["functions models"].items():
                names = name_list.split(", ")
                for name in names:
                    self.logger.debug(
                        "Import process which models {!r}".format(name))

                    # Set some default values
                    category = "functions models"
                    try:
                        process = self._import_process(source, name, category,
                                                       process_desc)
                        collection.models[str(process)] = process
                    except Exception as err:
                        self.logger.warning("Cannot parse {!r}: {}".format(
                            name, str(err)))
                        raise_exc.append(name)
        if "environment processes" in raw:
            self.logger.info("Import processes from 'environment processes'")
            for name, process_desc in raw["environment processes"].items():
                self.logger.debug(
                    "Import environment process {!r}".format(name))

                # This simplifies parsing of event specifications for Linux but actually this can be avoided by adding
                # categories to corresponding specifications.
                if '/' in name:
                    category, name = name.split('/')
                else:
                    category = None

                try:
                    process = self._import_process(source, name, category,
                                                   process_desc)
                    if process in collection.environment:
                        raise ValueError(
                            "There is an already imported process {!r} in intermediate environment model"
                            .format(str(process)))
                    collection.environment[str(process)] = process
                except Exception as err:
                    self.logger.warning("Cannot parse {!r}: {}".format(
                        name, str(err)))
                    raise_exc.append(name)

        if "main process" in raw and isinstance(raw["main process"], dict):
            self.logger.info("Import main process")
            try:
                entry_process = self._import_process(source, "entry",
                                                     "entry process",
                                                     raw["main process"])
                collection.entry = entry_process
            except Exception as err:
                self.logger.warning("Cannot main process: {}".format(str(err)))
                raise_exc.append('entry')
        else:
            collection.entry = None

        if raise_exc:
            raise RuntimeError(
                "Some specifications cannot be parsed, inspect log to find problems with: {}"
                .format(', '.join(raise_exc)))

        return collection
Exemplo n.º 2
0
    def _factory_iterator(self, processes_to_scenarios: dict,
                          model: ProcessCollection):
        selector = self.strategy(self.logger, self.conf,
                                 processes_to_scenarios, model)
        for batch, related_process in selector():
            new = ProcessCollection(batch.name)
            new.attributes = copy.deepcopy(batch.attributes)
            original_name = batch.attributed_name

            # Do sanity check to catch several savepoints in a model
            sp_scenarios = {
                s
                for s in batch.non_models
                if isinstance(s, Scenario) and s.savepoint
            }
            assert len(sp_scenarios) < 2

            # Set entry process
            if related_process and related_process in batch.environment and batch.environment[related_process] and\
                    batch.environment[related_process].savepoint:
                # There is an environment process with a savepoint
                new.entry = self._process_from_scenario(
                    batch.environment[related_process],
                    model.environment[related_process])
                del batch.environment[related_process]
                new.rename_notion(related_process, str(new.entry))

                # Move declarations and definitions
                if model.entry:
                    new.extend_model_name(str(model.entry), 'Removed')
                    new.copy_declarations_to_init(model.entry)
            elif batch.entry:
                # The entry process has a scenario
                new.entry = self._process_from_scenario(
                    batch.entry, model.entry)
            elif model.entry:
                # Keep as is
                new.entry = self._process_copy(model.entry)
            else:
                new.entry = None

            # Add models if no scenarios provided
            for function_model in model.models:
                if not batch.models.get(function_model):
                    batch.models[function_model] = None

            for attr in ('models', 'environment'):
                batch_collection = getattr(batch, attr)
                collection = getattr(new, attr)
                for key in getattr(model, attr):
                    if key in batch_collection:
                        if batch_collection[key]:
                            collection[key] = self._process_from_scenario(
                                batch_collection[key],
                                getattr(model, attr)[key])
                        else:
                            collection[key] = self._process_copy(
                                getattr(model, attr)[key])
                    else:
                        self.logger.debug(
                            f"Skip process '{key}' in '{new.attributed_name}'")
                        new.copy_declarations_to_init(
                            getattr(model, attr)[key])

            new.establish_peers()
            self._remove_unused_processes(new)

            if new.consistent:
                if new.attributed_name != original_name:
                    self.logger.info("Reduced batch {!r} to {!r}".format(
                        original_name, new.attributed_name))

                # Add missing attributes to the model
                for process_name in model.non_models:
                    added_attributes = []
                    if process_name not in new.attributes:
                        added_attributes.append(process_name)
                        new.extend_model_name(process_name, 'base')
                    added_attributes = ', '.join(added_attributes)
                    self.logger.debug(
                        f"Add to model '{new.attributed_name}' the following attributes: '{added_attributes}'"
                    )

                yield new
            else:
                self.logger.debug(
                    f"Obtained model '{new.attributed_name}' is inconsistent")