Ejemplo n.º 1
0
    def _handle_exception_datatrace(self, exception: CompilerException) -> None:
        if not compiler_config.datatrace_enable.get():
            raise exception

        def add_trace(exception: CompilerException) -> bool:
            """
            Add the trace to the deepest possible causes.
            """
            handled: bool = False
            if isinstance(exception, MultiException):
                unset_attrs: Dict[dataflow.AttributeNode, UnsetException] = {
                    cause.instance.instance_node.node().register_attribute(cause.attribute.name): cause
                    for cause in exception.get_causes()
                    if isinstance(cause, UnsetException)
                    if cause.instance is not None
                    if cause.instance.instance_node is not None
                    if cause.attribute is not None
                }
                root_causes: Set[dataflow.AttributeNode] = UnsetRootCauseAnalyzer(unset_attrs.keys()).root_causes()
                for attr, e in unset_attrs.items():
                    if attr not in root_causes:
                        exception.others.remove(e)
                handled = True
            causes: List[CompilerException] = exception.get_causes()
            for cause in causes:
                if add_trace(cause):
                    handled = True
            if not handled:
                trace: Optional[str] = None
                if isinstance(exception, UnsetException):
                    if (
                        exception.instance is not None
                        and exception.instance.instance_node is not None
                        and exception.attribute is not None
                    ):
                        attribute: dataflow.AttributeNode = exception.instance.instance_node.node().register_attribute(
                            exception.attribute.name
                        )
                        if len(list(attribute.assignments())) > 0:
                            trace = DataTraceRenderer.render(
                                dataflow.InstanceAttributeNodeReference(attribute.instance, attribute.name)
                            )
                if isinstance(exception, DoubleSetException):
                    variable: ResultVariable = exception.variable
                    trace = DataTraceRenderer.render(variable.get_dataflow_node())
                elif isinstance(exception, AttributeException):
                    node_ref: Optional[dataflow.InstanceNodeReference] = exception.instance.instance_node
                    assert node_ref is not None
                    trace = DataTraceRenderer.render(
                        dataflow.InstanceAttributeNodeReference(node_ref.top_node(), exception.attribute)
                    )
                if trace is not None:
                    exception.msg += "\ndata trace:\n%s" % trace
                    handled = True
            return handled

        add_trace(exception)
        exception.attach_compile_info(self)
        raise exception
Ejemplo n.º 2
0
 def __init__(self, location: Range, value, msg=None):
     if msg is None:
         msg = "Syntax error at token %s" % value
     else:
         msg = "Syntax error: %s" % msg
     CompilerException.__init__(self, msg)
     self.set_location(location)
     self.value = value
Ejemplo n.º 3
0
 def __init__(self,
              location: Range,
              value: object,
              msg: Optional[str] = None) -> None:
     if msg is None:
         msg = "Syntax error at token %s" % value
     else:
         msg = "Syntax error: %s" % msg
     CompilerException.__init__(self, msg)
     self.set_location(location)
     self.value = value
Ejemplo n.º 4
0
    def to_type(self, arg_type, resolver):
        """
            Convert a string representation of a type to a type
        """
        if arg_type is None:
            return None

        if not isinstance(arg_type, str):
            raise CompilerException(
                "bad annotation in plugin %s::%s, expected str but got %s (%s)"
                % (self.ns, self.__class__.__function_name__, type(arg_type),
                   arg_type))

        if arg_type == "any":
            return None

        if arg_type == "list":
            return list

        if arg_type == "expression":
            return None

        if arg_type.endswith("[]"):
            basetypename = arg_type[0:-2]
            basetype = resolver.get_type(basetypename)
            return TypedList(basetype)

        return resolver.get_type(arg_type)
Ejemplo n.º 5
0
 def __init__(self, name: LocatableString, toname: LocatableString) -> None:
     DefinitionStatement.__init__(self)
     self.name = str(name)
     if "-" in self.name:
         raise CompilerException(
             "%s is not a valid module name: hyphens are not allowed, please use underscores instead." % (self.name)
         )
     self.toname = str(toname)
     if "-" in self.toname:
         inmanta_warnings.warn(HyphenDeprecationWarning(toname))
Ejemplo n.º 6
0
    def to_type(self, arg_type: Optional[object],
                resolver: Namespace) -> Optional[inmanta_type.Type]:
        """
        Convert a string representation of a type to a type
        """
        if arg_type is None:
            return None

        if not isinstance(arg_type, str):
            raise CompilerException(
                "bad annotation in plugin %s::%s, expected str but got %s (%s)"
                % (self.ns, self.__class__.__function_name__, type(arg_type),
                   arg_type))

        if arg_type == "any":
            return None

        if arg_type == "expression":
            return None

        # quickfix issue #1774
        allowed_element_type: inmanta_type.Type = inmanta_type.Type()
        if arg_type == "list":
            return inmanta_type.TypedList(allowed_element_type)
        if arg_type == "dict":
            return inmanta_type.TypedDict(allowed_element_type)

        plugin_line: Range = Range(self.location.file, self.location.lnr, 1,
                                   self.location.lnr + 1, 1)
        locatable_type: LocatableString = LocatableString(
            arg_type, plugin_line, 0, None)

        # stack of transformations to be applied to the base inmanta_type.Type
        # transformations will be applied right to left
        transformation_stack: List[Callable[[inmanta_type.Type],
                                            inmanta_type.Type]] = []

        if locatable_type.value.endswith("?"):
            locatable_type.value = locatable_type.value[0:-1]
            transformation_stack.append(inmanta_type.NullableType)

        if locatable_type.value.endswith("[]"):
            locatable_type.value = locatable_type.value[0:-2]
            transformation_stack.append(inmanta_type.TypedList)

        return reduce(lambda acc, transform: transform(acc),
                      reversed(transformation_stack),
                      resolver.get_type(locatable_type))
Ejemplo n.º 7
0
    def add_resource(self, resource: Resource) -> None:
        """
        Add a new resource to the list of exported resources. When
        commit_resources is called, the entire list of resources is send
        to the the server.

        A resource is a map of attributes. This method validates the id
        of the resource and will add a version (if it is not set already)
        """
        if resource.version > 0:
            raise Exception(
                "Versions should not be added to resources during model compilation."
            )

        resource.set_version(self._version)

        if resource.id in self._resources:
            raise CompilerException(
                "Resource %s exists more than once in the configuration model"
                % resource.id)

        is_undefined = False
        for unknown in resource.unknowns:
            is_undefined = True
            value = getattr(resource, unknown)
            if value.source is not None and hasattr(value.source, "_type"):
                resource_id = to_id(value.source)
                if resource_id:
                    self._unknown_objects.add(resource_id)

        if is_undefined:
            self._resource_state[
                resource.id.resource_str()] = const.ResourceState.undefined
        else:
            self._resource_state[
                resource.id.resource_str()] = const.ResourceState.available

        self._resources[resource.id] = resource
Ejemplo n.º 8
0
    def run(self, compiler: "Compiler", statements: Sequence["Statement"], blocks: Sequence["BasicBlock"]) -> bool:
        """
        Evaluate the current graph
        """
        prev = time.time()
        start = prev

        # first evaluate all definitions, this should be done in one iteration
        self.define_types(compiler, statements, blocks)
        attributes_with_precedence_rule: List[RelationAttribute] = self._set_precedence_rules_on_relationship_attributes()

        # give all loose blocks an empty XC
        # register the XC's as scopes
        # All named scopes are now present

        for block in blocks:
            res = Resolver(block.namespace, self.track_dataflow)
            xc = ExecutionContext(block, res)
            block.context = xc
            block.namespace.scope = xc
            block.warn_shadowed_variables()

        # setup queues
        # queue for runnable items
        basequeue: Deque[Waiter] = deque()
        # queue for RV's that are delayed
        waitqueue = PrioritisedDelayedResultVariableQueue(attributes_with_precedence_rule)
        # queue for RV's that are delayed and had no effective waiters when they were first in the waitqueue
        zerowaiters: Deque[DelayedResultVariable[Any]] = deque()
        # queue containing everything, to find hanging statements
        all_statements: Set[Waiter] = set()

        # Wrap in object to pass around
        queue = QueueScheduler(compiler, basequeue, waitqueue, self.types, all_statements)

        # emit all top level statements
        for block in blocks:
            block.context.emit(queue.for_tracker(ModuleTracker(block)))

        # start an evaluation loop
        i = 0
        count = 0
        max_iterations = int(os.getenv("INMANTA_MAX_ITERATIONS", MAX_ITERATIONS))
        while i < max_iterations:
            now = time.time()

            # check if we can stop the execution
            if len(basequeue) == 0 and len(waitqueue) == 0 and len(zerowaiters) == 0:
                break
            else:
                i += 1

            LOGGER.debug(
                "Iteration %d (e: %d, w: %d, p: %d, done: %d, time: %f)",
                i,
                len(basequeue),
                len(waitqueue),
                len(zerowaiters),
                count,
                now - prev,
            )
            prev = now

            # evaluate all that is ready
            while len(basequeue) > 0:
                next = basequeue.popleft()
                try:
                    next.execute()
                    all_statements.discard(next)
                    count = count + 1
                except UnsetException as e:
                    # some statements don't know all their dependencies up front,...
                    next.requeue_with_additional_requires(object(), e.get_result_variable())

            # all safe stmts are done
            progress = False
            assert not basequeue

            # find a RV that has waiters, so freezing creates progress
            while len(waitqueue) > 0 and not progress:
                next_rv = waitqueue.popleft()
                if next_rv.hasValue:
                    # already froze itself
                    continue
                if next_rv.get_progress_potential() <= 0:
                    zerowaiters.append(next_rv)
                elif next_rv.get_waiting_providers() > 0:
                    # definitely not done
                    # drop from queue
                    # will requeue when value is added
                    next_rv.unqueue()
                else:
                    # freeze it and go to next iteration, new statements will be on the basequeue
                    LOGGER.log(LOG_LEVEL_TRACE, "Freezing %s", next_rv)
                    next_rv.freeze()
                    progress = True

            # no waiters in waitqueue,...
            # see if any zerowaiters have become gotten waiters
            if not progress:
                zerowaiters_tmp = [w for w in zerowaiters if not w.hasValue]
                waitqueue.replace(w for w in zerowaiters_tmp if w.get_progress_potential() > 0)
                zerowaiters = deque(w for w in zerowaiters_tmp if w.get_progress_potential() <= 0)
                while len(waitqueue) > 0 and not progress:
                    LOGGER.debug("Moved zerowaiters to waiters")
                    next_rv = waitqueue.popleft()
                    if next_rv.get_waiting_providers() > 0:
                        next_rv.unqueue()
                    else:
                        LOGGER.log(LOG_LEVEL_TRACE, "Freezing %s", next_rv)
                        next_rv.freeze()
                        progress = True

            if not progress:
                # nothing works anymore, attempt to unfreeze wait cycle
                progress = self.find_wait_cycle(attributes_with_precedence_rule, queue.allwaiters)

            if not progress:
                # no one waiting anymore, all done, freeze and finish
                LOGGER.debug("Finishing statements with no waiters")

                while len(zerowaiters) > 0:
                    next_rv = zerowaiters.pop()
                    next_rv.freeze()

        now = time.time()
        LOGGER.info(
            "Iteration %d (e: %d, w: %d, p: %d, done: %d, time: %f)",
            i,
            len(basequeue),
            len(waitqueue),
            len(zerowaiters),
            count,
            now - prev,
        )

        if i == max_iterations:
            raise CompilerException(f"Could not complete model, max_iterations {max_iterations} reached.")

        excns: List[CompilerException] = []
        self.freeze_all(excns)

        now = time.time()
        LOGGER.info(
            "Total compilation time %f",
            now - start,
        )

        if len(excns) == 0:
            pass
        elif len(excns) == 1:
            raise excns[0]
        else:
            raise MultiException(excns)

        if all_statements:
            stmt = None
            for st in all_statements:
                if isinstance(st, ExecutionUnit):
                    stmt = st
                    break

            assert stmt is not None

            raise RuntimeException(stmt.expression, "not all statements executed %s" % all_statements)

        return True
Ejemplo n.º 9
0
    def update(self,
               module: Optional[str] = None,
               project: Optional[Project] = None) -> None:
        """
        Update all modules to the latest version compatible with the given module version constraints.
        """

        if project is None:
            # rename var to make mypy happy
            my_project = self.get_project(load=False)
        else:
            my_project = project

        def do_update(specs: "Dict[str, List[InmantaModuleRequirement]]",
                      modules: List[str]) -> None:
            v2_modules = {
                module
                for module in modules
                if my_project.module_source.path_for(module) is not None
            }

            v2_python_specs: List[Requirement] = [
                ModuleV2Source.get_python_package_requirement(module_spec)
                for module, module_specs in specs.items()
                for module_spec in module_specs if module in v2_modules
            ]
            if v2_python_specs:
                env.process_env.install_from_index(
                    v2_python_specs,
                    my_project.module_source.urls,
                    upgrade=True,
                    allow_pre_releases=my_project.install_mode !=
                    InstallMode.release,
                )

            for v1_module in set(modules).difference(v2_modules):
                spec = specs.get(v1_module, [])
                try:
                    ModuleV1.update(my_project,
                                    v1_module,
                                    spec,
                                    install_mode=my_project.install_mode)
                except Exception:
                    LOGGER.exception("Failed to update module %s", v1_module)

            # Load the newly installed modules into the modules cache
            my_project.install_modules(bypass_module_cache=True,
                                       update_dependencies=True)

        attempt = 0
        done = False
        last_failure = None

        while not done and attempt < MAX_UPDATE_ATTEMPT:
            LOGGER.info("Performing update attempt %d of %d", attempt + 1,
                        MAX_UPDATE_ATTEMPT)
            try:
                loaded_mods_pre_update = {
                    module_name: mod.version
                    for module_name, mod in my_project.modules.items()
                }

                # get AST
                my_project.load_module_recursive(install=True)
                # get current full set of requirements
                specs: Dict[str, List[
                    InmantaModuleRequirement]] = my_project.collect_imported_requirements(
                    )
                if module is None:
                    modules = list(specs.keys())
                else:
                    modules = [module]
                do_update(specs, modules)

                loaded_mods_post_update = {
                    module_name: mod.version
                    for module_name, mod in my_project.modules.items()
                }
                if loaded_mods_pre_update == loaded_mods_post_update:
                    # No changes => state has converged
                    done = True
                else:
                    # New modules were downloaded or existing modules were updated to a new version. Perform another pass to
                    # make sure that all dependencies, defined in these new modules, are taken into account.
                    last_failure = CompilerException(
                        "Module update did not converge")
            except CompilerException as e:
                last_failure = e
                # model is corrupt
                LOGGER.info(
                    "The model is not currently in an executable state, performing intermediate updates",
                    stack_info=True)
                # get all specs from all already loaded modules
                specs = my_project.collect_requirements()

                if module is None:
                    # get all loaded/partly loaded modules
                    modules = list(my_project.modules.keys())
                else:
                    modules = [module]
                do_update(specs, modules)
            attempt += 1

        if last_failure is not None and not done:
            raise last_failure