Esempio n. 1
0
    def _graph_fn_reset(self):
        """
        Resets the EnvStepper and stores:
        - current state, current return, current terminal, current internal state (RNN), global time_step
        This is only necessary at the very beginning as the step method itself will take care of resetting the Env
        in between or during stepping runs (depending on terminal signals from the Env).

        Returns:
            SingleDataOp: The assign op that stores the state after the Env reset in `last_state` variable.
        """
        if get_backend() == "tf":
            state_after_reset = self.environment_server.reset_for_env_stepper()
            # Reset current state (support ContainerSpaces as well) via our variable(s)' initializer.
            assigns = [
                self.assign_variable(var, s)
                for var, s in zip(self.current_state.values(),
                                  force_tuple(state_after_reset))
            ]
            # Reset internal-states, current return and whether current state is terminal.
            # Also maybe action and reward.
            assigns.append(
                tf.variables_initializer(
                    var_list=[self.current_terminal] +
                    ([self.current_action] if self.add_action else []) +
                    ([self.current_reward] if self.add_reward else [])))
            if self.has_rnn:
                assigns.append(
                    tf.variables_initializer(
                        var_list=list(self.current_internal_states.values())))

            # Note: self.time_step never gets reset.

            with tf.control_dependencies(assigns):
                return tf.no_op()
Esempio n. 2
0
        def method(self_, *inputs, **kwargs):
            args_ = inputs
            kwargs_ = kwargs
            for i, sub_component in enumerate(self_.sub_components.values()):  # type: Component
                # TODO: python-Components: For now, we call each preprocessor's graph_fn
                #  directly (assuming that inputs are not ContainerSpaces).
                if self_.backend == "python" or get_backend() == "python":
                    graph_fn = getattr(sub_component, "_graph_fn_" + component_api_method_name)
                    # if sub_component.api_methods[components_api_method_name].add_auto_key_as_first_param:
                    #    results = graph_fn("", *args_)  # TODO: kwargs??
                    # else:
                    results = graph_fn(*args_)
                elif get_backend() == "pytorch":
                    # Do NOT convert to tuple, has to be in unpacked again immediately.n
                    results = getattr(sub_component, component_api_method_name)(*force_list(args_))
                else:  # if get_backend() == "tf":
                    results = getattr(sub_component, component_api_method_name)(*args_, **kwargs_)

                # Recycle args_, kwargs_ for reuse in next sub-Component's API-method call.
                if isinstance(results, dict):
                    args_ = ()
                    kwargs_ = results
                else:
                    args_ = force_tuple(results)
                    kwargs_ = {}

            if args_ == ():
                return kwargs_
            elif len(args_) == 1:
                return args_[0]
            else:
                return args_
Esempio n. 3
0
        def api_method_wrapper(self, *args, **kwargs):
            api_fn_name = name or re.sub(r'^_graph_fn_', "",
                                         wrapped_func.__name__)
            # Direct evaluation of function.
            if self.execution_mode == "define_by_run":
                type(self).call_count += 1

                start = time.perf_counter()
                # Check with owner if extra args needed.
                if api_fn_name in self.api_methods and self.api_methods[
                        api_fn_name].add_auto_key_as_first_param:
                    output = wrapped_func(self, "", *args, **kwargs)
                else:
                    output = wrapped_func(self, *args, **kwargs)

                # Store runtime for this method.
                type(self).call_times.append(  # Component.call_times
                    (self.name, wrapped_func.__name__,
                     time.perf_counter() - start))
                return output

            api_method_rec = self.api_methods[api_fn_name]

            # Sanity check input args for accidential dict-return values being passed into the next API as
            # supposed DataOpRecord.
            dict_args = [
                next(iter(a.values())) for a in args if isinstance(a, dict)
            ]
            if len(dict_args) > 0 and isinstance(dict_args[0], DataOpRecord):
                raise RLGraphError(
                    "One of your input args to API-method '{}.{}()' is a dict of DataOpRecords! This is probably "
                    "coming from a previous call to an API-method (returning a dict) and the DataOpRecord should be "
                    "extracted by string-key and passed into '{}' "
                    "directly.".format(api_method_rec.component.global_scope,
                                       api_fn_name, api_fn_name))
            # Create op-record column to call API method with. Ignore None input params. These should not be sent
            # to the API-method.
            in_op_column = DataOpRecordColumnIntoAPIMethod(
                component=self,
                api_method_rec=api_method_rec,
                args=args,
                kwargs=kwargs)
            # Add the column to the API-method record.
            api_method_rec.in_op_columns.append(in_op_column)

            # Check minimum number of passed args.
            minimum_num_call_params = len(in_op_column.api_method_rec.non_args_kwargs) - \
                len(in_op_column.api_method_rec.default_args)
            if len(in_op_column.op_records) < minimum_num_call_params:
                raise RLGraphAPICallParamError(
                    "Number of call params ({}) for call to API-method '{}' is too low. Needs to be at least {} "
                    "params!".format(len(in_op_column.op_records),
                                     api_method_rec.name,
                                     minimum_num_call_params))

            # Link from incoming op_recs into the new column or populate new column with ops/Spaces (this happens
            # if this call was made from within a graph_fn such that ops and Spaces are already known).
            all_args = [(i, a) for i, a in enumerate(args) if a is not None] + \
                       [(k, v) for k, v in sorted(kwargs.items()) if v is not None]
            flex = None
            build_when_done = False
            for i, (key, value) in enumerate(all_args):
                # Named arg/kwarg -> get input_name from that and peel op_rec.
                if isinstance(key, str):
                    param_name = key
                # Positional arg -> get input_name from input_names list.
                else:
                    slot = key if flex is None else flex
                    if slot >= len(api_method_rec.input_names):
                        raise RLGraphAPICallParamError(
                            "Too many input args given in call to API-method '{}'!"
                            .format(api_method_rec.name))
                    param_name = api_method_rec.input_names[slot]

                # Var-positional arg, attach the actual position to input_name string.
                if self.api_method_inputs.get(param_name, "") == "*flex":
                    if flex is None:
                        flex = i
                    param_name += "[{}]".format(i - flex)
                # Actual kwarg (not in list of api_method_inputs).
                elif api_method_rec.kwargs_name is not None and param_name not in self.api_method_inputs:
                    param_name = api_method_rec.kwargs_name + "[{}]".format(
                        param_name)

                # We are already in building phase (params may be coming from inside graph_fn).
                if self.graph_builder is not None and self.graph_builder.phase == "building":
                    # If Space not stored yet, determine it from op.
                    assert in_op_column.op_records[i].op is not None
                    if in_op_column.op_records[i].space is None:
                        in_op_column.op_records[i].space = get_space_from_op(
                            in_op_column.op_records[i].op)
                    self.api_method_inputs[
                        param_name] = in_op_column.op_records[i].space
                    # Check input-completeness of Component (but not strict as we are only calling API, not a graph_fn).
                    if self.input_complete is False:
                        # Build right after this loop in case more Space information comes in through next args/kwargs.
                        build_when_done = True

                # A DataOpRecord from the meta-graph.
                elif isinstance(value, DataOpRecord):
                    # Create entry with unknown Space if it doesn't exist yet.
                    if param_name not in self.api_method_inputs:
                        self.api_method_inputs[param_name] = None

                # Fixed value (instead of op-record): Store the fixed value directly in the op.
                else:
                    if self.api_method_inputs.get(param_name) is None:
                        self.api_method_inputs[
                            param_name] = in_op_column.op_records[i].space

            if build_when_done:
                # Check Spaces and create variables.
                self.graph_builder.build_component_when_input_complete(self)

            # Regular API-method: Call it here.
            api_fn_args, api_fn_kwargs = in_op_column.get_args_and_kwargs()

            if api_method_rec.is_graph_fn_wrapper is False:
                return_values = wrapped_func(self, *api_fn_args,
                                             **api_fn_kwargs)
            # Wrapped graph_fn: Call it through yet another wrapper.
            else:
                return_values = graph_fn_wrapper(
                    self, wrapped_func, returns,
                    dict(
                        flatten_ops=flatten_ops,
                        split_ops=split_ops,
                        add_auto_key_as_first_param=add_auto_key_as_first_param,
                        requires_variable_completeness=
                        requires_variable_completeness), *api_fn_args,
                    **api_fn_kwargs)

            # Process the results (push into a column).
            out_op_column = DataOpRecordColumnFromAPIMethod(
                component=self,
                api_method_name=api_fn_name,
                args=util.force_tuple(return_values)
                if type(return_values) != dict else None,
                kwargs=return_values if type(return_values) == dict else None)

            # If we already have actual op(s) and Space(s), push them already into the
            # DataOpRecordColumnFromAPIMethod's records.
            if self.graph_builder is not None and self.graph_builder.phase == "building":
                # Link the returned ops to that new out-column.
                for i, rec in enumerate(out_op_column.op_records):
                    out_op_column.op_records[i].op = rec.op
                    out_op_column.op_records[i].space = rec.space
            # And append the new out-column to the api-method-rec.
            api_method_rec.out_op_columns.append(out_op_column)

            # Do we need to return the raw ops or the op-recs?
            # Only need to check if False, otherwise, we return ops directly anyway.
            return_ops = False
            stack = inspect.stack()
            f_locals = stack[1][0].f_locals
            # We may be in a list comprehension, try next frame.
            if f_locals.get(".0"):
                f_locals = stack[2][0].f_locals
            # Check whether the caller component is a parent of this one.
            caller_component = f_locals.get(
                "root", f_locals.get("self_", f_locals.get("self")))

            # Potential call from a lambda.
            if caller_component is None and "fn" in stack[2][0].f_locals:
                # This is the component.
                prev_caller_component = TraceContext.PREV_CALLER
                lambda_obj = stack[2][0].f_locals["fn"]
                if "lambda" in inspect.getsource(lambda_obj):
                    # Try to reconstruct caller by using parent of prior caller.
                    caller_component = prev_caller_component.parent_component

            if caller_component is None:
                raise RLGraphError(
                    "API-method '{}' must have as 1st parameter (the component) either `root` or `self`. Other names "
                    "are not allowed!".format(api_method_rec.name))
            # Not directly called by this method itself (auto-helper-component-API-call).
            # AND call is coming from some caller Component, but that component is not this component
            # OR a parent -> Error.
            elif caller_component is not None and \
                    type(caller_component).__name__ != "MetaGraphBuilder" and \
                    caller_component not in [self] + self.get_parents():
                if not (stack[1][3] == "__init__"
                        and re.search(r'op_records\.py$', stack[1][1])):
                    raise RLGraphError(
                        "The component '{}' is not a child (or grand-child) of the caller ({})! Maybe you forgot to "
                        "add it as a sub-component via `add_components()`.".
                        format(self.global_scope,
                               caller_component.global_scope))

            # Update trace context.
            TraceContext.PREV_CALLER = caller_component

            for stack_item in stack[1:]:  # skip current frame
                # If we hit an API-method call -> return op-recs.
                if stack_item[3] == "api_method_wrapper" and re.search(
                        r'decorators\.py$', stack_item[1]):
                    break
                # If we hit a graph_fn call -> return ops.
                elif stack_item[3] == "run_through_graph_fn" and re.search(
                        r'graph_builder\.py$', stack_item[1]):
                    return_ops = True
                    break

            if return_ops is True:
                if type(return_values) == dict:
                    return {
                        key: value.op
                        for key, value in out_op_column.get_args_and_kwargs()
                        [1].items()
                    }
                else:
                    tuple_returns = tuple(
                        map(lambda x: x.op,
                            out_op_column.get_args_and_kwargs()[0]))
                    return tuple_returns[0] if len(
                        tuple_returns) == 1 else tuple_returns
            # Parent caller is non-graph_fn: Return op-recs.
            else:
                if type(return_values) == dict:
                    return return_values
                else:
                    tuple_returns = out_op_column.get_args_and_kwargs()[0]
                    return tuple_returns[0] if len(
                        tuple_returns) == 1 else tuple_returns
Esempio n. 4
0
        def method(self_, *inputs, **kwargs):
            # Fold time rank? For now only support 1st arg folding/unfolding.
            original_input = inputs[0]
            if fold_time_rank is True:
                args_ = tuple([self.folder.apply(original_input)] +
                              list(inputs[1:]))
            else:
                # TODO: If only unfolding: Assume for now that 2nd input is the original one (so we can infer
                # TODO: batch/time dims).
                if unfold_time_rank is True:
                    assert len(inputs) >= 2, \
                        "ERROR: In Stack: If unfolding w/o folding, second arg must be the original input!"
                    original_input = inputs[1]
                    args_ = tuple([inputs[0]] + list(inputs[2:]))
                else:
                    args_ = inputs
            kwargs_ = kwargs

            for i, sub_component in enumerate(
                    self_.sub_components.values()):  # type: Component
                if sub_component.scope in [
                        "time-rank-folder_", "time-rank-unfolder_"
                ]:
                    continue
                # TODO: python-Components: For now, we call each preprocessor's graph_fn
                #  directly (assuming that inputs are not ContainerSpaces).
                if self_.backend == "python" or get_backend() == "python":
                    graph_fn = getattr(
                        sub_component,
                        "_graph_fn_" + sub_components_api_method_name)
                    # if sub_component.api_methods[components_api_method_name].add_auto_key_as_first_param:
                    #    results = graph_fn("", *args_)  # TODO: kwargs??
                    # else:
                    results = graph_fn(*args_)
                elif get_backend() == "pytorch":
                    # Do NOT convert to tuple, has to be in unpacked again immediately.n
                    results = getattr(
                        sub_component,
                        sub_components_api_method_name)(*force_list(args_))
                else:  # if get_backend() == "tf":
                    results = getattr(sub_component,
                                      sub_components_api_method_name)(
                                          *args_, **kwargs_)

                # Recycle args_, kwargs_ for reuse in next sub-Component's API-method call.
                if isinstance(results, dict):
                    args_ = ()
                    kwargs_ = results
                else:
                    args_ = force_tuple(results)
                    kwargs_ = {}

            if args_ == ():
                # Unfold time rank? For now only support 1st arg folding/unfolding.
                if unfold_time_rank is True:
                    assert len(kwargs_) == 1,\
                        "ERROR: time-rank-unfolding not supported for more than one NN-return value!"
                    key = next(iter(kwargs_))
                    kwargs_ = {
                        key: self.unfolder.apply(kwargs_[key], original_input)
                    }
                return kwargs_
            else:
                # Unfold time rank? For now only support 1st arg folding/unfolding.
                if unfold_time_rank is True:
                    assert len(args_) == 1,\
                        "ERROR: time-rank-unfolding not supported for more than one NN-return value!"
                    args_ = tuple(
                        [self.unfolder.apply(args_[0], original_input)] +
                        list(args_[1 if fold_time_rank is True else 2:]))
                if len(args_) == 1:
                    return args_[0]
                else:
                    return args_
Esempio n. 5
0
def sanity_check_space(space,
                       allowed_types=None,
                       allowed_sub_types=None,
                       non_allowed_types=None,
                       non_allowed_sub_types=None,
                       must_have_batch_rank=None,
                       must_have_time_rank=None,
                       must_have_batch_or_time_rank=False,
                       must_have_categories=None,
                       num_categories=None,
                       must_have_lower_limit=None,
                       must_have_upper_limit=None,
                       rank=None,
                       shape=None):
    """
    Sanity checks a given Space for certain criteria and raises exceptions if they are not met.

    Args:
        space (Space): The Space object to check.
        allowed_types (Optional[List[type]]): A list of types that this Space must be an instance of.

        allowed_sub_types (Optional[List[type]]): For container spaces, a list of sub-types that all
            flattened sub-Spaces must be an instance of.

        non_allowed_types (Optional[List[type]]): A list of type that this Space must not be an instance of.

        non_allowed_sub_types (Optional[List[type]]): For container spaces, a list of sub-types that all
            flattened sub-Spaces must not be an instance of.

        must_have_batch_rank (Optional[bool]): Whether the Space must (True) or must not (False) have the
            `has_batch_rank` property set to True. None, if it doesn't matter.

        must_have_time_rank (Optional[bool]): Whether the Space must (True) or must not (False) have the
            `has_time_rank` property set to True. None, if it doesn't matter.

        must_have_batch_or_time_rank (Optional[bool]): Whether the Space must (True) or must not (False) have either
            the `has_batch_rank` or the `has_time_rank` property set to True.

        must_have_categories (Optional[bool]): For IntBoxes, whether the Space must (True) or must not (False) have
            global bounds with `num_categories` > 0. None, if it doesn't matter.

        num_categories (Optional[int,tuple]): An int or a tuple (min,max) range within which the Space's
            `num_categories` rank must lie. Only valid for IntBoxes.
            None if it doesn't matter.

        must_have_lower_limit (Optional[bool]): If not None, whether this Space must have a lower limit.
        must_have_upper_limit (Optional[bool]): If not None, whether this Space must have an upper limit.

        rank (Optional[int,tuple]): An int or a tuple (min,max) range within which the Space's rank must lie.
            None if it doesn't matter.

        shape (Optional[tuple[int]]): A tuple of ints specifying the required shape. None if it doesn't matter.

    Raises:
        RLGraphError: Various RLGraphErrors, if any of the conditions is not met.
    """
    flattened_space = space.flatten()

    # Check the types.
    if allowed_types is not None:
        if not isinstance(space, force_tuple(allowed_types)):
            raise RLGraphError(
                "ERROR: Space ({}) is not an instance of {}!".format(
                    space, allowed_types))

    if allowed_sub_types is not None:
        for flat_key, sub_space in flattened_space.items():
            if not isinstance(sub_space, force_tuple(allowed_sub_types)):
                raise RLGraphError(
                    "ERROR: sub-Space '{}' ({}) is not an instance of "
                    "{}!".format(flat_key, sub_space, allowed_sub_types))

    if non_allowed_types is not None:
        if isinstance(space, force_tuple(non_allowed_types)):
            raise RLGraphError(
                "ERROR: Space ({}) must not be an instance of {}!".format(
                    space, non_allowed_types))

    if non_allowed_sub_types is not None:
        for flat_key, sub_space in flattened_space.items():
            if isinstance(sub_space, force_tuple(non_allowed_sub_types)):
                raise RLGraphError(
                    "ERROR: sub-Space '{}' ({}) must not be an instance of "
                    "{}!".format(flat_key, sub_space, non_allowed_sub_types))

    if must_have_batch_or_time_rank is True:
        if space.has_batch_rank is False and space.has_time_rank is False:
            raise RLGraphError(
                "ERROR: Space ({}) does not have a batch- or a time-rank, but must have either one of "
                "these!".format(space))

    if must_have_batch_rank is not None:
        if (space.has_batch_rank is False and must_have_batch_rank is True) or \
                (space.has_batch_rank is not False and must_have_batch_rank is False):
            # Last chance: Check for rank >= 2, that would be ok as well.
            if must_have_batch_rank is True and len(
                    space.get_shape(with_batch_rank=True)) >= 2:
                pass
            # Something is wrong.
            elif space.has_batch_rank is not False:
                raise RLGraphError(
                    "ERROR: Space ({}) has a batch rank, but is not allowed to!"
                    .format(space))
            else:
                raise RLGraphError(
                    "ERROR: Space ({}) does not have a batch rank, but must have one!"
                    .format(space))

    if must_have_time_rank is not None:
        if (space.has_time_rank is False and must_have_time_rank is True) or \
                (space.has_time_rank is not False and must_have_time_rank is False):
            # Last chance: Check for rank >= 3, that would be ok as well.
            if must_have_time_rank is True and len(
                    space.get_shape(with_batch_rank=True,
                                    with_time_rank=True)) >= 2:
                pass
            # Something is wrong.
            elif space.has_time_rank is not False:
                raise RLGraphError(
                    "ERROR: Space ({}) has a time rank, but is not allowed to!"
                    .format(space))
            else:
                raise RLGraphError(
                    "ERROR: Space ({}) does not have a time rank, but must have one!"
                    .format(space))

    if must_have_categories is not None:
        for flat_key, sub_space in flattened_space.items():
            if not isinstance(sub_space, IntBox):
                raise RLGraphError(
                    "ERROR: Space {}({}) is not an IntBox. Only IntBox Spaces can have categories!"
                    .format("" if flat_key == "" else "'{}' ".format(flat_key),
                            space))
            elif sub_space.global_bounds is False:
                raise RLGraphError(
                    "ERROR: Space {}({}) must have categories (globally valid value bounds)!"
                    .format("" if flat_key == "" else "'{}' ".format(flat_key),
                            space))

    if must_have_lower_limit is not None:
        for flat_key, sub_space in flattened_space.items():
            low = sub_space.low
            if must_have_lower_limit is True and (low == -LARGE_INTEGER
                                                  or low == float("-inf")):
                raise RLGraphError(
                    "ERROR: Space {}({}) must have a lower limit, but has none!"
                    .format("" if flat_key == "" else "'{}' ".format(flat_key),
                            space))
            elif must_have_lower_limit is False and (low != -LARGE_INTEGER
                                                     and low != float("-inf")):
                raise RLGraphError(
                    "ERROR: Space {}({}) must not have a lower limit, but has one ({})!"
                    .format("" if flat_key == "" else "'{}' ".format(flat_key),
                            space, low))

    if must_have_upper_limit is not None:
        for flat_key, sub_space in flattened_space.items():
            high = sub_space.high
            if must_have_upper_limit is True and (high != LARGE_INTEGER
                                                  and high != float("inf")):
                raise RLGraphError(
                    "ERROR: Space {}({}) must have an upper limit, but has none!"
                    .format("" if flat_key == "" else "'{}' ".format(flat_key),
                            space))
            elif must_have_upper_limit is False and (high == LARGE_INTEGER
                                                     or high == float("inf")):
                raise RLGraphError(
                    "ERROR: Space {}({}) must not have a upper limit, but has one ({})!"
                    .format("" if flat_key == "" else "'{}' ".format(flat_key),
                            space, high))

    if rank is not None:
        if isinstance(rank, int):
            for flat_key, sub_space in flattened_space.items():
                if sub_space.rank != rank:
                    raise RLGraphError(
                        "ERROR: A Space (flat-key={}) of '{}' has rank {}, but must have rank "
                        "{}!".format(flat_key, space, sub_space.rank, rank))
        else:
            for flat_key, sub_space in flattened_space.items():
                if not ((rank[0] or 0) <= sub_space.rank <=
                        (rank[1] or float("inf"))):
                    raise RLGraphError(
                        "ERROR: A Space (flat-key={}) of '{}' has rank {}, but its rank must be between {} and "
                        "{}!".format(flat_key, space, sub_space.rank, rank[0],
                                     rank[1]))

    if shape is not None:
        for flat_key, sub_space in flattened_space.items():
            if sub_space.shape != shape:
                raise RLGraphError(
                    "ERROR: A Space (flat-key={}) of '{}' has shape {}, but its shape must be "
                    "{}!".format(flat_key, space, sub_space.get_shape(),
                                 shape))

    if num_categories is not None:
        for flat_key, sub_space in flattened_space.items():
            if not isinstance(sub_space, IntBox):
                raise RLGraphError(
                    "ERROR: A Space (flat-key={}) of '{}' is not an IntBox. Only IntBox Spaces can have "
                    "categories!".format(flat_key, space))
            elif isinstance(num_categories, int):
                if sub_space.num_categories != num_categories:
                    raise RLGraphError(
                        "ERROR: A Space (flat-key={}) of '{}' has `num_categories` {}, but must have {}!"
                        .format(flat_key, space, sub_space.num_categories,
                                num_categories))
            elif not ((num_categories[0] or 0) <= sub_space.num_categories <=
                      (num_categories[1] or float("inf"))):
                raise RLGraphError(
                    "ERROR: A Space (flat-key={}) of '{}' has `num_categories` {}, but this value must be between "
                    "{} and {}!".format(flat_key, space,
                                        sub_space.num_categories,
                                        num_categories[0], num_categories[1]))
Esempio n. 6
0
        def api_method_wrapper(self, *args, **kwargs):
            name_ = name or re.sub(r'^_graph_fn_', "", wrapped_func.__name__)

            return_ops = kwargs.pop("return_ops", False)

            # Direct evaluation of function.
            if self.execution_mode == "define_by_run":
                type(self).call_count += 1

                start = time.perf_counter()
                # Check with owner if extra args needed.
                if name_ in self.api_methods and self.api_methods[name_].add_auto_key_as_first_param:
                    output = wrapped_func(self, "", *args, **kwargs)
                else:
                    output = wrapped_func(self, *args, **kwargs)

                # Store runtime for this method.
                type(self).call_times.append(  # Component.call_times
                    (self.name, wrapped_func.__name__, time.perf_counter() - start)
                )
                return output

            api_method_rec = self.api_methods[name_]

            # Create op-record column to call API method with. Ignore None input params. These should not be sent
            # to the API-method.
            in_op_column = DataOpRecordColumnIntoAPIMethod(
                component=self, api_method_rec=api_method_rec, args=args, kwargs=kwargs
            )
            # Add the column to the API-method record.
            api_method_rec.in_op_columns.append(in_op_column)

            # Check minimum number of passed args.
            minimum_num_call_params = len(in_op_column.api_method_rec.non_args_kwargs) - \
                len(in_op_column.api_method_rec.default_args)
            if len(in_op_column.op_records) < minimum_num_call_params:
                raise RLGraphAPICallParamError(
                    "Number of call params ({}) for call to API-method '{}' is too low. Needs to be at least {} "
                    "params!".format(len(in_op_column.op_records), api_method_rec.name, minimum_num_call_params)
                )

            # Link from incoming op_recs into the new column or populate new column with ops/Spaces (this happens
            # if this call was made from within a graph_fn such that ops and Spaces are already known).
            all_args = [(i, a) for i, a in enumerate(args) if a is not None] + \
                       [(k, v) for k, v in sorted(kwargs.items()) if v is not None]
            flex = None
            for i, (key, value) in enumerate(all_args):
                # Named arg/kwarg -> get input_name from that and peel op_rec.
                if isinstance(key, str):
                    param_name = key
                # Positional arg -> get input_name from input_names list.
                else:
                    slot = key if flex is None else flex
                    if slot >= len(api_method_rec.input_names):
                        raise RLGraphAPICallParamError(
                            "Too many input args given in call to API-method '{}'!".format(api_method_rec.name)
                        )
                    param_name = api_method_rec.input_names[slot]

                # Var-positional arg, attach the actual position to input_name string.
                if self.api_method_inputs[param_name] == "*flex":
                    if flex is None:
                        flex = i
                    param_name += "[{}]".format(i - flex)

                # We are already in building phase (params may be coming from inside graph_fn).
                if self.graph_builder is not None and self.graph_builder.phase == "building":
                    self.api_method_inputs[param_name] = in_op_column.op_records[i].space
                    # Check input-completeness of Component (but not strict as we are only calling API, not a graph_fn).
                    if self.input_complete is False:
                        # Check Spaces and create variables.
                        self.graph_builder.build_component_when_input_complete(self)

                # A DataOpRecord from the meta-graph.
                elif isinstance(value, DataOpRecord):
                    if param_name not in self.api_method_inputs:
                        self.api_method_inputs[param_name] = None

                # Fixed value (instead of op-record): Store the fixed value directly in the op.
                else:
                    #in_op_column.op_records[i].space = get_space_from_op(value)
                    if param_name not in self.api_method_inputs or self.api_method_inputs[param_name] is None:
                        self.api_method_inputs[param_name] = in_op_column.op_records[i].space

            # Regular API-method: Call it here.
            args_, kwargs_ = in_op_column.get_args_and_kwargs()

            if api_method_rec.is_graph_fn_wrapper is False:
                return_values = wrapped_func(self, *args_, **kwargs_)
            # Wrapped graph_fn: Call it through yet another wrapper.
            else:
                return_values = graph_fn_wrapper(
                    self, wrapped_func, returns, dict(
                        flatten_ops=flatten_ops, split_ops=split_ops,
                        add_auto_key_as_first_param=add_auto_key_as_first_param
                    ), *args_, **kwargs_
                )

            # Process the results (push into a column).
            out_op_column = DataOpRecordColumnFromAPIMethod(
                component=self,
                api_method_name=name_,
                args=util.force_tuple(return_values) if type(return_values) != dict else None,
                kwargs=return_values if type(return_values) == dict else None
            )

            # If we already have actual op(s) and Space(s), push them already into the
            # DataOpRecordColumnFromAPIMethod's records.
            if self.graph_builder is not None and self.graph_builder.phase == "building":
                # Link the returned ops to that new out-column.
                for i, rec in enumerate(out_op_column.op_records):
                    out_op_column.op_records[i].op = rec.op
                    out_op_column.op_records[i].space = rec.space
            # And append the new out-column to the api-method-rec.
            api_method_rec.out_op_columns.append(out_op_column)

            # Do we need to return the raw ops or the op-recs?
            # Direct parent caller is a `_graph_fn_...`: Return raw ops.
            stack = inspect.stack()
            if return_ops is True or re.match(r'^_graph_fn_.+$', stack[1][3]):
                if type(return_values) == dict:
                    return {key: value.op for key, value in out_op_column.get_args_and_kwargs()[1].items()}
                else:
                    tuple_ = tuple(map(lambda x: x.op, out_op_column.get_args_and_kwargs()[0]))
                    return tuple_[0] if len(tuple_) == 1 else tuple_
            # Parent caller is non-graph_fn: Return op-recs.
            else:
                if type(return_values) == dict:
                    return return_values
                else:
                    tuple_ = out_op_column.get_args_and_kwargs()[0]
                    return tuple_[0] if len(tuple_) == 1 else tuple_