def evaluate_parameters_for_scenario(base_params: List[Parameter],
                                     scenario_params: Dict[str, str]):
    """
    Obtain a dictionary (parameter -> value), where parameter is a string and value is a literal: number, boolean,
    category or string.

    Start from the base parameters then overwrite with the values in the current scenario.

    Parameters may depend on other parameters, so this has to be considered before evaluation.
    No cycles are allowed in the dependencies, i.e., if P2 depends on P1, P1 cannot depend on P2.
    To analyze this, first expressions are evaluated, extracting which parameters appear in each of them. Then a graph
    is elaborated based on this information. Finally, an algorithm to find cycles is executed.

    :param base_params:
    :param scenario_params:
    :return:
    """
    # Create dictionary without evaluation
    result_params = create_dictionary()
    result_params.update(
        {p.name: p.default_value
         for p in base_params if p.default_value})

    # Overwrite with scenario expressions or constants
    result_params.update(scenario_params)

    state = State()
    known_params = create_dictionary()
    unknown_params = create_dictionary()

    # Now, evaluate ALL expressions
    for param, expression in result_params.items():
        value, ast, params, issues = evaluate_numeric_expression_with_parameters(
            expression, state)
        if not value:  # It is not a constant, store the parameters on which this depends
            if case_sensitive:
                unknown_params[param] = (ast, set(params))
            else:
                unknown_params[param] = (ast, set([p.lower() for p in params]))
        else:  # It is a constant, store it
            result_params[param] = value  # Overwrite
            known_params[param] = value

    cycles = get_circular_dependencies(unknown_params)
    if len(cycles) > 0:
        raise Exception(
            f"Parameters cannot have circular dependencies. {len(cycles)} cycles were detected: "
            f"{':: '.join(cycles)}")

    # Initialize state with known parameters
    state.update(known_params)

    # Loop until no new parameters can be evaluated
    previous_len_unknown_params = len(unknown_params) + 1
    while len(unknown_params) < previous_len_unknown_params:
        previous_len_unknown_params = len(unknown_params)

        for param in list(
                unknown_params
        ):  # A list(...) is used because the dictionary can be modified inside
            ast, params = unknown_params[param]
            if params.issubset(known_params):
                value, _, _, issues = evaluate_numeric_expression_with_parameters(
                    ast, state)
                if not value:
                    raise Exception(
                        f"It should be possible to evaluate the parameter '{param}'. "
                        f"Issues: {', '.join(issues)}")
                else:
                    del unknown_params[param]
                    result_params[param] = value
                    state.set(param, value)

    if len(unknown_params) > 0:
        raise Exception(
            f"Could not evaluate the following parameters: {', '.join(unknown_params)}"
        )

    return result_params
def set_update_scales_graph(
        graph: nx.DiGraph, params: Dict[str, Any],
        beginning_values: Dict[Factor, Tuple[Any,
                                             FactorQuantitativeObservation]]):
    """
    For a scaling graph:
     - set both the parameters and the values of Interfaces beginning scale-chains
     - update the scale chains accordingly

    :param graph: Graph with all scale chains
    :param params: Parameters to apply to values in edges and nodes of the graph
    :param beginning_values: Expressions (plus "unit") for beginning nodes of the graph
    :return: Nothing (the graph is updated in-place)
    """

    # Set of all nodes.
    all_interfaces = set(graph.nodes)
    # Set of "scale beginning" nodes.
    beginning_interfaces = get_scale_beginning_interfaces(graph)
    # Set of "scale following" nodes
    following_interfaces = all_interfaces.difference(beginning_interfaces)

    # Set of nodes with value
    interfaces_with_value = set(beginning_values.keys())

    # Check that all beginning interfaces have been defined (have a value)
    mandatory_to_define_all_beginning_interfaces = False

    if mandatory_to_define_all_beginning_interfaces:
        interfaces_which_should_have_a_value = beginning_interfaces.difference(
            interfaces_with_value)
        if interfaces_which_should_have_a_value:
            s = ", ".join([
                i.processor.name + ":" + i.name
                for i in interfaces_which_should_have_a_value
            ])
            raise Exception(
                "Not all scale beginning Interfaces have been assigned a value: "
                + s)

    # "following" interfaces in scale-chains should not have a value
    interfaces_which_should_not_have_a_value = following_interfaces.intersection(
        interfaces_with_value)

    if interfaces_which_should_not_have_a_value:
        s = ", ".join([
            i.processor.name + ":" + i.name
            for i in interfaces_which_should_not_have_a_value
        ])
        raise Exception(
            "Interfaces in scale chains cannot have assigned values: " + s)

    # Now expressions. First, prepare "state"
    state = State()
    state.update(params)

    # Evaluate (AST) all expressions from the INTERSECTION
    defined_beginning_interfaces = beginning_interfaces.intersection(
        interfaces_with_value)
    for i in defined_beginning_interfaces:
        expression = beginning_values[i][0]
        unit = ureg(beginning_values[i][1].attributes["unit"])
        v, _, _, issues = evaluate_numeric_expression_with_parameters(
            expression, state)
        if not v:
            raise Exception(
                f"Could not evaluate expression '{expression}': {', '.join(issues)}"
            )
        else:
            graph.nodes[i]["value"] = v * unit

    # Evaluate all edges
    for u, v, data in graph.edges(data=True):
        ast = data["ast"]
        if ast:
            v, _, _, issues = evaluate_numeric_expression_with_parameters(
                ast, state)
            if not v:
                raise Exception(
                    f"Could not evaluate edge scale expression '{ast}' for edge ({u.name}->{v.name}): {', '.join(issues)}"
                )
            else:
                graph.edges[u, v]["value"] = v

    # Now, compute values in nodes
    def compute_scaled_nodes(nodes):
        for i in nodes:
            val = graph.nodes[i]["value"]
            tmp = []
            for suc in graph.successors(i):
                # TODO Consider unit conversions, or the unit of the predecessor is inherited?
                graph.nodes[suc]["value"] = graph.nodes[i][
                    "value"] * graph.edges[i, suc]["value"]
                tmp.append(suc)
            compute_scaled_nodes(tmp)

    compute_scaled_nodes(defined_beginning_interfaces)
def flow_graph_solver(global_parameters: List[Parameter],
                      problem_statement: ProblemStatement,
                      input_systems: Dict[str, Set[Processor]], state: State):
    """
    * First scales have to be solved
    * Second direct flows
    * Third conversions of flows

    Once flows have been found, Indicators have to be gathered.

    :param global_parameters: Parameters including the default value (if defined)
    :param problem_statement: ProblemStatement object, with scenarios (parameters changing the default)
                              and parameters for the solver
    :param state: State with everything
    :param input_systems: A dictionary of the different systems to be solved
    :return: Issue[]
    """
    class Edge(NamedTuple):
        src: Factor
        dst: Factor
        weight: Optional[str]

    def add_edges(edges: List[Edge]):
        for src, dst, weight in edges:
            src_name = get_interface_name(src, glb_idx)
            dst_name = get_interface_name(dst, glb_idx)
            if "Archetype" in [
                    src.processor.instance_or_archetype,
                    dst.processor.instance_or_archetype
            ]:
                print(
                    f"WARNING: excluding relation from '{src_name}' to '{dst_name}' because of Archetype processor"
                )
            else:
                relations.add_edge(src_name, dst_name, weight=weight)

    glb_idx, _, _, _, _ = get_case_study_registry_objects(state)

    # Get all interface observations. Also resolve expressions without parameters. Cannot resolve expressions
    # depending only on global parameters because some of them can be overridden by scenario parameters.
    time_observations_absolute, time_observations_relative = get_observations_by_time(
        glb_idx)

    if len(time_observations_absolute) == 0:
        raise Exception(
            f"No absolute observations have been found. The solver has nothing to solve."
        )

    relations = nx.DiGraph()

    # Add Interfaces -Flow- relations (time independent)
    add_edges([
        Edge(r.source_factor, r.target_factor, r.weight) for r in glb_idx.get(
            FactorsRelationDirectedFlowObservation.partial_key())
    ])

    # Add Processors -Scale- relations (time independent)
    add_edges([
        Edge(r.origin, r.destination, r.quantity)
        for r in glb_idx.get(FactorsRelationScaleObservation.partial_key())
    ])

    # TODO Expand flow graph with it2it transforms
    # relations_scale_it2it = glb_idx.get(FactorTypesRelationUnidirectionalLinearTransformObservation.partial_key())

    # First pass to resolve weight expressions: only expressions without parameters can be solved
    for _, _, data in relations.edges(data=True):
        expression = data["weight"]
        if expression:
            value, ast, _, _ = evaluate_numeric_expression_with_parameters(
                expression, state)
            data["weight"] = ifnull(value, ast)

    for scenario_idx, (scenario_name, scenario_params) in enumerate(
            problem_statement.scenarios.items()):

        print(f"********************* SCENARIO: {scenario_name}")

        scenario_state = State()
        scenario_combined_params = evaluate_parameters_for_scenario(
            global_parameters, scenario_params)
        scenario_state.update(scenario_combined_params)

        for time_period, observations in time_observations_absolute.items():

            print(f"********************* TIME PERIOD: {time_period}")

            # Final values are taken from "observations" that need to computed
            graph_params = {}

            # Second and last pass to resolve observation expressions with parameters
            for expression, obs in observations:
                interface_name = get_interface_name(obs.factor, glb_idx)
                if interface_name not in relations.nodes:
                    print(
                        f"WARNING: observation at interface '{interface_name}' is not taken into account."
                    )
                else:
                    value, ast, _, issues = evaluate_numeric_expression_with_parameters(
                        expression, scenario_state)
                    if not value:
                        raise Exception(
                            f"Cannot evaluate expression '{expression}' for observation at "
                            f"interface '{interface_name}'. Issues: {', '.join(issues)}"
                        )
                    graph_params[interface_name] = value

            assert (graph_params is not None)

            # Add Processors internal -RelativeTo- relations (time dependent)
            # Transform relative observations into graph edges
            for expression, obs in time_observations_relative[time_period]:
                relations.add_edge(get_interface_name(obs.relative_factor,
                                                      glb_idx),
                                   get_interface_name(obs.factor, glb_idx),
                                   weight=expression)

            # Second and last pass to resolve weight expressions: expressions with parameters can be solved
            for u, v, data in relations.edges(data=True):
                expression = data["weight"]
                if expression:
                    value, ast, _, _ = evaluate_numeric_expression_with_parameters(
                        expression, scenario_state)
                    if not value:
                        raise Exception(
                            f"Cannot evaluate expression '{expression}' for weight "
                            f"from interface '{u}' to interface '{v}'. Issues: {', '.join(issues)}"
                        )
                    data["weight"] = value

            # ----------------------------------------------------

            if time_period == '2008':
                for component in nx.weakly_connected_components(relations):
                    nx.draw_kamada_kawai(relations.subgraph(component),
                                         with_labels=True)
                    plt.show()

            flow_graph = FlowGraph(relations)
            comp_graph, issues = flow_graph.get_computation_graph()

            for issue in issues:
                print(issue)

            print(f"****** NODES: {comp_graph.nodes}")

            # ----------------------------------------------------

            # Obtain nodes without a value
            compute_nodes = [
                n for n in comp_graph.nodes if not graph_params.get(n)
            ]

            # Compute the missing information with the computation graph
            if len(compute_nodes) == 0:
                print("All nodes have a value. Nothing to solve.")
                return []

            print(f"****** UNKNOWN NODES: {compute_nodes}")
            print(f"****** PARAMS: {graph_params}")

            conflicts = comp_graph.compute_param_conflicts(
                set(graph_params.keys()))

            for s, (param, values) in enumerate(conflicts.items()):
                print(f"Conflict {s + 1}: {param} -> {values}")

            combinations = ComputationGraph.compute_param_combinations(
                conflicts)

            for s, combination in enumerate(combinations):
                print(f"Combination {s}: {combination}")

                filtered_params = {
                    k: v
                    for k, v in graph_params.items() if k in combination
                }
                results, _ = comp_graph.compute_values(compute_nodes,
                                                       filtered_params)

                results_with_values = {k: v for k, v in results.items() if v}
                print(f'  results_with_values={results_with_values}')

                # TODO: work with "part_of_graph"
                #  - Params: graph_params + results
                #  - Compute conflicts, combinations
                #  - For each combination "compute_values"

        # TODO INDICATORS

    # ----------------------------------------------------
    # ACCOUNTING PER SYSTEM

    for system in input_systems:

        # Handle Processors -PartOf- relations
        proc_hierarchy = nx.DiGraph()
        for relation in glb_idx.get(
                ProcessorsRelationPartOfObservation.partial_key(
                )):  # type: ProcessorsRelationPartOfObservation
            if relation.parent_processor.instance_or_archetype == "Instance":
                proc_hierarchy.add_edge(
                    get_processor_name(relation.child_processor, glb_idx),
                    get_processor_name(relation.parent_processor, glb_idx))

        part_of_graph = ComputationGraph()

        # for relation in system_flows[system]:  # type: FactorsRelationDirectedFlowObservation
        #
        #     # We create another graph only with interfaces in processors with parents
        #     for interface in [relation.source_factor, relation.target_factor]:
        #
        #         processor_name = get_processor_name(interface.processor, glb_idx)
        #         interface_full_name = processor_name+":"+interface.name
        #
        #         # If "processor" is in the "PartOf" hierarchy AND the "processor:interface" is not being handled yet
        #         if processor_name in proc_hierarchy and interface_full_name not in part_of_graph.nodes:
        #             # Insert into the Computation Graph a copy of the "PartOf" hierarchy of processors
        #             # for the specific interface
        #             new_edges = [(u+":"+interface.name, v+":"+interface.name)
        #                          for u, v in weakly_connected_subgraph(proc_hierarchy, processor_name).edges]
        #             part_of_graph.add_edges(new_edges, 1.0, None)

        # for component in nx.weakly_connected_components(part_of_graph.graph):
        #     nx.draw_kamada_kawai(part_of_graph.graph.subgraph(component), with_labels=True)
        #     plt.show()

    return []
def flow_graph_solver(global_parameters: List[Parameter],
                      problem_statement: ProblemStatement,
                      input_systems: Dict[str, Set[Processor]], state: State):
    """
    * First scales have to be solved
    * Second direct flows
    * Third conversions of flows

    Once flows have been found, Indicators have to be gathered.

    :param global_parameters: Parameters including the default value (if defined)
    :param problem_statement: ProblemStatement object, with scenarios (parameters changing the default)
                              and parameters for the solver
    :param state: State with everything
    :param input_systems: A dictionary of the different systems to be solved
    :return: Issue[]
    """

    glb_idx, _, _, _, _ = get_case_study_registry_objects(state)

    # Initialize dictionaries
    system_flows: Dict[str,
                       Set[FactorsRelationDirectedFlowObservation]] = dict()
    system_scales: Dict[str, Set[FactorsRelationScaleObservation]] = dict()
    system_processor_hierarchies: Dict[str, nx.DiGraph] = dict()
    for s in input_systems:
        system_flows[s] = set()
        system_scales[s] = set()
        system_processor_hierarchies[s] = dict()

    # Handle Interface Types -Scale- relations
    relations_scale_it2it = glb_idx.get(
        FactorTypesRelationUnidirectionalLinearTransformObservation.
        partial_key())

    # Handle Interfaces -Flow- relations
    relations_flow = glb_idx.get(
        FactorsRelationDirectedFlowObservation.partial_key())

    for relation in relations_flow:  # type: FactorsRelationDirectedFlowObservation
        system_flows[relation.source_factor.processor.processor_system].add(
            relation)
        system_flows[relation.target_factor.processor.processor_system].add(
            relation)

    relations_scale = glb_idx.get(
        FactorsRelationScaleObservation.partial_key())

    for relation in relations_scale:  # type: FactorsRelationScaleObservation
        system_scales[relation.origin.processor.processor_system].add(relation)
        system_scales[relation.destination.processor.processor_system].add(
            relation)

    # Handle Processors -PartOf- relations
    relations_part_of = glb_idx.get(
        ProcessorsRelationPartOfObservation.partial_key())

    for relation in relations_part_of:  # type: ProcessorsRelationPartOfObservation
        if relation.parent_processor.instance_or_archetype.lower(
        ) == "instance":
            graph = system_processor_hierarchies[
                relation.parent_processor.processor_system]

            if not graph:
                graph = nx.DiGraph()
                system_processor_hierarchies[
                    relation.parent_processor.processor_system] = graph

            graph.add_edge(
                get_processor_name(relation.child_processor, glb_idx),
                get_processor_name(relation.parent_processor, glb_idx))

    # Get all interface observations. Also resolve expressions without parameters. Cannot resolve expressions
    # depending only on global parameters because some of them can be overridden by scenario parameters.
    observations_by_time = get_observations_by_time(glb_idx)

    if len(observations_by_time) == 0:
        raise Exception(
            f"No observations have been found. The solver has nothing to solve."
        )

    # Split observations into relative and not relative
    observations_by_time_norelative , observations_by_time_relative = \
        split_observations_by_relativeness(observations_by_time)

    # Combine scenario parameters with the global parameters
    scenario_parameters: Dict[str, Dict[str, str]] = \
        {scenario_name: evaluate_parameters_for_scenario(global_parameters, scenario_params)
         for scenario_name, scenario_params in problem_statement.scenarios.items()}

    # SCALES --------------------------

    # Obtain the scale VALUES
    # scales_prd = get_scaled(scenarios=problem_statement.scenarios,
    #                         scenario_params=scenario_parameters,
    #                         relations_scale=glb_idx.get(FactorsRelationScaleObservation.partial_key()),
    #                         observations_by_time=observations_by_time_norelative)

    # FLOWS --------------------------
    for system in input_systems:
        # From Factors IN the context (LOCAL, ENVIRONMENT or OUTSIDE)
        # obtain a basic graph. Signal each Factor as LOCAL or EXTERNAL, and SOCIETY or ENVIRONMENT
        # basic_graph = prepare_interfaces_graph(systems[s][Factor])

        print(f"********************* SYSTEM: {system}")

        # Obtain a flow graph
        flow_graph = FlowGraph()
        part_of_graph = ComputationGraph()

        for relation in system_flows[
                system]:  # type: FactorsRelationDirectedFlowObservation
            flow_graph.add_edge(get_interface_name(relation.source_factor,
                                                   glb_idx),
                                get_interface_name(relation.target_factor,
                                                   glb_idx),
                                weight=relation.weight,
                                reverse_weight=None)

            assert (relation.source_factor.name == relation.target_factor.name)

            # We create another graph only with interfaces in processors with parents
            proc_hierarchy = system_processor_hierarchies[system]

            for interface in [relation.source_factor, relation.target_factor]:

                processor_name = get_processor_name(interface.processor,
                                                    glb_idx)
                interface_full_name = processor_name + ":" + interface.name

                # If "processor" is in the "PartOf" hierarchy AND the "processor:interface" is not being handled yet
                if processor_name in proc_hierarchy and interface_full_name not in part_of_graph.nodes:
                    # Insert into the Computation Graph a copy of the "PartOf" hierarchy of processors
                    # for the specific interface
                    new_edges = [(u + ":" + interface.name,
                                  v + ":" + interface.name)
                                 for u, v in weakly_connected_subgraph(
                                     proc_hierarchy, processor_name).edges]
                    part_of_graph.add_edges(new_edges, 1.0, None)

        comp_graph, issues = flow_graph.get_computation_graph()

        for relation in system_scales[
                system]:  # type: FactorsRelationScaleObservation
            comp_graph.add_edge(get_interface_name(relation.origin, glb_idx),
                                get_interface_name(relation.destination,
                                                   glb_idx),
                                weight=relation.quantity,
                                reverse_weight=None)

        for issue in issues:
            print(issue)

        print(f"****** NODES: {comp_graph.nodes}")

        # for component in nx.weakly_connected_components(part_of_graph.graph):
        #     nx.draw_kamada_kawai(part_of_graph.graph.subgraph(component), with_labels=True)
        #     plt.show()

        # TODO Expand flow graph with it2it transforms

        # Split flow graphs
        for scenario_idx, (scenario_name, scenario) in enumerate(
                problem_statement.scenarios.items()):

            print(f"********************* SCENARIO: {scenario_name}")

            scenario_state = State()
            scenario_state.update(scenario_parameters[scenario_name])

            for time_period, observations in observations_by_time_norelative.items(
            ):

                print(f"********************* TIME PERIOD: {time_period}")

                scales = {
                }  # {fact: val for fact, val in scales_prd.get(dict(__t=time_period, __s=scenario_idx))}

                # Final values are taken from "scales" or from "observations" that need to computed
                graph_params = {}
                for expression, obs in observations:
                    interface_name = get_interface_name(obs.factor, glb_idx)
                    if interface_name not in comp_graph.nodes:
                        print(
                            f"WARNING: observation at interface '{interface_name}' is not taken into account."
                        )
                    else:
                        if scales.get(obs.factor):
                            graph_params[interface_name] = scales[obs.factor]
                        else:
                            value, ast, _, issues = evaluate_numeric_expression_with_parameters(
                                expression, scenario_state)
                            if not value:
                                raise Exception(
                                    f"Cannot evaluate expression '{expression}' for observation at interface '{interface_name}'"
                                )

                            graph_params[interface_name] = value

                # ----------------------------------------------------

                compute_nodes = [
                    n for n in comp_graph.nodes if not graph_params.get(n)
                ]

                # Compute the missing information with the computation graph
                if len(compute_nodes) > 0:

                    print(f"****** UNKNOWN NODES: {compute_nodes}")
                    print(f"****** PARAMS: {graph_params}")

                    conflicts = comp_graph.compute_param_conflicts(
                        set(graph_params.keys()))

                    for s, (param, values) in enumerate(conflicts.items()):
                        print(f"Conflict {s + 1}: {param} -> {values}")

                    combinations = ComputationGraph.compute_param_combinations(
                        conflicts)

                    for s, combination in enumerate(combinations):
                        print(f"Combination {s}: {combination}")

                        filtered_params = {
                            k: v
                            for k, v in graph_params.items()
                            if k in combination
                        }
                        results, _ = comp_graph.compute_values(
                            compute_nodes, filtered_params)

                        results_with_values = {
                            k: v
                            for k, v in results.items() if v
                        }
                        print(f'  results_with_values={results_with_values}')

                        # TODO: work with "part_of_graph"
                        #  - Params: graph_params + results
                        #  - Compute conflicts, combinations
                        #  - For each combination "compute_values"
                else:
                    print(
                        "There aren't nodes with unknown values. Nothing to solve."
                    )

                # TODO Overwrite "obs" with "scales" results
                # TODO Put observations into the flow-graph

                # TODO Put processors into scale (intensive to extensive conversion)
                # scale_unit_processors(flow_graph, params, relative_observations_prd)

                # for sub_fg in nx.weakly_connected_component_subgraphs(flow_graph):
                # TODO Elaborate information flow graph
                #      Cycles allowed?
                # ifg = get_information_flow_graph(sub_fg)
                # TODO Solve information flow graph. From all possible combinations:
                #  bottom-up if top-down USE
                #  bottom-up if top-down DO NOT USE
                #  top-down  if bottom-up USE
                #  top-down  if bottom-up DO NOT USE
                # solve_flow_graph(sub_fg, ifg)  # Each value: Interface, Scenario, Time, Given/Computed -> VALUE (or UNDEFINED)
                # TODO Put results back

        # TODO INDICATORS --- (INSIDE FLOWS)

    return []