示例#1
0
def combine_optimizer_plan(stream_plan, functions):
    if not stream_plan:
        return stream_plan
    optimizer = get_optimizer(stream_plan[-1])
    if optimizer is None:
        return stream_plan
    function_plan = list(
        filter(
            lambda r: get_prefix(r.instance.external.head) in optimizer.
            objectives, functions))
    external_plan = stream_plan + function_plan
    if CLUSTER:
        partial_orders = get_partial_orders(external_plan)
        cluster_plans = get_connected_components(external_plan, partial_orders)
    else:
        cluster_plans = [external_plan]
    optimizer_plan = []
    for cluster_plan in cluster_plans:
        if all(isinstance(r, FunctionResult) for r in cluster_plan):
            continue
        #if len(cluster_plan) == 1:
        #    optimizer_plan.append(cluster_plan[0])
        #    continue
        stream = OptimizerStream(optimizer, cluster_plan)
        instance = stream.get_instance(stream.input_objects,
                                       fluent_facts=stream.fluent_facts)
        result = instance.get_result(stream.output_objects)
        optimizer_plan.append(result)
    return optimizer_plan
示例#2
0
def combine_optimizers_greedy(evaluations, external_plan):
    if not is_plan(external_plan):
        return external_plan
    # The key thing is that a variable must be grounded before it can used in a non-stream thing
    # TODO: construct variables in order
    # TODO: graph cut algorithm to minimize the number of constraints that are excluded
    # TODO: reorder to ensure that constraints are done first since they are likely to fail as tests
    incoming_edges, outgoing_edges = neighbors_from_orders(
        get_partial_orders(external_plan))
    queue = []
    functions = []
    for v in external_plan:
        if not incoming_edges[v]:
            (functions if isinstance(v, FunctionResult) else queue).append(v)
    current = []
    ordering = []
    while queue:
        optimizer = get_optimizer(current[-1]) if current else None
        for v in queue:
            if optimizer == get_optimizer(v):
                current.append(v)
                break
        else:
            ordering.extend(combine_optimizer_plan(current, functions))
            current = [queue[0]]
        v1 = current[-1]
        queue.remove(v1)
        for v2 in outgoing_edges[v1]:
            incoming_edges[v2].remove(v1)
            if not incoming_edges[v2]:
                (functions
                 if isinstance(v2, FunctionResult) else queue).append(v2)
    ordering.extend(combine_optimizer_plan(current, functions))
    return ordering + functions
示例#3
0
def extract_disabled_clusters(queue):
    clusters = set()
    for skeleton in queue.skeletons:
        # TODO: include costs within clustering?
        # What is goal is to be below a cost threshold?
        # In satisfaction, no need because costs are fixed
        # Make stream_facts for externals to prevent use of the same ones
        # This ordering is why it's better to put likely to fail first
        # Branch on the different possible binding outcomes
        # TODO: consider a nonlinear version of this that evaluates out of order
        # Need extra sampling effort to identify infeasible subsets
        # Treat unevaluated optimistically, as in always satisfiable
        # Need to keep streams with outputs to connect if downstream is infeasible
        # TODO: prune streams that always have at least one success
        # TODO: CSP identification of irreducible unsatisfiable subsets
        # TODO: take into consideration if a stream is enumerated to mark as a hard failure
        # Decompose down optimizers

        #cluster_plans = [skeleton.stream_plan]
        partial_orders = get_partial_orders(skeleton.stream_plan)
        cluster_plans = get_connected_components(skeleton.stream_plan,
                                                 partial_orders)
        binding = skeleton.best_binding
        if not binding.is_bound():
            # TODO: block if cost sensitive to possibly get cheaper solutions
            #cluster_plans = current_failed_cluster(binding)
            cluster_plans = current_failure_contributors(binding)
        for cluster_plan in cluster_plans:
            clusters.add(frozenset(cluster_plan))
    return clusters
示例#4
0
def convert_fluent_streams(stream_plan, real_states, action_plan,
                           step_from_fact, node_from_atom):
    #return stream_plan
    import pddl
    assert len(real_states) == len(action_plan) + 1
    steps_from_stream = get_steps_from_stream(stream_plan, step_from_fact,
                                              node_from_atom)

    # TODO: ensure that derived facts aren't in fluents?
    # TODO: handle case where costs depend on the outputs
    _, outgoing_edges = neighbors_from_orders(
        get_partial_orders(stream_plan,
                           init_facts=map(
                               fact_from_fd,
                               filter(lambda f: isinstance(f, pddl.Atom),
                                      real_states[0]))))
    static_plan = []
    fluent_plan = []
    for result in stream_plan:
        external = result.external
        if isinstance(result, FunctionResult) or (result.opt_index != 0) or (
                not external.is_fluent):
            static_plan.append(result)
            continue
        if outgoing_edges[result]:
            # No way of taking into account the binding of fluent inputs when preventing cycles
            raise NotImplementedError(
                'Fluent stream is required for another stream: {}'.format(
                    result))
        #if (len(steps_from_stream[result]) != 1) and result.output_objects:
        #    raise NotImplementedError('Fluent stream required in multiple states: {}'.format(result))
        for state_index in steps_from_stream[result]:
            new_output_objects = [
                #OptimisticObject.from_opt(out.value, object())
                OptimisticObject.from_opt(
                    out.value, UniqueOptValue(result.instance, object(), name))
                for name, out in safe_zip(result.external.outputs,
                                          result.output_objects)
            ]
            if new_output_objects and (state_index <= len(action_plan) - 1):
                # TODO: check that the objects aren't used in any effects
                instance = copy.copy(action_plan[state_index])
                action_plan[state_index] = instance
                output_mapping = get_mapping(
                    list(map(pddl_from_object, result.output_objects)),
                    list(map(pddl_from_object, new_output_objects)))
                instance.var_mapping = {
                    p: output_mapping.get(v, v)
                    for p, v in instance.var_mapping.items()
                }
            new_instance = get_fluent_instance(external,
                                               result.instance.input_objects,
                                               real_states[state_index])
            # TODO: handle optimistic here
            new_result = new_instance.get_result(new_output_objects,
                                                 opt_index=result.opt_index)
            fluent_plan.append(new_result)
    return static_plan + fluent_plan
示例#5
0
def current_failed_cluster(binding):
    assert 1 <= binding.visits
    failed_result = binding.skeleton.stream_plan[binding.index]
    successful_results = [
        result for i, result in enumerate(binding.skeleton.stream_plan)
        if i not in binding.stream_indices
    ]
    stream_plan = successful_results + [failed_result]
    partial_orders = get_partial_orders(stream_plan)
    # All connected components
    #return get_connected_components(stream_plan, partial_orders)
    # Only the failed connected component
    return [
        grow_component([failed_result], adjacent_from_edges(partial_orders))
    ]
示例#6
0
def get_synthetic_stream_plan(stream_plan, synthesizers):
    if (stream_plan is None) or (not synthesizers):
        return stream_plan
    orders = get_partial_orders(stream_plan)
    for order in list(orders):
        orders.add(order[::-1])
    neighbors, _ = neighbors_from_orders(orders)
    # TODO: what if many possibilities?
    # TODO: cluster first and then plan using the macro and regular streams

    processed = set()
    new_stream_plan = []
    for v in stream_plan:  # Processing in order is important
        if v in processed:
            continue
        processed.add(v)
        # TODO: assert that it has at least one thing in it

        for synthesizer in synthesizers:
            # TODO: something could be an input and output of a cut...
            if v.instance.external.name not in synthesizer.streams:
                continue
            # TODO: need to ensure all are covered I think?
            # TODO: don't do if no streams within

            cluster = {v}
            queue = deque([v])
            while queue:
                v1 = queue.popleft()
                for v2 in neighbors[v1]:
                    if (v2 not in processed) and (v2.instance.external.name
                                                  in synthesizer.streams):
                        cluster.add(v2)
                        queue.append(v2)
                        processed.add(v2)
            counts = Counter(r.instance.external.name for r in cluster)
            if not all(n <= counts[name]
                       for name, n in synthesizer.streams.items()):
                continue
            ordered_cluster = [r for r in stream_plan if r in cluster]
            new_stream_plan.append(
                synthesizer.get_synth_stream(ordered_cluster))
            new_stream_plan += filter(lambda s: isinstance(s, FunctionResult),
                                      ordered_cluster)
            break
        else:
            new_stream_plan.append(v)
    return new_stream_plan
示例#7
0
def visualize_stream_plan(stream_plan, filename='stream_plan.pdf'):
    from pygraphviz import AGraph
    graph = AGraph(strict=True, directed=True)
    graph.node_attr['style'] = 'filled'
    graph.node_attr['shape'] = 'box'
    graph.node_attr['color'] = STREAM_COLOR

    for stream in stream_plan:
        graph.add_node(str(stream))
    for stream1, stream2 in get_partial_orders(stream_plan):
        graph.add_edge(str(stream1), str(stream2))
    # TODO: could also print the raw values (or a lookup table)
    # https://stackoverflow.com/questions/3499056/making-a-legend-key-in-graphviz

    graph.draw(filename, prog='dot')
    return graph
示例#8
0
def current_failure_contributors(binding):
    # Alternatively, find unsuccessful streams in cluster and add ancestors
    assert (1 <= binding.visits) or binding.is_dominated()
    failed_result = binding.skeleton.stream_plan[binding.index]
    failed_indices = compute_failed_indices(
        binding.skeleton)  # Use last index?
    partial_orders = get_partial_orders(binding.skeleton.stream_plan)
    incoming = incoming_from_edges(partial_orders)
    failed_ancestors = grow_component([failed_result], incoming)
    for index in reversed(failed_indices):
        if index == binding.index:
            continue
        result = binding.skeleton.stream_plan[index]
        ancestors = grow_component([result], incoming)
        if ancestors & failed_ancestors:
            failed_ancestors.update(ancestors)
    return [failed_ancestors]
示例#9
0
def get_synthetic_stream_plan(stream_plan, synthesizers):
    # TODO: fix this implementation of this to be as follows:
    # 1) Prune graph not related
    # 2) Cluster
    # 3) Try combinations of replacing on stream plan
    if not is_plan(stream_plan) or (not synthesizers):
        return stream_plan
    orders = get_partial_orders(stream_plan)
    for order in list(orders):
        orders.add(order[::-1])
    neighbors, _ = neighbors_from_orders(orders)
    # TODO: what if many possibilities?
    # TODO: cluster first and then plan using the macro and regular streams

    processed = set()
    new_stream_plan = []
    for result in stream_plan:  # Processing in order is important
        if result in processed:
            continue
        processed.add(result)
        # TODO: assert that it has at least one thing in it
        for synthesizer in synthesizers:
            # TODO: something could be an input and output of a cut...
            if result.instance.external.name not in synthesizer.streams:
                continue
            # TODO: need to ensure all are covered I think?
            # TODO: don't do if no streams within
            cluster = expand_cluster(synthesizer, result, neighbors, processed)
            counts = Counter(r.instance.external.name for r in cluster)
            if not all(n <= counts[name]
                       for name, n in synthesizer.streams.items()):
                continue
            ordered_cluster = [r for r in stream_plan if r in cluster]
            synthesizer_result = synthesizer.get_synth_stream(ordered_cluster)
            if synthesizer_result is None:
                continue
            new_stream_plan.append(synthesizer_result)
            new_stream_plan.extend(
                filter(lambda s: isinstance(s, FunctionResult),
                       ordered_cluster))
            break
        else:
            new_stream_plan.append(result)
    return new_stream_plan
示例#10
0
    def __init__(self, queue, stream_plan, action_plan, cost):
        # TODO: estimate statistics per stream_instance online and use to reorder the skeleton
        self.queue = queue
        self.index = len(self.queue.skeletons)
        self.stream_plan = stream_plan
        self.action_plan = action_plan
        self.cost = cost
        self.best_binding = None
        self.improved = False
        self.root = Binding(self, self.cost, history=[], mapping={}, index=0, parent=None, parent_result=None)
        self.affected_indices = [compute_affected_downstream(self.stream_plan, index)
                                 for index in range(len(self.stream_plan))]

        stream_orders = get_partial_orders(self.stream_plan) # init_facts=self.queue.evaluations)
        index_from_result = get_mapping(stream_plan, range(len(stream_plan)))
        index_orders = {(index_from_result[r1], index_from_result[r2]) for r1, r2 in stream_orders}

        preimage = stream_plan_preimage(stream_plan)
        self.preimage_complexities = [[queue.evaluations[evaluation_from_fact(fact)].complexity
                                       for fact in stream.get_domain() if fact in preimage] for stream in stream_plan]
        self.incoming_indices = incoming_from_edges(index_orders)
        self.outgoing_indices = outgoing_from_edges(index_orders)
示例#11
0
def visualize_stream_plan(stream_plan, filename='stream_plan.pdf'):
    from pygraphviz import AGraph
    graph = AGraph(strict=True, directed=True)
    graph.node_attr['style'] = 'filled'
    graph.node_attr['shape'] = 'box'
    graph.node_attr['color'] = STREAM_COLOR
    graph.node_attr['fontcolor'] = 'black'
    #graph.node_attr['fontsize'] = 12
    graph.node_attr['width'] = 0
    graph.node_attr['height'] = 0.02  # Minimum height is 0.02
    graph.node_attr['margin'] = 0
    graph.graph_attr['outputMode'] = 'nodesfirst'
    graph.graph_attr['dpi'] = 300

    for stream in stream_plan:
        graph.add_node(str(stream))
    for stream1, stream2 in get_partial_orders(stream_plan):
        graph.add_edge(str(stream1), str(stream2))
    # TODO: could also print the raw values (or a lookup table)
    # https://stackoverflow.com/questions/3499056/making-a-legend-key-in-graphviz

    graph.draw(filename, prog='dot')
    return graph
示例#12
0
def visualize_stream_plan(stream_plan,
                          filename='stream_plan' + DEFAULT_EXTENSION):
    return visualize_stream_orders(get_partial_orders(stream_plan),
                                   streams=stream_plan,
                                   filename=filename)
示例#13
0
def convert_fluent_streams(stream_plan, real_states, action_plan,
                           step_from_fact, node_from_atom):
    import pddl
    assert len(real_states) == len(action_plan) + 1
    steps_from_stream = {}
    for result in reversed(stream_plan):
        steps_from_stream[result] = set()
        for fact in result.get_certified():
            if (fact in step_from_fact) and (node_from_atom[fact].result
                                             == result):
                steps_from_stream[result].update(step_from_fact[fact])
        for fact in result.instance.get_domain():
            step_from_fact[fact] = step_from_fact.get(
                fact, set()) | steps_from_stream[result]
            # TODO: apply this recursively

    # TODO: ensure that derived facts aren't in fluents?
    # TODO: handle case where costs depend on the outputs
    _, outgoing_edges = neighbors_from_orders(
        get_partial_orders(stream_plan,
                           init_facts=map(
                               fact_from_fd,
                               filter(lambda f: isinstance(f, pddl.Atom),
                                      real_states[0]))))
    static_plan = []
    fluent_plan = []
    for result in stream_plan:
        external = result.external
        if (result.opt_index != 0) or (not external.is_fluent()):
            static_plan.append(result)
            continue
        if outgoing_edges[result]:
            # No way of taking into account the binding of fluent inputs when preventing cycles
            raise NotImplementedError(
                'Fluent stream is required for another stream: {}'.format(
                    result))
        #if (len(steps_from_stream[result]) != 1) and result.output_objects:
        #    raise NotImplementedError('Fluent stream required in multiple states: {}'.format(result))
        for state_index in steps_from_stream[result]:
            new_output_objects = [  # OptimisticObject.from_opt(out.value, object())
                OptimisticObject.from_opt(
                    out.value, UniqueOptValue(result.instance, object(), i))
                for i, out in enumerate(result.output_objects)
            ]
            if new_output_objects and (state_index < len(action_plan)):
                # TODO: check that the objects aren't used in any effects
                instance = copy.copy(action_plan[state_index])
                action_plan[state_index] = instance
                output_mapping = get_mapping(
                    map(pddl_from_object, result.output_objects),
                    map(pddl_from_object, new_output_objects))
                instance.var_mapping = {
                    p: output_mapping.get(v, v)
                    for p, v in instance.var_mapping.items()
                }
            fluent_facts = list(
                map(
                    fact_from_fd,
                    filter(
                        lambda f: isinstance(f, pddl.Atom) and
                        (f.predicate in external.fluents),
                        real_states[state_index])))
            new_instance = external.get_instance(result.instance.input_objects,
                                                 fluent_facts=fluent_facts)
            new_result = new_instance.get_result(new_output_objects,
                                                 opt_index=result.opt_index)
            fluent_plan.append(new_result)
    return static_plan + fluent_plan
示例#14
0
 def get_cluster_plans(self):
     # TODO: split the optimizer into clusters when provably independent
     external_plan = self.stream_plan + self.function_plan
     partial_orders = get_partial_orders(external_plan)
     return get_connected_components(external_plan, partial_orders)