def extract_disabled_clusters(queue, full_cluster=False): # TODO: include costs within clustering? # What is goal is to be below a cost threshold? # In satisfaction, no need because costs are fixed # Make stream_facts for externals to prevent use of the same ones # This ordering is why it's better to put likely to fail first # Branch on the different possible binding outcomes # TODO: consider a nonlinear version of this that evaluates out of order # Need extra sampling effort to identify infeasible subsets # Treat unevaluated optimistically, as in always satisfiable # Need to keep streams with outputs to connect if downstream is infeasible # TODO: prune streams that always have at least one success # TODO: CSP identification of irreducible unsatisfiable subsets # TODO: take into consideration if a stream is enumerated to mark as a hard failure #clusters = set() ordered_clusters = [] for skeleton in queue.skeletons: # TODO: consider all up to the most progress #cluster_plans = [skeleton.stream_plan] cluster_plans = get_stream_plan_components(skeleton.stream_plan) binding = skeleton.best_binding if not binding.is_fully_bound: # TODO: block if cost sensitive to possibly get cheaper solutions cluster_plans = current_failed_cluster( binding) if full_cluster else current_failure_contributors( binding) for cluster_plan in cluster_plans: ordered_clusters.append(cluster_plan) #clusters.add(frozenset(cluster_plan)) # TODO: could instead prune at this stage return ordered_clusters
def combine_optimizer_plan(stream_plan, functions): if not stream_plan: return stream_plan optimizer = get_optimizer(stream_plan[-1]) if optimizer is None: return stream_plan function_plan = list( filter( lambda r: get_prefix(r.instance.external.head) in optimizer. objectives, functions)) external_plan = stream_plan + function_plan cluster_plans = get_stream_plan_components(external_plan) if CLUSTER else [ external_plan ] optimizer_plan = [] for cluster_plan in cluster_plans: if all(isinstance(r, FunctionResult) for r in cluster_plan): continue #if len(cluster_plan) == 1: # optimizer_plan.append(cluster_plan[0]) # continue stream = OptimizerStream(optimizer, cluster_plan) instance = stream.get_instance(stream.input_objects, fluent_facts=stream.fluent_facts) result = instance.get_result(stream.output_objects) optimizer_plan.append(result) return optimizer_plan
def get_cluster_plans(self): # TODO: split the optimizer into clusters when provably independent return get_stream_plan_components(self.stream_plan + self.function_plan)