def add_atom(self, atom): if not is_atom(atom): return False head = atom.head if head in self.atoms: return False self.atoms.add(head) # TODO: doing this in a way that will eventually allow constants for i, stream in enumerate(self.streams): for j, domain_atom in enumerate(stream.domain): if get_prefix(head) != get_prefix(domain_atom): continue if len(head.args) != len(get_args(domain_atom)): raise ValueError(head, domain_atom) if any( isinstance(b, Object) and (a != b) for (a, b) in zip(head.args, get_args(domain_atom))): continue self.atoms_from_domain[(i, j)].append(head) values = [ self.atoms_from_domain[(i, k)] if j != k else [head] for k in range(len(stream.domain)) ] domain = list(map(head_from_fact, stream.domain)) #domain = stream.domain for combo in product(*values): mapping = get_mapping(domain, combo) if mapping is None: continue input_objects = tuple(mapping[p] for p in stream.inputs) self._add_instance(stream, input_objects) return True
def retrace_instantiation(fact, streams, evaluations, free_parameters, visited_facts, planned_results): # Makes two assumptions: # 1) Each stream achieves a "primary" fact that uses all of its inputs + outputs # 2) Outputs are only free parameters (no constants) if (evaluation_from_fact(fact) in evaluations) or (fact in visited_facts): return visited_facts.add(fact) for stream in streams: for cert in stream.certified: if get_prefix(fact) == get_prefix(cert): mapping = get_mapping(get_args(cert), get_args(fact)) # Should be same anyways if not all(p in mapping for p in (stream.inputs + stream.outputs)): # TODO: assumes another effect is sufficient for binding # Create arbitrary objects for inputs/outputs that aren't mentioned # Can lead to incorrect ordering continue input_objects = tuple(mapping[p] for p in stream.inputs) output_objects = tuple(mapping[p] for p in stream.outputs) if not all(out in free_parameters for out in output_objects): # Can only bind if free continue instance = stream.get_instance(input_objects) for new_fact in instance.get_domain(): retrace_instantiation(new_fact, streams, evaluations, free_parameters, visited_facts, planned_results) planned_results.append(instance.get_result(output_objects))
def compile_to_exogenous_actions(evaluations, domain, streams): # TODO: version of this that operates on fluents of length one? # TODO: better instantiation when have full parameters fluent_predicates = get_fluents(domain) certified_predicates = {get_prefix(a) for s in streams for a in s.certified} future_map = {p: 'f-{}'.format(p) for p in certified_predicates} augment_evaluations(evaluations, future_map) future_fn = lambda a: rename_atom(a, future_map) new_streams = [] for stream in list(streams): if not isinstance(stream, Stream): raise NotImplementedError(stream) # TODO: could also just have conditions asserting that one of the fluent conditions fails new_streams.append(create_static_stream(stream, evaluations, fluent_predicates, future_fn)) stream_atom = new_streams[-1].certified[0] add_predicate(domain, make_predicate(get_prefix(stream_atom), get_args(stream_atom))) preconditions = [stream_atom] + list(stream.domain) effort = 1 # TODO: use stream info #effort = 1 if unit_cost else result.instance.get_effort() #if effort == INF: # continue domain.actions.append(make_action( name='call-{}'.format(stream.name), parameters=get_args(stream_atom), preconditions=preconditions, effects=stream.certified, cost=effort)) stream.certified = tuple(set(stream.certified) | set(map(future_fn, stream.certified))) if REPLACE_STREAM: streams.extend(new_streams) else: streams[:] = new_streams
def retrace_instantiation(fact, streams, evaluations, visited_facts, planned_results): if (evaluation_from_fact(fact) in evaluations) or (fact in visited_facts): return visited_facts.add(fact) for stream in streams: for cert in stream.certified: if get_prefix(fact) == get_prefix(cert): mapping = get_mapping(get_args(cert), get_args(fact)) # Should be same anyways if not all(p in mapping for p in (stream.inputs + stream.outputs)): # TODO: assumes another effect is sufficient for binding # Create arbitrary objects for inputs/outputs that aren't mentioned # Can lead to incorrect ordering continue input_objects = tuple(mapping[p] for p in stream.inputs) output_objects = tuple(mapping[p] for p in stream.outputs) if not all( isinstance(out, OptimisticObject) for out in output_objects): # Can only bind if free continue instance = stream.get_instance(input_objects) for new_fact in instance.get_domain(): retrace_instantiation(new_fact, streams, evaluations, visited_facts, planned_results) result = instance.get_result(output_objects) planned_results.append(result)
def __init__(self, name, info, inputs, domain): super(External, self).__init__(name, info) self.inputs = tuple(inputs) self.domain = tuple(domain) for p, c in Counter(self.inputs).items(): if not is_parameter(p): # AssertionError: Expected item to be a variable: q2 in (?q1 q2) raise ValueError( 'Input [{}] for stream [{}] is not a parameter'.format( p, name)) if c != 1: raise ValueError( 'Input [{}] for stream [{}] is not unique'.format(p, name)) parameters = { a for i in self.domain for a in get_args(i) if is_parameter(a) } for p in (parameters - set(self.inputs)): raise ValueError( 'Parameter [{}] for stream [{}] is not included within inputs'. format(p, name)) for p in (set(self.inputs) - parameters): print( 'Warning! Input [{}] for stream [{}] is not covered by a domain condition' .format(p, name)) self.constants = { a for i in self.domain for a in get_args(i) if not is_parameter(a) } self.instances = {}
def compile_to_exogenous_axioms(evaluations, domain, streams): # TODO: no attribute certified # TODO: recover the streams that are required import pddl fluent_predicates = get_fluents(domain) certified_predicates = { get_prefix(a) for s in streams for a in s.certified } future_map = {p: 'f-{}'.format(p) for p in certified_predicates} augment_evaluations(evaluations, future_map) future_fn = lambda a: rename_atom(a, future_map) derived_map = {p: 'd-{}'.format(p) for p in certified_predicates} derived_fn = lambda a: rename_atom(a, derived_map) # TODO: could prune streams that don't need this treatment for action in domain.actions: action.precondition = replace_predicates(derived_map, action.precondition) for effect in action.effects: assert (isinstance(effect, pddl.Effect)) effect.condition = replace_predicates(derived_map, effect.condition) for axiom in domain.axioms: axiom.condition = replace_predicates(derived_map, axiom.condition) #fluent_predicates.update(certified_predicates) new_streams = [] for stream in list(streams): if not isinstance(stream, Stream): raise NotImplementedError(stream) new_streams.append( create_static_stream(stream, evaluations, fluent_predicates, future_fn)) stream_atom = new_streams[-1].certified[0] add_predicate( domain, make_predicate(get_prefix(stream_atom), get_args(stream_atom))) preconditions = [stream_atom] + list(map(derived_fn, stream.domain)) for certified_fact in stream.certified: derived_fact = derived_fn(certified_fact) external_params = get_args(derived_fact) internal_params = tuple(p for p in (stream.inputs + stream.outputs) if p not in get_args(derived_fact)) domain.axioms.extend([ make_axiom(parameters=external_params, preconditions=[certified_fact], derived=derived_fact), make_axiom(parameters=external_params + internal_params, preconditions=preconditions, derived=derived_fact), ]) stream.certified = tuple( set(stream.certified) | set(map(future_fn, stream.certified))) if REPLACE_STREAM: streams.extend(new_streams) else: streams[:] = new_streams
def visualize_constraints(constraints, filename='constraint_network.pdf', use_functions=True): from pygraphviz import AGraph graph = AGraph(strict=True, directed=False) graph.node_attr['style'] = 'filled' #graph.node_attr['fontcolor'] = 'black' #graph.node_attr['fontsize'] = 12 graph.node_attr['colorscheme'] = 'SVG' graph.edge_attr['colorscheme'] = 'SVG' #graph.graph_attr['rotate'] = 90 #graph.node_attr['fixedsize'] = True graph.node_attr['width'] = 0 graph.node_attr['height'] = 0.02 # Minimum height is 0.02 graph.node_attr['margin'] = 0 graph.graph_attr['rankdir'] = 'RL' graph.graph_attr['nodesep'] = 0.05 graph.graph_attr['ranksep'] = 0.25 #graph.graph_attr['pad'] = 0 # splines="false"; graph.graph_attr['outputMode'] = 'nodesfirst' graph.graph_attr['dpi'] = 300 functions = set() negated = set() heads = set() for fact in constraints: prefix = get_prefix(fact) if prefix in (EQ, MINIMIZE): functions.add(fact[1]) elif prefix == NOT: negated.add(fact[1]) else: heads.add(fact) heads.update(functions) heads.update(negated) objects = {a for head in heads for a in get_args(head)} optimistic_objects = filter(lambda o: isinstance(o, OptimisticObject), objects) for opt_obj in optimistic_objects: graph.add_node(str(opt_obj), shape='circle', color=PARAMETER_COLOR) for head in heads: if not use_functions and (head in functions): continue # TODO: prune values w/o free parameters? name = str_from_fact(head) if head in functions: color = COST_COLOR elif head in negated: color = NEGATED_COLOR else: color = CONSTRAINT_COLOR graph.add_node(name, shape='box', color=color) for arg in get_args(head): if arg in optimistic_objects: graph.add_edge(name, str(arg)) graph.draw(filename, prog='dot') # neato | dot | twopi | circo | fdp | nop return graph
def fd_from_fact(fact): # TODO: convert to evaluation? prefix = get_prefix(fact) if prefix == NOT: return fd_from_fact(fact[1]).negate() if prefix == EQ: _, head, value = fact predicate = get_prefix(head) args = list(map(pddl_from_object, get_args(head))) fluent = pddl.f_expression.PrimitiveNumericExpression(symbol=predicate, args=args) expression = pddl.f_expression.NumericConstant(value) return pddl.f_expression.Assign(fluent, expression) args = list(map(pddl_from_object, get_args(fact))) return pddl.Atom(prefix, args)
def remap_inputs(self, bindings): input_objects = remap_objects(self.instance.input_objects, bindings) fluent_facts = [(get_prefix(f),) + remap_objects(get_args(f), bindings) for f in self.instance.fluent_facts] new_instance = self.external.get_instance(input_objects, fluent_facts=fluent_facts) new_instance.opt_index = self.instance.opt_index return self.__class__(new_instance, self.output_objects, self.opt_index)
def reuse_facts(problem, certificate, skeleton): # TODO: repackage streams # TODO: recover the full axiom + action plan # TODO: recover the plan preimage annotated with use time # Some supporting args are quantified out and thus lack some facts new_facts = [] if skeleton is None: return new_facts reuse_objs = set() for action, args in skeleton: for arg in args: if (arg != WILD) and not is_parameter(arg): reuse_objs.add(hash_or_id(arg)) # The reuse relpose omission is due to the fact that the initial pose was selected # (which is populated in the initial state) order_predicate = ORDER_PREDICATE.format('') domain = parse_domain(problem.domain_pddl) fluents = get_fluents(domain) for fact in certificate.preimage_facts: predicate = get_prefix(fact) if (predicate in {order_predicate, EQ}) or (predicate in fluents): # Could technically evaluate functions as well continue if all( isinstance(arg, str) or (hash_or_id(arg) in reuse_objs) for arg in get_args(fact)): new_facts.append(fact) return new_facts
def check_problem(domain, streams, obj_from_constant): for action in domain.actions + domain.axioms: for p, c in Counter(action.parameters).items(): if c != 1: raise ValueError('Parameter [{}] for action [{}] is not unique'.format(p.name, action.name)) # TODO: check that no undeclared parameters & constants #action.dump() undeclared_predicates = set() for stream in streams: # TODO: domain.functions facts = list(stream.domain) if isinstance(stream, Stream): facts.extend(stream.certified) for fact in facts: name = get_prefix(fact) if name not in domain.predicate_dict: undeclared_predicates.add(name) elif len(get_args(fact)) != domain.predicate_dict[name].get_arity(): # predicate used with wrong arity: {} print('Warning! predicate used with wrong arity in stream [{}]: {}'.format(stream.name, fact)) for constant in stream.constants: if constant not in obj_from_constant: raise ValueError('Undefined constant in stream [{}]: {}'.format(stream.name, constant)) if undeclared_predicates: print('Warning! Undeclared predicates: {}'.format( sorted(undeclared_predicates))) # Undeclared predicate: {}
def fn(literal, action): if literal.predicate not in predicate_map: return literal # TODO: other checks on only inputs stream = predicate_map[literal.predicate] mapping = remap_certified(literal, stream) if mapping is None: # TODO: this excludes typing. This is not entirely safe return literal output_args = set(mapping[arg] for arg in stream.outputs) if isinstance(action, pddl.Action): # TODO: unified Action/Axiom effects for effect in action.effects: if isinstance(effect, pddl.Effect) and (output_args & set(effect.literal.args)): raise RuntimeError('Fluent stream outputs cannot be in action effects: {}'.format( effect.literal.predicate)) elif not stream.is_negated: axiom = action raise RuntimeError('Fluent stream outputs cannot be in an axiom: {}'.format(axiom.name)) blocked_args = safe_apply_mapping(stream.inputs, mapping) blocked_literal = literal.__class__(stream.blocked_predicate, blocked_args).negate() if stream.is_negated: conditions = [blocked_literal] conditions.extend(pddl.Atom(get_prefix(fact), safe_apply_mapping(get_args(fact), mapping)) # fd_from_fact for fact in stream.domain) # TODO: be careful when using imply return pddl.Conjunction(conditions) # TODO: prune redundant conditions return pddl.Conjunction([literal, blocked_literal])
def process_conditional_effect(effect, negative_from_predicate): import pddl new_parts = [] stream_facts = [] for disjunctive in get_conjunctive_parts(effect.condition): for literal in get_disjunctive_parts(disjunctive): # TODO: assert only one disjunctive part if isinstance(literal, pddl.Literal) and (literal.predicate in negative_from_predicate): stream = negative_from_predicate[literal.predicate] if not isinstance(stream, ConstraintStream): new_parts.append(literal) continue certified = find_unique( lambda f: get_prefix(f) == literal.predicate, stream.certified) mapping = get_mapping(get_args(certified), literal.args) stream_facts.append( fd_from_fact( substitute_expression(stream.stream_fact, mapping))) # TODO: add the negated literal as precondition here? else: new_parts.append(literal) return new_parts, stream_facts
def compile_to_exogenous_axioms(evaluations, domain, streams): # TODO: no attribute certified import pddl fluent_predicates = get_fluents(domain) certified_predicates = { get_prefix(a) for s in streams for a in s.certified } future_map = {p: 'f-{}'.format(p) for p in certified_predicates} augment_evaluations(evaluations, future_map) rename_future = lambda a: rename_atom(a, future_map) derived_map = {p: 'd-{}'.format(p) for p in certified_predicates} rename_derived = lambda a: rename_atom(a, derived_map) for action in domain.actions: action.precondition = replace_predicates(derived_map, action.precondition) for effect in action.effects: assert (isinstance(effect, pddl.Effect)) effect.condition = replace_predicates(derived_map, effect.condition) for axiom in domain.axioms: axiom.condition = replace_predicates(derived_map, axiom.condition) #fluent_predicates.update(certified_predicates) for stream in list(streams): if not isinstance(stream, Stream): raise NotImplementedError(stream) streams.append( create_static_stream(stream, evaluations, fluent_predicates, rename_future)) stream_atom = streams[-1].certified[0] domain.predicate_dict[get_prefix(stream_atom)] = pddl.Predicate( get_prefix(stream_atom), get_args(stream_atom)) preconditions = [stream_atom] + list(map(rename_derived, stream.domain)) for fact in stream.certified: derived_fact = fd_from_fact(rename_derived(fact)) external_params = derived_fact.args internal_params = tuple(p for p in (stream.inputs + stream.outputs) if p not in derived_fact.args) parameters = tuple( pddl.TypedObject(p, OBJECT) for p in (external_params + internal_params)) #precondition = pddl.Conjunction(tuple(map(fd_from_fact, [stream_atom] + # list(map(rename_derived, stream.domain))))) #precondition = pddl.Disjunction([fd_from_fact(fact), precondition]) # TODO: quantifier domain.axioms.extend([ pddl.Axiom(name=derived_fact.predicate, parameters=parameters, num_external_parameters=len(external_params), condition=make_preconditions(preconditions)), pddl.Axiom(name=derived_fact.predicate, parameters=parameters[:len(external_params)], num_external_parameters=len(external_params), condition=fd_from_fact(fact)), ]) stream.certified = tuple( set(stream.certified) | set(map(rename_future, stream.certified)))
def remap_certified(literal, stream): certified = find_unique(lambda f: get_prefix(f) == literal.predicate, stream.certified) mapping = get_mapping(get_args(certified), literal.args) if not all(arg in mapping for arg in stream.inputs): # Certified must contain all inputs return None return mapping
def __init__(self, name, gen_fn, inputs, domain, outputs, certified, info, fluents=[], is_wild=False): super(Stream, self).__init__(name, info, inputs, domain) self.outputs = tuple(outputs) self.certified = tuple(certified) self.constants.update(a for i in certified for a in get_args(i) if not is_parameter(a)) for p, c in Counter(self.outputs).items(): if not is_parameter(p): raise ValueError('Output [{}] for stream [{}] is not a parameter'.format(p, name)) if c != 1: raise ValueError('Output [{}] for stream [{}] is not unique'.format(p, name)) for p in set(self.inputs) & set(self.outputs): raise ValueError('Parameter [{}] for stream [{}] is both an input and output'.format(p, name)) certified_parameters = {a for i in certified for a in get_args(i) if is_parameter(a)} for p in (certified_parameters - set(self.inputs + self.outputs)): raise ValueError('Parameter [{}] for stream [{}] is not included within outputs'.format(p, name)) for p in (set(self.outputs) - certified_parameters): print('Warning! Output [{}] for stream [{}] is not covered by a certified condition'.format(p, name)) # TODO: automatically switch to unique if only used once self.gen_fn = get_debug_gen_fn(self) if gen_fn == DEBUG else gen_fn self.num_opt_fns = 1 if self.outputs else 0 # Always unique if no outputs if isinstance(self.info.opt_gen_fn, PartialInputs): if self.info.opt_gen_fn.unique: self.num_opt_fns = 0 self.opt_gen_fn = self.info.opt_gen_fn.get_opt_gen_fn(self) else: self.opt_gen_fn = self.info.opt_gen_fn #self.bound_list_fn = None # TODO: generalize to a hierarchical sequence #self.opt_fns = [get_unique_fn(self), get_shared_fn(self)] # get_unique_fn | get_shared_fn self.fluents = [] if gen_fn == DEBUG else fluents if NEGATIVE_BLOCKED: self.blocked_predicate = '~{}-negative'.format(self.name) # Args are self.inputs else: self.blocked_predicate = '~{}'.format(self.name) self.disabled_instances = [] self.is_wild = is_wild if self.is_negated(): if self.outputs: raise ValueError('Negated streams cannot have outputs: {}'.format(self.outputs)) #assert len(self.certified) == 1 # TODO: is it okay to have more than one fact? for certified in self.certified: if not (set(self.inputs) <= set(get_args(certified))): raise ValueError('Negated streams must have certified facts including all input parameters')
def make_axiom(parameters, preconditions, derived): predicate = get_prefix(derived) external_parameters = list(get_args(derived)) internal_parameters = [p for p in parameters if p not in external_parameters] parameters = external_parameters + internal_parameters return pddl.Axiom(name=predicate, parameters=make_parameters(parameters), num_external_parameters=len(external_parameters), condition=make_preconditions(preconditions))
def make_cost(cost): if cost is None: return cost fluent = pddl.PrimitiveNumericExpression(symbol=TOTAL_COST, args=[]) try: expression = pddl.NumericConstant(cost) except TypeError: expression = pddl.PrimitiveNumericExpression( symbol=get_prefix(cost), args=list(map(pddl_from_object, get_args(cost)))) return pddl.Increase(fluent=fluent, expression=expression)
def remap_inputs(self, bindings): # TODO: speed this procedure up #if not any(o in bindings for o in self.instance.get_objects()): # return self input_objects = apply_mapping(self.instance.input_objects, bindings) fluent_facts = [Fact(get_prefix(f), apply_mapping(get_args(f), bindings)) for f in self.instance.fluent_facts] new_instance = self.external.get_instance(input_objects, fluent_facts=fluent_facts) new_instance.opt_index = self.instance.opt_index return self.__class__(new_instance, self.output_objects, self.opt_index, self.call_index, self.list_index, self.optimistic)
def apply_rules_to_streams(rules, streams): # TODO: can actually this with multiple condition if stream certified contains all # TODO: do also when no domain conditions processed_rules = deque(rules) while processed_rules: rule = processed_rules.popleft() if len(rule.domain) != 1: continue [rule_fact] = rule.domain rule.info.p_success = 0 # Need not be applied for stream in streams: if not isinstance(stream, Stream): continue for stream_fact in stream.certified: if get_prefix(rule_fact) == get_prefix(stream_fact): mapping = get_mapping(get_args(rule_fact), get_args(stream_fact)) new_facts = set(substitute_expression(rule.certified, mapping)) - set(stream.certified) stream.certified = stream.certified + tuple(new_facts) if new_facts and (stream in rules): processed_rules.append(stream)
def __init__(self, optimizer, constraint, domain, fluents): self.optimizer = optimizer self.constraint = constraint inputs = get_args(constraint) outputs = [] certified = [constraint] name = '{}-{}'.format(optimizer.name, get_prefix(constraint)) gen_fn = get_gen_fn(optimizer.procedure, inputs, outputs, certified) #gen_fn = empty_gen() info = StreamInfo(effort_fn=get_effort_fn(optimizer.name), simultaneous=DEFAULT_SIMULTANEOUS) self.stream_fact = Fact('_{}'.format(name), concatenate(inputs, outputs)) super(ConstraintStream, self).__init__(name, gen_fn, inputs, domain, outputs, certified, info, fluents=fluents)
def __init__(self, head, fn, domain, info): if info is None: # TODO: move the defaults to FunctionInfo in the event that an optimistic fn is specified info = FunctionInfo(p_success=self._default_p_success, overhead=self._default_overhead) super(Function, self).__init__(get_prefix(head), info, get_args(head), domain) self.head = head opt_fn = lambda *args: self.codomain() self.fn = opt_fn if fn == DEBUG else fn #arg_spec = get_arg_spec(self.fn) #if len(self.inputs) != len(arg_spec.args): # raise TypeError('Function [{}] expects inputs {} but its procedure has inputs {}'.format( # self.name, list(self.inputs), arg_spec.args)) self.opt_fn = opt_fn if (self.info.opt_fn is None) else self.info.opt_fn
def existential_quantification(goal_literals): # TODO: merge with pddlstream-experiments goal_formula = [] for literal in goal_literals: parameters = [a for a in get_args(literal) if is_parameter(a)] if parameters: type_literals = [('Type', p, get_parameter_name(p)) for p in parameters] goal_formula.append( Exists(parameters, And(literal, *type_literals))) else: goal_formula.append(literal) return And(*goal_formula)
def add_optimizer_axioms(results, instantiated): # Ends up being a little slower than version in optimizer.py when not blocking shared # TODO: add this to simultaneous import pddl results_from_instance = defaultdict(list) for result in results: results_from_instance[result.instance].append(result) optimizer_results = list(filter(is_optimizer_result, results)) optimizers = {result.external.optimizer for result in optimizer_results} for optimizer in optimizers: optimizer_facts = { substitute_expression(result.external.stream_fact, result.get_mapping()) for result in optimizer_results if result.external.optimizer is optimizer } facts_from_arg = defaultdict(list) for fact in optimizer_facts: for arg in get_args(fact): facts_from_arg[arg].append(fact) for stream in optimizer.streams: if not stream.instance.disabled: continue constraints = stream.instance.get_constraints() output_variables = [] for out in stream.output_objects: assert isinstance(out.param, UniqueOptValue) output_variables.append([ r.output_objects[out.param.output_index] for r in results_from_instance[out.param.instance] ]) for combo in product(*output_variables): mapping = get_mapping(stream.output_objects, combo) name = '({})'.join(UNSATISFIABLE) blocked = set(substitute_expression(constraints, mapping)) additional = { fact for arg in combo for fact in facts_from_arg[arg] } - blocked # TODO: like a partial disable, if something has no outputs, then adding things isn't going to help if stream.instance.enumerated and not stream.instance.successes: # Assumes the optimizer is submodular condition = list(map(fd_from_fact, blocked)) else: condition = list( map(fd_from_fact, blocked | set(map(Not, additional)))) effect = fd_from_fact((UNSATISFIABLE, )) instantiated.axioms.append( pddl.PropositionalAxiom(name, condition, effect)) instantiated.atoms.add(effect)
def optimizer_conditional_effects(domain, externals): import pddl #from pddlstream.algorithms.scheduling.negative import get_negative_predicates # TODO: extend this to predicates if UNIVERSAL_TO_CONDITIONAL: negative_streams = list(filter(lambda e: e.is_negated(), externals)) else: negative_streams = list( filter( lambda e: isinstance(e, ConstraintStream) and e.is_negated(), externals)) negative_from_predicate = get_predicate_map(negative_streams) if not negative_from_predicate: return for action in domain.actions: universal_to_conditional(action) new_effects = [] for effect in action.effects: if effect.literal.predicate != UNSATISFIABLE: new_effects.append(effect) continue new_parts = [] stream_facts = [] for disjunctive in get_conjunctive_parts(effect.condition): for literal in get_disjunctive_parts(disjunctive): # TODO: assert only one disjunctive part if isinstance(literal, pddl.Literal) and ( literal.predicate in negative_from_predicate): stream = negative_from_predicate[literal.predicate] if not isinstance(stream, ConstraintStream): new_parts.append(literal) continue certified = find_unique( lambda f: get_prefix(f) == literal.predicate, stream.certified) mapping = get_mapping(get_args(certified), literal.args) stream_facts.append( fd_from_fact( substitute_expression(stream.stream_fact, mapping))) # TODO: add the negated literal as precondition here? else: new_parts.append(literal) if not stream_facts: new_effects.append(effect) for stream_fact in stream_facts: new_effects.append( pddl.Effect(effect.parameters, pddl.Conjunction(new_parts), stream_fact)) action.effects = new_effects
def compile_to_exogenous_actions(evaluations, domain, streams): # TODO: automatically derive fluents # TODO: version of this that operates on fluents of length one? # TODO: better instantiation when have full parameters # TODO: conversion from stream cost to real cost units? # TODO: any predicates derived would need to be replaced as well fluent_predicates = get_fluents(domain) certified_predicates = { get_prefix(a) for s in streams for a in s.certified } future_map = {p: 'f-{}'.format(p) for p in certified_predicates} augment_evaluations(evaluations, future_map) rename_future = lambda a: rename_atom(a, future_map) for stream in list(streams): if not isinstance(stream, Stream): raise NotImplementedError(stream) # TODO: could also just have conditions asserting that one of the fluent conditions fails streams.append( create_static_stream(stream, evaluations, fluent_predicates, rename_future)) stream_atom = streams[-1].certified[0] add_predicate( domain, make_predicate(get_prefix(stream_atom), get_args(stream_atom))) preconditions = [stream_atom] + list(stream.domain) effort = 1 # TODO: use stream info #effort = 1 if unit_cost else result.instance.get_effort() #if effort == INF: # continue domain.actions.append( make_action(name='call-{}'.format(stream.name), parameters=get_args(stream_atom), preconditions=preconditions, effects=stream.certified, cost=effort)) stream.certified = tuple( set(stream.certified) | set(map(rename_future, stream.certified)))
def __init__(self, head, fn, domain, info): # TODO: function values that act as preconditions (cost must be below threshold) if info is None: # TODO: move the defaults to FunctionInfo in the event that an optimistic fn is specified info = FunctionInfo() #p_success=self._default_p_success) super(Function, self).__init__(get_prefix(head), info, get_args(head), domain) self.head = head opt_fn = lambda *args: self.codomain() self.fn = opt_fn if (fn in DEBUG_MODES) else fn #arg_spec = get_arg_spec(self.fn) #if len(self.inputs) != len(arg_spec.args): # raise TypeError('Function [{}] expects inputs {} but its procedure has inputs {}'.format( # self.name, list(self.inputs), arg_spec.args)) self.opt_fn = opt_fn if (self.info.opt_fn is None) else self.info.opt_fn
def __init__(self, optimizer, constraint, domain, infos): # TODO: could support fluents and compile them into conditional effects inputs = get_args(constraint) outputs = [] certified = [constraint] name = '{}-{}'.format(optimizer.name, get_prefix(constraint)) gen_fn = get_list_gen_fn(optimizer.procedure, inputs, outputs, certified) #gen_fn = empty_gen() info = infos.get(name, None) if info is None: info = StreamInfo(effort=get_effort_fn(optimizer.name), simultaneous=DEFAULT_SIMULTANEOUS) super(ConstraintStream, self).__init__(optimizer, name, gen_fn, inputs, domain, outputs, certified, info)
def fn(literal): if literal.predicate not in predicate_map: return literal # TODO: other checks on only inputs stream = predicate_map[literal.predicate] certified = find_unique(lambda f: get_prefix(f) == literal.predicate, stream.certified) mapping = get_mapping(get_args(certified), literal.args) #assert all(arg in mapping for arg in stream.inputs) # Certified must contain all inputs if not all(arg in mapping for arg in stream.inputs): # TODO: this excludes typing. This is not entirely safe return literal blocked_args = tuple(mapping[arg] for arg in stream.inputs) blocked_literal = literal.__class__(stream.blocked_predicate, blocked_args).negate() if stream.is_negated(): # TODO: add stream conditions here return blocked_literal return pddl.Conjunction([literal, blocked_literal])
def visualize_constraints(constraints, filename='constraint_network' + DEFAULT_EXTENSION, use_functions=True): from pygraphviz import AGraph graph = AGraph(strict=True, directed=False) graph.node_attr['style'] = 'filled' #graph.node_attr['fontcolor'] = 'black' #graph.node_attr['fontsize'] = 12 graph.node_attr['colorscheme'] = 'SVG' graph.edge_attr['colorscheme'] = 'SVG' #graph.graph_attr['rotate'] = 90 #graph.node_attr['fixedsize'] = True graph.node_attr['width'] = 0 graph.node_attr['height'] = 0.02 # Minimum height is 0.02 graph.node_attr['margin'] = 0 graph.graph_attr['rankdir'] = 'RL' graph.graph_attr['nodesep'] = 0.05 graph.graph_attr['ranksep'] = 0.25 #graph.graph_attr['pad'] = 0 # splines="false"; graph.graph_attr['outputMode'] = 'nodesfirst' graph.graph_attr['dpi'] = 300 positive, negated, functions = partition_facts(constraints) for head in (positive + negated + functions): # TODO: prune values w/o free parameters? name = str_from_fact(head) if head in functions: if not use_functions: continue color = COST_COLOR elif head in negated: color = NEGATED_COLOR else: color = CONSTRAINT_COLOR graph.add_node(name, shape='box', color=color) for arg in get_args(head): if isinstance(arg, OptimisticObject) or is_parameter(arg): arg_name = str(arg) graph.add_node(arg_name, shape='circle', color=PARAMETER_COLOR) graph.add_edge(name, arg_name) graph.draw(filename, prog='dot') # neato | dot | twopi | circo | fdp | nop print('Saved', filename) return graph