def add_effect(self, effect, c): """Helper function for visit_effect_stmt. Keyword arguments: effect -- instance of the effect data structure c -- the formula representing the effect that we want to add to the addlist or dellist """ # Needed for instance check. from .parser import Variable nextPredicate = None isNegative = False if c.key == 'not': # This is a negative effect, only one child allowed. if len(c.children) != 1: raise SemanticError('Error not statement with multiple ' 'children in effect of action') nextPredicate = c.children[0] isNegative = True else: nextPredicate = c # Check whether predicate was defined previously. if not nextPredicate.key in self._predicates: raise SemanticError('Error: unknown predicate %s used in effect ' 'of action' % nextPredicate.key) if nextPredicate == None: raise SemanticError('Error: NoneType predicate used in effect of ' 'action') predDef = self._predicates[nextPredicate.key] signature = list() count = 0 # Check whether predicate is used with the correct signature. if len(nextPredicate.children) != len(predDef.signature): raise SemanticError('Error: wrong number of arguments for ' 'predicate ' + nextPredicate.key + ' in effect of action') # Apply to all parameters. for v in nextPredicate.children: if isinstance(v.key, Variable): signature.append((v.key.name, predDef.signature[count][1])) else: signature.append((v.key, predDef.signature[count][1])) count += 1 # Add a new effect to the positive or negative effects respectively. if isNegative: effect.dellist.append(pddl.Predicate(nextPredicate.key, signature)) else: effect.addlist.append(pddl.Predicate(nextPredicate.key, signature))
def replace(condition): if isinstance(condition, pddl.FunctionComparison): non_negated = condition if condition.negated: non_negated = condition.negate() # remove possible duplicates fluent_str = 'num_condition_satisfied{0}'.format(task.comparator_count) new_predicate_args = dict() functions = condition.get_functions() for func in functions: arg_types = extract_arg_types(func) for (key, value) in zip(func.args, arg_types): new_predicate_args[key] = pddl.TypedObject(key, value) args = [] for key in sorted(new_predicate_args.keys()): args.append(new_predicate_args[key]) predicate = pddl.Predicate(fluent_str, args) task.predicates.append(predicate) if condition.negated: new_atom = pddl.NegatedNumericWrapper(predicate.name, [x.name for x in args], non_negated) else: new_atom = pddl.NumericWrapper(predicate.name, [x.name for x in args], non_negated) task.comparator_count += 1 return new_atom if condition.has_numeric_precondition(): new_condition = condition.__class__([]) new_condition.parts = [replace(x) for x in condition.parts] return new_condition return condition
def compile_into_unary_predicates(task): """Create one new unary predicate for each object type.""" for t in task.types: pred_name = _get_type_predicate_name(str(t)) task.predicates.append(pddl.Predicate(pred_name, ['?x'])) name_to_type_pred[pred_name] = task.predicates[-1] return
def parse_temporal_domain(domain_pddl): translate_path = os.path.join(get_tfd_path(), 'translate/') # tfd & temporal-FD prefixes = ['pddl', 'normalize'] deleted = delete_imports(prefixes) sys.path.insert(0, translate_path) import pddl import normalize temporal_domain = TemporalDomain(*pddl.tasks.parse_domain(pddl.parser.parse_nested_list(domain_pddl.splitlines()))) name, requirements, constants, predicates, types, functions, actions, durative_actions, axioms = temporal_domain fluents = normalize.get_fluent_predicates(temporal_domain) sys.path.remove(translate_path) delete_imports(prefixes) sys.modules.update(deleted) # This is important otherwise classes are messed up import pddl import pddl_parser assert not actions simple_from_durative = simple_from_durative_action(durative_actions, fluents) simple_actions = [action for triplet in simple_from_durative.values() for action in triplet] requirements = pddl.Requirements([]) types = [pddl.Type(ty.name, ty.basetype_name) for ty in types] pddl_parser.parsing_functions.set_supertypes(types) predicates = [pddl.Predicate(p.name, p.arguments) for p in predicates] constants = convert_parameters(constants) axioms = list(map(convert_axiom, axioms)) return SimplifiedDomain(name, requirements, types, {ty.name: ty for ty in types}, constants, predicates, {p.name: p for p in predicates}, functions, simple_actions, axioms, simple_from_durative, domain_pddl)
def add_precond(self, precond, c, isNegative): """Helper function for visit_precondition_stmt. Keyword arguments: precond -- a list of preconditions tuple(predicate, true if positive) c -- the formula representing a precondition we want to add to the list """ from .parser import Variable predDef = self._predicates[c.key] signature = list() count = 0 # Check for correct number of arguments. if len(c.children) != len(predDef.signature): raise SemanticError('Error: wrong number of arguments for ' 'predicate ' + c.key + ' in precondition of ' 'action') # Apply to all arguments. for v in c.children: if isinstance(v.key, Variable): signature.append((v.key.name, predDef.signature[count][1])) else: signature.append((v.key, predDef.signature[count][1])) count += 1 # Add predicate to precondition list. precond.append((pddl.Predicate(c.key, signature), not(isNegative)))
def compile_to_exogenous_axioms(evaluations, domain, streams): # TODO: no attribute certified import pddl fluent_predicates = get_fluents(domain) certified_predicates = { get_prefix(a) for s in streams for a in s.certified } future_map = {p: 'f-{}'.format(p) for p in certified_predicates} augment_evaluations(evaluations, future_map) rename_future = lambda a: rename_atom(a, future_map) derived_map = {p: 'd-{}'.format(p) for p in certified_predicates} rename_derived = lambda a: rename_atom(a, derived_map) for action in domain.actions: action.precondition = replace_predicates(derived_map, action.precondition) for effect in action.effects: assert (isinstance(effect, pddl.Effect)) effect.condition = replace_predicates(derived_map, effect.condition) for axiom in domain.axioms: axiom.condition = replace_predicates(derived_map, axiom.condition) #fluent_predicates.update(certified_predicates) for stream in list(streams): if not isinstance(stream, Stream): raise NotImplementedError(stream) streams.append( create_static_stream(stream, evaluations, fluent_predicates, rename_future)) stream_atom = streams[-1].certified[0] domain.predicate_dict[get_prefix(stream_atom)] = pddl.Predicate( get_prefix(stream_atom), get_args(stream_atom)) preconditions = [stream_atom] + list(map(rename_derived, stream.domain)) for fact in stream.certified: derived_fact = fd_from_fact(rename_derived(fact)) external_params = derived_fact.args internal_params = tuple(p for p in (stream.inputs + stream.outputs) if p not in derived_fact.args) parameters = tuple( pddl.TypedObject(p, OBJECT) for p in (external_params + internal_params)) #precondition = pddl.Conjunction(tuple(map(fd_from_fact, [stream_atom] + # list(map(rename_derived, stream.domain))))) #precondition = pddl.Disjunction([fd_from_fact(fact), precondition]) # TODO: quantifier domain.axioms.extend([ pddl.Axiom(name=derived_fact.predicate, parameters=parameters, num_external_parameters=len(external_params), condition=make_preconditions(preconditions)), pddl.Axiom(name=derived_fact.predicate, parameters=parameters[:len(external_params)], num_external_parameters=len(external_params), condition=fd_from_fact(fact)), ]) stream.certified = tuple( set(stream.certified) | set(map(rename_future, stream.certified)))
def visit_predicate(self, node): """Visits a PDDL predicate.""" signature = list() # Visit all predicate parameters. for v in node.parameters: v.accept(self) signatureTuple = self.get_in(v) # Append each parameter to the predicate signature. signature.append(signatureTuple) # Create new PDDL predicate and store it in node. self.set_in(node, pddl.Predicate(node.name, signature))
def visit_predicate_instance(self, node): """ Visits a PDDL-problem predicate instance.""" signature = list() # Visit all parameters. for o in node.parameters: o_type = None # Check whether predicate was introduced in objects or domain # constants. if not (o in self._objects or o in self._domain.constants): raise SemanticError('Error: object ' + o + ' referenced in ' 'problem definition - but not defined') elif o in self._objects: o_type = self._objects[o] elif o in self._domain.constants: o_type = self._domain.constants[o] signature.append((o, o_type.name)) ###(o_type) self.set_in(node, pddl.Predicate(node.name, signature))
def compile_to_exogenous_actions(evaluations, domain, streams): import pddl # TODO: automatically derive fluents # TODO: version of this that operates on fluents of length one? # TODO: better instantiation when have full parameters # TODO: conversion from stream cost to real cost units? # TODO: any predicates derived would need to be replaced as well fluent_predicates = get_fluents(domain) certified_predicates = { get_prefix(a) for s in streams for a in s.certified } future_map = {p: 'f-{}'.format(p) for p in certified_predicates} augment_evaluations(evaluations, future_map) rename_future = lambda a: rename_atom(a, future_map) for stream in list(streams): if not isinstance(stream, Stream): raise NotImplementedError(stream) # TODO: could also just have conditions asserting that one of the fluent conditions fails streams.append( create_static_stream(stream, evaluations, fluent_predicates, rename_future)) stream_atom = streams[-1].certified[0] parameters = [ pddl.TypedObject(p, OBJECT) for p in get_args(stream_atom) ] # TODO: add to predicates as well? domain.predicate_dict[get_prefix(stream_atom)] = pddl.Predicate( get_prefix(stream_atom), parameters) preconditions = [stream_atom] + list(stream.domain) effort = 1 # TODO: use stream info #effort = 1 if unit_cost else result.instance.get_effort() #if effort == INF: # continue domain.actions.append( pddl.Action(name='call-{}'.format(stream.name), parameters=parameters, num_external_parameters=len(parameters), precondition=make_preconditions(preconditions), effects=make_effects(stream.certified), cost=make_cost(effort))) stream.certified = tuple( set(stream.certified) | set(map(rename_future, stream.certified)))
def parse_domain_custom(checker): # parse domain file checker.parse_code(checker.files['domain'], checker.parse_token_domain) yield checker.names['domain'] if checker.requirements: yield pddl.Requirements(checker.requirements) else: yield pddl.Requirements([":strips"]) types = [pddl.Type("object")] set_supertypes(types) type_dict = dict((type_.name, type_) for type_ in types) yield types yield type_dict yield [] predicates = [[pred] + checker.predicates[pred]['params'] for pred in checker.predicates] predicates = [parse_predicate(entry) for entry in predicates] predicates += [ pddl.Predicate("=", [ pddl.TypedObject("?x", "object"), pddl.TypedObject("?y", "object") ]) ] predicate_dict = dict((pred.name, pred) for pred in predicates) yield predicates yield predicate_dict yield [] actions = [] entries = [[ ':action', action, ':parameters', checker.actions[action]['params'], ':precondition', checker.actions[action]['logic_precs'], ':effect', checker.actions[action]['logic_effs'] ] for action in checker.actions] for entry in entries: action = parse_action(entry, type_dict, predicate_dict) if action is not None: actions.append(action) yield actions yield []
def compile_to_exogenous_actions(evaluations, domain, streams): import pddl # TODO: automatically derive fluents # TODO: version of this that operates on fluents of length one? # TODO: better instantiation when have full parameters # TODO: conversion from stream cost to real cost units? # TODO: any predicates derived would need to be replaced as well fluent_predicates = get_fluents(domain) domain_predicates = {get_prefix(a) for s in streams for a in s.domain} if not (domain_predicates & fluent_predicates): return certified_predicates = {get_prefix(a) for s in streams for a in s.certified} future_map = {p: 'f-{}'.format(p) for p in certified_predicates} augment_evaluations(evaluations, future_map) rename_future = lambda a: rename_atom(a, future_map) for stream in list(streams): if not isinstance(stream, Stream): raise NotImplementedError(stream) # TODO: could also just have conditions asserting that one of the fluent conditions fails streams.append(create_static_stream(stream, evaluations, fluent_predicates, rename_future)) stream_atom = streams[-1].certified[0] parameters = [pddl.TypedObject(p, 'object') for p in get_args(stream_atom)] # TODO: add to predicates as well? domain.predicate_dict[get_prefix(stream_atom)] = pddl.Predicate(get_prefix(stream_atom), parameters) precondition = pddl.Conjunction(tuple(map(fd_from_fact, (stream_atom,) + tuple(stream.domain)))) effects = [pddl.Effect(parameters=[], condition=pddl.Truth(), literal=fd_from_fact(fact)) for fact in stream.certified] effort = 1 # TODO: use stream info #effort = 1 if unit_cost else result.instance.get_effort() #if effort == INF: # continue fluent = pddl.PrimitiveNumericExpression(symbol=TOTAL_COST, args=[]) expression = pddl.NumericConstant(int_ceil(effort)) # Integer cost = pddl.Increase(fluent=fluent, expression=expression) # Can also be None domain.actions.append(pddl.Action(name='call-{}'.format(stream.name), parameters=parameters, num_external_parameters=len(parameters), precondition=precondition, effects=effects, cost=cost)) stream.certified = tuple(set(stream.certified) | set(map(rename_future, stream.certified)))
def add_goal(self, goal, c): """Helper function for visit_goal_stmt. Keyword arguments: goal -- a list of goals c -- a formula representing a goal we want to add to the goal list """ # Check whether predicate was introduced in domain file. if not c.key in self._domain.predicates: raise SemanticError('Error: unknown predicate ' + c.key + ' in goal definition') # Get predicate from the domain data structure. predDef = self._domain.predicates[c.key] signature = list() count = 0 # Check whether the predicate uses the correct signature. if len(c.children) != len(predDef.signature): raise SemanticError('Error: wrong number of arguments for ' 'predicate ' + c.key + ' in goal') for v in c.children: signature.append((v.key, predDef.signature[count][1])) count += 1 # Add the predicate to the goal. goal.append(pddl.Predicate(c.key, signature))
def parse_predicate(alist): name = alist[0] arguments = parse_typed_list(alist[1:], only_variables=True) return pddl.Predicate(name, arguments)
def parse_domain_pddl(domain_pddl): iterator = iter(domain_pddl) define_tag = next(iterator) assert define_tag == "define" domain_line = next(iterator) assert domain_line[0] == "domain" and len(domain_line) == 2 yield domain_line[1] ## We allow an arbitrary order of the requirement, types, constants, ## predicates and functions specification. The PDDL BNF is more strict on ## this, so we print a warning if it is violated. requirements = pddl.Requirements([":strips"]) the_types = [pddl.Type("object")] constants, the_predicates, the_functions = [], [], [] correct_order = [ ":requirements", ":types", ":constants", ":predicates", ":functions" ] seen_fields = [] for opt in iterator: field = opt[0] if field not in correct_order: first_action = opt break if field in seen_fields: raise SystemExit("Error in domain specification\n" + "Reason: two '%s' specifications." % field) if (seen_fields and correct_order.index(seen_fields[-1]) > correct_order.index(field)): msg = "\nWarning: %s specification not allowed here (cf. PDDL BNF)" % field print(msg, file=sys.stderr) seen_fields.append(field) if field == ":requirements": requirements = pddl.Requirements(opt[1:]) elif field == ":types": the_types.extend(parse_typed_list(opt[1:], constructor=pddl.Type)) elif field == ":constants": constants = parse_typed_list(opt[1:]) elif field == ":predicates": the_predicates = [parse_predicate(entry) for entry in opt[1:]] the_predicates += [ pddl.Predicate("=", [ pddl.TypedObject("?x", "object"), pddl.TypedObject("?y", "object") ]) ] elif field == ":functions": the_functions = parse_typed_list(opt[1:], constructor=parse_function, default_type="number") set_supertypes(the_types) yield requirements yield the_types type_dict = dict((type.name, type) for type in the_types) yield type_dict yield constants yield the_predicates predicate_dict = dict((pred.name, pred) for pred in the_predicates) yield predicate_dict yield the_functions entries = [first_action] + [entry for entry in iterator] the_axioms = [] the_actions = [] for entry in entries: if entry[0] == ":derived": axiom = parse_axiom(entry, type_dict, predicate_dict) the_axioms.append(axiom) else: action = parse_action(entry, type_dict, predicate_dict) if action is not None: the_actions.append(action) yield the_actions yield the_axioms
def make_predicate(name, parameters): return pddl.Predicate(name, make_parameters(parameters))
def parse_predicate(alist): name = alist[0] # if DEBUG: print("parsing predicate: ", name) arguments = parse_typed_list(alist[1:], only_variables=True) return pddl.Predicate(name, arguments)
def parse_domain_pddl(domain_pddl): def typesplit( alist): # recurse nested lists and replace "-type" by "-", "type" # an error which occurs in some sloppyly modeled domains ix = 0 while ix < len(alist): el = alist[ix] # print("checking element %s"%el) if isinstance(el, list): typesplit(alist[ix]) elif len(el) > 1 and el[0] == "-": msg = ( "\nWARNING: %s seems to be a 'type' definition missing a space.\n" "Splitting Element into '-' and '%s'") % (el, el[1:]) print(msg, file=sys.stderr) alist[ix:ix + 1] = el[0], el[1:] ix += 1 iterator = iter(domain_pddl) define_tag = next(iterator) assert define_tag == "define" domain_line = next(iterator) assert domain_line[0] == "domain" and len(domain_line) == 2 yield domain_line[1] ## We allow an arbitrary order of the requirement, types, constants, ## predicates and functions specification. The PDDL BNF is more strict on ## this, so we print a warning if it is violated. requirements = pddl.Requirements([":strips"]) the_types = [pddl.Type("object")] constants, the_functions = [], [] the_predicates = [ pddl.Predicate("=", [ pddl.TypedObject("?x", "object"), pddl.TypedObject("?y", "object") ]) ] # the_free_functions = [] ## support for global constraints with free functions is not implemented yet correct_order = [ ":requirements", ":types", ":constants", ":predicates", ":functions" ] #, ":free_functions"] seen_fields = [] first_action = None for opt in iterator: # print("Options before: ",opt) typesplit( opt) # fix for missing space between dash '-' and type identifier # print("Options after: ",opt) field = opt[0] if field not in correct_order: first_action = opt break if field in seen_fields: raise SystemExit("Error in domain specification\n" + "Reason: two '%s' specifications." % field) if (seen_fields and correct_order.index(seen_fields[-1]) > correct_order.index(field)): msg = "\nWARNING: %s specification not allowed here (cf. PDDL BNF)" % field print(msg, file=sys.stderr) seen_fields.append(field) if field == ":requirements": requirements = pddl.Requirements(opt[1:]) elif field == ":types": the_types.extend(parse_typed_list(opt[1:], constructor=pddl.Type)) elif field == ":constants": constants = parse_typed_list(opt[1:]) elif field == ":predicates": the_predicates += [parse_predicate(entry) for entry in opt[1:]] elif field == ":functions": the_functions = parse_typed_list(opt[1:], constructor=parse_function, default_type="number") # elif field == ":free_functions": # the_free_functions = parse_typed_list( # opt[1:], # constructor=parse_function, # default_type="number") set_supertypes(the_types) yield requirements yield the_types type_dict = dict((pddltype.name, pddltype) for pddltype in the_types) yield type_dict yield constants yield the_predicates predicate_dict = dict((pred.name, pred) for pred in the_predicates) yield predicate_dict total_cost_fluent = pddl.Function("total-cost", [], "number") the_functions.append(total_cost_fluent) # the_functions.append(the_free_functions) yield the_functions entries = [] if first_action is not None: entries.append(first_action) entries.extend(iterator) the_axioms = [] the_actions = [] for entry in entries: # if DEBUG: print("Entries before: ",entry) typesplit( entry ) # fix for missing space between dash '-' and type identifier # if DEBUG: print("Entries after: ",entry) if entry[0] == ":derived": axiom = parse_axiom(entry, type_dict, predicate_dict) the_axioms.append(axiom) elif entry[0] == ":action": action = parse_action(entry, type_dict, predicate_dict) if action is not None: the_actions.append(action) elif entry[ 0] == ":constraint": ## support for global constraints is new in NFD global_constraint = parse_global_constraint( entry, type_dict, predicate_dict) the_axioms.append(global_constraint) else: print("%s could not be parsed" % entry[0]) if entry[0] != ":free_functions": assert False yield the_actions yield the_axioms