def get_task_actions(domain, problem, agent_list): IGNORE_ACTIONS = set() REPLACEABLE_ACTIONS = [] import re action_args = re.compile('\((.*)\)') #private_pred_list = list(map(str.strip,private_preds)) task = pddl_parser.open(task_filename=problem, domain_filename=domain) normalize.normalize(task) (relaxed_reachable, atoms, actions, axioms, reachable_action_params) = instantiate.explore(task) for _action in actions: IGNORE_FLAG = True REPLACEABLE_ACTIONS_FLAG = True for cond, eff_pred in _action.add_effects: # if eff_pred.literal.predicate not in private_pred_list: if len(set(eff_pred.args) & set(agent_list)) == 0: IGNORE_FLAG = False for cond, eff_pred in _action.del_effects: # if eff_pred.literal.predicate not in private_pred_list: if len(set(eff_pred.args) & set(agent_list)) == 0: IGNORE_FLAG = False if IGNORE_FLAG: act_name = action_args.findall(_action.name)[0].split()[0] IGNORE_ACTIONS.add(act_name) return task, actions, list( IGNORE_ACTIONS ), relaxed_reachable, atoms, axioms, reachable_action_params
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open( domain_filename=options.domain, task_filename=options.task) with timers.timing("Normalizing task"): normalize.normalize(task) if options.generate_relaxed_task: # Remove delete effects. for action in task.actions: for index, effect in reversed(list(enumerate(action.effects))): if effect.literal.negated: del action.effects[index] sas_task = pddl_to_sas(task) dump_statistics(sas_task) with timers.timing("Writing output"): with open("output.sas", "w") as output_file: sas_task.output(output_file) print("Done! %s" % timer) global t1, t2 t2 = time.time() - t2 print('Time1:', t1) print('Time2:', t2)
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open(task_filename=options.task, domain_filename=options.domain) with timers.timing("Normalizing task"): normalize.normalize(task) if options.generate_relaxed_task: # Remove delete effects. for action in task.actions: for index, effect in reversed(list(enumerate(action.effects))): if effect.literal.negated: del action.effects[index] sas_task = pddl_to_sas(task) dump_statistics(sas_task) # Print pddl if a transormation option is selected. if options.exp or options.evmdd: pddl_parser.print_pddl(options.domain, sas_task, task, []) print("done!") exit(0) with timers.timing("Writing output"): with open("output.sas", "w") as output_file: sas_task.output(output_file) print("Done! %s" % timer)
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open(domain_filename=options.domain, task_filename=options.task) # Alberto Pozanco """global actions_that_achieve if task.goal.negate: goal_predicate = task.goal.predicate goal_tuple = task.goal.key for action in task.actions: for effect in action.effects: if effect.literal.predicate == goal_predicate: actions_that_achieve.append([action.name,goal_tuple]) break""" with timers.timing("Normalizing task"): normalize.normalize(task) if options.generate_relaxed_task: # Remove delete effects. for action in task.actions: for index, effect in reversed(list(enumerate(action.effects))): if effect.literal.negated: del action.effects[index] sas_task = pddl_to_sas(task) dump_statistics(sas_task) with timers.timing("Writing output"): with open(options.sas_file, "w") as output_file: sas_task.output(output_file) print("Done! %s" % timer)
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open(domain_filename=options.domain, task_filename=options.task) with timers.timing("Normalizing task"): normalize.normalize(task) if options.generate_relaxed_task: # Remove delete effects. for action in task.actions: for index, effect in reversed(list(enumerate(action.effects))): if effect.literal.negated: del action.effects[index] sas_task = pddl_to_sas(task) dump_statistics(sas_task) with timers.timing("Writing output"): with open("output.sas", "w") as output_file: sas_task.output(output_file) print("Done! %s" % timer) global t1, t2 t2 = time.time() - t2 print('Time1:', t1) print('Time2:', t2)
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open(task_filename=options.task, domain_filename=options.domain) with timers.timing("Normalizing task"): normalize.normalize(task) compile(task) print("Done! %s" % timer)
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open(domain_filename=options.domain, task_filename=options.task) with timers.timing("Normalizing task"): normalize.normalize(task) if options.generate_relaxed_task: # Remove delete effects. for action in task.actions: for index, effect in reversed(list(enumerate(action.effects))): if effect.literal.negated: del action.effects[index] # Alberto Pozanco # Con esto guardamos el estado inicial del problema print("ESTAMOS EJECUTANDO EL TRANSLATOR DE ALBERTO POZANCO") #print(os.path.dirname(os.path.abspath(__file__))) #print(os.getcwd()) #print(options.sas_file) outfile = open('current-state.txt', 'w+') for predicate in task.init: if type(predicate) is pddl.f_expression.Assign: value = predicate.expression.value fluent = str(predicate.fluent.symbol) args = list(predicate.fluent.args) args2 = [str(e) for e in args] data = '(= (' + fluent + ' ' + ' '.join(args2) + ') ' + str( value) + ')' outfile.write(data + '\n') elif type(predicate) is pddl.Atom and str(predicate.predicate) != '=': data = '(' + predicate.key[0] for x in predicate.key[1]: data += ' ' + str(x) data += ')' outfile.write(data + '\n') elif type(predicate) is pddl.NegatedAtom: print( "TREMENDO FALLO DEL TRANSLATOR QUE NO HA TENIDO EN CUENTA QUE HABIA UN NEGATED ATOM" ) outfile.close() sas_task = pddl_to_sas(task) dump_statistics(sas_task) with timers.timing("Writing output"): with open("output.sas", "w") as output_file: sas_task.output(output_file) print("Done! %s" % timer)
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open(domain_filename=options.domain, task_filename=options.task) # print("translate.main(): task parsed\n Function symbols:") # for fs in task.FUNCTION_SYMBOLS: # print (fs) # print("task is:") # task.dump() with timers.timing("Handling Global Constraints"): task.add_global_constraints() with timers.timing("Normalizing task"): normalize.normalize(task) # print("translate.main(): Function symbols after normalization:") # for fs in task.FUNCTION_SYMBOLS: # print (fs) # print("task is:") # task.dump() # assert False if options.generate_relaxed_task: # Remove delete effects. for action in task.actions: for index, effect in reversed(list(enumerate(action.effects))): if effect.literal.negated: del action.effects[index] sas_task = pddl_to_sas(task) dump_statistics(sas_task) # for operator in sas_task.operators: # print("Operatorname = %s" % operator.name) # print("Dumping axioms in task before writing output ") # for axiom in sas_task.axioms: # axiom.dump() # avar, aval = axiom.effect # print("Effect state value init: %s axiomresult: %s" % (avar,aval)) # print("Init = %s" % [sas_task.init.values[avar]]) with timers.timing("Writing output"): with open("output.sas", "w") as output_file: sas_task.output(output_file) print("Done! %s" % timer)
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open(domain_filename=options.domain, task_filename=options.task) with timers.timing("Normalizing task"): normalize.normalize(task) if options.generate_relaxed_task: # Remove delete effects. for action in task.actions: for index, effect in reversed(list(enumerate(action.effects))): if effect.literal.negated: del action.effects[index] instantiate_dump(task) print("Done! %s" % timer)
def main(task=None, sas_fname=None, max_num_actions=float("inf"), pg_generator=None): timer = timers.Timer() if task is None: import options domain_fname = options.domain task_fname = options.task sas_fname = options.sas_file with timers.timing("Parsing", True): task = pddl_parser.open(domain_filename=domain_fname, task_filename=task_fname) with timers.timing("Normalizing task"): normalize.normalize(task) sas_task = pddl_to_sas(task, max_num_actions, pg_generator) dump_statistics(sas_task) with timers.timing("Writing output"): with open(sas_fname, "w") as output_file: sas_task.output(output_file) print("Done! %s" % timer)
for pne in primitives: result.append(get_function_predicate(pne)) return result def get_function_predicate(pne): name = "defined!%s" % pne.symbol return pddl.Atom(name, pne.args) def get_fluent_function_predicate(pne): return pddl.Atom(pne, pne.args) def get_function_axiom_predicate(axiom): name = axiom args = axiom.parameters for part in axiom.parts: if isinstance(part, pddl.PrimitiveNumericExpression): args += part.args elif isinstance(part, pddl.NumericAxiom): args += part.parameters return pddl.Atom(name, args) if __name__ == "__main__": import pddl_parser task = pddl_parser.open() normalize(task) task.dump()
def condition_to_rule_body(parameters, condition): result = [] for par in parameters: result.append(par.get_atom()) if not isinstance(condition, pddl.Truth): if isinstance(condition, pddl.ExistentialCondition): for par in condition.parameters: result.append(par.get_atom()) condition = condition.parts[0] if isinstance(condition, pddl.Conjunction): parts = condition.parts else: parts = (condition,) for part in parts: if isinstance(part, pddl.Falsity): # Use an atom in the body that is always false because # it is not initially true and doesn't occur in the # head of any rule. return [pddl.Atom("@always-false", [])] assert isinstance(part, pddl.Literal), "Condition not normalized: %r" % part if not part.negated: result.append(part) return result if __name__ == "__main__": import pddl_parser task = pddl_parser.open() normalize(task) task.dump()
def atom_to_name(atom): return "(" + atom.predicate + "_" + "_".join(atom.args) + ")" def transform_action(action): add_effects = set(map(lambda x: atom_to_name(x[1]), action.add_effects)) del_effects = set(map(lambda x: atom_to_name(x[1]), action.del_effects)) pre = set(map(atom_to_name, action.precondition)) return Action(action.name.replace(" ", "_"), pre, add_effects, del_effects, []) if __name__ == '__main__': task = pddl_parser.open(sys.argv[1], sys.argv[2]) actions = task.actions events = task.events inst = instantiate.explore(task) task.actions = events inst_ev = instantiate.explore(task) task.actions = actions inst_atoms = inst[1] inst_actions = inst[2] inst_events = inst_ev[2] ground_actions = list(map(transform_action, inst_actions)) ground_events = list(map(transform_action, inst_events)) ground_atoms = list(map(atom_to_name, inst_atoms))
def main(): timer = timers.Timer() with timers.timing("Parsing", True): task = pddl_parser.open( domain_filename=options.domain, task_filename=options.task) print('Processing task', task.task_name) with timers.timing("Normalizing task"): normalize.normalize(task) if options.unit_cost: transform_into_unit_cost(task) perform_sanity_checks(task) if options.build_datalog_model: print("Building Datalog model...") prog = pddl_to_prolog.translate(task, options.keep_action_predicates, options.add_inequalities) prog.rename_free_variables() if not options.keep_duplicated_rules: prog.remove_duplicated_rules() with open(options.datalog_file, 'w') as f: #prog.dump(f) prog.dump(f) with timers.timing("Compiling types into unary predicates"): g = compile_types.compile_types(task) with timers.timing("Checking static predicates"): static_pred = static_predicates.check(task) assert isinstance(task.goal, pddl.Conjunction) or \ isinstance(task.goal, pddl.Atom) or \ isinstance(task.goal, pddl.NegatedAtom), \ "Goal is not conjunctive." if options.ground_state_representation: with timers.timing("Generating complete initial state"): reachability.generate_overapproximated_reachable_atoms(task, g) get_initial_state_size(static_pred, task) if options.verbose_data: print("%s %s: initial state size %d : time %s" % ( os.path.basename(os.path.dirname(options.domain)), os.path.basename(options.task), len(task.init), timer)) test_if_experiment(options.test_experiment) # Preprocess a dict of supertypes for every type from the TypeGraph types_dict = get_types_dict(g) # Sets output file from options if os.path.isfile(options.output_file): print( "WARNING: file %s already exists, it will be overwritten" % options.output_file) output = open(options.output_file, "w") sys.stdout = output remove_functions_from_initial_state(task) remove_predicates.remove_unused_predicate_symbols(task) if is_trivially_unsolvable(task, static_pred): output_trivially_unsolvable_task() sys.exit(0) remove_static_predicates_from_goal(task, static_pred) print_names_and_representation(task.domain_name, task.task_name) type_index = {} print_types(task, type_index) predicate_index = {} print_predicates(task, predicate_index, type_index) object_index = {} print_objects(task, object_index, type_index, types_dict) atom_index = {} print_initial_state(task, atom_index, object_index, predicate_index) print_goal(task, atom_index, object_index, predicate_index) print_action_schemas(task, object_index, predicate_index, type_index) test_if_experiment(options.test_experiment) return
print_atom(atom, destination=destination, indent=2) print(" )", file=destination) print("(:actions", file=destination) for act in actions: print(" " + act.name, file=destination) print(" )", file=destination) print("(:mutex-groups", file=destination) for group in mutex_groups: if len(group) > 1: print_mutex_group(group, destination=destination, indent=2) print(" )", file=destination) if __name__ == '__main__': if len(sys.argv) < 3: print(sys.argv[0] + " <domain> <problem>") sys.exit(0) task = pddl_parser.open(task_filename=sys.argv[2], domain_filename=sys.argv[1]) if len(sys.argv) > 3: dumpfile = open(sys.argv[3], "w") dump(task, destination=dumpfile) dumpfile.close() else: dump(task)