Beispiel #1
0
def main():
    args = parse_args()

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open(task_filename=args.task, domain_filename=args.domain)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)
    
    task.INVARIANT_TIME_LIMIT = int(args.inv_limit)

    if args.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    ########### dump_statistics(sas_task).

    with timers.timing("Writing output"):
        with open(args.sas_name, "w") as output_file:
            sas_task.output(output_file)
def get_groups(task, safe=True, reachable_action_params=None):
    with timers.timing("Finding invariants"):
        invariants = list(find_invariants(task, safe, reachable_action_params))
    invariants = sorted(invariants)
    with timers.timing("Checking invariant weight"):
        result = list(useful_groups(invariants, task.init))
    return result
Beispiel #3
0
def main():
    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl_parser.open(domain_filename=options.domain,
                                task_filename=options.task)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
    global t1, t2
    t2 = time.time() - t2
    print('Time1:', t1)
    print('Time2:', t2)
Beispiel #4
0
def main():
    print("--------------FOND Translator------------")
    args = parse_args()

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open(task_filename=args.task, domain_filename=args.domain)
        
        # for test:
        print();
        print("Problem Filename = " + args.task);
        print("Domain Filename = " + args.domain);
        print();
        
    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if args.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open("..\\webapps\\LunaPlanner\\translator_output\\output.sas", "w") as output_file:
            sas_task.output(output_file)
            
    print()
    print("SAS file saved at: " + output_file.name)
    
    print("Done! %s" % timer)
Beispiel #5
0
def main():
    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl_parser.open(domain_filename=options.domain,
                                task_filename=options.task)

    # Alberto Pozanco
    """global actions_that_achieve
    if task.goal.negate:
        goal_predicate = task.goal.predicate
        goal_tuple = task.goal.key
        for action in task.actions:
            for effect in action.effects:
                if effect.literal.predicate == goal_predicate:
                    actions_that_achieve.append([action.name,goal_tuple])
                    break"""

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open(options.sas_file, "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #6
0
def main():
    options, args = parse_options()

    check_python_version(options.force_old_python)

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open()

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #7
0
def compute_model(prog):
    with timers.timing("Preparing model"):
        rules = convert_rules(prog)
        unifier = Unifier(rules)
        # unifier.dump()
        fact_atoms = sorted(fact.atom for fact in prog.facts)
        queue = Queue(fact_atoms)

    print("Generated %d rules." % len(rules))
    with timers.timing("Computing model"):
        relevant_atoms = 0
        auxiliary_atoms = 0
        while queue:
            next_atom = queue.pop()
            pred = next_atom.predicate
            if isinstance(pred, str) and "$" in pred:
                auxiliary_atoms += 1
            else:
                relevant_atoms += 1
            matches = unifier.unify(next_atom)
            for rule, cond_index in matches:
                rule.update_index(next_atom, cond_index)
                rule.fire(next_atom, cond_index, queue.push)
    print("%d relevant atoms" % relevant_atoms)
    print("%d auxiliary atoms" % auxiliary_atoms)
    print("%d final queue length" % len(queue.queue))
    print("%d total queue pushes" % queue.num_pushes)
    return queue.queue
Beispiel #8
0
def compute_groups(task, atoms, reachable_action_params):
    groups = invariant_finder.get_groups(task, reachable_action_params)

    with timers.timing("Instantiating groups"):
        groups = instantiate_groups(groups, task, atoms)

    # Sort here already to get deterministic mutex groups.
    groups = sort_groups(groups)
    # TODO: I think that collect_all_mutex_groups should do the same thing
    #       as choose_groups with partial_encoding=False, so these two should
    #       be unified.
    with timers.timing("Collecting mutex groups"):
        mutex_groups = collect_all_mutex_groups(groups, atoms)
    with timers.timing("Choosing groups", block=True):
        groups = choose_groups(groups, atoms)
    groups = sort_groups(groups)
    with timers.timing("Building translation key"):
        translation_key = build_translation_key(groups)

    if DEBUG:
        for group in groups:
            if len(group) >= 2:
                print("{%s}" % ", ".join(map(str, group)))

    return groups, mutex_groups, translation_key
Beispiel #9
0
def main():
    args = parse_args()

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open(task_filename=args.task,
                         domain_filename=args.domain,
                         addl_filename=args.addl)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if args.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    output_file = args.output_file
    use_proto = args.use_proto
    print('Use Proto:', use_proto)

    sas_task = pddl_to_sas(task, args.agent_id, args.agent_url)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open(output_file, "w") as output_file:
            if use_proto:
                sas_task.output_proto(output_file)
            else:
                sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #10
0
def translate(task):
    # Note: The function requires that the task has been normalized.
    from invariant_finder import get_fluents
    with timers.timing("Generating Datalog program"):
        prog = PrologProgram()
        translate_facts(prog, task)
        fluents = get_fluents(
            task)  # TODO: identify implied conditions and filter automatically
        for conditions, effect in normalize.build_exploration_rules(task):
            if REDUCE_CONDITIONS:
                # TODO: could possibly remove rules with effects that don't achieve conditions
                #conditions = [condition for condition in conditions if condition.predicate not in fluents]
                conditions = sorted(conditions,
                                    key=lambda c:
                                    (len(c.args), c.predicate not in fluents),
                                    reverse=True)
                covered_args = set()
                reduced_conditions = []
                for condition in conditions:
                    # isinstance(condition.predicate, pddl.Action) or isinstance(condition.predicate, pddl.Axiom)
                    if not reduced_conditions or not (set(condition.args) <=
                                                      covered_args):
                        covered_args.update(condition.args)
                        reduced_conditions.append(condition)
                conditions = reduced_conditions
            prog.add_rule(Rule(conditions, effect))
    with timers.timing("Normalizing Datalog program", block=True):
        # Using block=True because normalization can output some messages
        # in rare cases.
        prog.normalize()
        prog.split_rules()
    return prog
def main():
    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl_parser.open(
            domain_filename=options.domain, task_filename=options.task)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
    global t1, t2
    t2 = time.time() - t2
    print('Time1:', t1)
    print('Time2:', t2)
def get_groups(task, safe=True, reachable_action_params=None):
    with timers.timing("Finding invariants"):
        invariants = list(find_invariants(task, safe, reachable_action_params))
    invariants = sorted(invariants)
    with timers.timing("Checking invariant weight"):
        result = list(useful_groups(invariants, task.init))
    return result
Beispiel #13
0
def main():
    print("-------------POND Translator-----------")
    args = parse_args()

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open(task_filename=args.task, domain_filename=args.domain)
      
        print();
        print("Problem Filename = " + args.task);
        print("Domain Filename = " + args.domain);
        print();
        
    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if args.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    if not sas_task is None:
        with timers.timing("Writing output"):
            with open("..\\webapps\\LunaPlanner\\translator_output\\output.sas", "w") as output_file:
                sas_task.output(output_file)
                
        print()
        print("SAS file saved at: " + output_file.name)
                
        print("Done! %s" % timer)
Beispiel #14
0
def translate(task):
    # Note: The function requires that the task has been normalized.
    with timers.timing("Generating Datalog program"):
        prog = PrologProgram()
        translate_facts(prog, task)
        for conditions, effect in normalize.build_exploration_rules(task):
            #            rule = Rule(conditions, effect)
            #            print("\nadding rule %s"% rule)
            #            prog.add_rule(rule)
            prog.add_rule(Rule(conditions, effect))

#    print("############################## dumping translated logic program ##############")
#    prog.dump()
#    print("############################## done dumping translated logic program ##############")

    with timers.timing("Normalizing Datalog program", block=True):
        # Using block=True because normalization can output some messages
        # in rare cases.
        prog.normalize()
        prog.split_rules()


#    print("############################## dumping normalized logic program ##############")
#    prog.dump()
#    print("############################## done dumping normalized logic program ##############")
    return prog
Beispiel #15
0
def compute_groups(task, atoms, reachable_action_params):
    groups = invariant_finder.get_groups(task, reachable_action_params)

    with timers.timing("Instantiating groups"):
        groups = instantiate_groups(groups, task, atoms)

    # Sort here already to get deterministic mutex groups.
    groups = sort_groups(groups)
    # TODO: I think that collect_all_mutex_groups should do the same thing
    #       as choose_groups with partial_encoding=False, so these two should
    #       be unified.
    with timers.timing("Collecting mutex groups"):
        mutex_groups = collect_all_mutex_groups(groups, atoms)
    with timers.timing("Choosing groups", block=True):
        groups = choose_groups(groups, atoms)
    groups = sort_groups(groups)
    with timers.timing("Building translation key"):
        translation_key = build_translation_key(groups)

    if DEBUG:
        for group in groups:
            if len(group) >= 2:
                print("{%s}" % ", ".join(map(str, group)))

    return groups, mutex_groups, translation_key
Beispiel #16
0
def main():
    args = parse_args()

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open(task_filename=args.task, domain_filename=args.domain)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if args.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    if not sas_task is None:
        with timers.timing("Writing output"):
            with open("output.sas", "w") as output_file:
                sas_task.output(output_file)
        print("Done! %s" % timer)
Beispiel #17
0
def main():
    print('HOLAAAAAAAAAAAAAAAAAAAAAAA')
    args = parse_args()

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open(task_filename=args.task, domain_filename=args.domain)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    task.INVARIANT_TIME_LIMIT = int(args.inv_limit)

    if args.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)

    assert len(sas_task.operators) == len(set([o.name for o in sas_task.operators])), \
           "Error: Operator names (with parameters) must be unique"

    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open(args.sas_name, "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #18
0
def main():
    args = parse_args()

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open(task_filename=args.task,
                         domain_filename=args.domain,
                         addl_filename=args.addl)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if args.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    output_file = args.output_file
    use_proto = args.use_proto
    print('Use Proto:', use_proto)

    sas_task = pddl_to_sas(task, args.agent_id, args.agent_url)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open(output_file, "w") as output_file:
            if use_proto:
                sas_task.output_proto(output_file)
            else:
                sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #19
0
def partial_grounding_compute_model(prog, action_prioritizer):
    with timers.timing("Preparing model"):
        rules = convert_rules(prog)
        unifier = Unifier(rules)
        # unifier.dump()
        fact_atoms = sorted(fact.atom for fact in prog.facts)
        queue = ActionPriorityQueue(fact_atoms, action_prioritizer)

    print("Generated %d rules." % len(rules))
    with timers.timing("Computing model"):
        num_pops = 0
        relevant_atoms = 0
        auxiliary_atoms = 0
        while queue:
            next_atom = queue.pop()
            num_pops += 1
            pred = next_atom.predicate
            if isinstance(pred, str) and "$" in pred:
                auxiliary_atoms += 1
            else:
                relevant_atoms += 1
            matches = unifier.unify(next_atom)
            for rule, cond_index in matches:
                rule.update_index(next_atom, cond_index)
                rule.fire(next_atom, cond_index, queue.push)
            while queue.new_facts:
                atom_to_yield = queue.new_facts.pop()
                yield atom_to_yield
            if isinstance(next_atom.predicate, pddl.Action):
                yield next_atom
    print("%d relevant atoms" % relevant_atoms)
    print("%d auxiliary atoms" % auxiliary_atoms)
    print("%d final queue length" % num_pops)
    print("%d total queue pushes" % queue.num_pushes)
    return
Beispiel #20
0
def compute_model(prog):
    with timers.timing("Preparing model"):
        rules = convert_rules(prog)
        unifier = Unifier(rules)
        # unifier.dump()
        fact_atoms = sorted(fact.atom for fact in prog.facts)
        queue = Queue(fact_atoms)

    print("Generated %d rules." % len(rules))
    with timers.timing("Computing model"):
        relevant_atoms = 0
        auxiliary_atoms = 0
        while queue:
            if options.total_queue_pushes > 0 and queue.num_pushes > options.total_queue_pushes:
                print("%d > %d total queue pushes raise" %
                      (queue.num_pushes, options.total_queue_pushes))
                ## For a full list of exit codes, please see driver/returncodes.py.
                OUT_OF_TOTAL_QUEUE_PUSHES = 150
                sys.exit(OUT_OF_TOTAL_QUEUE_PUSHES)
            next_atom = queue.pop()
            pred = next_atom.predicate
            if isinstance(pred, str) and "$" in pred:
                auxiliary_atoms += 1
            else:
                relevant_atoms += 1
            matches = unifier.unify(next_atom)
            for rule, cond_index in matches:
                rule.update_index(next_atom, cond_index)
                rule.fire(next_atom, cond_index, queue.push)
    print("%d relevant atoms" % relevant_atoms)
    print("%d auxiliary atoms" % auxiliary_atoms)
    print("%d final queue length" % len(queue.queue))
    print("%d total queue pushes" % queue.num_pushes)
    return queue.queue
def main():
    timer = timers.Timer()
    
    
    with timers.timing("Parsing", True):
        task = pddl_parser.open(task_filename=options.task, domain_filename=options.domain)
    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    # Print pddl if a transormation option is selected.
    if options.exp or options.evmdd:
        pddl_parser.print_pddl(options.domain, sas_task, task, [])
        print("done!")
        exit(0)

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer) 
def compute_model(prog):
    with timers.timing("Preparing model"):
        rules = convert_rules(prog)
        unifier = Unifier(rules)
        # unifier.dump()
        fact_atoms = [fact.atom for fact in prog.facts]
        queue = Queue(fact_atoms)

    print("Generated %d rules." % len(rules))
    with timers.timing("Computing model"):
        relevant_atoms = 0
        auxiliary_atoms = 0
        while queue:
            next_atom = queue.pop()
            pred = next_atom.predicate
            if isinstance(pred, str) and "$" in pred:
                auxiliary_atoms += 1
            else:
                relevant_atoms += 1
            matches = unifier.unify(next_atom)
            for rule, cond_index in matches:
                rule.update_index(next_atom, cond_index)
                rule.fire(next_atom, cond_index, queue.push)
    print("%d relevant atoms" % relevant_atoms)
    print("%d auxiliary atoms" % auxiliary_atoms)
    print("%d final queue length" % len(queue.queue))
    print("%d total queue pushes" % queue.num_pushes)
    return queue.queue
Beispiel #23
0
def main():
    options, args = parse_options()

    check_python_version(options.force_old_python)

    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl.open()

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #24
0
def pddl_to_sas(task):
    with timers.timing("Instantiating", block=True):
        relaxed_reachable, atoms, actions, axioms = instantiate.explore(task)

    if not relaxed_reachable:
        return unsolvable_sas_task("No relaxed solution")

    # HACK! Goals should be treated differently.
    if isinstance(task.goal, pddl.Conjunction):
        goal_list = task.goal.parts
    else:
        goal_list = [task.goal]
    for item in goal_list:
        assert isinstance(item, pddl.Literal)

    with timers.timing("Computing fact groups", block=True):
        groups, mutex_groups, translation_key = fact_groups.compute_groups(
            task, atoms, partial_encoding=USE_PARTIAL_ENCODING)

    with timers.timing("Building STRIPS to SAS dictionary"):
        ranges, strips_to_sas = strips_to_sas_dictionary(
            groups, assert_partial=USE_PARTIAL_ENCODING)

    with timers.timing("Building dictionary for full mutex groups"):
        mutex_ranges, mutex_dict = strips_to_sas_dictionary(
            mutex_groups, assert_partial=False)

    if ADD_IMPLIED_PRECONDITIONS:
        with timers.timing("Building implied facts dictionary..."):
            implied_facts = build_implied_facts(strips_to_sas, groups, mutex_groups)
    else:
        implied_facts = {}

    with timers.timing("Translating task", block=True):
        sas_task = translate_task(
            strips_to_sas, ranges, mutex_dict, mutex_ranges,
            task.init, goal_list, actions, axioms, task.use_min_cost_metric,
            implied_facts)

    print "%d implied effects removed" % removed_implied_effect_counter
    print "%d effect conditions simplified" % simplified_effect_condition_counter
    print "%d implied preconditions added" % added_implied_precondition_counter
    
    with timers.timing("Building mutex information"):
        mutex_key = build_mutex_key(strips_to_sas, mutex_groups)

    if DETECT_UNREACHABLE:
        with timers.timing("Detecting unreachable propositions", block=True):
            try:
                simplify.filter_unreachable_propositions(
                    sas_task, mutex_key, translation_key)
            except simplify.Impossible:
                return unsolvable_sas_task("Simplified to trivially false goal")

    with timers.timing("Writing translation key"):
        write_translation_key(translation_key)
    with timers.timing("Writing mutex key"):
        write_mutex_key(mutex_key)
    return sas_task
def main():
    timer = timers.Timer()
    
    with timers.timing("Parsing", True):
        task = pddl_parser.open(task_filename=options.task, domain_filename=options.domain)
    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    compile(task)
    print("Done! %s" % timer) 
def translate(task):
    # Note: The function requires that the task has been normalized.
    with timers.timing("Generating Datalog program"):
        prog = PrologProgram()
        translate_facts(prog, task)
        for conditions, effect in normalize.build_exploration_rules(task):
            prog.add_rule(Rule(conditions, effect))
    with timers.timing("Normalizing Datalog program", block=True):
        # Using block=True because normalization can output some messages
        # in rare cases.
        prog.normalize()
        prog.split_rules()
    return prog
Beispiel #27
0
def translate(task):
    # Note: The function requires that the task has been normalized.
    with timers.timing("Generating Datalog program"):
        prog = PrologProgram()
        translate_facts(prog, task)
        for conditions, effect in normalize.build_exploration_rules(task):
            prog.add_rule(Rule(conditions, effect))
    with timers.timing("Normalizing Datalog program", block=True):
        # Using block=True because normalization can output some messages
        # in rare cases.
        prog.normalize()
        prog.split_rules()
    return prog
Beispiel #28
0
def main():
    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl_parser.open(domain_filename=options.domain,
                                task_filename=options.task)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    # Alberto Pozanco
    # Con esto guardamos el estado inicial del problema
    print("ESTAMOS EJECUTANDO EL TRANSLATOR DE ALBERTO POZANCO")
    #print(os.path.dirname(os.path.abspath(__file__)))
    #print(os.getcwd())
    #print(options.sas_file)
    outfile = open('current-state.txt', 'w+')
    for predicate in task.init:
        if type(predicate) is pddl.f_expression.Assign:
            value = predicate.expression.value
            fluent = str(predicate.fluent.symbol)
            args = list(predicate.fluent.args)
            args2 = [str(e) for e in args]
            data = '(= (' + fluent + ' ' + ' '.join(args2) + ') ' + str(
                value) + ')'
            outfile.write(data + '\n')
        elif type(predicate) is pddl.Atom and str(predicate.predicate) != '=':
            data = '(' + predicate.key[0]
            for x in predicate.key[1]:
                data += ' ' + str(x)
            data += ')'
            outfile.write(data + '\n')
        elif type(predicate) is pddl.NegatedAtom:
            print(
                "TREMENDO FALLO DEL TRANSLATOR QUE NO HA TENIDO EN CUENTA QUE HABIA UN NEGATED ATOM"
            )
    outfile.close()

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #29
0
def pddl_to_sas(task):
    with timers.timing("Instantiating", block=True):
        (relaxed_reachable, atoms, actions, axioms,
         reachable_action_params) = instantiate.explore(task)

    if not relaxed_reachable:
        return unsolvable_sas_task("No relaxed solution")

    # HACK! Goals should be treated differently.
    if isinstance(task.goal, pddl.Conjunction):
        goal_list = task.goal.parts
    else:
        goal_list = [task.goal]
    for item in goal_list:
        assert isinstance(item, pddl.Literal)

    with timers.timing("Computing fact groups", block=True):
        groups, mutex_groups, translation_key = fact_groups.compute_groups(
            task, atoms, reachable_action_params)

    with timers.timing("Building STRIPS to SAS dictionary"):
        ranges, strips_to_sas = strips_to_sas_dictionary(
            groups, assert_partial=options.use_partial_encoding)

    with timers.timing("Building dictionary for full mutex groups"):
        mutex_ranges, mutex_dict = strips_to_sas_dictionary(
            mutex_groups, assert_partial=False)

    if options.add_implied_preconditions:
        with timers.timing("Building implied facts dictionary..."):
            implied_facts = build_implied_facts(strips_to_sas, groups,
                                                mutex_groups)
    else:
        implied_facts = {}

    with timers.timing("Building mutex information", block=True):
        mutex_key = build_mutex_key(strips_to_sas, mutex_groups)

    with timers.timing("Translating task", block=True):
        sas_task = translate_task(
            strips_to_sas, ranges, translation_key,
            mutex_dict, mutex_ranges, mutex_key,
            task.init, goal_list, actions, axioms, task.use_min_cost_metric,
            implied_facts)

    print("%d effect conditions simplified" %
          simplified_effect_condition_counter)
    print("%d implied preconditions added" %
          added_implied_precondition_counter)

    if options.filter_unreachable_facts:
        with timers.timing("Detecting unreachable propositions", block=True):
            try:
                simplify.filter_unreachable_propositions(sas_task)
            except simplify.Impossible:
                return unsolvable_sas_task("Simplified to trivially false goal")
            except simplify.TriviallySolvable:
                return solvable_sas_task("Simplified to empty goal")

    return sas_task
Beispiel #30
0
def pddl_to_sas(task):
    with timers.timing("Instantiating", block=True):
        (relaxed_reachable, atoms, actions, axioms,
         reachable_action_params) = instantiate.explore(task)

    if not relaxed_reachable:
        return unsolvable_sas_task("No relaxed solution")

    # HACK! Goals should be treated differently.
    if isinstance(task.goal, pddl.Conjunction):
        goal_list = task.goal.parts
    else:
        goal_list = [task.goal]
    for item in goal_list:
        assert isinstance(item, pddl.Literal)

    with timers.timing("Computing fact groups", block=True):
        groups, mutex_groups, translation_key = fact_groups.compute_groups(
            task, atoms, reachable_action_params)

    with timers.timing("Building STRIPS to SAS dictionary"):
        ranges, strips_to_sas = strips_to_sas_dictionary(
            groups, assert_partial=options.use_partial_encoding)

    with timers.timing("Building dictionary for full mutex groups"):
        mutex_ranges, mutex_dict = strips_to_sas_dictionary(
            mutex_groups, assert_partial=False)

    if options.add_implied_preconditions:
        with timers.timing("Building implied facts dictionary..."):
            implied_facts = build_implied_facts(strips_to_sas, groups,
                                                mutex_groups)
    else:
        implied_facts = {}

    with timers.timing("Building mutex information", block=True):
        mutex_key = build_mutex_key(strips_to_sas, mutex_groups)

    with timers.timing("Translating task", block=True):
        sas_task = translate_task(
            strips_to_sas, ranges, translation_key,
            mutex_dict, mutex_ranges, mutex_key,
            task.init, goal_list, actions, axioms, task.use_min_cost_metric,
            implied_facts)

    print("%d effect conditions simplified" %
          simplified_effect_condition_counter)
    print("%d implied preconditions added" %
          added_implied_precondition_counter)

    if options.filter_unreachable_facts:
        with timers.timing("Detecting unreachable propositions", block=True):
            try:
                simplify.filter_unreachable_propositions(sas_task)
            except simplify.Impossible:
                return unsolvable_sas_task("Simplified to trivially false goal")
            except simplify.TriviallySolvable:
                return solvable_sas_task("Simplified to empty goal")

    return sas_task
Beispiel #31
0
def compute_groups(task, atoms, partial_encoding=True):
    groups = invariant_finder.get_groups(task)
    with timers.timing("Instantiating groups"):
        groups = instantiate_groups(groups, task, atoms)
    # TODO: I think that collect_all_mutex_groups should do the same thing
    #       as choose_groups with partial_encoding=False, so these two should
    #       be unified.
    with timers.timing("Collecting mutex groups"):
        mutex_groups = collect_all_mutex_groups(groups, atoms)
    with timers.timing("Choosing groups", block=True):
        groups = choose_groups(groups, atoms, partial_encoding=partial_encoding)
    with timers.timing("Building translation key"):
        translation_key = build_translation_key(groups)
    return groups, mutex_groups, translation_key
Beispiel #32
0
def sas_from_instantiated(instantiated_task):
    import timers
    import fact_groups
    import options
    import simplify
    import variable_order
    from translate import translate_task, unsolvable_sas_task, strips_to_sas_dictionary, \
        build_implied_facts, build_mutex_key, solvable_sas_task

    if not instantiated_task:
        return unsolvable_sas_task("No relaxed solution")
    task, atoms, actions, axioms, reachable_action_params, goal_list = instantiated_task

    with timers.timing("Computing fact groups", block=True):
        groups, mutex_groups, translation_key = fact_groups.compute_groups(
            task, atoms, reachable_action_params)

    with timers.timing("Building STRIPS to SAS dictionary"):
        ranges, strips_to_sas = strips_to_sas_dictionary(
            groups, assert_partial=options.use_partial_encoding)

    with timers.timing("Building dictionary for full mutex groups"):
        mutex_ranges, mutex_dict = strips_to_sas_dictionary(
            mutex_groups, assert_partial=False)

    if options.add_implied_preconditions:
        with timers.timing("Building implied facts dictionary..."):
            implied_facts = build_implied_facts(strips_to_sas, groups,
                                                mutex_groups)
    else:
        implied_facts = {}

    with timers.timing("Building mutex information", block=True):
        mutex_key = build_mutex_key(strips_to_sas, mutex_groups)

    with timers.timing("Translating task", block=True):
        sas_task = translate_task(strips_to_sas, ranges, translation_key,
                                  mutex_dict, mutex_ranges, mutex_key,
                                  task.init, goal_list, actions, axioms,
                                  task.use_min_cost_metric, implied_facts)

    if options.filter_unreachable_facts:
        with timers.timing("Detecting unreachable propositions", block=True):
            try:
                simplify.filter_unreachable_propositions(sas_task)
            except simplify.Impossible:
                return unsolvable_sas_task(
                    "Simplified to trivially false goal")
            except simplify.TriviallySolvable:
                return solvable_sas_task("Simplified to empty goal")

    if options.reorder_variables or options.filter_unimportant_vars:
        with timers.timing("Reordering and filtering variables", block=True):
            variable_order.find_and_apply_variable_order(
                sas_task, options.reorder_variables,
                options.filter_unimportant_vars)

    translate.dump_statistics(sas_task)
    return sas_task
Beispiel #33
0
def compute_groups(task, atoms, reachable_action_params, partial_encoding=True):
    groups = invariant_finder.get_groups(task, reachable_action_params)
    with timers.timing("Instantiating groups"):
        groups = instantiate_groups(groups, task, atoms)

    # TODO: I think that collect_all_mutex_groups should do the same thing
    #       as choose_groups with partial_encoding=False, so these two should
    #       be unified.
    with timers.timing("Collecting mutex groups"):
        mutex_groups = collect_all_mutex_groups(groups, atoms)
    with timers.timing("Choosing groups", block=True):
        groups = choose_groups(groups, atoms, partial_encoding=partial_encoding)
    with timers.timing("Building translation key"):
        translation_key = build_translation_key(groups)
    return groups, mutex_groups, translation_key
Beispiel #34
0
def compute_groups(task, atoms, actions,
                   reachable_action_params, partial_encoding=True,
                   comm = None):
    invariants, groups = invariant_finder.get_groups(task, reachable_action_params)

    with timers.timing("Instantiating groups"):
        groups = instantiate_groups(groups, task, atoms)

    if comm is not None:
        # Try to instantiate another mutex groups that are based on initial
        # states of other agents
        #groups = ma_instantiate_groups(comm, groups, invariants, task, atoms)

        # Separate private atoms to a separate groups
        groups = split_groups_by_private_atoms(groups)
        print('Groups split to private/public')
        sys.stdout.flush()

        # Split mutex groups so that all agents have the same mutex groups
        # -- this should ensure that no agent have invalid mutexes.
        groups = ma_split_groups(comm, groups, atoms, task.init, actions)
    else:
        print('NOT COMM!')
        sys.stdout.flush()
    print('Groups computed.')
    sys.stdout.flush()

    # Sort here already to get deterministic mutex groups.
    groups = sort_groups(groups)

    # TODO: I think that collect_all_mutex_groups should do the same thing
    #       as choose_groups with partial_encoding=False, so these two should
    #       be unified.
    with timers.timing("Collecting mutex groups"):
        mutex_groups = collect_all_mutex_groups(groups, atoms)
    with timers.timing("Choosing groups", block=True):
        groups = choose_groups(groups, atoms, partial_encoding=partial_encoding)
    groups = sort_groups(groups)

    with timers.timing("Building translation key"):
        translation_key = build_translation_key(groups)

    if DEBUG:
        for group in groups:
            if len(group) >= 2:
                print("{%s}" % ", ".join(map(str, group)))

    return groups, mutex_groups, translation_key
def translate_task(strips_to_sas, ranges, translation_key,
                   mutex_dict, mutex_ranges, mutex_key,
                   init, goals,
                   actions, axioms, metric, implied_facts):
    with timers.timing("Processing axioms", block=True):
        axioms, axiom_init, axiom_layer_dict = axiom_rules.handle_axioms(
            actions, axioms, goals)
    init = init + axiom_init
    #axioms.sort(key=lambda axiom: axiom.name)
    #for axiom in axioms:
    #  axiom.dump()

    if options.dump_task:
        # Remove init facts that don't occur in strips_to_sas: they're constant.
        nonconstant_init = filter(strips_to_sas.get, init)
        dump_task(nonconstant_init, goals, actions, axioms, axiom_layer_dict)

    init_values = [rang - 1 for rang in ranges]
    # Closed World Assumption: Initialize to "range - 1" == Nothing.
    for fact in init:
        pairs = strips_to_sas.get(fact, [])  # empty for static init facts
        for var, val in pairs:
            curr_val = init_values[var]
            if curr_val != ranges[var] - 1 and curr_val != val:
                assert False, "Inconsistent init facts! [fact = %s]" % fact
            init_values[var] = val
    init = sas_tasks.SASInit(init_values)

    goal_dict_list = translate_strips_conditions(goals, strips_to_sas, ranges,
                                                 mutex_dict, mutex_ranges)
    if goal_dict_list is None:
        # "None" is a signal that the goal is unreachable because it
        # violates a mutex.
        return unsolvable_sas_task("Goal violates a mutex")

    assert len(goal_dict_list) == 1, "Negative goal not supported"
    ## we could substitute the negative goal literal in
    ## normalize.substitute_complicated_goal, using an axiom. We currently
    ## don't do this, because we don't run into this assertion, if the
    ## negative goal is part of finite domain variable with only two
    ## values, which is most of the time the case, and hence refrain from
    ## introducing axioms (that are not supported by all heuristics)
    goal_pairs = list(goal_dict_list[0].items())
    goal = sas_tasks.SASGoal(goal_pairs)

    operators = translate_strips_operators(actions, strips_to_sas, ranges,
                                           mutex_dict, mutex_ranges,
                                           implied_facts)
    axioms = translate_strips_axioms(axioms, strips_to_sas, ranges, mutex_dict,
                                     mutex_ranges)

    axiom_layers = [-1] * len(ranges)
    for atom, layer in axiom_layer_dict.items():
        assert layer >= 0
        [(var, val)] = strips_to_sas[atom]
        axiom_layers[var] = layer
    variables = sas_tasks.SASVariables(ranges, axiom_layers, translation_key)
    mutexes = [sas_tasks.SASMutexGroup(group) for group in mutex_key]
    return sas_tasks.SASTask(variables, mutexes, init, goal,
                             operators, axioms, metric)
Beispiel #36
0
def translate_task(strips_to_sas, ranges, mutex_dict, mutex_ranges, init, goals,
                   actions, axioms, metric, implied_facts):
    with timers.timing("Processing axioms", block=True):
        axioms, axiom_init, axiom_layer_dict = axiom_rules.handle_axioms(
            actions, axioms, goals)
    init = init + axiom_init
    #axioms.sort(key=lambda axiom: axiom.name)
    #for axiom in axioms:
    #  axiom.dump()

    init_values = [rang - 1 for rang in ranges]
    # Closed World Assumption: Initialize to "range - 1" == Nothing.
    for fact in init:
        pair = strips_to_sas.get(fact)
        pairs = strips_to_sas.get(fact, [])  # empty for static init facts
        for var, val in pairs:
            assert init_values[var] == ranges[var] - 1, "Inconsistent init facts!"
            init_values[var] = val
    init = sas_tasks.SASInit(init_values)

    goal_pairs = translate_strips_conditions(goals, strips_to_sas, ranges, mutex_dict, mutex_ranges).items()
    goal = sas_tasks.SASGoal(goal_pairs)

    operators = translate_strips_operators(actions, strips_to_sas, ranges, mutex_dict, mutex_ranges, implied_facts)
    axioms = translate_strips_axioms(axioms, strips_to_sas, ranges, mutex_dict, mutex_ranges)

    axiom_layers = [-1] * len(ranges)
    for atom, layer in axiom_layer_dict.iteritems():
        assert layer >= 0
        [(var, val)] = strips_to_sas[atom]
        axiom_layers[var] = layer
    variables = sas_tasks.SASVariables(ranges, axiom_layers)

    return sas_tasks.SASTask(variables, init, goal, operators, axioms, metric)
Beispiel #37
0
def translate_task(strips_to_sas, ranges, translation_key,
                   mutex_dict, mutex_ranges, mutex_key,
                   init, goals,
                   actions, axioms, metric, implied_facts):
    with timers.timing("Processing axioms", block=True):
        axioms, axiom_init, axiom_layer_dict = axiom_rules.handle_axioms(
            actions, axioms, goals)
    init = init + axiom_init
    #axioms.sort(key=lambda axiom: axiom.name)
    #for axiom in axioms:
    #  axiom.dump()

    if DUMP_TASK:
        # Remove init facts that don't occur in strips_to_sas: they're constant.
        nonconstant_init = filter(strips_to_sas.get, init)
        dump_task(nonconstant_init, goals, actions, axioms, axiom_layer_dict)

    init_values = [rang - 1 for rang in ranges]
    # Closed World Assumption: Initialize to "range - 1" == Nothing.
    for fact in init:
        pairs = strips_to_sas.get(fact, [])  # empty for static init facts
        for var, val in pairs:
            curr_val = init_values[var]
            if curr_val != ranges[var] - 1 and curr_val != val:
                assert False, "Inconsistent init facts! [fact = %s]" % fact
            init_values[var] = val
    init = sas_tasks.SASInit(init_values)

    goal_dict_list = translate_strips_conditions(goals, strips_to_sas, ranges,
                                                 mutex_dict, mutex_ranges)
    if goal_dict_list is None:
        # "None" is a signal that the goal is unreachable because it
        # violates a mutex.
        return unsolvable_sas_task("Goal violates a mutex")

    #assert len(goal_dict_list) == 1, "Negative goal not supported"
    ## we could substitute the negative goal literal in
    ## normalize.substitute_complicated_goal, using an axiom. We currently
    ## don't do this, because we don't run into this assertion, if the
    ## negative goal is part of finite domain variable with only two
    ## values, which is most of the time the case, and hence refrain from
    ## introducing axioms (that are not supported by all heuristics)
    goal_pairs = list(goal_dict_list[0].items())
    goal = sas_tasks.SASGoal(goal_pairs)

    operators = translate_strips_operators(actions, strips_to_sas, ranges,
                                           mutex_dict, mutex_ranges,
                                           implied_facts)
    axioms = translate_strips_axioms(axioms, strips_to_sas, ranges, mutex_dict,
                                     mutex_ranges)

    axiom_layers = [-1] * len(ranges)
    for atom, layer in axiom_layer_dict.items():
        assert layer >= 0
        [(var, val)] = strips_to_sas[atom]
        axiom_layers[var] = layer
    variables = sas_tasks.SASVariables(ranges, axiom_layers, translation_key)
    mutexes = [sas_tasks.SASMutexGroup(group) for group in mutex_key]
    return sas_tasks.SASTask(variables, mutexes, init, goal,
                             operators, axioms, metric)
Beispiel #38
0
def main():
    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl_parser.open(domain_filename=options.domain,
                                task_filename=options.task)
#     print("translate.main(): task parsed\n Function symbols:")
#     for fs in task.FUNCTION_SYMBOLS:
#         print (fs)
#     print("task is:")
#     task.dump()
    with timers.timing("Handling Global Constraints"):
        task.add_global_constraints()

    with timers.timing("Normalizing task"):
        normalize.normalize(task)


#     print("translate.main(): Function symbols after normalization:")
#     for fs in task.FUNCTION_SYMBOLS:
#         print (fs)
#     print("task is:")
#     task.dump()
#     assert False

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)
    #    for operator in sas_task.operators:
    #        print("Operatorname = %s" % operator.name)
    #     print("Dumping axioms in task before writing output ")
    #     for axiom in sas_task.axioms:
    #         axiom.dump()
    #         avar, aval = axiom.effect
    #         print("Effect state value init: %s axiomresult: %s" % (avar,aval))
    #         print("Init = %s" % [sas_task.init.values[avar]])

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #39
0
def main():
    timer = timers.Timer()
    with timers.timing("Parsing", True):
        task = pddl_parser.open(domain_filename=options.domain,
                                task_filename=options.task)

    with timers.timing("Normalizing task"):
        normalize.normalize(task)

    if options.generate_relaxed_task:
        # Remove delete effects.
        for action in task.actions:
            for index, effect in reversed(list(enumerate(action.effects))):
                if effect.literal.negated:
                    del action.effects[index]

    instantiate_dump(task)
    print("Done! %s" % timer)
def compile(task):
    with timers.timing("Instantiating", block=True):
        (relaxed_reachable, atoms, actions, axioms,
         reachable_action_params) = instantiate.explore(task)

    # Transform logical terms of all cost functions into arithmetic terms.
    for a in actions:
        if isinstance(a.cost, pddl_parser.CostNode):
            a.cost.transform_logic()    

    # writing value tuples to atoms
    predicate_dict = dict((p.name, p ) for p in task.predicates)
    for a in atoms:    
        p = predicate_dict.get(a.predicate)
        if p and len(p.value_mapping) > 0:
            a.value = p.value_mapping.get(a.args)
    if not relaxed_reachable:
        return unsolvable_sas_task("No relaxed solution")
    # HACK! Goals should be treated differently.
    if isinstance(task.goal, pddl.Conjunction):
        goal_list = task.goal.parts
    else:
        goal_list = [task.goal]
    for item in goal_list:
        assert isinstance(item, pddl.Literal)

    with timers.timing("Computing fact groups", block=True):
        groups, mutex_groups, translation_key, atom_groups = fact_groups.compute_groups(
            task, atoms, reachable_action_params)

    with timers.timing("Building STRIPS to SAS dictionary"):
        ranges, strips_to_sas = strips_to_sas_dictionary(
            groups, assert_partial=options.use_partial_encoding)
    with timers.timing("Building dictionary for full mutex groups"):
        mutex_ranges, mutex_dict = strips_to_sas_dictionary(
            mutex_groups, assert_partial=False)
    with timers.timing("Building mutex information", block=True):
        mutex_key = build_mutex_key(strips_to_sas, mutex_groups)

    compiler = EVMDDActionCompiler()
    actions = compiler.evmdd_action_compilation(actions)
    pddl_writer = SdacPDDLWriter(compiler._fact_name_dict)
    pddl_writer.write_pddl_files(options.domain, options.task, actions)
    print("done!")
Beispiel #41
0
def compute_clusters(axioms, goals, operators):
    dependencies = AxiomDependencies(axioms)

    # Compute necessary literals and prune unnecessary vars from dependencies.
    necessary_literals = compute_necessary_literals(dependencies, goals,
                                                    operators)
    dependencies.remove_unnecessary_variables(necessary_literals)

    groups = get_strongly_connected_components(dependencies)
    clusters = [AxiomCluster(group) for group in groups]

    # Compute mapping from variables to their clusters and set needed_negatively.
    variable_to_cluster = {}
    for cluster in clusters:
        for variable in cluster.variables:
            variable_to_cluster[variable] = cluster
            if variable.negate() in necessary_literals:
                cluster.needed_negatively = True

    # Assign axioms to their clusters.
    for axiom in axioms:
        # axiom.effect is derived but might have been pruned
        if axiom.effect in dependencies.derived_variables:
            variable_to_cluster[axiom.effect].axioms[axiom.effect].append(
                axiom)

    removed = 0
    with timers.timing("Simplifying axioms"):
        for cluster in clusters:
            for variable in cluster.variables:
                old_size = len(cluster.axioms[variable])
                cluster.axioms[variable] = compute_simplified_axioms(
                    cluster.axioms[variable])
                removed += old_size - len(cluster.axioms[variable])
    print("Translator axioms removed by simplifying: %d" % removed)

    # Create links between clusters (positive dependencies).
    for from_variable, depends_on in dependencies.positive_dependencies.items(
    ):
        from_cluster = variable_to_cluster[from_variable]
        for to_variable in depends_on:
            to_cluster = variable_to_cluster[to_variable]
            if from_cluster is not to_cluster:
                from_cluster.positive_children.add(to_cluster)

    # Create links between clusters (negative dependencies).
    for from_variable, depends_on in dependencies.negative_dependencies.items(
    ):
        from_cluster = variable_to_cluster[from_variable]
        for to_variable in depends_on:
            to_cluster = variable_to_cluster[to_variable]
            if from_cluster is to_cluster:
                raise ValueError("axioms are not stratifiable")
            from_cluster.negative_children.add(to_cluster)

    return clusters
Beispiel #42
0
def main():
    options, args = parse_options()

    check_python_version(options.force_old_python)

    timer = timers.Timer()
    with timers.timing("Parsing"):
        task = pddl.open()

    # EXPERIMENTAL!
    # import psyco
    # psyco.full()

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #43
0
def main():
    options, args = parse_options()

    check_python_version(options.force_old_python)

    timer = timers.Timer()
    with timers.timing("Parsing"):
        task = pddl.open()

    # EXPERIMENTAL!
    # import psyco
    # psyco.full()

    sas_task = pddl_to_sas(task)
    dump_statistics(sas_task)

    with timers.timing("Writing output"):
        with open("output.sas", "w") as output_file:
            sas_task.output(output_file)
    print("Done! %s" % timer)
Beispiel #44
0
def explore(task):
    if DEBUG:
        print("DEBUG: Exploring Task Step [1]: create logic program 'prog'")
    prog = pddl_to_prolog.translate(task)
    #     prog.dump()
    if DEBUG: print("DEBUG: Exploring Task Step [2]: build model 'model'")
    model = build_model.compute_model(prog)
    #     print("instantiate.explore task dumps task")
    #     task.dump()
    if DEBUG: print("DEBUG: Exploring Task Step [3]: instantiate model")
    with timers.timing("Completing instantiation"):
        return instantiate(task, model)
def handle_axioms(operators, axioms, goals):
    axioms_by_atom = get_axioms_by_atom(axioms)

    axiom_literals = compute_necessary_axiom_literals(axioms_by_atom, operators, goals)
    axiom_init = get_axiom_init(axioms_by_atom, axiom_literals)
    with timers.timing("Simplifying axioms"):
        axioms = simplify_axioms(axioms_by_atom, axiom_literals)
    axioms = compute_negative_axioms(axioms_by_atom, axiom_literals)
    # NOTE: compute_negative_axioms more or less invalidates axioms_by_atom.
    #       Careful with that axe, Eugene!
    axiom_layers = compute_axiom_layers(axioms, axiom_init)
    return axioms, list(axiom_init), axiom_layers
Beispiel #46
0
def translate_task(strips_to_sas, ranges, mutex_dict, mutex_ranges, init, goals,
                   actions, axioms, metric, implied_facts):
    with timers.timing("Processing axioms", block=True):
        axioms, axiom_init, axiom_layer_dict = axiom_rules.handle_axioms(
            actions, axioms, goals)
    init = init + axiom_init
    #axioms.sort(key=lambda axiom: axiom.name)
    #for axiom in axioms:
    #  axiom.dump()

    init_values = [rang - 1 for rang in ranges]
    # Closed World Assumption: Initialize to "range - 1" == Nothing.
    for fact in init:
        pair = strips_to_sas.get(fact)
        pairs = strips_to_sas.get(fact, [])  # empty for static init facts
        for var, val in pairs:
            assert init_values[var] == ranges[var] - 1, "Inconsistent init facts!"
            init_values[var] = val
    init = sas_tasks.SASInit(init_values)

    goal_dict_list = translate_strips_goal(goals, strips_to_sas, ranges, mutex_dict, mutex_ranges)
    assert len(goal_dict_list) == 1, "Negative goal not supported"
    ## we could substitute the negative goal literal in
    ## normalize.substitute_complicated_goal, using an axiom. We currently
    ## don't do this, because we don't run into this assertion, if the
    ## negative goal is part of finite domain variable with only two
    ## values, which is most of the time the case, and hence refrain from
    ## introducing axioms (that are not supported by all heuristics)
    goal_pairs = goal_dict_list[0].items()
    goal = sas_tasks.SASGoal(goal_pairs)

    operators = translate_strips_operators(actions, strips_to_sas, ranges, mutex_dict, mutex_ranges, implied_facts)
    axioms = translate_strips_axioms(axioms, strips_to_sas, ranges, mutex_dict, mutex_ranges)

    axiom_layers = [-1] * len(ranges)
    for atom, layer in axiom_layer_dict.iteritems():
        assert layer >= 0
        [(var, val)] = strips_to_sas[atom]
        axiom_layers[var] = layer
    variables = sas_tasks.SASVariables(ranges, axiom_layers)

    return sas_tasks.SASTask(variables, init, goal, operators, axioms, metric)
Beispiel #47
0
def pddl_to_sas(task, agent_id, agent_url):
    comm = None
    if agent_id >= 0 and len(agent_url) > 1:
        comm = AgentComm(agent_id, agent_url)

    with timers.timing("Instantiating", block=True):
        (relaxed_reachable, atoms, actions, axioms,
         reachable_action_params) = instantiate.explore(task, comm)

    if not relaxed_reachable:
        return unsolvable_sas_task("No relaxed solution")

    # HACK! Goals should be treated differently.
    if isinstance(task.goal, pddl.Conjunction):
        goal_list = task.goal.parts
    else:
        goal_list = [task.goal]
    for item in goal_list:
        assert isinstance(item, pddl.Literal)

    with timers.timing("Computing fact groups", block=True):
        groups, mutex_groups, translation_key = fact_groups.compute_groups(
            task, atoms, reachable_action_params,
            partial_encoding=USE_PARTIAL_ENCODING,
            comm = comm)

    with timers.timing("Building STRIPS to SAS dictionary"):
        ranges, strips_to_sas = strips_to_sas_dictionary(
            groups, assert_partial=USE_PARTIAL_ENCODING)

    if comm is not None:
        # Each group contains either all public or all private values
        private_vars = [x[0].is_private for x in groups]
    else:
        private_vars = [None for _ in groups]

    with timers.timing("Building dictionary for full mutex groups"):
        mutex_ranges, mutex_dict = strips_to_sas_dictionary(
            mutex_groups, assert_partial=False)

    if ADD_IMPLIED_PRECONDITIONS:
        with timers.timing("Building implied facts dictionary..."):
            implied_facts = build_implied_facts(strips_to_sas, groups,
                                                mutex_groups)
    else:
        implied_facts = {}

    with timers.timing("Building mutex information", block=True):
        mutex_key = build_mutex_key(strips_to_sas, mutex_groups)

    with timers.timing("Translating task", block=True):
        sas_task = translate_task(
            strips_to_sas, ranges, translation_key, private_vars,
            mutex_dict, mutex_ranges, mutex_key,
            task.init, goal_list, actions, axioms, task.use_min_cost_metric,
            implied_facts, task.agents, comm)

    print("%d effect conditions simplified" %
          simplified_effect_condition_counter)
    print("%d implied preconditions added" %
          added_implied_precondition_counter)

    if comm is not None:
        comm.close()

    if DETECT_UNREACHABLE and comm is None:
        with timers.timing("Detecting unreachable propositions", block=True):
            try:
                simplify.filter_unreachable_propositions(sas_task)
            except simplify.Impossible:
                return unsolvable_sas_task("Simplified to trivially false goal")

    return sas_task
Beispiel #48
0
            if not rest == "":
                #print "there are args" , rest
                args = rest.split(",")
            else:
                args = []
            print_line = "%d %d %s %d " % (var, val, predicate, len(args))
            for arg in args:
                print_line += str(arg).strip() + " "
            #print fact
            #print print_line
            print >> invariants_file, print_line
    print >> invariants_file, "end_groups"
    invariants_file.close()


if __name__ == "__main__":
    import pddl

    timer = timers.Timer()
    with timers.timing("Parsing"):
        task = pddl.open()

    # EXPERIMENTAL!
    # import psyco
    # psyco.full()

    sas_task = pddl_to_sas(task)
    with timers.timing("Writing output"):
        sas_task.output(file("output.sas", "w"))
    print "Done! %s" % timer
Beispiel #49
0
def pddl_to_sas(task):
    # for partial observability assume that unknown facts
    # are true initially (to use it in the reachability analysis)
    mod_task = deepcopy(task)
    mod_task.init = mod_task.init + mod_task.init_unknown
    
    with timers.timing("Instantiating", block=True):
        (relaxed_reachable, atoms, actions, observation_actions, axioms,
         reachable_action_params) = instantiate.explore(mod_task)
    
    if not relaxed_reachable:
        # POND we return no unsolvable task
        return


    # HACK! Goals should be treated differently.
    if isinstance(task.goal, pddl.Conjunction):
        goal_list = task.goal.parts
    else:
        goal_list = [task.goal]
    for item in goal_list:
        assert isinstance(item, pddl.Literal)

    with timers.timing("Computing fact groups", block=True):
        groups, mutex_groups, translation_key = fact_groups.compute_groups(
            mod_task, atoms, reachable_action_params,
            partial_encoding=USE_PARTIAL_ENCODING)

    with timers.timing("Building STRIPS to SAS dictionary"):
        ranges, strips_to_sas = strips_to_sas_dictionary(
            groups, assert_partial=USE_PARTIAL_ENCODING)

    with timers.timing("Building dictionary for full mutex groups"):
        mutex_ranges, mutex_dict = strips_to_sas_dictionary(
            mutex_groups, assert_partial=False)

    if ADD_IMPLIED_PRECONDITIONS:
        with timers.timing("Building implied facts dictionary..."):
            implied_facts = build_implied_facts(strips_to_sas, groups,
                                                mutex_groups)
    else:
        implied_facts = {}

    with timers.timing("Building mutex information", block=True):
        mutex_key = build_mutex_key(strips_to_sas, mutex_groups)

    with timers.timing("Translating task", block=True):
        sas_task = translate_task(
            strips_to_sas, ranges, translation_key,
            mutex_dict, mutex_ranges, mutex_key,
            task.init, task.init_unknown, task.init_oneof,
            task.init_formula, goal_list, actions, observation_actions,
            axioms, task.use_min_cost_metric, implied_facts)

    print("%d effect conditions simplified" %
          simplified_effect_condition_counter)
    print("%d implied preconditions added" %
          added_implied_precondition_counter)

    if DETECT_UNREACHABLE:
        with timers.timing("Detecting unreachable propositions", block=True):
            try:
                simplify.filter_unreachable_propositions(sas_task)
            except simplify.Impossible:
                return unsolvable_sas_task("Simplified to trivially false goal")

    return sas_task
Beispiel #50
0
def translate_task(strips_to_sas, ranges, translation_key,
                   mutex_dict, mutex_ranges, mutex_key,
                   init, init_unknown, init_oneof, init_formula, goals,
                   actions, observation_actions, axioms, metric, implied_facts):
    with timers.timing("Processing axioms", block=True):
        axioms, axiom_init, axiom_layer_dict = axiom_rules.handle_axioms(
            actions, axioms, goals)
    init = init + axiom_init
    #axioms.sort(key=lambda axiom: axiom.name)
    #for axiom in axioms:
    #  axiom.dump()

    if DUMP_TASK:
        # Remove init facts that don't occur in strips_to_sas: they're constant.
        nonconstant_init = filter(strips_to_sas.get, init)
        dump_task(nonconstant_init, goals, actions, axioms, axiom_layer_dict)

    # Closed World Assumption
    false_facts = list(range(len(ranges)))
    for fact in init_unknown:
        pairs = strips_to_sas.get(fact, [])
        for pair in pairs:
            false_facts.remove(pair[0])

    facts = []
    for fact in init:
        assert fact not in init_unknown
        pairs = strips_to_sas.get(fact, [])
        for pair in pairs:
            false_facts.remove(pair[0])
        if pairs:
            facts = facts + pairs
    for var in false_facts:
        assert fact not in init_unknown
        facts.append((var, ranges[var] - 1))
    facts_oneof = []   
    for oneof in init_oneof:
        assert len(oneof) >= 2
        for fact in oneof:
            assert fact in init_unknown
        l = []
        for one in oneof:
            l = l + strips_to_sas.get(one, [])
        facts_oneof.append(l)

    # move to conditions.py?
    def translate_formula(formula, result, strips_to_sas, ranges, mutex_dict, mutex_ranges):
        if isinstance(formula, pddl.conditions.Atom):
            assert formula in init_unknown
            result.append(strips_to_sas.get(formula, []))
        elif isinstance(formula, pddl.conditions.NegatedAtom):
            dict_list = translate_strips_conditions([formula], strips_to_sas, ranges, mutex_dict, mutex_ranges)
            assert len(dict_list) == 1
            result.append(dict_list[0].items())
        elif isinstance(formula, pddl.conditions.Disjunction):
            result.append("or(")
            for part in formula.parts:
                translate_formula(part, result, strips_to_sas, ranges, mutex_dict, mutex_ranges)
            result.append(")")
        elif isinstance(formula, pddl.conditions.Conjunction):
            result.append("and(")
            for part in formula.parts:
                translate_formula(part, result, strips_to_sas, ranges, mutex_dict, mutex_ranges)
            result.append(")")
        else:
            assert False, print(formula)
    formulae = []
    for formula in init_formula:
        result = []
        translate_formula(formula, result, strips_to_sas, ranges, mutex_dict, mutex_ranges)
        formulae.append(result)
    init = sas_tasks.SASInit(facts, facts_oneof, formulae)

    goal_dict_list = translate_strips_conditions(goals, strips_to_sas, ranges,
                                                 mutex_dict, mutex_ranges)
    if goal_dict_list is None:
        # "None" is a signal that the goal is unreachable because it
        # violates a mutex.
        return unsolvable_sas_task("Goal violates a mutex")

    assert len(goal_dict_list) == 1, "Negative goal not supported"
    ## we could substitute the negative goal literal in
    ## normalize.substitute_complicated_goal, using an axiom. We currently
    ## don't do this, because we don't run into this assertion, if the
    ## negative goal is part of finite domain variable with only two
    ## values, which is most of the time the case, and hence refrain from
    ## introducing axioms (that are not supported by all heuristics)
    goal_pairs = list(goal_dict_list[0].items())
    goal = sas_tasks.SASGoal(goal_pairs)

    operators = translate_strips_operators(actions, strips_to_sas, ranges,
                                           mutex_dict, mutex_ranges,
                                           implied_facts)
    observation_operators = translate_strips_operators(observation_actions, 
                                                       strips_to_sas, ranges, 
                                                       mutex_dict, mutex_ranges,
                                                       implied_facts)
    axioms = translate_strips_axioms(axioms, strips_to_sas, ranges, mutex_dict,
                                     mutex_ranges)

    axiom_layers = [-1] * len(ranges)
    for atom, layer in axiom_layer_dict.items():
        assert layer >= 0
        [(var, val)] = strips_to_sas[atom]
        axiom_layers[var] = layer
    variables = sas_tasks.SASVariables(ranges, axiom_layers, translation_key)
    mutexes = [sas_tasks.SASMutexGroup(group) for group in mutex_key]
    return sas_tasks.SASTask(variables, mutexes, init, goal,
                             operators + observation_operators, 
                             axioms, metric)
def pddl_to_sas(task):
    with timers.timing("Instantiating", block=True):
        (relaxed_reachable, atoms, actions, axioms,
         reachable_action_params) = instantiate.explore(task)

    # Transform logical terms of all cost functions into arithmetic terms.
    for a in actions:
        if isinstance(a.cost, pddl_parser.CostNode):
            a.cost.transform_logic()    

    # writing value tuples to atoms
    predicate_dict = dict((p.name, p ) for p in task.predicates)
    for a in atoms:    
        p = predicate_dict.get(a.predicate)
        if p and len(p.value_mapping) > 0:
            a.value = p.value_mapping.get(a.args)
    if not relaxed_reachable:
        return unsolvable_sas_task("No relaxed solution")
    # HACK! Goals should be treated differently.
    if isinstance(task.goal, pddl.Conjunction):
        goal_list = task.goal.parts
    else:
        goal_list = [task.goal]
    for item in goal_list:
        assert isinstance(item, pddl.Literal)

    with timers.timing("Computing fact groups", block=True):
        groups, mutex_groups, translation_key, atom_groups = fact_groups.compute_groups(
            task, atoms, reachable_action_params)

    with timers.timing("Building STRIPS to SAS dictionary"):
        ranges, strips_to_sas = strips_to_sas_dictionary(
            groups, assert_partial=options.use_partial_encoding)
    with timers.timing("Building dictionary for full mutex groups"):
        mutex_ranges, mutex_dict = strips_to_sas_dictionary(
            mutex_groups, assert_partial=False)
        
    if options.add_implied_preconditions:
        with timers.timing("Building implied facts dictionary..."):
            implied_facts = build_implied_facts(strips_to_sas, groups,
                                                mutex_groups)
    else:
        implied_facts = {}

    with timers.timing("Building mutex information", block=True):
        mutex_key = build_mutex_key(strips_to_sas, mutex_groups)
    if options.exp:
        actions = pddl_parser.transform_exp_actions(actions, mutex_groups)
    compiler = EVMDDActionCompiler()
    actions = compiler.evmdd_action_compilation(actions)
    pddl_writer = SdacPDDLWriter(compiler._fact_name_dict)
    pddl_writer.write_pddl_files(options.domain, options.task, actions)
    print("done!")
    exit(0)
    task.inst_actions = actions   
    with timers.timing("Translating task", block=True):
        sas_task = translate_task(
            strips_to_sas, ranges, translation_key,
            mutex_dict, mutex_ranges, mutex_key,
            task.init, goal_list, actions, axioms, task.use_min_cost_metric,
            implied_facts, atom_groups)
    print("%d effect conditions simplified" %
          simplified_effect_condition_counter)
    print("%d implied preconditions added" %
          added_implied_precondition_counter)
    if options.filter_unreachable_facts:
        with timers.timing("Detecting unreachable propositions", block=True):
            try:
                simplify.filter_unreachable_propositions(sas_task)
            except simplify.Impossible:
                return unsolvable_sas_task("Simplified to trivially false goal")
            except simplify.TriviallySolvable:
                return solvable_sas_task("Simplified to empty goal")
    atom_dict = dict((str(a), a) for a in atoms)

    new_atom_groups = []
    for i in range(0, len(sas_task.variables.atom_groups)):
        group_str = [str(fact) for fact in sas_task.variables.atom_groups[i]]
        for j in range(0, len(sas_task.variables.value_names)):
            if group_str == sas_task.variables.value_names[j]:
                new_atom_groups.append(sas_task.variables.atom_groups[i])
                break
    sas_task.variables.atom_groups = new_atom_groups
    for i in range(0, len(sas_task.variables.atom_groups)):
        group_str = [str(fact) for fact in sas_task.variables.atom_groups[i]]
        assert(group_str == sas_task.variables.value_names[i])
    
    # Transform atoms of all cost functions into sas notation.
    # Simplify cost function
    for op in sas_task.operators:
        if isinstance(op.cost, pddl_parser.CostNode):
            op.cost.to_sas(sas_task.variables.atom_groups, atom_dict, sas_task.variables.deleted_true_variables)
            op.cost = op.cost.get_simplified_function()    
    return sas_task
Beispiel #52
0
def _explore(task, add_fluents = set()):
    prog = pddl_to_prolog.translate(task, add_fluents)
    model = build_model.compute_model(prog)
    with timers.timing("Completing instantiation"):
        return instantiate(task, model, add_fluents)
Beispiel #53
0
def explore(task):
    prog = pddl_to_prolog.translate(task)
    model = build_model.compute_model(prog)
    with timers.timing("Completing instantiation"):
        return instantiate(task, model)
Beispiel #54
0
def get_groups(task):
    with timers.timing("Finding invariants"):
        invariants = list(find_invariants(task))
    with timers.timing("Checking invariant weight"):
        result = list(useful_groups(invariants, task.init))
    return result
Beispiel #55
0
def default( domain_file, problem_file, output_task ) :
	parsing_timer = timers.Timer()
	print("Domain: %s Problem: %s"%(domain_file, problem_file) )

	task = pddl.open( problem_file, domain_file)

	relaxed_reachable, atoms, actions, axioms, reachable_action_params = explore(task)
	print("goal relaxed reachable: %s" % relaxed_reachable)
	if not relaxed_reachable :
		print("No weak plan exists")
		sys.exit(2)
	
	print("%d atoms" % len(atoms))

	with timers.timing("Computing fact groups", block=True):
		groups, mutex_groups, translation_key = fact_groups.compute_groups(
			task, atoms, reachable_action_params,
			partial_encoding=USE_PARTIAL_ENCODING)
	
	index = 0
	atom_table = {}

	
	for atom in atoms :
		atom.index = index
		atom_table[ atom.text() ] = index
		output_task.add_atom( atom.text() )
		index += 1

	print("Invariants %d"%len(mutex_groups))
	for group in mutex_groups :
		if len(group) >= 2 :
			print("{%s}" % ", ".join(map(str, group)))
			output_task.add_invariant( encode( group, atom_table ) )
			#print( encode( group, atom_table ) )

	print("Deterministic %d actions" % len(actions))
	nd_actions = {}
	for action in actions :
		nd_action = PropositionalDetAction( action.name, action.cost )
		nd_action.set_precondition( action.precondition, atom_table )
		nd_action.add_effect( action.add_effects, action.del_effects, atom_table )
		nd_actions[ nd_action.name ] = nd_action

	index = 0
	for action in nd_actions.values() :
		output_task.add_action( action.name )
		output_task.add_precondition( index, action.precondition )
		text_prec = []
		for p, v in action.precondition :
			text_prec.append( "%s=%s"%(output_task.get_atom_name( p ),  not v) )
		for eff in action.effects :
			output_task.add_effect( index, eff )
			text_eff = []
			for p, v in eff :
				text_eff.append( "%s=%s"%(output_task.get_atom_name( p ), not v) )
		index += 1
	output_task.set_domain_name( task.domain_name )
	output_task.set_problem_name( task.task_name )
	output_task.set_init( encode( task.init, atom_table ) )
	output_task.set_goal( encode( task.goal, atom_table ) )
	output_task.parsing_time = parsing_timer.report()
def compute_groups(task, atoms, reachable_action_params, actions):
    import mutex
    import time

    t = time.time()
    groups = set()
    for mtype in options.mutex.split('+'):
        if mtype == 'fd':
            g = invariant_finder.get_groups(task, reachable_action_params)
            with timers.timing("Instantiating groups"):
                g = instantiate_groups(g, task, atoms)
            g = set([frozenset(x) for x in g])

        elif mtype == 'h2':
            g, _ = mutex.h2(task, atoms, actions)

        elif mtype == 'fa':
            g, _ = mutex.fa(task, atoms, actions)

        elif mtype == 'rfa':
            g, _ = mutex.rfa(task, atoms, actions)

        elif mtype == 'rfa-ilp':
            g, _ = mutex.fa(task, atoms, actions, rfa = True)

        elif mtype == 'rfa-ilp-c':
            g, _ = mutex.rfa_complete(task, atoms, actions)

        elif mtype == 'extend':
            g, _ = mutex.extend_mutexes(groups, task, atoms, actions)

        elif mtype == 'full':
            g, _ = mutex.full(task, atoms, actions, True)

        elif mtype.startswith('full'):
            size = int(mtype[4:])
            g, _ = mutex.full(task, atoms, actions, True, size)

        else:
            raise Exception('Uknown mutex algorithm: ' + mtype)

        g = set([x for x in g if len(x) > 1])
        groups |= g

    if options.mutex_max:
        groups = mutex.max_mutexes(groups)

    t = time.time() - t
    print('MUTEX TIME:', t)

    for m in groups:
        print('MUTEX:', ';'.join(sorted([str(x) for x in m])))
    for m in mutex.pair_mutexes(groups):
        print('MUTEX2:', ';'.join(sorted([str(x) for x in m])))
#    for m in mutex.max_mutexes(groups):
#        print('MUTEX-MAX:', ';'.join(sorted([str(x) for x in m])))

    if options.mutex_check >= 0:
        check, _ = mutex.full(task, atoms, actions, True, options.mutex_check)
        if check is not None:
            mutex.check_mutexes(check, groups)

    # Sort here already to get deterministic mutex groups.
    groups = sort_groups(groups)
    # TODO: I think that collect_all_mutex_groups should do the same thing
    #       as choose_groups with partial_encoding=False, so these two should
    #       be unified.
    with timers.timing("Collecting mutex groups"):
        mutex_groups = collect_all_mutex_groups(groups, atoms)
    with timers.timing("Choosing groups", block=True):
        groups = choose_groups(groups, atoms)
    groups = sort_groups(groups)
    with timers.timing("Building translation key"):
        translation_key = build_translation_key(groups)

    if DEBUG:
        for group in groups:
            if len(group) >= 2:
                print("{%s}" % ", ".join(map(str, group)))

    return groups, mutex_groups, translation_key
Beispiel #57
0
def fodet( domain_file, problem_file, output_task ) :
        parsing_timer = timers.Timer()
        
        print("Domain: %s Problem: %s"%(domain_file, problem_file) )
        
        with timers.timing("Parsing", True):
                task = pddl_file.open(
                        domain_filename=domain_file, task_filename=problem_file)

        with timers.timing("Normalizing task"):
                normalize.normalize(task)


	relaxed_reachable, atoms, actions, axioms, reachable_action_params = explore(task)
	print("goal relaxed reachable: %s" % relaxed_reachable)
	if not relaxed_reachable :
		print("No plan exists")
		sys.exit(2)
	
	print("%d atoms" % len(atoms))

	with timers.timing("Computing fact groups", block=True):
		groups, mutex_groups, translation_key = fact_groups.compute_groups(
			task, atoms, reachable_action_params)
	
	index = 0
	atom_table = {}

	atom_names = [ atom.text() for atom in atoms ]
	atom_names.sort()
	
	for atom in atom_names :
		atom_table[ atom ] = index
		output_task.add_atom( atom )
		index += 1

	print("Axioms %d"%len(axioms))
	for axiom in axioms:
		axiom.dump()
		output_task.add_axiom( encode( axiom.condition, atom_table), encode( [ axiom.effect ], atom_table ))

	print("Deterministic %d actions" % len(actions))
	nd_actions = []
	for action in actions :
		#print( "action: %s cost: %d"%(action.name,action.cost) )
		nd_action = PropositionalDetAction( action.name, action.cost )
		nd_action.set_precondition( action.precondition, atom_table )
		nd_action.add_effect( action.add_effects, action.del_effects, atom_table,atom_names, axioms )
		nd_actions.append( (nd_action.name, nd_action) )


	for name, _ in nd_actions.iteritems() :
		output_task.add_action( name )

	index = 0
	for (action_name,action) in nd_actions :
		output_task.add_precondition( index, action.precondition )
		for eff in action.effects :
			output_task.add_effect( index, eff )
		#if len(action.cond_effs) != 0 :
		#	print action.name, len(action.cond_effs), "has conditional effects"
		for cond, eff in action.cond_effs.iteritems() :
			output_task.add_cond_effect( index, list(cond), eff )
		output_task.set_cost( index, action.cost ) 
		index += 1
	output_task.set_domain_name( task.domain_name )
	output_task.set_problem_name( task.task_name )
	output_task.set_init( encode( task.init, atom_table ) )
	output_task.set_goal( encode( task.goal, atom_table ) )
	output_task.parsing_time = parsing_timer.report()
Beispiel #58
0
def default( domain_file, problem_file, output_task ) :
	parsing_timer = timers.Timer()
	print("Domain: %s Problem: %s"%(domain_file, problem_file) )

	with timers.timing("Parsing", True):
                task = pddl_file.open(
                        domain_filename=domain_file, task_filename=problem_file)

	normalize.normalize(task)

	relaxed_reachable, atoms, actions, axioms, reachable_action_params = explore(task)
	print("goal relaxed reachable: %s" % relaxed_reachable)
	if not relaxed_reachable :
		print("No plan exists")
		sys.exit(2)
	
	print("%d atoms" % len(atoms))

	with timers.timing("Computing fact groups", block=True):
		groups, mutex_groups, translation_key = fact_groups.compute_groups(
			task, atoms, reachable_action_params)
	
	index = 0
	atom_table = {}

	atom_names = [ atom.text() for atom in atoms ]
	atom_names.sort()
	
	for atom in atom_names :
		atom_table[ atom ] = index
		output_task.add_atom( atom.encode('utf-8') )
		index += 1

	print("Axioms %d"%len(axioms))
	
	print("Deterministic %d actions" % len(actions))
	nd_actions = []
	for action in actions :
		#print( "action: %s cost: %d"%(action.name,action.cost) )
		nd_action = PropositionalDetAction( action.name, action.cost )
		nd_action.set_precondition( action.precondition, atom_table )
		nd_action.add_effect( action.add_effects, action.del_effects, atom_table,atom_names, axioms   )
		if len(nd_action.negated_conditions) > 0 :
			output_task.notify_negated_conditions( nd_action.negated_conditions )
		nd_actions.append( ( nd_action.name, nd_action ) )

	output_task.create_negated_fluents()

	for (name, _) in nd_actions :
		output_task.add_action( name.encode('utf-8') )

	index = 0
	for (_,action) in nd_actions :
		output_task.add_precondition( index, action.precondition )
		for eff in action.effects :
			output_task.add_effect( index, eff )
		#if len(action.cond_effs) != 0 :
		#	print action.name, len(action.cond_effs), "has conditional effects"
		for cond, eff in action.cond_effs.iteritems() :
                        #print( action.name, cond, atom_names[cond[0][0]] )

			output_task.add_cond_effect( index, list(cond), eff )
                        
		output_task.set_cost( index, action.cost ) 
		index += 1

        # NIR: Default options assign 0 seconds. Change Options file to 300s to have the same configuration as FD
	# MRJ: Mutex groups processing needs to go after negations are compiled away
	print("Invariants %d"%len(mutex_groups))
	for group in mutex_groups :
		if len(group) >= 2 :
			#print("{%s}" % ", ".join(map(str, group)))
			output_task.add_mutex_group( encode( group, atom_table ) )
			#print( encode( group, atom_table ) )


	output_task.set_domain_name( task.domain_name.encode('utf-8') )
	output_task.set_problem_name( task.task_name.encode('utf-8') )
	output_task.set_init( encode( task.init, atom_table ) )
	output_task.set_goal( encode( task.goal, atom_table ) )
	output_task.parsing_time = parsing_timer.report()