Пример #1
0
def apply_operators(ele, operators, knowledge_base, epsilon):
    operator_output = None
    for operator in operators:
        effect = list(operator.effects)[0]
        pattern = effect[0]
        u_mapping = unify(
            pattern, ground(ele),
            {})  #Returns a mapping to name the values in the expression
        if (u_mapping):
            # print(operator.conditions,"\n")
            # print(u_mapping,"\n")
            # print(effect[1],"\n")
            # print("BEEP", [subst(u_mapping,x) for x in operator.conditions],"\n")
            condition_sub = [subst(u_mapping, x) for x in operator.conditions]
            # print("CS", condition_sub)

            value_map = next(
                knowledge_base.fc_query(condition_sub,
                                        max_depth=0,
                                        epsilon=epsilon))
            # print(value_map)
            # print()
            try:
                operator_output = execute_functions(subst(
                    value_map, effect[1]))
            except:
                continue

            return operator_output
Пример #2
0
    def get_matches(self, x, constraints=None, epsilon=0.0):
        if self.operator is None:
            return

        # print("GETTING MATCHES")
        grounded = [(ground(a), x[a]) for a in x if (isinstance(a, tuple))]
        # print("FACTS")
        # pprint(grounded)
        index = build_index(grounded)

        # print("INDEX")
        # pprint(index)

        # print("OPERATOR")
        # pprint(self.operator)

        for m in self.operator.match(index, epsilon=epsilon):
            # print('match', m, self.operator.name)
            result = tuple(
                [unground(subst(m, ele)) for ele in self.operator.name[1:]])
            # result = tuple(['?' + subst(m, ele)
            #                 for ele in self.operator.name[1:]])
            # result = tuple(['?' + m[e] for e in self.target_types])
            # print('GET MATCHES T', result)
            yield result
Пример #3
0
    def get_matches(self, x, epsilon=0.0):

        # print("GETTING MATCHES")
        # pprint(self.learner.get_hset())
        grounded = self.ground_example(x)
        # grounded = [(ground(a), x[a]) for a in x if (isinstance(a, tuple))]
        # print("FACTS")

        # pprint(grounded)

        index = build_index(grounded)

        # print("INDEX")
        # pprint(index)

        # print("OPERATOR")
        # pprint(self.operator)
        # print(self.learner.get_hset())

        for h in self.learner.get_hset():
            # print('h', h)
            # Update to include initial args
            operator = Operator(tuple(('Rule', ) + self.args), h, [])
            # print("OPERATOR", h)

            for m in operator.match(index, epsilon=epsilon):
                # print('match', m, operator.name)
                result = tuple(
                    [unground(subst(m, ele)) for ele in operator.name[1:]])
                # result = tuple(['?' + subst(m, ele)
                #                 for ele in self.operator.name[1:]])
                # result = tuple(['?' + m[e] for e in self.target_types])
                # print('GET MATCHES T', result)
                yield result
Пример #4
0
    def explain_sai(self, skill, learner_dict, sai, knowledge_base, state):
        """
        Doc String
        """
        # print(skill)
        # print(skill['where'])
        exp, iargs = skill
        skill_where = learner_dict['where']

        #
        # print("SAI", sai)
        for match in skill_where.get_matches(state):
            # print(exp)
            # print()
            # print(m)
            # print("CLOOOL",get_vars(exp), exp)
            mapping = {var: val for var, val in zip(get_vars(exp), match)}
            grounded_exp = subst(mapping, exp)
            rg_exp = tuple(
                eval_expression(grounded_exp, knowledge_base,
                                self.function_set, self.epsilon))

            # print("RG_EXP", rg_exp)

            if (rg_exp == sai):
                yield mapping
Пример #5
0
    def request(self, state):
        tup = Tuplizer()
        flt = Flattener()
        state = flt.transform(tup.transform(state))

        new = {}
        for attr in state:
            if (isinstance(attr, tuple) and attr[0] == 'value'):
                new[('editable', attr[1])] = state[attr] == ''
                for attr2 in state:
                    if (isinstance(attr2, tuple) and attr2[0] == 'value'):
                        if (attr2 == attr or attr < attr2
                                or (state[attr] == "" or state[attr2] == "")):
                            continue
                        if (state[attr] == state[attr2]):
                            new[('eq', attr, attr2)] = True
        state.update(new)
        # pprint(state)

        # for episode in range(self.max_episodes):
        while True:

            print("#########")
            print("NEW TRACE")
            print("#########")
            kb = FoPlanner([(self.ground(a), state[a].replace('?', 'QM')
                             if isinstance(state[a], str) else state[a])
                            for a in state], functionsets[self.action_set])

            curr_state = {x[0]: x[1] for x in kb.facts}
            next_actions = [a for a in kb.fc_get_actions(epsilon=epsilon)]
            trace_actions = []
            depth = 0

            while depth < search_depth:
                actions = [(self.Q.evaluate(curr_state, get_action_key(a)), a)
                           for a in next_actions]

                print("NEXT ACTION WEIGHTS")
                print(
                    sorted([(w, a[0].name[0]) for w, a in actions],
                           reverse=True))

                # operator, mapping, effects = weighted_choice(actions)
                operator, mapping, effects = max_choice(actions)
                # operator, mapping, effects = choice(action_space)

                self.last_state = curr_state
                self.last_action = get_action_key((operator, mapping, effects))
                trace_actions.append(subst(mapping, operator.name))

                for f in effects:
                    kb.add_fact(f)

                # if not termainal, then decrease reward
                # self.reward = -1
                self.reward = 0
                curr_state = {x[0]: x[1] for x in kb.facts}
                depth += 1

                # check if we're in a terminal state
                # if so, query oracle
                for f in effects:
                    f = self.unground(f)
                    if f[0] == 'sai':
                        response = {}
                        response['label'] = str(trace_actions)
                        response['selection'] = f[1]
                        response['action'] = f[2]
                        response['inputs'] = {'value': f[3]}
                        # {a: rg_exp[3+i] for i, a in
                        #                       enumerate(input_args)}
                        # response['inputs'] = list(rg_exp[3:])
                        response['foas'] = []
                        # pprint(response)
                        print("EXECUTING ACTION", self.last_action)
                        print("Requesting oracle feedback")

                        return response

                # punish for failed search
                if depth >= search_depth:
                    # self.reward -= 3 * search_depth
                    curr_state = None
                    next_actions = []
                else:
                    # because we're not terminal we can compute next_actions
                    next_actions = [
                        a for a in kb.fc_get_actions(epsilon=epsilon,
                                                     must_match=effects)
                    ]

                self.Q.update(self.last_state, self.last_action, self.reward,
                              curr_state, next_actions)
Пример #6
0
    def successors(self, node):
        h = node.state
        # print("EXPANDING H", h)
        args, constraints, pset, neg, neg_mapping, gensym = node.extra

        all_args = set(s for x in h.union(constraints)
                       for s in extract_strings(x) if is_variable(s))

        if len(pset) == 0:
            return

        p, pm = choice(pset)
        p_index = build_index(p)

        operator = Operator(tuple(('Rule', ) + tuple(all_args)),
                            h.union(constraints), [])

        # operator = Operator(tuple(('Rule',) + args), h, [])

        found = False
        for m in operator.match(p_index, initial_mapping=pm):
            reverse_m = {m[a]: a for a in m}
            pos_partial = set([rename(reverse_m, x) for x in p])
            found = True
            break

        if not found:
            return

        n_index = build_index(neg)
        found = False
        for nm in operator.match(n_index, initial_mapping=neg_mapping):
            # print(nm)
            reverse_nm = {nm[a]: a for a in nm}
            neg_partial = set([rename(reverse_nm, x) for x in neg])
            found = True
            break

        if not found:
            return

        unique_pos = pos_partial - neg_partial
        unique_neg = neg_partial - pos_partial

        # print("UNIQUE POS", unique_pos)
        # print("UNIQUE NEG", unique_neg)

        # Yield all minimum specializations of current vars
        for a in m:
            # TODO make sure m[a] is a minimum specialization
            sub_m = {a: m[a]}
            new_h = frozenset([subst(sub_m, ele) for ele in h])
            # print("SPECIALIZATION", new_h, sub_m)
            # print()
            yield Node(new_h, node, ('specializing', (a, m[a])),
                       node.cost() + 1, node.extra)

        # Add Negations for all neg specializations
        # for a in nm:
        #     sub_nm = {a: nm[a]}
        #     new_nh = set()
        #     for ele in h:
        #         new = subst(sub_nm, ele)
        #         if new != ele and new not in h:
        #             new_nh.add(('not', new))
        #     new_h = h.union(new_nh)
        #     print("NEGATION SPECIALIZATION", new_nh)
        #     yield Node(new_h, node, ('negation specialization', (a, nm[a])),
        #                node.cost()+1, node.extra)

        # if current vars then add all relations that include current vars
        if len(all_args) > 0:
            added = set()
            for literal in unique_pos:
                if literal in h or literal in constraints:
                    continue
                args = set(s for s in extract_strings(literal)
                           if is_variable(s))
                if len(args.intersection(all_args)) > 0:
                    key = (literal[0], ) + tuple(
                        ele if is_variable(ele) else '?'
                        for ele in literal[1:])
                    if key in added:
                        continue
                    added.add(key)

                    literal = generalize_literal(literal, gensym)
                    new_h = h.union(frozenset([literal]))
                    # print("ADD CURRENT", new_h)
                    # print()
                    yield Node(new_h, node, ('adding current', literal),
                               node.cost() + 1, node.extra)

        else:
            added = set()
            for literal in unique_pos:
                if literal in h or literal in constraints:
                    continue
                if literal[0] in added:
                    continue
                added.add(literal[0])
                literal = generalize_literal(literal, gensym)
                new_h = h.union(frozenset([literal]))
                # print("ADD NEW", new_h)
                # print()
                yield Node(new_h, node, ('adding', literal),
                           node.cost() + 1, node.extra)