Пример #1
0
    def get_matches(self, x, constraints=None, epsilon=0.0):
        if self.operator is None:
            return

        # print("GETTING MATCHES")
        grounded = [(ground(a), x[a]) for a in x if (isinstance(a, tuple))]
        # print("FACTS")
        # pprint(grounded)
        index = build_index(grounded)

        # print("INDEX")
        # pprint(index)

        # print("OPERATOR")
        # pprint(self.operator)

        for m in self.operator.match(index, epsilon=epsilon):
            # print('match', m, self.operator.name)
            result = tuple(
                [unground(subst(m, ele)) for ele in self.operator.name[1:]])
            # result = tuple(['?' + subst(m, ele)
            #                 for ele in self.operator.name[1:]])
            # result = tuple(['?' + m[e] for e in self.target_types])
            # print('GET MATCHES T', result)
            yield result
Пример #2
0
    def get_matches(self, x, epsilon=0.0):

        # print("GETTING MATCHES")
        # pprint(self.learner.get_hset())
        grounded = self.ground_example(x)
        # grounded = [(ground(a), x[a]) for a in x if (isinstance(a, tuple))]
        # print("FACTS")

        # pprint(grounded)

        index = build_index(grounded)

        # print("INDEX")
        # pprint(index)

        # print("OPERATOR")
        # pprint(self.operator)
        # print(self.learner.get_hset())

        for h in self.learner.get_hset():
            # print('h', h)
            # Update to include initial args
            operator = Operator(tuple(('Rule', ) + self.args), h, [])
            # print("OPERATOR", h)

            for m in operator.match(index, epsilon=epsilon):
                # print('match', m, operator.name)
                result = tuple(
                    [unground(subst(m, ele)) for ele in operator.name[1:]])
                # result = tuple(['?' + subst(m, ele)
                #                 for ele in self.operator.name[1:]])
                # result = tuple(['?' + m[e] for e in self.target_types])
                # print('GET MATCHES T', result)
                yield result
Пример #3
0
    def check_match(self, t, x):
        """
        if y is predicted to be 1 then returns True
        else returns False
        """
        # print("CHECK MATCHES T", t)

        if self.operator is None:
            return

        t = tuple(ele.replace('?', "QM") for ele in t)

        mapping = {a: t[i] for i, a in enumerate(self.operator.name[1:])}

        # print("MY MAPPING", mapping)

        # print("CHECKING MATCHES")
        grounded = [(ground(a), x[a]) for a in x if (isinstance(a, tuple))]
        # pprint(grounded)
        # pprint(mapping)
        index = build_index(grounded)

        for m in self.operator.match(index, initial_mapping=mapping):
            return True
        return False
Пример #4
0
    def get_matches(self, x, epsilon=0.0):

        # print("GETTING MATCHES")
        # pprint(self.tuples)
        grounded = self.ground_example(x)
        # grounded = [(ground(a), x[a]) for a in x if (isinstance(a, tuple))]
        # print("FACTS")

        # pprint(grounded)

        index = build_index(grounded)

        # print("INDEX")
        # pprint(index)

        # print("OPERATOR")
        # pprint(self.operator)
        # print(self.learner.get_hset())

        for t in self.tuples:
            mapping = {a: t[i] for i, a in enumerate(self.args)}
            operator = Operator(tuple(('Rule', ) + self.args),
                                frozenset().union(self.constraints), [])

            for m in operator.match(index,
                                    epsilon=epsilon,
                                    initial_mapping=mapping):
                result = tuple(ele.replace("QM", '?') for ele in t)
                # print('GET MATCHES T', result)
                yield result
                break
Пример #5
0
    def check_match(self, t, x):
        # print("CHECK MATCHES T", t)

        t = tuple(ground(ele) for ele in t)

        # Update to include initial args
        mapping = {a: t[i] for i, a in enumerate(self.args)}

        # print("MY MAPPING", mapping)

        # print("CHECKING MATCHES")

        if t not in self.tuples:
            return False

        grounded = self.ground_example(x)
        # grounded = [(ground(a), x[a]) for a in x if (isinstance(a, tuple))]
        # pprint(grounded)
        # pprint(mapping)
        index = build_index(grounded)

        # Update to include initial args
        operator = Operator(tuple(('Rule', ) + self.args),
                            frozenset().union(self.constraints), [])
        for m in operator.match(index, initial_mapping=mapping):
            return True
        return False
Пример #6
0
def covers(h, x, initial_mapping):
    """
    Returns true if h covers x
    """
    index = build_index(x)
    operator = Operator(tuple(['Rule']), h, [])
    for m in operator.match(index, initial_mapping=initial_mapping):
        return True
    return False
Пример #7
0
    def get_matches(self, x, epsilon=0.0):
        x = x.get_view("flat_ungrounded")

        grounded = self.ground_example(x)

        index = build_index(grounded)

        for t in self.tuples:
            mapping = {a: t[i] for i, a in enumerate(self.args)}
            operator = Operator(tuple(('Rule', ) + self.args),
                                frozenset().union(self.constraints), [])

            for m in operator.match(index,
                                    epsilon=epsilon,
                                    initial_mapping=mapping):
                result = tuple(ele.replace("QM", '?') for ele in t)
                yield result
                break
Пример #8
0
    def is_specialization(self, s, h):
        """
        Takes two hypotheses s and g and returns True if s is a specialization
        of h. Note, it returns False if s and h are equal (s is not a
        specialization in this case).
        """
        if s == h:
            return False

        # remove vars, so the unification isn't going in both directions.
        s = set(remove_vars(l) for l in s)

        # check if h matches s (then s specializes h)
        index = build_index(s)
        operator = Operator(tuple(['Rule']), h, [])
        for m in operator.match(index):
            return True
        return False
Пример #9
0
    def check_match(self, t, x):
        x = x.get_view("flat_ungrounded")
        # print("CHECK MATCHES T", t)

        t = tuple(ground(ele) for ele in t)

        # Update to include initial args
        mapping = {a: t[i] for i, a in enumerate(self.args)}

        if t not in self.tuples:
            return False

        grounded = self.ground_example(x)
        index = build_index(grounded)

        # Update to include initial args
        operator = Operator(tuple(('Rule', ) + self.args),
                            frozenset().union(self.constraints), [])
        for m in operator.match(index, initial_mapping=mapping):
            return True
        return False
Пример #10
0
    def check_match(self, t, x):
        # print("CHECK MATCHES T", t)

        t = tuple(ele.replace('?', "QM") for ele in t)

        # Update to include initial args
        mapping = {a: t[i] for i, a in enumerate(self.args)}

        # print("MY MAPPING", mapping)

        # print("CHECKING MATCHES")
        grounded = self.ground_example(x)
        # grounded = [(ground(a), x[a]) for a in x if (isinstance(a, tuple))]
        # pprint(grounded)
        # pprint(mapping)
        index = build_index(grounded)

        for h in self.learner.get_hset():
            # Update to include initial args
            operator = Operator(tuple(('Rule', ) + self.args), h, [])
            for m in operator.match(index, initial_mapping=mapping):
                return True
        return False
Пример #11
0
def optimize_clause(h, constraints, seed, pset, nset):
    """
    Returns the set of most specific generalization of h that do NOT
    cover x.
    """
    c_length = clause_length(h)
    print('# POS', len(pset))
    print('# NEG', len(nset))
    print('length', c_length)

    p_covered, n_covered = test_coverage(h, constraints, pset, nset)
    p_uncovered = [p for p in pset if p not in p_covered]
    n_uncovered = [n for n in nset if n not in n_covered]
    print("P COVERED", len(p_covered))
    print("N COVERED", len(n_covered))
    initial_score = clause_score(clause_accuracy_weight, len(p_covered),
                                 len(p_uncovered), len(n_covered),
                                 len(n_uncovered), c_length)

    p, pm = seed
    pos_partial = list(compute_bottom_clause(p, pm))
    # print('POS PARTIAL', pos_partial)

    # TODO if we wanted we could add the introduction of new variables to the
    # get_variablizations function.
    possible_literals = {}
    for i, l in enumerate(pos_partial):
        possible_literals[i] = [None, l] + [v for v in get_variablizations(l)]
    partial_literals = set(
        [l for i in possible_literals for l in possible_literals[i]])

    additional_literals = h - partial_literals

    if len(additional_literals) > 0:
        p_index = build_index(p)
        operator = Operator(tuple(('Rule', )), h.union(constraints), [])
        for add_m in operator.match(p_index, initial_mapping=pm):
            break
        additional_lit_mapping = {
            rename(add_m, l): l
            for l in additional_literals
        }
        for l in additional_lit_mapping:
            new_l = additional_lit_mapping[l]
            reverse_m = {pm[a]: a for a in pm}
            l = rename(reverse_m, l)
            print(pos_partial)
            print(add_m)
            print(l)
            print(new_l)
            print(additional_literals)
            if l not in pos_partial:
                print("ERROR l not in pos_partial")
                import time
                time.sleep(1000)
            possible_literals[pos_partial.index(l)].append(new_l)

    # pprint(possible_literals)
    reverse_pl = {
        l: (i, j)
        for i in possible_literals for j, l in enumerate(possible_literals[i])
    }

    clause_vector = [0 for i in range(len(possible_literals))]
    for l in h:
        if l not in reverse_pl:
            print("MISSING LITERAL!!!")
            import time
            time.sleep(1000)

        i, j = reverse_pl[l]
        clause_vector[i] = j
    clause_vector = tuple(clause_vector)

    # print("INITIAL CLAUSE VECTOR")
    # print(clause_vector)

    flip_weights = [(len(possible_literals[i]) - 1, i)
                    for i in possible_literals]
    size = 1
    for w, _ in flip_weights:
        size *= (w + 1)
    print("SIZE OF SEARCH SPACE:", size)

    num_successors = sum([w for w, c in flip_weights])
    print("NUM SUCCESSORS", num_successors)
    temp_length = num_successors
    temp_length = 10
    # initial_temp = 0.15
    # initial_temp = 0.0
    print("TEMP LENGTH", temp_length)
    print('INITIAL SCORE', initial_score)
    problem = ClauseOptimizationProblem(clause_vector,
                                        initial_cost=-1 * initial_score,
                                        extra=(possible_literals, flip_weights,
                                               constraints, pset, nset,
                                               len(p_uncovered),
                                               len(n_covered)))
    # for sol in hill_climbing(problem):
    for sol in simulated_annealing(
            problem,
            # initial_temp=initial_temp,
            temp_length=temp_length):
        # print("SOLUTION FOUND", sol.state)
        print('FINAL SCORE', -1 * sol.cost())
        return build_clause(sol.state, possible_literals)
Пример #12
0
    def successors(self, node):
        h = node.state
        # print("EXPANDING H", h)
        args, constraints, pset, neg, neg_mapping, gensym = node.extra

        all_args = set(s for x in h.union(constraints)
                       for s in extract_strings(x) if is_variable(s))

        if len(pset) == 0:
            return

        p, pm = choice(pset)
        p_index = build_index(p)

        operator = Operator(tuple(('Rule', ) + tuple(all_args)),
                            h.union(constraints), [])

        # operator = Operator(tuple(('Rule',) + args), h, [])

        found = False
        for m in operator.match(p_index, initial_mapping=pm):
            reverse_m = {m[a]: a for a in m}
            pos_partial = set([rename(reverse_m, x) for x in p])
            found = True
            break

        if not found:
            return

        n_index = build_index(neg)
        found = False
        for nm in operator.match(n_index, initial_mapping=neg_mapping):
            # print(nm)
            reverse_nm = {nm[a]: a for a in nm}
            neg_partial = set([rename(reverse_nm, x) for x in neg])
            found = True
            break

        if not found:
            return

        unique_pos = pos_partial - neg_partial
        unique_neg = neg_partial - pos_partial

        # print("UNIQUE POS", unique_pos)
        # print("UNIQUE NEG", unique_neg)

        # Yield all minimum specializations of current vars
        for a in m:
            # TODO make sure m[a] is a minimum specialization
            sub_m = {a: m[a]}
            new_h = frozenset([subst(sub_m, ele) for ele in h])
            # print("SPECIALIZATION", new_h, sub_m)
            # print()
            yield Node(new_h, node, ('specializing', (a, m[a])),
                       node.cost() + 1, node.extra)

        # Add Negations for all neg specializations
        # for a in nm:
        #     sub_nm = {a: nm[a]}
        #     new_nh = set()
        #     for ele in h:
        #         new = subst(sub_nm, ele)
        #         if new != ele and new not in h:
        #             new_nh.add(('not', new))
        #     new_h = h.union(new_nh)
        #     print("NEGATION SPECIALIZATION", new_nh)
        #     yield Node(new_h, node, ('negation specialization', (a, nm[a])),
        #                node.cost()+1, node.extra)

        # if current vars then add all relations that include current vars
        if len(all_args) > 0:
            added = set()
            for literal in unique_pos:
                if literal in h or literal in constraints:
                    continue
                args = set(s for s in extract_strings(literal)
                           if is_variable(s))
                if len(args.intersection(all_args)) > 0:
                    key = (literal[0], ) + tuple(
                        ele if is_variable(ele) else '?'
                        for ele in literal[1:])
                    if key in added:
                        continue
                    added.add(key)

                    literal = generalize_literal(literal, gensym)
                    new_h = h.union(frozenset([literal]))
                    # print("ADD CURRENT", new_h)
                    # print()
                    yield Node(new_h, node, ('adding current', literal),
                               node.cost() + 1, node.extra)

        else:
            added = set()
            for literal in unique_pos:
                if literal in h or literal in constraints:
                    continue
                if literal[0] in added:
                    continue
                added.add(literal[0])
                literal = generalize_literal(literal, gensym)
                new_h = h.union(frozenset([literal]))
                # print("ADD NEW", new_h)
                # print()
                yield Node(new_h, node, ('adding', literal),
                           node.cost() + 1, node.extra)