コード例 #1
0
    def successors(self, node):
        h = node.state
        constraints, c_length, pset, nset, gensym = node.extra

        if len(pset) == 0:
            return

        p, pm = choice(pset)
        p_index = build_index(p)

        operator = Operator(tuple(('Rule', )), h.union(constraints), [])

        found = False
        for m in operator.match(p_index, initial_mapping=pm):
            reverse_m = {m[a]: a for a in m}
            pos_partial = set([rename(reverse_m, x) for x in p])
            found = True
            break

        if not found:
            return

        # specialize current variables using pset?
        for var in m:
            limited_m = {var: m[var]}
            new_h = frozenset([subst(limited_m, l) for l in h])

            new_pset, new_nset = test_coverage(new_h, constraints, pset, nset)
            new_c_length = c_length + 1
            score = self.score(len(new_pset), len(new_nset),
                               0.01 * new_c_length)

            yield Node(new_h,
                       node, ('specializing var', var, m[var]),
                       -1 * score,
                       extra=(constraints, new_c_length, new_pset, new_nset,
                              gensym))

        # add new literals from pset
        for l in pos_partial:
            if l not in h:
                l = generate_literal(l[0], len(l) - 1, gensym)
                # l = generalize_literal(l, gensym)

                new_h = h.union([l])

                new_pset, new_nset = test_coverage(new_h, constraints, pset,
                                                   nset)

                new_c_length = c_length + 1

                score = self.score(len(new_pset), len(new_nset),
                                   0.01 * new_c_length)

                yield Node(new_h,
                           node, ('add literal', l),
                           -1 * score,
                           extra=(constraints, new_c_length, new_pset,
                                  new_nset, gensym))
コード例 #2
0
    def successors(self, node):
        h = node.state
        (constraints, c_length, p_uncovered, n_uncovered, pset, nset,
         gensym) = node.extra

        # remove literals
        for literal in h:
            removable = True
            for ele in literal[1:]:
                if not is_variable(ele):
                    removable = False
                    break
                if (count_occurances(ele, h) > 1):
                    removable = False
                    break

            if removable:
                new_h = frozenset(x for x in h if x != literal)
                new_pset, new_nset = test_coverage(new_h, constraints,
                                                   p_uncovered, n_uncovered)
                new_p_uncovered = [p for p in p_uncovered if p not in new_pset]
                new_n_uncovered = [n for n in n_uncovered if n not in new_nset]
                new_c_length = c_length - 1
                # new_c_length = clause_length(new_h)
                score = self.score(
                    len(pset) - len(new_p_uncovered),
                    len(nset) - len(new_n_uncovered), len(pset), len(nset),
                    new_c_length)

                yield Node(new_h,
                           node, ('remove literal', literal),
                           -1 * score,
                           extra=(constraints, new_c_length, new_p_uncovered,
                                  new_n_uncovered, pset, nset, gensym))

        # replace constants with variables.
        for literal in h:
            for new_l in get_variablizations(literal, gensym):
                new_h = frozenset([x if x != literal else new_l for x in h])
                new_pset, new_nset = test_coverage(new_h, constraints,
                                                   p_uncovered, n_uncovered)
                new_p_uncovered = [p for p in p_uncovered if p not in new_pset]
                new_n_uncovered = [n for n in n_uncovered if n not in new_nset]
                new_c_length = c_length - 1
                # new_c_length = clause_length(new_h)
                score = self.score(
                    len(pset) - len(new_p_uncovered),
                    len(nset) - len(new_n_uncovered), len(pset), len(nset),
                    new_c_length)

                yield Node(new_h,
                           node, ('variablize', literal, new_l),
                           -1 * score,
                           extra=(constraints, new_c_length, new_p_uncovered,
                                  new_n_uncovered, pset, nset, gensym))
コード例 #3
0
def clause_vector_score(v, possible_literals, constraints, pset, nset):
    h = build_clause(v, possible_literals)
    l = clause_length(h)
    p_covered, n_covered = test_coverage(h, constraints, pset, nset)
    return clause_score(clause_accuracy_weight, len(p_covered),
                        len(pset) - len(p_covered), len(n_covered),
                        len(nset) - len(n_covered), l)
コード例 #4
0
    def ifit(self, t, x, y):
        """
        Incrementally specializes the hypothesis set.
        """
        mapping = {a: t[i] for i, a in enumerate(self.args)}

        if y == 1:
            self.pset.append((x, mapping))
        elif y == 0:
            self.nset.append((x, mapping))
        else:
            raise Exception("y must be 0 or 1")

        if self.h is None and y == 1:
            self.h = self.compute_bottom_clause(x, mapping)

        if self.h is not None:
            self.h = optimize_clause(self.h, self.constraints, self.pset,
                                     self.nset, lambda: self.gensym())
            c_length = clause_length(self.h)
            p_covered, n_covered = test_coverage(self.h, self.constraints,
                                                 self.pset, self.nset)
            p_uncovered = [p for p in self.pset if p not in p_covered]
            n_uncovered = [n for n in self.nset if n not in n_covered]
            score = clause_score(clause_accuracy_weight, len(p_covered),
                                 len(p_uncovered), len(n_covered),
                                 len(n_uncovered), c_length)

            print("OVERALL SCORE", score)
コード例 #5
0
def generalize(h, constraints, pset, nset, gensym, depth_limit=10):
    """
    Returns the set of most specific generalization of h that do NOT
    cover x.
    """
    c_length = clause_length(h)
    psubset, nsubset = test_coverage(h, constraints, pset, nset)
    p_uncovered = [p for p in pset if p not in psubset]
    n_uncovered = [n for n in nset if n not in nsubset]
    initial_score = clause_score(clause_accuracy_weight, len(psubset),
                                 len(nsubset), len(pset), len(nset), c_length)
    problem = GeneralizationProblem(h,
                                    initial_cost=-1 * initial_score,
                                    extra=(constraints, c_length, p_uncovered,
                                           n_uncovered, pset, nset, gensym))
    for sol in branch_and_bound(problem, depth_limit=depth_limit):
        print("SOLUTION FOUND", sol.state)
        return sol.state
コード例 #6
0
def optimize_clause(h, constraints, pset, nset, gensym):
    """
    Returns the set of most specific generalization of h that do NOT
    cover x.
    """
    c_length = clause_length(h)
    p_covered, n_covered = test_coverage(h, constraints, pset, nset)
    p_uncovered = [p for p in pset if p not in p_covered]
    n_uncovered = [n for n in nset if n not in n_covered]
    initial_score = clause_score(clause_accuracy_weight, len(p_covered),
                                 len(p_uncovered), len(n_covered),
                                 len(n_uncovered), c_length)
    print('INITIAL SCORE', initial_score)
    problem = ClauseOptimizationProblem(h,
                                        initial_cost=-1 * initial_score,
                                        extra=(constraints, c_length,
                                               p_covered, p_uncovered,
                                               n_covered, n_uncovered, gensym))
    for sol in simulated_annealing(problem):
        print("SOLUTION FOUND", sol.state)
        return sol.state
コード例 #7
0
def optimize_clause(h, constraints, pset, nset):
    """
    Returns the set of most specific generalization of h that do NOT
    cover x.
    """
    c_length = clause_length(h)
    p_covered, n_covered = test_coverage(h, constraints, pset, nset)
    p_uncovered = [p for p in pset if p not in p_covered]
    n_uncovered = [n for n in nset if n not in n_covered]
    initial_score = clause_score(clause_accuracy_weight, len(p_covered),
                                 len(p_uncovered), len(n_covered),
                                 len(n_uncovered), c_length)
    p, pm = choice(p_covered)
    pos_partial = list(compute_bottom_clause(p, pm))
    # print('POS PARTIAL', pos_partial)

    # TODO if we wanted we could add the introduction of new variables to the
    # get_variablizations function.
    possible_literals = {}
    for i, l in enumerate(pos_partial):
        possible_literals[i] = [None, l] + [v for v in get_variablizations(l)]
    partial_literals = set(
        [l for i in possible_literals for l in possible_literals[i]])

    additional_literals = h - partial_literals

    if len(additional_literals) > 0:
        p_index = build_index(p)
        operator = Operator(tuple(('Rule', )), h.union(constraints), [])
        for add_m in operator.match(p_index, initial_mapping=pm):
            break
        additional_lit_mapping = {
            rename(add_m, l): l
            for l in additional_literals
        }
        for l in additional_lit_mapping:
            new_l = additional_lit_mapping[l]
            print(pos_partial)
            print(add_m)
            print(l)
            print(new_l)
            possible_literals[pos_partial.index(l)].append(new_l)

    reverse_pl = {
        l: (i, j)
        for i in possible_literals for j, l in enumerate(possible_literals[i])
    }

    clause_vector = [0 for i in range(len(possible_literals))]
    for l in h:
        i, j = reverse_pl[l]
        clause_vector[i] = j
    clause_vector = tuple(clause_vector)

    flip_weights = [(len(possible_literals[i]) - 1, i)
                    for i in possible_literals]
    # size = 1
    # for w, _ in flip_weights:
    #     size *= (w + 1)
    # print("SIZE OF SEARCH SPACE:", size)

    num_successors = sum([w for w, c in flip_weights])
    temp_length = 2 * num_successors
    # print("TEMP LENGTH", temp_length)
    # print('INITIAL SCORE', initial_score)
    problem = ClauseOptimizationProblem(clause_vector,
                                        initial_cost=-1 * initial_score,
                                        extra=(possible_literals, flip_weights,
                                               constraints, pset, nset))
    # for sol in hill_climbing(problem):
    for sol in simulated_annealing(problem, temp_length=temp_length):
        # print("SOLUTION FOUND", sol.state)
        return build_clause(sol.state, possible_literals)
コード例 #8
0
    def gen_specializations(self, node):
        h = node.state
        (constraints, c_length, p_covered, p_uncovered, n_covered, n_uncovered,
         gensym) = node.extra

        if len(p_covered) == 0:
            return

        p, pm = choice(p_covered)
        p_index = build_index(p)

        operator = Operator(tuple(('Rule', )), h.union(constraints), [])

        found = False
        for m in operator.match(p_index, initial_mapping=pm):
            reverse_m = {m[a]: a for a in m}
            pos_partial = set([rename(reverse_m, x) for x in p])
            found = True
            break

        if not found:
            return

        # specialize current variables using pset?
        for var in m:
            limited_m = {var: m[var]}
            new_h = frozenset([subst(limited_m, l) for l in h])

            new_p_subset, new_n_subset = test_coverage(new_h, constraints,
                                                       p_covered, n_covered)
            new_p_covered = new_p_subset
            new_p_uncovered = p_uncovered + [
                p for p in p_covered if p not in new_p_subset
            ]
            new_n_covered = new_n_subset
            new_n_uncovered = n_uncovered + [
                n for n in n_covered if n not in new_n_subset
            ]
            new_c_length = c_length + 1
            score = self.score(len(new_p_covered), len(new_p_uncovered),
                               len(new_n_covered), len(new_n_uncovered),
                               new_c_length)

            yield Node(new_h,
                       None,
                       None,
                       -1 * score,
                       extra=(constraints, new_c_length, new_p_covered,
                              new_p_uncovered, new_n_covered, new_n_uncovered,
                              gensym))

        # add new literals from pset
        for l in pos_partial:
            if l not in h:
                l = generate_literal(l[0], len(l) - 1, gensym)
                # l = generalize_literal(l, gensym)

                new_h = h.union([l])

                new_p_subset, new_n_subset = test_coverage(
                    new_h, constraints, p_covered, n_covered)
                new_p_covered = new_p_subset
                new_p_uncovered = p_uncovered + [
                    p for p in p_covered if p not in new_p_subset
                ]
                new_n_covered = new_n_subset
                new_n_uncovered = n_uncovered + [
                    n for n in n_covered if n not in new_n_subset
                ]
                new_c_length = c_length + 1
                score = self.score(len(new_p_covered), len(new_p_uncovered),
                                   len(new_n_covered), len(new_n_uncovered),
                                   new_c_length)

                yield Node(new_h,
                           None,
                           None,
                           -1 * score,
                           extra=(constraints, new_c_length, new_p_covered,
                                  new_p_uncovered, new_n_covered,
                                  new_n_uncovered, gensym))
コード例 #9
0
    def gen_generalizations(self, node):
        h = node.state
        (constraints, c_length, p_covered, p_uncovered, n_covered, n_uncovered,
         gensym) = node.extra

        # remove literals
        for literal in h:
            removable = True
            for ele in literal[1:]:
                if not is_variable(ele):
                    removable = False
                    break
                if (count_occurances(ele, h) > 1):
                    removable = False
                    break

            if removable:
                new_h = frozenset(x for x in h if x != literal)
                new_pc_subset, new_nc_subset = test_coverage(
                    new_h, constraints, p_uncovered, n_uncovered)
                new_p_covered = p_covered + new_pc_subset
                new_n_covered = n_covered + new_nc_subset
                new_p_uncovered = [
                    p for p in p_uncovered if p not in new_pc_subset
                ]
                new_n_uncovered = [
                    n for n in n_uncovered if n not in new_nc_subset
                ]
                new_c_length = c_length - 1
                score = self.score(len(new_p_covered), len(new_p_uncovered),
                                   len(new_n_covered), len(new_n_uncovered),
                                   new_c_length)

                yield Node(new_h,
                           None,
                           None,
                           -1 * score,
                           extra=(constraints, new_c_length, new_p_covered,
                                  new_p_uncovered, new_n_covered,
                                  new_n_uncovered, gensym))

        # replace constants with variables.
        for literal in h:
            for new_l in get_variablizations(literal, gensym):
                new_h = frozenset([x if x != literal else new_l for x in h])
                new_pc_subset, new_nc_subset = test_coverage(
                    new_h, constraints, p_uncovered, n_uncovered)
                new_p_covered = p_covered + new_pc_subset
                new_n_covered = n_covered + new_nc_subset
                new_p_uncovered = [
                    p for p in p_uncovered if p not in new_pc_subset
                ]
                new_n_uncovered = [
                    n for n in n_uncovered if n not in new_nc_subset
                ]
                new_c_length = c_length - 1
                score = self.score(len(new_p_covered), len(new_p_uncovered),
                                   len(new_n_covered), len(new_n_uncovered),
                                   new_c_length)

                yield Node(new_h,
                           None,
                           None,
                           -1 * score,
                           extra=(constraints, new_c_length, new_p_covered,
                                  new_p_uncovered, new_n_covered,
                                  new_n_uncovered, gensym))