Esempio n. 1
0
def get_optimal_refinement(**kwargs):
    initsol = kwargs['initsol']
    parent = kwargs['parent']
    prior = kwargs['prior']
    # added below contains the as_string value for each clause added in refined.
    # It's used as set of 'keys' to filter duplicates  
    refined,added = [],[] 
    if 'search_space_size' not in kwargs: 
        for c in parent.support:
            use_dict = analyze_use_3(parent,c,prior)
            use_3 = asp.find_all_optimal()
            print(use_3)
            if use_3 != []:
                (_,revisable) = functs.filter_retained_(use_3,use_dict)
                for (clause_index,clause) in revisable:
                    revcl = functs.form_revised_(clause_index,clause,use_dict,use_3)
                    revcl = structs.Clause(revcl,gl)
                    #refined.append((clause,revcl))
                    if not revcl.as_string in added:
                        added.append(revcl.as_string)
                        refined.append(revcl)
            else: pass    
        if list(set(added)) == [initsol.as_string]:
            # then we have a problem: All we could find is the initial refinement
            # which does not subsume the support set. We'll try to split the initial
            # refinement. To do that, we repeat the above process, but this time we 
            # increase the size of the fragment of the support set that is used as 
            # search space. We do that calling this method again with an optional 
            # argument that specifies how many support clauses shall be used simultaneously
            # as search space: 
            kwargs.update({'search_space_size':'2'})
            return get_optimal_refinement(**kwargs)        
            # this is buggy (and doesn't fully work):
            # opt = subsumption.find_minimal_subsuming_subset(refined,parent.support,found_initial=True)
        else:        
            opt = subsumption.find_minimal_subsuming_subset(refined,parent.support)        
            update_support((parent,opt))
            return opt
    else:
        pass
        """ 
Esempio n. 2
0
def incremental_kernel_search(**kwargs):
    gl.use_dict = {}
    is_new_example = kwargs['is_new_example'] 
    retained,new,refined = kwargs['retcl'],kwargs['newcl'],kwargs['refcl']
    prior_theory = [x for y in [retained,new,refined] for x in y]
    positive_count = gl.current_example_object.positive_count
    if is_new_example:
        generate_kernel(**kwargs)
        # revise the clauses in the prior theory one by one first
        for c in prior_theory: # TODO
            pass
        # generate new clauses from the Kernel Set, using iterative deepening
        # on the subsets of the Kernel Set, until a solution is found.
        #------------------------------------------------------------------------------
        # TODO: This strategy can be modified to return approximate solution. This 
        # may be useful in cases where large effort is required to obtain a correct
        # hypothesis, or in cases where a correct hypothesis does not exist (noise).
        # A straightforward strategy towards this is to keep the best hypothesis found
        # within a max_iterations bound.  
        #-------------------------------------------------------------------------------
        var_kernel = gl.current_var_kernel 
        found_solution = False
        already_found = []
        for i in range(1,len(var_kernel)+1):
            i_subsets = itertools.combinations(var_kernel, i)
            for subset_ in i_subsets: 
                subset = list(subset_)
                analyze_use_try(subset,[])  
                out = asp.ind(kernel_generalization=True)
                #==============================================================================
                # Test code. Try to add what you get from each subset to the bk in an effort
                # to further reduce solving time. 
                (use_2,use_3) = functs.split_use_2_3(out)
                (ok,use_head_body_map) = functs.head_body_use_atoms_filter(use_2)
                if ok :
                    new = form_new_clauses(use_head_body_map)
                    if new != []:
                        already_found.extend(new)
                        n = '\n\n'.join(map(lambda x: x.as_string_with_var_types ,already_found))
                        write_to_file(gl.helper, n)
                else:
                    msg = 'Found a solution use(i,j) atom with no corresponding use(i,0) atom'
                    raise excps.Use_2_HeadNotAbducedException(msg,gl.logger)
                #===============================================================================
                pos,negs,score = get_score(out)
                print(str(i)+'-subsets',pos,negs,score)
                if score == positive_count:
                    #------------------
                    print('Found it')
                    sys.exit()
                    #------------------
                    found_solution = True
                    (use_2,use_3) = functs.split_use_2_3(out)
                    (ok,use_head_body_map) = functs.head_body_use_atoms_filter(use_2)
                    if ok :
                        new = form_new_clauses(use_head_body_map)
                    else:
                        msg = 'Found a solution use(i,j) atom with no corresponding use(i,0) atom'
                        raise excps.Use_2_HeadNotAbducedException(msg,gl.logger)
                    (retained,revisable) = functs.filter_retained_(use_3,gl.use_dict)
                    update_support(retained)
                    break # stop searchin i-level subsets
            if found_solution:
                break # stop searching subsets   
        if not found_solution:
            print('unsat at kernel generalization')
            sys.exit()    
    else: # TODO
        pass    
Esempio n. 3
0
def revise(**kwargs):
    hs,scs,cls = kwargs['heuristic_search'],kwargs['set_cover_search'],kwargs['clause_level_search']
    special_search =  scs or cls
    if not special_search:
        gl.use_dict = {}
        is_new_example = kwargs['is_new_example'] 
        debug_mode = kwargs['debug'] if 'debug' in kwargs else False # keep this optional
        retained,new,refined = kwargs['retcl'],kwargs['newcl'],kwargs['refcl'] 
        if debug_mode: 
            import debug_utils
            prior_theory = debug_utils.py_load_from_file() 
        else:
            prior_theory = [retained,new,refined]
            prior_theory_ = [x for y in prior_theory for x in y]
            if is_new_example:
                #retained.extend(refined) # that's a patch because due to a bug previously refined clauses are left out  
                generate_kernel(**kwargs) 
                if gl.runargs["kernel-set-only"]: # We only need to generate a kernel set
                    sys.exit()
                var_kernel = gl.current_var_kernel
                if 'search_subsets' in kwargs:
                    search_kernel_by_subsets(prior_theory_)   
                elif 'incremental_solve' in kwargs and kwargs['incremental_solve']:
                    (solution,use_2,use_3) = incremental_solve(var_kernel,prior_theory_) 
                    
                else: 
                    
                    analyze_use_try(gl.current_var_kernel,prior_theory_)
                    out = asp.ind(kernel_generalization=True)
                    (use_2,use_3) = functs.split_use_2_3(out)
                    (ok,use_head_body_map) = functs.head_body_use_atoms_filter(use_2)
                    if ok :
                        new = form_new_clauses(use_head_body_map)
                    else:
                        msg = 'Found a solution use(i,j) atom with no corresponding use(i,0) atom'
                        raise excps.Use_2_HeadNotAbducedException(msg,gl.logger)
                       
                    
                                   
                (retained,revisable) = functs.filter_retained_(use_3,gl.use_dict)
                update_support(retained)
            else: # re-seeing past example
                #prior_theory = [retained,refined]
                # Only analyze the new clauses generated last. 
                # Update This is messy...Analyze it all, there is no serious overhead
                #analyze_use_try([],new,preserve=[x for y in prior_theory for x in y])
                analyze_use_try([],prior_theory_)
                out = asp.ind(recheck_hist_memory=True)
                (_,use_3) = functs.split_use_2_3(out) # no use/2 here
                (retained,revisable) = functs.filter_retained_(use_3,gl.use_dict) 
            revisable_ = []
            for (clause_index,clause) in revisable:
                #import debug_utils
                #debug_utils.check_on_previous()
                revcl = functs.form_revised_(clause_index,clause,gl.use_dict,use_3)
                revcl = structs.Clause(revcl,gl)
                revisable_.append((clause,revcl))
            incorrects = [inx[0] for inx in revisable_]    
            specialized = []    
            for (incorrect,one_solution) in revisable_ : # need to find optimal refinement
                optimal_ref = one_solution 
                check_prior = [z for z in prior_theory_ if not z in incorrects]
                if search_more(initsol=one_solution,parent=incorrect,prior=check_prior):
                    other_init_solutions = [x[1] for x in revisable_ if x[1] != one_solution]
                    check_prior.extend(other_init_solutions)
                    optimal_ref = get_optimal_refinement(initsol=one_solution,
                                                     parent=incorrect,
                                                     prior=check_prior)
                if isinstance(optimal_ref,list): # then it went through get_optimal_refinement because that returns a list  
                    specialized.extend(optimal_ref)
                else:
                    if isinstance(optimal_ref,structs.Clause): # then it's the initial solution, no support update yet
                        update_support((incorrect,optimal_ref))# this must be passed as a (parent,child) tuple
                        specialized.append(optimal_ref)
    
        #d = dict(zip(incorrects,specialized))
        if is_new_example:
            (n,r,s) = (new,retained,specialized)
        else:
            (n,r,s) = ([],retained,specialized)                    
    
        updateprior(n,r,s)
        return (n,r,s) 
    else:
        if kwargs['set_cover_search']:
            return set_cover_search(**kwargs)
        elif kwargs['heuristic_search']:
            return heuristic_search(**kwargs)
        else:
            #return incremental_kernel_search.incremental_search(**kwargs)
            return incremental_kernel_search(**kwargs)