Exemplo n.º 1
0
def get_lemma_files_from_sentences(lemmas_name, sentences):

    sentences_files = []
    sentences_names = []

    import math
    # determine maximal number of digits
    digits = int(math.ceil(math.log10(len(sentences))))

    i = 1

    for lemma in sentences:
        name = lemmas_name + '_goal' + ('{0:0' + str(digits) + 'd}').format(i)

        filename = filemgt.get_full_path(
            name,
            folder=filemgt.read_config('ladr', 'folder'),
            ending=filemgt.read_config('ladr', 'ending'))
        output_file = open(filename, 'w')
        output_file.write('formulas(goals).\n')
        output_file.write(lemma + '\n')
        output_file.write('end_of_list.\n')
        output_file.close()
        sentences_files.append(filename)
        sentences_names.append(name)
        i += 1

    return (sentences_names, sentences_files)
Exemplo n.º 2
0
def get_lemma_files_from_sentences (lemmas_name, sentences):

    sentences_files = []
    sentences_names = []

    import math
    # determine maximal number of digits
    digits = int(math.ceil(math.log10(len(sentences))))
    
    i = 1

    for lemma in sentences:
        name = lemmas_name + '_goal' + ('{0:0'+ str(digits) +  'd}').format(i)
        
        filename = filemgt.get_full_path(name, 
                              folder=filemgt.read_config('ladr','folder'), 
                              ending=filemgt.read_config('ladr','ending'))        
        output_file = open(filename, 'w')
        output_file.write('formulas(goals).\n')
        output_file.write(lemma + '\n')
        output_file.write('end_of_list.\n')
        output_file.close()
        sentences_files.append(filename)
        sentences_names.append(name)
        i += 1
    
    return (sentences_names, sentences_files)
Exemplo n.º 3
0
    def get_single_ladr_file (self, imports = None):
        """get the ClifModuleSet as a single file in LADR syntax."""

        # if the given imports are identical to the modules imports, treat it as the modules imports were used
        if imports and set(self.imports).issubset(imports) and set(self.imports).issuperset(imports):
            imports = None

        # avoid redundant work if we already have the ladr file
        if not imports and len(self.p9_file_name)>0:
            return self.p9_file_name

        ending = ""
        if not imports:
            ending = filemgt.read_config('ladr','all_ending')
            name = self.module_name
        else:
            ending = filemgt.read_config('ladr','select_ending')
            name = imports[0].module_name
        # construct the final ending
        ending += filemgt.read_config('ladr','ending')
        
        p9_files = self.get_ladr_files(imports)

        p9_file_name = filemgt.get_full_path(name, 
                                           folder=filemgt.read_config('ladr','folder'), 
                                           ending=ending)
        if not imports:
            self.p9_file_name = p9_file_name
            
        #print "FILE NAME:" + self.p9_file_name
        # TODO: need to initialize self.replaceable_symbols
        ladr.cumulate_ladr_files(p9_files, p9_file_name)
        logging.getLogger(__name__).info("CREATED SINGLE LADR TRANSLATION: " + p9_file_name)
        return p9_file_name
Exemplo n.º 4
0
    def get_single_tptp_file(self, imports=None):
        """translate the module and all imported modules to a single TPTP file."""

        # if the given imports are identical to the modules imports, treat it as the modules imports were used
        if imports and set(self.imports).issubset(imports) and set(
                self.imports).issuperset(imports):
            imports = None

        ending = ""

        # avoid redundant work if we already have the tptp file and didn't add a lemma module
        if not imports:
            if len(self.tptp_file_name) > 0 and self.lemma_module is None:
                return self.tptp_file_name
            ending = filemgt.read_config('tptp', 'all_ending')
            name = self.module_name
        else:
            ending = filemgt.read_config('tptp', 'select_ending')
            name = imports[0].module_name
        # construct the final ending
        ending += filemgt.read_config('tptp', 'ending')

        tptp_file_name = filemgt.get_full_path(name,
                                               folder=filemgt.read_config(
                                                   'tptp', 'folder'),
                                               ending=ending)

        if not imports:
            self.tptp_file_name = tptp_file_name
            imports = self.get_imports().copy()

        tptp_sentences = []
        if self.lemma_module:
            imports.remove(self.lemma_module)
            tptp_sentences.append(self.lemma_module.tptp_sentence)

        files_to_translate = [i.clif_processed_file_name for i in imports]
        while None in files_to_translate:
            files_to_translate.remove(None)
        tptp_sentences.extend(
            clif.translate_sentences(files_to_translate, "TPTP"))
        tptp_file = open(tptp_file_name, 'w')
        tptp_file.writelines([t + "\n" for t in tptp_sentences])
        tptp_file.close()

        logging.getLogger(__name__).info("CREATED TPTP TRANSLATION: " +
                                         tptp_file_name)

        return tptp_file_name
Exemplo n.º 5
0
def get_paradox_cmd (imports,output_stem):
    """ we only care about the first element in the list of imports, which will we use as base name to obtain a single tptp file of the imports,
    which is the input for paradox."""
    args = []
    args.append(filemgt.read_config('paradox','command'))
    args.append('--time')
    args.append(filemgt.read_config('paradox','timeout'))
    args.append('--verbose')
    args.append('2')
    args.append('--model')
    args.append('--tstp')
    # append all tptp input files
    args.append(list(imports)[0].get_module_set(imports).get_single_tptp_file(imports))

    return (args, [])
Exemplo n.º 6
0
def get_p9_cmd(imports, output_stem, option_files=None):
    """get a formatted command to run Prover9 with options (timeout, etc.) set in the class instance."""

    args = []
    args.append(filemgt.read_config('prover9', 'command'))
    args.append('-t' + filemgt.read_config('prover9', 'timeout'))
    args.append('-f')
    # append all ladr input files
    for m in imports:
        args.append(m.get_p9_file_name())
    if option_files:
        for f in option_files:
            args.append(f)

    return (args, [])
Exemplo n.º 7
0
def get_p9_cmd (imports,output_stem, option_files = None):
    """get a formatted command to run Prover9 with options (timeout, etc.) set in the class instance."""

    args = []
    args.append(filemgt.read_config('prover9','command'))
    args.append('-t' + filemgt.read_config('prover9','timeout'))
    args.append('-f')
    # append all ladr input files
    for m in imports:
        args.append(m.get_p9_file_name())
    if option_files:
        for f in option_files:
            args.append(f)
            
    return (args, [])
Exemplo n.º 8
0
def get_paradox_cmd(imports, output_stem):
    """ we only care about the first element in the list of imports, which will we use as base name to obtain a single tptp file of the imports,
    which is the input for paradox."""
    args = []
    args.append(filemgt.read_config('paradox', 'command'))
    args.append('--time')
    args.append(filemgt.read_config('paradox', 'timeout'))
    args.append('--verbose')
    args.append('2')
    args.append('--model')
    args.append('--tstp')
    # append all tptp input files
    args.append(
        list(imports)[0].get_module_set(imports).get_single_tptp_file(imports))

    return (args, [])
Exemplo n.º 9
0
def get_m4_cmd(imports, output_stem):
    """get a formatted command to run Mace4 with options (timeout, etc.) set in the class instance."""

    args = []
    args.append(filemgt.read_config('mace4', 'command'))
    args.append('-v0')
    args.append('-t' + filemgt.read_config('mace4', 'timeout'))
    args.append('-s' + filemgt.read_config('mace4', 'timeout_per'))
    args.append('-n' + filemgt.read_config('mace4', 'start_size'))
    args.append('-N' + filemgt.read_config('mace4', 'end_size'))
    args.append('-f')
    # append all ladr input files
    for m in imports:
        args.append(m.get_p9_file_name())

    return (args, [])
Exemplo n.º 10
0
def get_p9_empty_optionsfile(p9_file_name, verbose=True):

    # currently one option file for all!!
    #print 'OPTIONS FILE - P9 file name = ' + p9_file_name
    options_file_name = os.path.join(
        os.path.dirname(p9_file_name),
        os.path.splitext(os.path.basename(p9_file_name))[0] +
        filemgt.read_config('prover9', 'options_ending'))

    #print 'OPTIONS FILE = ' + options_file_name

    ladr.options_files.append(options_file_name)

    if os.path.isfile(options_file_name):
        return options_file_name
    else:
        #    options_file_name = module_p9_file + '.options'
        options_file = open(options_file_name, 'w')
        options_file.write('clear(auto_denials).\n')
        if not verbose:
            options_file.write('clear(print_initial_clauses).\n')
            options_file.write('clear(print_kept).\n')
            options_file.write('clear(print_given).\n')
            #options_file.write('set(quiet).')
        options_file.close()
        return options_file_name
Exemplo n.º 11
0
    def __init__(self, name, reasoner_type=None, reasoner_id=None):
        self.identifier = ''

        self.type = Reasoner.PROVER

        self.args = []

        self.positive_returncodes = []

        self.unknown_returncodes = []

        self.modules = []

        self.input_files = ''

        self.output_file = ''

        self.time = -1

        self.return_code = None

        self.output = None

        self.name = name
        if reasoner_type:
            self.type = reasoner_type
        if reasoner_id:
            self.identifier = reasoner_id
        else:
            self.identifier = name
        self.positive_returncodes = commands.get_positive_returncodes(
            self.name)
        self.unknown_returncodes = commands.get_unknown_returncodes(self.name)

        self.timeout = filemgt.read_config(self.name, 'timeout')
Exemplo n.º 12
0
    def run_simple_consistency_check (self, module_name = None, modules = None, options_files = None):
        """ test the input for consistency by trying to find a model or an inconsistency."""
        # want to create a subfolder for the output files
        outfile_stem = filemgt.get_full_path(self.module_name, 
                                            folder=filemgt.read_config('output','folder')) 
        
        if not module_name:
            module_name = self.module_name
        
        if not modules: 
            modules = self.imports  # use all imports as default set of modules
        
        reasoners = ReasonerSet() 
        reasoners.constructAllCommands(modules, outfile_stem)
        logging.getLogger(__name__).info("USING " + str(len(reasoners)) + " REASONERS: " + str([r.name for r in reasoners]))
        
        # run provers and modelfinders simultaneously and wait until one returns
        reasoners = process.raceProcesses(reasoners)

        # this captures our return code (consistent/inconsistent/unknown), not the reasoning processes return code
        (return_value, fastest_reasoner) = self.consolidate_results(reasoners)    

        if len(modules)==0:
            self.pretty_print_result(module_name + " (without imports)", return_value)
        else:
            self.pretty_print_result(module_name + " (with imports = " + str(modules) + ")", return_value)
        
        results = []
        results.append((tuple(modules), return_value, fastest_reasoner))
        #print str(results)
        return results
Exemplo n.º 13
0
def get_m4_cmd (imports,output_stem):
    """get a formatted command to run Mace4 with options (timeout, etc.) set in the class instance."""

    args = []
    args.append(filemgt.read_config('mace4','command'))
    args.append('-v0')
    args.append('-t' + filemgt.read_config('mace4','timeout'))
    args.append('-s' + filemgt.read_config('mace4','timeout_per'))
    args.append('-n' + filemgt.read_config('mace4','start_size'))
    args.append('-N' + filemgt.read_config('mace4','end_size'))
    args.append('-f')
    # append all ladr input files
    for m in imports:
        args.append(m.get_p9_file_name())
    
    return (args, [])
Exemplo n.º 14
0
    def __init__(self, name, reasoner_type=None, reasoner_id=None):
        self.identifier = ''
        
        self.type = Reasoner.PROVER
        
        self.args = []
        
        self.positive_returncodes = []
        
        self.unknown_returncodes = []
        
        self.modules = []
        
        self.input_files = ''
        
        self.output_file = ''
        
        self.time = -1
        
        self.return_code = None
        
        self.output = None

        self.name = name
        if reasoner_type:
            self.type = reasoner_type
        if reasoner_id:
            self.identifier = reasoner_id
        else:
            self.identifier = name
        self.positive_returncodes = commands.get_positive_returncodes(self.name)
        self.unknown_returncodes = commands.get_unknown_returncodes(self.name)

        self.timeout = filemgt.read_config(self.name,'timeout')
Exemplo n.º 15
0
def get_vampire_cmd (imports,ouput_stem):
    args = []
    args.append(filemgt.read_config('vampire','command'))
    args.append('--mode')
    args.append('casc')
    args.append('--proof')
    args.append('tptp')
    args.append('-t')
    args.append(filemgt.read_config('vampire','timeout'))
    # needed for Windows
    args.append('--input_file')
    args.append(list(imports)[0].get_module_set(imports).get_single_tptp_file(imports))
    logging.getLogger(__name__).debug("COMMAND FOR vampire IS " + str(args))
    # works for linux, not for Windows
    #return (args, [list(imports)[0].get_module_set(imports).get_single_tptp_file(imports)])
    
    return (args, [])
Exemplo n.º 16
0
 def constructCommand(self, modules, outfile_stem):
     """Construct the command to invoke the reasoner."""
     self.modules = modules
     self.output_file = outfile_stem + filemgt.read_config(
         self.name, 'ending')
     (self.args, self.input_files) = commands.get_system_command(
         self.name, modules, self.output_file)
     return self.args
Exemplo n.º 17
0
    def get_single_tptp_file (self, imports = None):
        """translate the module and all imported modules to a single TPTP file."""
        
        # if the given imports are identical to the modules imports, treat it as the modules imports were used
        if imports and set(self.imports).issubset(imports) and set(self.imports).issuperset(imports):
            imports = None

        ending = ""

        # avoid redundant work if we already have the tptp file and didn't add a lemma module
        if not imports:
            if len(self.tptp_file_name)>0 and self.lemma_module is None: return self.tptp_file_name
            ending = filemgt.read_config('tptp','all_ending')
            name = self.module_name
        else:
            ending = filemgt.read_config('tptp','select_ending')
            name = imports[0].module_name
        # construct the final ending
        ending += filemgt.read_config('tptp','ending')

        tptp_file_name = filemgt.get_full_path(name, 
                                           folder=filemgt.read_config('tptp','folder'), 
                                           ending=ending)

        if not imports:
            self.tptp_file_name = tptp_file_name
            imports = self.get_imports().copy()
        
        tptp_sentences = []
        if self.lemma_module:
            imports.remove(self.lemma_module)
            tptp_sentences.append(self.lemma_module.tptp_sentence)

        files_to_translate = [i.clif_processed_file_name for i in imports]
        while None in files_to_translate:
            files_to_translate.remove(None)
        tptp_sentences.extend(clif.translate_sentences(files_to_translate, "TPTP"))
        tptp_file = open(tptp_file_name, 'w')
        tptp_file.writelines([t+"\n" for t in tptp_sentences])
        tptp_file.close()

        logging.getLogger(__name__).info("CREATED TPTP TRANSLATION: " + tptp_file_name)

        return tptp_file_name                
Exemplo n.º 18
0
    def detect_systems (self):
        """Read the active provers from the configuration file."""

        # local variables        
        provers = filemgt.read_config('active','provers').split(',')
        finders = filemgt.read_config('active','modelfinders').split(',')
        
        provers = [ s.strip() for s in provers ]
        finders = [ s.strip() for s in finders ]
        
        provers = filter(lambda x: len(x)>0, provers)
        finders = filter(lambda x: len(x)>0, finders)

        self.extend([Reasoner(r) for r in provers])
        self.extend([Reasoner(r, reasoner_type=Reasoner.MODEL_FINDER) for r in finders])

        logging.getLogger(__name__).debug("REASONER SET: " + str(provers+finders))
        
        return True
Exemplo n.º 19
0
def get_vampire_cmd(imports, ouput_stem):
    args = []
    args.append(filemgt.read_config('vampire', 'command'))
    args.append('--mode')
    args.append('casc')
    args.append('--proof')
    args.append('tptp')
    #    args.append('--latex_output')
    #    args.append(filemgt.read_config('vampire','latexfile'))
    args.append('-t')
    args.append(filemgt.read_config('vampire', 'timeout'))
    # needed for Windows
    args.append('--input_file')
    args.append(
        list(imports)[0].get_module_set(imports).get_single_tptp_file(imports))
    logging.getLogger(__name__).debug("COMMAND FOR vampire IS " + str(args))
    # works for linux, not for Windows
    #return (args, [list(imports)[0].get_module_set(imports).get_single_tptp_file(imports)])

    return (args, [])
Exemplo n.º 20
0
    def get_single_ladr_file(self, imports=None):
        """get the ClifModuleSet as a single file in LADR syntax."""

        # if the given imports are identical to the modules imports, treat it as the modules imports were used
        if imports and set(self.imports).issubset(imports) and set(
                self.imports).issuperset(imports):
            imports = None

        # avoid redundant work if we already have the ladr file
        if not imports and len(self.p9_file_name) > 0:
            return self.p9_file_name

        ending = ""
        if not imports:
            ending = filemgt.read_config('ladr', 'all_ending')
            name = self.module_name
        else:
            ending = filemgt.read_config('ladr', 'select_ending')
            name = imports[0].module_name
        # construct the final ending
        ending += filemgt.read_config('ladr', 'ending')

        p9_files = self.get_ladr_files(imports)

        p9_file_name = filemgt.get_full_path(name,
                                             folder=filemgt.read_config(
                                                 'ladr', 'folder'),
                                             ending=ending)
        if not imports:
            self.p9_file_name = p9_file_name

        #print "FILE NAME:" + self.p9_file_name
        # TODO: need to initialize self.replaceable_symbols
        ladr.cumulate_ladr_files(p9_files, p9_file_name)
        logging.getLogger(__name__).info("CREATED SINGLE LADR TRANSLATION: " +
                                         p9_file_name)
        return p9_file_name
Exemplo n.º 21
0
def reformat_urls(lines):
	"""Delete URL prefixes from all import declarations.""" 
	lines = list(lines)
	prefixes = filemgt.read_config('cl','prefix').split(',')
	prefixes = [p.strip().strip('"') for p in prefixes]
	prefixes = sorted([p.strip() for p in prefixes], key=lambda s: len(s), reverse=True) 
	for i in range(0,len(lines)):
		for prefix in prefixes:
			if prefix in lines[i]:
				if not prefix.endswith('/'):
					prefix = prefix+'/'
				#print "replacing prefix: " + prefix + " in " + lines[i]
				lines[i] = lines[i].replace(prefix,'')
				#print lines[i]
	return lines
Exemplo n.º 22
0
    def extract_p9_predicates_and_functions(self):

        #print 'extract predicates and functions'
        prover9args = 'prover9 -t 0 -f '

        for f in self.imports:
            prover9args += f.p9_file_name + ' '

        options_file = commands.get_p9_empty_optionsfile(
            self.get_p9_file_name(), verbose=False)
        prover9args += ' ' + options_file + ' '

        # would be better to create a temporary file or read the output stream directly
        temp_file = self.get_module_name() + '_order' + filemgt.read_config(
            'ladr', 'ending')
        prover9args += ' > ' + temp_file
        logging.getLogger(__name__).debug(prover9args)
        process.executeSubprocess(prover9args)

        order_file = open(temp_file, 'r')
        line = order_file.readline()
        predicates = None
        functions = None
        while line:
            if line.find('predicate_order') > -1:
                predicateline = line[line.find('predicate_order([') +
                                     len('predicate_order([') + 1:-4]
                predicates = predicateline.split()
                for i in range(len(predicates)):
                    predicates[i] = predicates[i].replace(',', '')
                line = order_file.readline()
                functionline = line[line.find('function_order([') +
                                    len('function_order([') + 1:-4]
                functions = functionline.split()
                for i in range(len(functions)):
                    functions[i] = functions[i].replace(',', '')
                break
            line = order_file.readline()

        order_file.close()
        #print 'temp file : ' + temp_file
        #print 'options file : ' + options_file
        os.remove(temp_file)
        os.remove(options_file)
        if predicates and functions:
            return (predicates, functions)
        else:
            return ([], [])
Exemplo n.º 23
0
    def run_module_consistency_check (self,module):
        """check a single module for consistency."""
        outfile_stem = filemgt.get_full_path(module.module_name, 
                                            folder=filemgt.read_config('output','folder')) 

        reasoners = ReasonerSet() 
        reasoners.constructAllCommands([module], outfile_stem)
        logging.getLogger(__name__).info("USING " + str(len(reasoners)) + " REASONERS: " + str([r.name for r in reasoners]))
        
        # run provers and modelfinders simultaneously and wait until one returns
        reasoners = process.raceProcesses(reasoners)

        (return_value, _) = self.consolidate_results(reasoners)    
        self.pretty_print_result(module.module_name, return_value)
        
        return return_value  
Exemplo n.º 24
0
def reformat_urls(lines):
    """Delete URL prefixes from all import declarations."""
    lines = list(lines)
    prefixes = filemgt.read_config('cl', 'prefix').split(',')
    prefixes = [p.strip().strip('"') for p in prefixes]
    prefixes = sorted([p.strip() for p in prefixes],
                      key=lambda s: len(s),
                      reverse=True)
    for i in range(0, len(lines)):
        for prefix in prefixes:
            if prefix in lines[i]:
                if not prefix.endswith('/'):
                    prefix = prefix + '/'
                # print "replacing prefix: " + prefix + " in " + lines[i]
                lines[i] = lines[i].replace(prefix, '')
                # print lines[i]
    return lines
Exemplo n.º 25
0
    def extract_p9_predicates_and_functions (self):
    
        #print 'extract predicates and functions'
        prover9args = 'prover9 -t 0 -f '
        
    
        for f in self.imports:
            prover9args += f.p9_file_name + ' '
        
        options_file = commands.get_p9_empty_optionsfile(self.get_p9_file_name(), verbose=False)
        prover9args += ' ' + options_file + ' '

        
        # would be better to create a temporary file or read the output stream directly
        temp_file = self.get_module_name() + '_order' + filemgt.read_config('ladr','ending')
        prover9args += ' > ' + temp_file
        logging.getLogger(__name__).debug(prover9args)
        process.executeSubprocess(prover9args)
        
        order_file = open(temp_file, 'r')
        line = order_file.readline()
        predicates = None
        functions = None
        while line:
            if line.find('predicate_order') > -1:
                predicateline = line[line.find('predicate_order([')+len('predicate_order([')+1:-4]
                predicates = predicateline.split()
                for i in range(len(predicates)):
                    predicates[i] = predicates[i].replace(',','')
                line = order_file.readline()
                functionline = line[line.find('function_order([')+len('function_order([')+1:-4]
                functions = functionline.split()
                for i in range(len(functions)):
                    functions[i] = functions[i].replace(',','')
                break
            line = order_file.readline()
            
        order_file.close()
        #print 'temp file : ' + temp_file
        #print 'options file : ' + options_file
        os.remove(temp_file)
        os.remove(options_file)
        if predicates and functions:
            return (predicates, functions)
        else:
            return ([], [])
Exemplo n.º 26
0
    def run_module_consistency_check(self, module):
        """check a single module for consistency."""
        outfile_stem = filemgt.get_full_path(module.module_name,
                                             folder=filemgt.read_config(
                                                 'output', 'folder'))

        reasoners = ReasonerSet()
        reasoners.constructAllCommands([module], outfile_stem)
        logging.getLogger(__name__).info("USING " + str(len(reasoners)) +
                                         " REASONERS: " +
                                         str([r.name for r in reasoners]))

        # run provers and modelfinders simultaneously and wait until one returns
        reasoners = process.raceProcesses(reasoners)

        (return_value, _) = self.consolidate_results(reasoners)
        self.pretty_print_result(module.module_name, return_value)

        return return_value
Exemplo n.º 27
0
    def run_simple_consistency_check(self,
                                     module_name=None,
                                     modules=None,
                                     options_files=None):
        """ test the input for consistency by trying to find a model or an inconsistency."""
        # want to create a subfolder for the output files
        outfile_stem = filemgt.get_full_path(self.module_name,
                                             folder=filemgt.read_config(
                                                 'output', 'folder'))

        if not module_name:
            module_name = self.module_name

        if not modules:
            modules = self.imports  # use all imports as default set of modules

        reasoners = ReasonerSet()
        reasoners.constructAllCommands(modules, outfile_stem)
        logging.getLogger(__name__).info("USING " + str(len(reasoners)) +
                                         " REASONERS: " +
                                         str([r.name for r in reasoners]))

        # run provers and modelfinders simultaneously and wait until one returns
        reasoners = process.raceProcesses(reasoners)

        # this captures our return code (consistent/inconsistent/unknown), not the reasoning processes return code
        (return_value, fastest_reasoner) = self.consolidate_results(reasoners)

        if len(modules) == 0:
            self.pretty_print_result(module_name + " (without imports)",
                                     return_value)
        else:
            self.pretty_print_result(
                module_name + " (with imports = " + str(modules) + ")",
                return_value)

        results = []
        results.append((tuple(modules), return_value, fastest_reasoner))
        #print str(results)
        return results
Exemplo n.º 28
0
def get_returncodes (name,type="positive_returncode"):
    code_list = filemgt.read_config(name,type) 
    codes = []
    if len(code_list)>0:
        codes = [ int(s.strip()) for s in code_list.split(',')]
    return codes
Exemplo n.º 29
0
def get_p9_empty_optionsfile (p9_file_name, verbose=True):

    # currently one option file for all!!
    #print 'OPTIONS FILE - P9 file name = ' + p9_file_name
    options_file_name = os.path.join(os.path.dirname(p9_file_name), 
                                     os.path.splitext(os.path.basename(p9_file_name))[0] + filemgt.read_config('prover9','options_ending'))
    
    #print 'OPTIONS FILE = ' + options_file_name

    ladr.options_files.append(options_file_name)

    if os.path.isfile(options_file_name):
        return options_file_name
    else:
    #    options_file_name = module_p9_file + '.options'
        options_file = open(options_file_name, 'w')
        options_file.write('clear(auto_denials).\n')
        if not verbose:
            options_file.write('clear(print_initial_clauses).\n')
            options_file.write('clear(print_kept).\n')
            options_file.write('clear(print_given).\n')
            #options_file.write('set(quiet).')
        options_file.close()
        return options_file_name
Exemplo n.º 30
0
def get_ladr_to_tptp_cmd(input_file_name, output_file_name):
    cmd = filemgt.read_config(
        'converters',
        'prover9-to-tptp') + ' < ' + input_file_name + ' > ' + output_file_name
    return cmd
Exemplo n.º 31
0
def get_ladr_to_tptp_cmd (input_file_name, output_file_name):
    cmd = filemgt.read_config('converters','prover9-to-tptp') + ' < ' + input_file_name + ' > ' + output_file_name
    return cmd
Exemplo n.º 32
0
from src import filemgt
from tasks import *
import os, sys
from src.ClifModuleSet import ClifModuleSet
from tasks import clif_to_tptp

#global variables
filemgt.start_logging()
tempfolder = filemgt.read_config('converters', 'tempfolder')
ignores = [tempfolder]
ending = filemgt.read_config('cl','ending')
licence.print_terms()

if __name__ == '__main__':
    options = sys.argv
    options.reverse()
    options.pop()
    folder = options.pop()
    ladr_all(folder, options)


#    for directory, subdirs, files in os.walk(folder):
#        if any(ignore in directory for ignore in ignores):
#            pass
#        else:
#            for single_file in files:
#                if single_file.endswith(ending):
#                    filename = os.path.join(directory.replace('qs\\',''), single_file)
#                    print filename
#                    m = ClifModuleSet(filename)
#                    clif_to_ladr.ladr(filename, m, options)
Exemplo n.º 33
0
 def constructCommand (self, modules, outfile_stem):
     """Construct the command to invoke the reasoner."""
     self.modules = modules
     self.output_file = outfile_stem + filemgt.read_config(self.name,'ending')
     (self.args, self.input_files) = commands.get_system_command(self.name, modules, self.output_file)
     return self.args
Exemplo n.º 34
0
def get_returncodes(name, type="positive_returncode"):
    code_list = filemgt.read_config(name, type)
    codes = []
    if len(code_list) > 0:
        codes = [int(s.strip()) for s in code_list.split(',')]
    return codes
Exemplo n.º 35
0
def nontrivially_consistent(filename, options=[]):
	(consistent, m) = check_consistency.consistent(filename, options)
	
	if consistent==None or consistent==True:  # no need to check nontrivial consistency of it is not consistent at all      
		#m = ClifModuleSet(filename)
		definitional_modules = []
		if "-simple" in options:
			i = m.get_top_module()
			if "-defs" not in options or i.is_simple_definition():
				definitional_modules.append(i)
		else:
			for i in m.get_imports():
				if "-defs" not in options or i.is_simple_definition():
					definitional_modules.append(i)
		
		weak = "strong"
		defs = ""
		if "-weak" in options:
			weak = "weak"
		if "-defs" in options:
			defs = "definitional "
		print "\n+++++++++++++++++++++\nProving "+weak +" nontrivial consistency for all " + str(len(definitional_modules))  + " " + defs + "modules of "+ m.get_module_name() +":\n+++++++++++++++++++++"
		for n in definitional_modules:
			print n.module_name
		print "+++++++++++++++++++++\n"
		
		if len(definitional_modules)==0:
			print "NO DEFINITIONS FOUND TO CHECK NONTRIVIAL CONSISTENCY FOR."
		
		for i in definitional_modules:
			if "-defs" not in options or i.is_simple_definition():
				if "-defs" in options:
					if "-all" in options:
						defined_symbols = m.get_defined_nonlogical_symbols()
					else:
						defined_symbols = i.get_defined_symbols()
				else: # not just definitions
					if "-all" in options:
						defined_symbols = m.get_nonlogical_symbols()
					else:
						defined_symbols = i.get_nonlogical_symbols()

				symbol_string = ""
				for (symbol, arity) in defined_symbols:
					symbol_string += symbol + '('+ str(arity) + ') '

				print "\n+++++++++++++++++++++\nProving "+weak +" nontrivial consistency of nonlogical symbols " + symbol_string + " in module " + i.module_name + "\n+++++++++++++++++++++\n"
				
				#for (symbol, arity) in defined_symbols:
					#print "Symbol " + str(symbol) + " has arity " + str(arity)
				
				# need to create new CL file that imports the definition module and adds a sentence stating that n distinct elements in this relation exist

				module_name_modifier = "" 
				if "-all" in options:
					module_name_modifier += "_all"
				if "-weak" in options:
					module_name_modifier += "_weak"
				(module_name, path) = filemgt.get_path_with_ending_for_nontrivial_consistency_checks(i.module_name+module_name_modifier)

				now = datetime.datetime.now()
				
				clif_file = open(path, 'w')
				clif_file.write("/** AUTOMATICALLY CREATED BY MACLEOD ON " + now.strftime("%a %b %d %H:%M:%S %Y")+'**/\n\n')
				clif_file.write('(' + clif.CLIF_TEXT + ' ' + module_name + '\n\n')
				clif_file.write('(' + clif.CLIF_IMPORT + ' ' + i.module_name + filemgt.read_config('cl','ending') + ')\n\n')
				
				# constructing a sentence of the form:
				# (exists (x1 x2 ...)
				#    (and
				#        (SYMBOL x1 x2 ...)
				#        (not (= x1 x2))
				#        (not (= ...
				#  )  )
				#
				# The assumption here is that there must exist a possibility that all the participating elements are distinct. 
				# If this weren't the case, the n-ary predicate could be reduced to a (n-1)-ary predicate.  This may be overly simplistic, but works for most of the case.
				# In particular, it fails if a binary relation is strictly reflexive, i.e. holds only for individual elements.
				# For predicates with n>2 this should probably be relaxed to:
				# every pairwise position of elements can be distinct.   
				for (symbol, arity) in defined_symbols:
					if arity>0:
						if "-weak" in options: # weak nontrivial consistency: each entity is independent from all others
							for n in range(arity):                            
								clif_file.write(construct_existential_sentence(symbol, arity, negation=False, all_distinct=False, position=n) + '\n\n')
								clif_file.write(construct_existential_sentence(symbol, arity, negation=True, all_distinct=False, position=n) + '\n\n')
								
						else: # strong nontrivial consistency: all participating entities have to be disjoint
							clif_file.write(construct_existential_sentence(symbol, arity, negation=False, all_distinct=True) + '\n\n')
							clif_file.write(construct_existential_sentence(symbol, arity, negation=True, all_distinct=True) + '\n\n')

				clif_file.write(')\n') # closing "cl-module"
					
				clif_file.close()
	
				check_consistency.consistent(path,options)            
Exemplo n.º 36
0
from src import filemgt
from tasks import *
import os, sys
from src.ClifModuleSet import ClifModuleSet
from tasks import clif_to_tptp

#global variables
filemgt.start_logging()
tempfolder = filemgt.read_config('converters', 'tempfolder')
ignores = [tempfolder]
ending = filemgt.read_config('cl','ending')
licence.print_terms()

if __name__ == '__main__':
    options = sys.argv
    options.reverse()
    options.pop()
    folder = options.pop()
    tptp_all(folder, options)


#    for directory, subdirs, files in os.walk(folder):
#        if any(ignore in directory for ignore in ignores):
#            pass
#        else:
#            for single_file in files:
#                if single_file.endswith(ending):
#                    filename = os.path.join(directory.replace('qs\\',''), single_file)
#                    print filename
#                    m = ClifModuleSet(filename)
#                    clif_to_ladr.ladr(filename, m, options)
def nontrivially_consistent(filename, m, options=[]):
	(consistent, m) = check_consistency.consistent(filename, m, options)
	
	if consistent==None or consistent==True:  # no need to check nontrivial consistency if it is not consistent at all      
		#m = ClifModuleSet(filename)
		definitional_modules = []
		if "-simple" in options:
			i = m.get_top_module()
			if "-defs" not in options or i.is_simple_definition():
				definitional_modules.append(i)
		else:
			for i in m.get_imports():
				if "-defs" not in options or i.is_simple_definition():
					definitional_modules.append(i)
		
		weak = "strong"
		defs = ""
		if "-weak" in options:
			weak = "weak"
		if "-defs" in options:
			defs = "definitional "
		print "\n+++++++++++++++++++++\nProving "+weak +" nontrivial consistency for all " + str(len(definitional_modules))  + " " + defs + "modules of "+ m.get_module_name() +":\n+++++++++++++++++++++"
		for n in definitional_modules:
			print n.module_name
		print "+++++++++++++++++++++\n"
		
		if len(definitional_modules)==0:
			print "NO DEFINITIONS FOUND TO CHECK NONTRIVIAL CONSISTENCY FOR."
		
		for i in definitional_modules:
			if "-defs" not in options or i.is_simple_definition():
				if "-defs" in options:
					if "-all" in options:
						defined_symbols = m.get_defined_nonlogical_symbols()
					else:
						defined_symbols = i.get_defined_symbols()
				else: # not just definitions
					if "-all" in options:
						defined_symbols = m.get_nonlogical_symbols()
					else:
						defined_symbols = i.get_nonlogical_symbols()

				symbol_string = ""
				for (symbol, arity) in defined_symbols:
					symbol_string += symbol + '('+ str(arity) + ') '

				print "\n+++++++++++++++++++++\nProving "+weak +" nontrivial consistency of nonlogical symbols " + symbol_string + " in module " + i.module_name + "\n+++++++++++++++++++++\n"
				
				#for (symbol, arity) in defined_symbols:
					#print "Symbol " + str(symbol) + " has arity " + str(arity)
				
				# need to create new CL file that imports the definition module and adds a sentence stating that n distinct elements in this relation exist

				module_name_modifier = "" 
				if "-all" in options:
					module_name_modifier += "_all"
				if "-weak" in options:
					module_name_modifier += "_weak"
				(module_name, path) = filemgt.get_path_with_ending_for_nontrivial_consistency_checks(i.module_name+module_name_modifier)

				now = datetime.datetime.now()
				
				clif_file = open(path, 'w')
				clif_file.write("/** AUTOMATICALLY CREATED BY MACLEOD ON " + now.strftime("%a %b %d %H:%M:%S %Y")+'**/\n\n')
				clif_file.write('(' + clif.CLIF_TEXT + ' ' + module_name + '\n\n')
				clif_file.write('(' + clif.CLIF_IMPORT + ' ' + i.module_name + filemgt.read_config('cl','ending') + ')\n\n')
				
				# constructing a sentence of the form:
				# (exists (x1 x2 ...)
				#    (and
				#        (SYMBOL x1 x2 ...)
				#        (not (= x1 x2))
				#        (not (= ...
				#  )  )
				#
				# The assumption here is that there must exist a possibility that all the participating elements are distinct. 
				# If this weren't the case, the n-ary predicate could be reduced to a (n-1)-ary predicate.  This may be overly simplistic, but works for most of the case.
				# In particular, it fails if a binary relation is strictly reflexive, i.e. holds only for individual elements.
				# For predicates with n>2 this should probably be relaxed to:
				# every pairwise position of elements can be distinct.   
				for (symbol, arity) in defined_symbols:
					if arity>0:
						if "-weak" in options: # weak nontrivial consistency: each entity is independent from all others
							for n in range(arity):                            
								clif_file.write(construct_existential_sentence(symbol, arity, negation=False, all_distinct=False, position=n) + '\n\n')
								clif_file.write(construct_existential_sentence(symbol, arity, negation=True, all_distinct=False, position=n) + '\n\n')
								
						else: # strong nontrivial consistency: all participating entities have to be disjoint
							clif_file.write(construct_existential_sentence(symbol, arity, negation=False, all_distinct=True) + '\n\n')
							clif_file.write(construct_existential_sentence(symbol, arity, negation=True, all_distinct=True) + '\n\n')

				clif_file.write(')\n') # closing "cl-module"
					
				clif_file.close()
				
				m2 = ClifModuleSet(path)
				check_consistency.consistent(path, m2, options=options)