def _run(self): """ Runs the build process to produce the ontology documentation. """ fileoutinfos = self.getOutputFileInfos() # Create the destination directory, if needed. We only need to check # this for in-source builds, since the BuildDirTarget dependency will # take care of this for out-of-source builds. if self.config.getDoInSourceBuilds(): destdir = os.path.dirname(fileoutinfos[0].destpath) if not (os.path.isdir(destdir)): self._makeDirs(destdir) logger.info('Creating ontology documentation files...') ont = Ontology(self.mobt_reasoned.getOutputFilePath()) # Create the documentation files. for foutinfo in fileoutinfos: # If the format is HTML, make sure the supporting CSS and # Javascript files are present. if foutinfo.formatstr == 'html': destdir = os.path.dirname(foutinfo.destpath) self._checkWebFiles(['documentation_styles.css', 'navtree.js'], destdir) writer = getDocumentationWriter(foutinfo.formatstr) documenter = Documenter(ont, writer) with open(self.config.getDocSpecificationFile()) as docspec: with open(foutinfo.destpath, 'w') as fout: documenter.document(docspec, fout)
def _run(self): """ Runs the inferencing pipeline. """ #self._retrieveAndCheckFilePaths() if self.srcpath != '': sourceont = Ontology(self.srcpath) else: sourceont = Ontology(JavaSystem.in) logger.info('Running reasoner and adding inferred axioms...') inf_types = self.config.getInferenceTypeStrs() annotate_inferred = self.config.getAnnotateInferred() preprocess_inverses = self.config.getPreprocessInverses() iaa = InferredAxiomAdder(sourceont, self.config.getReasonerStr()) if self.config.getExcludedTypesFile() != '': iaa.loadExcludedTypes(self.config.getExcludedTypesFile()) iaa.addInferredAxioms( inf_types, annotate_inferred, preprocess_inverses ) # Write the ontology to the output file or stdout. format_str = self.config.getOutputFormat() if self.outpath != '': logger.info('Writing compiled ontology to ' + self.outpath + '...') sourceont.saveOntology(self.outpath, format_str) else: sourceont.printOntology(format_str)
def _run(self): """ Runs the build process and produces a compiled OWL ontology file. """ # Get the imports modules IRIs from the imports build target. importinfos = self.ibt.getImportsInfo() self._retrieveAndCheckFilePaths() baseont = Ontology(self.base_ont_path) # Add an import declaration for each import module. for importinfo in importinfos: baseont.addImport(importinfo.iristr, True) # Write the base ontology to the output file. fileoutpath = self.getOutputFilePath() logger.info('Writing updated base ontology to ' + fileoutpath + '...') baseont.saveOntology(fileoutpath)
def _run(self): """ Runs the build process and produces a new, modified version of the main OWL ontology file. """ timer = BasicTimer() timer.start() self._retrieveAndCheckFilePaths() mainont = Ontology(self.obt.getOutputFilePath()) if self.mergeimports: # Merge the axioms from each imported ontology directly into this # ontology (that is, do not use import statements). logger.info( 'Merging all imported ontologies into the main ontology...') for importIRI in mainont.getImports(): mainont.mergeOntology(importIRI, self.config.getAnnotateMerged()) if self.prereason: logger.info('Running reasoner and adding inferred axioms...') inf_types = self.config.getInferenceTypeStrs() annotate_inferred = self.config.getAnnotateInferred() preprocess_inverses = self.config.getPreprocessInverses() iaa = InferredAxiomAdder(mainont, self.config.getReasonerStr()) if self.config.getExcludedTypesFile() != '': iaa.loadExcludedTypes(self.config.getExcludedTypesFile()) iaa.addInferredAxioms(inf_types, annotate_inferred, preprocess_inverses) fileoutpath = self.getOutputFilePath() # Set the ontology IRI. ontIRI = self.config.generateDevIRI(fileoutpath) mainont.setOntologyID(ontIRI) # Write the ontology to the output file. logger.info('Writing compiled ontology to ' + fileoutpath + '...') mainont.saveOntology(fileoutpath, self.config.getOutputFormat()) if self.mergeimports and self.prereason: msgtxt = 'Merged and reasoned ' elif self.mergeimports: msgtxt = 'Merged ' else: msgtxt = 'Reasoned ' logger.info((msgtxt + 'ontology build completed in {0} s.\n').format( timer.stop()))
def _run(self): """ Runs the imports build process and produces import module OWL files. """ # Create the destination directory, if needed. We only need to check # this for in-source builds, since the BuildDirTarget dependency will # take care of this for out-of-source builds. if self.config.getDoInSourceBuilds(): if not (os.path.isdir(self.outputdir)): self._makeDirs(self.outputdir) for row in self.tablerows: termsfile_path = row['abs_tfilepath'] if termsfile_path != '': if self.mbuilder.isBuildNeeded(row['IRI'], termsfile_path): logger.info('Building the {0} ({1}) import module.'.format( row['name'], row['IRI'])) self.mbuilder.buildModule(row['IRI'], termsfile_path) else: logger.info('The {0} ({1}) import module is already up to ' 'date.'.format(row['name'], row['IRI']))
def _updateDownloadProgress(self, blocks_transferred, blocksize, filesize): """ Instantiates and updates a console-based progress bar to indicate ontology download progress. This method should be passed to the retrieve() method of URLOpenerWithErrorHandling. """ #print blocks_transferred, blocksize, filesize if blocks_transferred == 0: self.progbar = ProgressBar( widgets=[ Percentage(), '', Bar(marker='-', left='[', right=']'), ' ', ETA() ], maxval=int(math.ceil(float(filesize) / blocksize))) logger.info('\nDownloading ' + self.sourceOntologyIRI) self.progbar.start() else: self.progbar.update(blocks_transferred) if blocks_transferred == self.progbar.maxval: self.progbar.finish() print
def _run(self): """ Runs the build process to produce a new ontology release. """ # We don't need to run generateBuildInfo() here because the base class # ensures that _isBuildRequired() will always be called prior to this # method, so generateBuildInfo() will have already been run. # Create the main release directory, if needed. if not (os.path.isdir(self.release_dir)): self._makeDirs(self.release_dir) # Get the path to the released imports modules directory and create it, # if needed. if len(self.imports_fileinfos) > 0: dirpath = os.path.dirname(self.imports_fileinfos[0].destpath) if not (os.path.exists(dirpath)): self._makeDirs(dirpath) # Create the release import module files. logger.info('Creating release import modules...') for fileinfo in self.imports_fileinfos: ont = Ontology(fileinfo.sourcepath) ont.setOntologyID(fileinfo.destIRI, fileinfo.versionIRI) ont.saveOntology(fileinfo.destpath) # Create the release ontology files. logger.info('Creating release ontology files...') for fileinfo in self.ont_fileinfos: ont = Ontology(fileinfo.sourcepath) ont.setOntologyID(fileinfo.destIRI, fileinfo.versionIRI) # Update the IRIs of any released import modules that are # explicitly imported by the ontology. for ifinfo in self.imports_fileinfos: if ont.hasImport(ifinfo.oldIRI): ont.updateImportIRI(ifinfo.oldIRI, ifinfo.versionIRI) ont.saveOntology(fileinfo.destpath, self.config.getOutputFormat())
def _run(self): """ Checks for entailment errors in the main ontology. """ mainont = Ontology(self.obt.getOutputFilePath()) timer = BasicTimer() logger.info('Checking for entailment errors...') timer.start() entcheck_res = mainont.checkEntailmentErrors( self.config.getReasonerStr()) logger.info('Logical error check completed in {0} s'.format( timer.stop())) if not (entcheck_res['is_consistent']): logger.info( '\nERROR: The ontology is inconsistent (that is, it has no ' 'models). This is often caused by the presence of an ' 'individual (that is, a class instance) that is explicitly or ' 'implicitly a member of two disjoint classes. It might also ' 'indicate an underlying modeling error. Regardless, it is a ' 'serious problem because an inconsistent ontology cannot be ' 'used for logical inference.\n') else: class_list = entcheck_res['unsatisfiable_classes'] if len(class_list) > 0: iri_strs = [ent.getIRI().toString() for ent in class_list] classes_str = '<' + '>\n<'.join(iri_strs) + '>' logger.info( '\nERROR: The ontology is consistent but incoherent ' 'because it contains one or more unsatisfiable classes. ' 'This usually indicates a modeling error. The following ' 'classes are unsatisfiable:\n' + classes_str + '\n') else: logger.info('\nThe ontology is consistent and coherent. No ' 'entailment problems were found.\n')
def _run(self): """ Reads the source ontologies and looks for the search terms. """ ef = EntityFinder() for search_ont in self.search_onts: logger.info('Reading source ontology {0}...'.format(search_ont)) ontology = Ontology(search_ont) logger.info('Processing ontology entities...') ef.addOntologyEntities(ontology) if self.tfpath != '': termsin = open(self.tfpath) else: termsin = sys.stdin if self.outpath != '': logger.info('Writing search results to ' + self.outpath + '...') fout = open(self.outpath, 'w') else: fout = sys.stdout writer = csv.DictWriter(fout, fieldnames=[ 'Search term', 'Matching entity', 'Label(s)', 'Annotation', 'Value', 'Match type', 'Definition(s)' ]) writer.writeheader() row = {} for searchterm in termsin: searchterm = searchterm.strip() results = ef.findEntities(searchterm) for result in results: entity = result[0] row['Search term'] = searchterm row['Matching entity'] = str(entity.getIRI()) row['Label(s)'] = ','.join(entity.getLabels()) row['Annotation'] = result[1] row['Value'] = result[2] row['Definition(s)'] = ','.join(entity.getDefinitions()) if result[3] == MATCH_FULL: row['Match type'] = 'Full' else: row['Match type'] = 'Partial' writer.writerow(row)
def createProject(self): """ Creates a new ontology project in the target directory. """ # Copy in the template configuration file, customize the template, and # load the configuration settings. logger.info('Creating custom project configuration file...') config = self._initConfig() logger.info('Generating project folder structure...') self._createProjectDirs(config) logger.info('Creating initial source files...') self._createSourceFiles(config)
def getReasoner(self, reasoner_name): """ Returns an instance of a reasoner matching the value of the string "reasoner_name". Supported values are "ELK", "HermiT", "Pellet", or "JFact" (the strings are not case sensitive). ReasonerManager ensures that reasoner instances are effectively singletons (that is, subsequent requests for the same reasoner type return the same reasoner instance). reasoner_name: A string specifying the type of reasoner to instantiate. """ reasoner_name = reasoner_name.lower().strip() if reasoner_name not in self.reasoners: owlont = self.getOntology().getOWLOntology() rfact = None if reasoner_name == 'elk': logger.info('Creating ELK reasoner...') rfact = ElkReasonerFactory() elif reasoner_name == 'hermit': logger.info('Creating HermiT reasoner...') rfact = HermiTReasonerFactory() elif reasoner_name == 'pellet': logger.info('Creating Pellet reasoner...') rfact = PelletReasonerFactory() elif reasoner_name == 'jfact': logger.info('Creating JFact reasoner...') rfact = JFactFactory() if rfact is not None: self.reasoners[ reasoner_name] = rfact.createNonBufferingReasoner(owlont) else: raise RuntimeError('Unrecognized DL reasoner name: ' + reasoner_name + '.') return self.reasoners[reasoner_name]
def _run(self): """ Runs the build process and produces a compiled OWL ontology file. """ # We don't need to run _retrieveAndCheckFilePaths() here because the # base class ensures that _isBuildRequired() will always be called # prior to this method, so _retrieveAndCheckFilePaths() will have # already been run. timer = BasicTimer() timer.start() # Get the imports modules IRIs from the imports build target. importsIRIs = [info.iristr for info in self.ibt.getImportsInfo()] fileoutpath = self.getOutputFilePath() # Create the destination directory, if needed. We only need to check # this for in-source builds, since the BuildDirTarget dependency will # take care of this for out-of-source builds. if self.config.getDoInSourceBuilds(): destdir = os.path.dirname(fileoutpath) if not (os.path.isdir(destdir)): self._makeDirs(destdir) ontbuilder = OWLOntologyBuilder(self.base_ont_path) # Add an import declaration for each import module. for importIRI in importsIRIs: ontbuilder.getOntology().addImport(importIRI, True) # Process each source file. In this step, entities and label # annotations are defined, but processing of all other axioms (e.g., # text definitions, comments, equivalency axioms, subclass of axioms, # etc.) is deferred until after all input files have been read. This # allows forward referencing of labels and term IRIs and means that # entity descriptions and source files can be processed in any # arbitrary order. for termsfile in self.termsfile_paths: with TableReaderFactory(termsfile) as reader: logger.info('Parsing ' + termsfile + '...') for table in reader: table.setRequiredColumns(REQUIRED_COLS) table.setOptionalColumns(OPTIONAL_COLS) for t_row in table: if not (t_row['Ignore'].lower() in TRUE_STRS): # Collapse all spaces in the "Type" string so that, # e.g., "DataProperty" and "Data Property" will # both work as expected. typestr = t_row['Type'].lower().replace(' ', '') if typestr == 'class': ontbuilder.addOrUpdateClass(t_row) elif typestr == 'dataproperty': ontbuilder.addOrUpdateDataProperty(t_row) elif typestr == 'objectproperty': ontbuilder.addOrUpdateObjectProperty(t_row) elif typestr == 'annotationproperty': ontbuilder.addOrUpdateAnnotationProperty(t_row) elif typestr == 'individual': ontbuilder.addOrUpdateIndividual(t_row) elif typestr == '': raise EntityDescriptionError( 'The entity type (e.g., "class", "data ' 'property") was not specified.', t_row) else: raise EntityDescriptionError( 'The entity type "' + t_row['Type'] + '" is not supported.', t_row) # Define all deferred axioms from the source entity descriptions. logger.info('Defining all remaining entity axioms...') ontbuilder.processDeferredEntityAxioms(self.expanddefs) # Set the ontology IRI. ontIRI = self.config.generateDevIRI(fileoutpath) ontbuilder.getOntology().setOntologyID(ontIRI) # Write the ontology to the output file. logger.info('Writing compiled ontology to ' + fileoutpath + '...') ontbuilder.getOntology().saveOntology(fileoutpath, self.config.getOutputFormat()) logger.info('Main ontology build completed in {0} s.\n'.format( timer.stop()))
def addInferredAxioms(self, inference_types, annotate=False, add_inverses=False): """ Runs a reasoner on this ontology and adds the inferred axioms. inference_types: A list of strings specifying the kinds of inferred axioms to generate. Valid values are detailed in the sample configuration file. annotate: If True, annotate inferred axioms to mark them as inferred. add_inverses: If True, inverse property assertions will be explicitly added to the ontology *prior* to running the reasoner. This is useful for cases in which a reasoner that does not support inverses must be used (e.g., for runtime considerations) on an ontology with inverse property axioms. """ timer = BasicTimer() owlont = self.ont.getOWLOntology() ontman = self.ont.ontman df = self.ont.df oldaxioms = owlont.getAxioms(ImportsEnum.INCLUDED) if add_inverses: logger.info('Generating inverse property assertions...') timer.start() self._addInversePropAssertions() logger.info( 'Inverse property assertions generated in {0} s.'.format( timer.stop())) # Make sure that the ontology is consistent; otherwise, all inference # attempts will fail. logger.info('Checking whether the ontology is logically consistent...') timer.start() entcheck_res = self.ont.checkEntailmentErrors(self.reasoner_str) logger.info('Consistency check completed in {0} s.'.format( timer.stop())) if not (entcheck_res['is_consistent']): raise RuntimeError( 'The ontology is inconsistent (that is, it has no models). ' 'This is often caused by the presence of an individual (that ' 'is, a class instance) that is explicitly or implicitly a ' 'member of two disjoint classes. It might also indicate an ' 'underlying modeling error. You must correct this problem ' 'before inferred axioms can be added to the ontology.') # The general approach is to first get the set of all axioms in the # ontology prior to reasoning so that this set can be used for # de-duplication later. Then, inferred axioms are added to a new # ontology. This makes it easy to compare explicit and inferred # axioms and to annotate inferred axioms. Trivial axioms are removed # from the inferred axiom set, and the inferred axioms are merged into # the main ontology. logger.info('Generating inferred axioms...') timer.start() generators = self._getGeneratorsList(inference_types) iog = InferredOntologyGenerator(self.reasoner, generators) inferredont = ontman.createOntology() iog.fillOntology(self.ont.df, inferredont) logger.info('Inferred axioms generated in {0} s.'.format(timer.stop())) logger.info( 'Cleaning up redundant, trivial, and excluded axioms and merging ' 'with the main ontology...') timer.start() # Delete axioms in the inferred set that are explicitly stated in the # source ontology (or its imports closure). delaxioms = HashSet() for axiom in inferredont.getAxioms(): if oldaxioms.contains(axiom): delaxioms.add(axiom) ontman.removeAxioms(inferredont, delaxioms) # Delete trivial axioms (e.g., subclass of owl:Thing, etc.). trivial_entities = [ df.getOWLThing(), df.getOWLNothing(), df.getOWLTopDataProperty(), df.getOWLTopObjectProperty(), df.getOWLBottomDataProperty(), df.getOWLBottomObjectProperty() ] delaxioms.clear() for axiom in inferredont.getAxioms(): for trivial_entity in trivial_entities: if axiom.containsEntityInSignature(trivial_entity): delaxioms.add(axiom) break ontman.removeAxioms(inferredont, delaxioms) # Find and remove excluded class/type assertions. This is only # necessary if we added inferred class assertions. if 'types' in inference_types: excluded = self._getExcludedTypeAssertions(inferredont) ontman.removeAxioms(inferredont, excluded) if annotate: # Annotate all of the inferred axioms. annotprop = df.getOWLAnnotationProperty(self.INFERRED_ANNOT_IRI) annotval = df.getOWLLiteral('true') for axiom in inferredont.getAxioms(): annot = df.getOWLAnnotation(annotprop, annotval) newaxiom = axiom.getAnnotatedAxiom(HashSet([annot])) ontman.removeAxiom(inferredont, axiom) ontman.addAxiom(inferredont, newaxiom) # Merge the inferred axioms into the main ontology. ontman.addAxioms(owlont, inferredont.getAxioms()) # Find and remove redundant "subclass of" axioms. This is only # necessary if we inferred the class hierarchy. if 'subclasses' in inference_types: redundants = self._getRedundantSubclassOfAxioms(owlont) ontman.removeAxioms(owlont, redundants) logger.info('Axiom clean up and merge completed in {0} s.'.format( timer.stop()))