Пример #1
0
    def ExecuteReasoner(self):
        onto = World()
        try:
            onto.get_ontology("file://" + self.input).load()

            sync_reasoner_pellet(onto, infer_property_values=True)
            onto.save(file=self.output)
            return True
        except:
            return False
Пример #2
0
 def convert_worker(self, filename):
     db_path = os.path.join(
         self.db_dir, '.'.join(
             (os.path.splitext(os.path.basename(filename))[0], "sqlite3")))
     if not os.path.isfile(db_path):
         self.Print(cDebug.LEVEL_DEVELOPMENT, 'Convert: ' + db_path)
         my_world = World()
         my_world.set_backend(filename=db_path)
         my_world.get_ontology('file://' + filename).load()
         my_world.save()
         if self.remove_source:
             os.remove(filename)
     return db_path
Пример #3
0
def _read_from_source(filename: str) -> Ontology:
    name = os.path.basename(filename)
    _, ext = os.path.splitext(name)

    if ext == ".json":
        with open(filename) as json_file:
            data = json.load(json_file)

    elif ext == ".pkl":
        with open(filename, "rb") as f:
            data = pickle.load(f)

    elif ext == ".owl":
        world = World()

        try:
            ontology = world.get_ontology(filename)
            onto = ontology.load()
        except OwlReadyOntologyParsingError as ex:
            return Ontology(name, {}, filename=filename, error_msg=ex.args[0])

        with onto:
            data = _onto_to_tree(Thing, world)

    else:
        raise NotImplementedError(f"No readers for file {name}")

    assert isinstance(data, dict)
    return Ontology(name, data, filename=filename)
def find_candidate_instances(w2v_vectors, tagged_words, input_onto, topn):
    candidate_instances = defaultdict(list)

    world = World()
    onto = world.get_ontology(input_onto).load()

    onto_classes = onto.classes()

    for onto_class in onto_classes:
        instances = [
            nlp.get_name_from_IRI(inst)
            for inst in onto.get_instances_of(onto_class)
        ]

        for inst in instances:
            if inst not in w2v_vectors.vocab.keys():
                instances.remove(inst)

        similar = find_by_cos_similarity(w2v_vectors, instances, onto_class,
                                         topn)

        similar = filter_by_pos(similar, instances, tagged_words)

        for s in similar[:]:
            if s[1] <= 0.42:
                similar.remove(s)

        candidate_instances[onto_class] = similar

    return candidate_instances
Пример #5
0
def onto_get_var_limits():
    da_vars = {}
    mec_vars = {}
    # creating a new world to isolate the reasoning results
    new_world = World()
    # Loading our ontologia
    onto = new_world.get_ontology(onto_dir_path).load()

    variables = onto.search(type=onto.Variable)
    for var in variables:
        try:
            if 'DA' in var.esVariableDe[0].get_name():
                da_vars[django_names[var.get_name()]] = {
                    'min': var.tieneValorMinimo,
                    'max': var.tieneValorMaximo
                }
#                print(var.esVariableDe[0].get_name())
            if 'MEC' in var.esVariableDe[0].get_name():
                mec_vars[django_names[var.get_name()]] = {
                    'min': var.tieneValorMinimo,
                    'max': var.tieneValorMaximo
                }


#                print(var.esVariableDe[0].get_name())
        except Exception as e:
            print("None", e)
    return da_vars, mec_vars
Пример #6
0
 def sparql_query_owlready(self, query):
     """
     ""SELECT ?p WHERE {
       <http://www.semanticweb.org/jiba/ontologies/2017/0/test#ma_pizza> <http://www.semanticweb.org/jiba/ontologies/2017/0/test#price> ?p .
     }""
     """
     my_world = World()
     onto = my_world.get_ontology(
         join(self._onto.path, self._onto.owl_file + ".owl")).load()
     graph = my_world.as_rdflib_graph()
     return list(graph.query_owlready(query))
def populate_ontology(candidate_instances, input_onto, output_onto):
    world = World()

    onto = world.get_ontology(input_onto).load()

    for onto_class, instances in candidate_instances.items():

        print_class_and_similar(onto_class, instances)

        for inst in instances:
            _save_instance(onto, onto_class, inst)

    onto.save(file=output_onto)
Пример #8
0
def get_ontology_from_local_file(filename: str = '',
                                 db_dir: str = '',
                                 db_dir_name: str = '',
                                 use_owl_world: bool = True) -> Ontology:
    filename_with_prefix = 'file://' + filename

    if use_owl_world:
        if not os.path.isdir(db_dir):
            ret = FL.CreateDir(db_dir)
        my_world = World()
        my_world.set_backend(filename=db_dir_name)

        return my_world.get_ontology(filename_with_prefix).load()
    else:
        return get_ontology(filename_with_prefix).load()
Пример #9
0
    def __iter__(self) -> Iterator[Optional[Tuple[Ontology, Scene]]]:
        for index, scene in enumerate(self.variation_dimensions):
            self.iterations += 1
            if scene is not None:
                world = World(backend='sqlite', filename=':memory:', dbname=f"scene_db_{index:04}")
                with world.get_ontology(self.base_iri) as onto:
                    self.domain_factory(onto)
                    self.instantiate_scene(scene, onto)

                    try:
                        sync_reasoner_pellet(x=world,
                                             infer_data_property_values=True,
                                             infer_property_values=True,
                                             debug=self.debug)
                    except Exception as e:
                        onto.save("error.rdf.xml")
                        raise e
                    yield onto, scene
            else:
                yield None
            if self.max_tries is not None and self.iterations >= self.max_tries:
                break
Пример #10
0
def reasoner(data):
    # print(data.shape)
    # print("Inside OntoParser-Reasoner")
    # creating a new world to isolate the reasoning results
    # ontos = {n: World().get_ontology(onto_dir_path).load()
    #          for n in range(data.shape[0])}

    new_world = World()
    # Loading the ontology
    onto = new_world.get_ontology(onto_dir_path).load()

    # Creating individuals of Lectura that will be used by the rules
    onto.Variable_Dil1_Entrada.tieneValorPropuesto = float(data[0])
    onto.Lectura_AGV_Entrada.tieneValorCensado = float(data[1])
    onto.Lectura_DQO_Entrada.tieneValorCensado = float(data[2])
    onto.Lectura_Biomasa_Salida.tieneValorCensado = float(data[3])
    onto.Lectura_DQO_Salida.tieneValorCensado = float(data[4])
    onto.Lectura_AGV_Salida.tieneValorCensado = float(data[5])

    onto.Variable_Dil2_Entrada.tieneValorPropuesto = float(data[7])
    onto.Lectura_Ace_Salida.tieneValorCensado = float(data[9])
    onto.Lectura_xa_Salida.tieneValorCensado = float(data[10])
    onto.Lectura_xm_Salida.tieneValorCensado = float(data[11])
    onto.Lectura_xh_Salida.tieneValorCensado = float(data[12])
    onto.Lectura_mox_Salida.tieneValorCensado = float(data[13])
    onto.Lectura_imec_Salida.tieneValorCensado = float(data[14])
    onto.Lectura_QH2_Salida.tieneValorCensado = float(data[15])

    # Apply the rules using pellet reasoner
    sync_reasoner_pellet(onto,
                         infer_data_property_values=True,
                         infer_property_values=True,
                         debug=0)

    # Get new states for each process
    infered_states = get_infered_states(onto)

    return json.dumps(infered_states), onto
Пример #11
0
class OntologyManager:
    def __init__(self, iri="http://www.example.org/onto.owl"):
        self.typical_facts_list = list()
        self.a_box_members_list = list()
        self.scenarios_list = list()
        self.typical_members_list = list()
        self.cost_dict = dict()
        self.symptoms_dict = dict()
        self.my_world = World()
        self.big_world = World()
        self.onto = self.my_world.get_ontology(iri)

    def create_complementary_class(self, class_identifier):
        with self.onto:
            complementary_class = Not(class_identifier)
        return complementary_class

    def create_class(self, class_name):
        if not self.is_class_present(class_name):
            with self.onto:
                new_class = types.new_class(class_name, (Thing,))
            return new_class
        else:
            return self.get_class(class_name)

    def create_property(self, property_name):
        with self.onto:
            new_property = types.new_class(property_name, (ObjectProperty,))
        return new_property

    def add_sub_class(self, sub_class_identifier, super_class_identifier):
        with self.onto:
            sub_class_identifier.is_a.append(super_class_identifier)

    def add_member_to_class(self, member_name, class_identifier, symp: bool = False):
        self.a_box_members_list.append(AboxMember(class_identifier, member_name, symp))
        return class_identifier(member_name)

    def add_member_to_multiple_classes(self, member_identifier, class_list, symp: bool = False):
        for c in class_list:
            member_identifier.is_a.append(c)
            self.a_box_members_list.append(AboxMember(c, member_identifier.name, symp))

    # & operatore logico di owlready di intersezione.
    # r1 proprietà di owlready
    # only per ogni
    # some invece significa esiste

    # noinspection PyUnresolvedReferences
    def add_typical_fact(self, t_class_identifier, class_identifier, probability="No probability"):
        with self.onto:
            t_class_identifier_1 = self.create_class(t_class_identifier.name + "1")
            t_class_intersection = self.create_class(
                "Intersection" + t_class_identifier.name + t_class_identifier_1.name)
            t_class_intersection.equivalent_to = [t_class_identifier & t_class_identifier_1]
            self.add_sub_class(t_class_intersection, class_identifier)

            r1 = self.create_property("r1")
            t_class_identifier_1.is_a.append(r1.only(Not(t_class_identifier) & t_class_identifier_1))

            not_t_class_identifier_1 = self.create_class("Not" + t_class_identifier_1.name)
            not_t_class_identifier_1.is_a.append(r1.some(t_class_identifier & t_class_identifier_1))

            self.typical_facts_list.append(TypicalFact(t_class_identifier, class_identifier, probability))

    # C e C1
    # Interesezione serve per esplicitare il concetto della doppia appartenenza

    def set_as_typical_member(self, member_name, t_class_identifier, t_class_identifier_1):
        with self.onto:
            print("Membro tipico:")
            t_class_identifier(member_name)
            t_class_identifier_1(member_name)
            t_class_intersection = self.get_class("Intersection" + t_class_identifier.name + t_class_identifier_1.name)
            t_class_intersection(member_name)
            print(member_name + " is_a " + t_class_identifier.name)
            print(member_name + " is_a " + t_class_identifier_1.name)
            print(member_name + " is_a " + t_class_intersection.name)

    def is_class_present(self, class_name):
        if self.get_class(class_name) is not None:
            return True
        return False

    def get_class(self, class_name):
        return self.onto[class_name]

    def consistency(self, condition: bool = False):
        try:
            with self.onto:
                if condition:
                    sync_reasoner(self.my_world)
                    classi_incosistenti = list(self.my_world.inconsistent_classes())
                    if not len(classi_incosistenti) == 0:
                        return classi_incosistenti
                else:
                    sync_reasoner(self.big_world)
                return "The ontology is consistent"
        except OwlReadyInconsistentOntologyError:
            return "The ontology is inconsistent"

    def store_for_reasoning(self, member_name: str, class_id: object):
        self.symptoms_dict.update({class_id: member_name})

    def add_symptoms_to_kb(self):
        for class_sy, pname, in self.symptoms_dict.items():
            class_c = self.create_class(class_sy.name)
            not_class_c = self.create_class("Not(" + class_sy.name + ")")
            class_c.equivalent_to = [Not(not_class_c)]
            self.add_member_to_class(pname, not_class_c, symp=True)
            print("Sintomo aggiunto: " + pname + ": " + not_class_c.name)

    def save_base_world(self):
        self.onto.save("ontoBase.owl", format="ntriples")

    def create_new_world(self):
        self.onto.destroy()
        self.big_world = World()
        self.onto = self.big_world.get_ontology(
            "file://" + PATH_TO_ONTO + "//ontoBase.owl").load()     # .load(True, None, True)

    def show_classes_iri(self):
        for c in self.big_world.classes():
            print(str(c.name) + " is_a " + str(c.is_a))

    def show_members_in_classes(self):
        for c in self.big_world.classes():
            for m in c.instances():
                print(m.name + " member_of " + c.name)

    def show_classes_iri_my(self):
        for c in self.my_world.classes():
            print(str(c.name) + " is_a " + str(c.is_a))

    def show_members_in_classes_my(self):
        for c in self.my_world.classes():
            for m in c.instances():
                print(m.name + " member_of " + c.name)

    def show_scenarios(self):
        num_scenario = 1
        for s in self.scenarios_list:
            print("INIZIO SCENARIO " + str(num_scenario))
            record = ""
            if len(s.list_of_typical_members) == 0:
                print("Scenario vuoto" + "\n" + "Probabilità scenario: " + str(s.probability))
            else:
                for tm in s.list_of_typical_members:
                    record = record + "Typical(" + tm.t_class_identifier.name + ")" + "," + tm.member_name + "," \
                             + str(tm.probability) + "\n"
                record = record + "Probabilità scenario: " + str(s.probability)
                print(record)
            print("FINE SCENARIO " + str(num_scenario))
            print("\n")
            num_scenario = num_scenario + 1

    @staticmethod
    def show_a_specific_scenario(scenario):
        print("INIZIO SCENARIO")
        record = ""
        if len(scenario.list_of_typical_members) == 0:
            print("Scenario vuoto;" + "\nProbabilità scenario: " + str(scenario.probability))
        else:
            for tm in scenario.list_of_typical_members:
                record = record + tm.t_class_identifier.name + "," + tm.member_name + "," + str(tm.probability) + "; "
            record = record + "\nProbabilità scenario: " + str(scenario.probability)
            print(record)
        print("FINE SCENARIO")

    # TODO Metodi mai utilizzati decidere cosa farne
    @staticmethod
    def destroy_class(class_identifier):
        destroy_entity(class_identifier)

    @staticmethod
    def set_classes_as_disjoint(classes_identifier_list):
        AllDisjoint(classes_identifier_list)

    @staticmethod
    def remove_onto_file():
        if os.path.exists("ontoBase.owl"):
            os.remove("ontoBase.owl")
        else:
            print("The file does not exist")
Пример #12
0
class OntologyInspector(object):
    def __init__(self, owl_file, path, endpoint=None, inspector=None):
        self.owl_file = owl_file.replace(".owl", "")
        self.path = path
        self._onto = GenericOntology(join(self.path, self.owl_file))
        self.world = World()
        self.world.get_ontology(join(self.path, self.owl_file + ".owl")).load()

        self.endpoint = endpoint
        self.inspector = inspector or SPARQLQueries(endpoint, self)

        self.list_labels = set()  # An unique list of ontology labels
        self.superclass_for_class = {}
        self.__load_subclasses__()
        self.__nsmap = {}  ##mapping of namespaces

    @property
    def ontology(self):
        """ owlready2 ontology format """
        return self._onto.onto

    def reload_ontology(self):
        """ reload from disk """
        self._onto = GenericOntology(join(self.path, self.owl_file))

    def update_ontology(self, onto):
        """ owlready2 ontology format """
        self._onto.onto = onto

    def __get_owl_root_node__(self):
        try:
            from lxml import etree
        except:
            import xml.etree.cElementTree as etree
        owl_file = self.path + '/' + self.owl_file + ".owl"
        owl_root = etree.parse(owl_file).getroot()
        self.nsmap = owl_root.nsmap.copy()
        self.nsmap['xmlns'] = self.nsmap.pop(None)
        return owl_root

    def __load_subclasses__(self):
        owl_root = self.__get_owl_root_node__()
        for class_obj in owl_root.findall('{%s}Class' % owl_root.nsmap['owl']):
            onto_label = class_obj.get('{%s}about' % owl_root.nsmap['rdf'])
            self.list_labels.add(onto_label)
            subclass_of_obj = class_obj.find('{%s}subClassOf' %
                                             owl_root.nsmap['rdfs'])
            if subclass_of_obj is not None:
                superclass_label = subclass_of_obj.get('{%s}resource' %
                                                       owl_root.nsmap['rdf'])
                self.superclass_for_class[onto_label] = superclass_label

    def is_leaf_class(self, onto_label):
        """
        Checks if the ontology label provided (for instance http://dbpedia.org/ontology/SportsTeam) is a leaf in the DBpedia ontology tree or not
        It is a leaf if it is not super-class of any other class in the ontology
        @param onto_label: the ontology label
        @type onto_label: string
        @return: whether it is a leaf or not
        @rtype: bool
        """
        is_super_class = False
        for subclass, superclass in list(self.superclass_for_class.items()):
            if superclass == onto_label:
                is_super_class = True
                break
        if not is_super_class and onto_label not in self.list_labels:
            return None

        return not is_super_class

    def get_ontology_path(self, onto_label):
        '''
        Returns the path of ontology classes for the given ontology label (is-a relations)
        @param onto_label: the ontology label (could be http://dbpedia.org/ontology/SportsTeam or just SportsTeam)
        @type onto_label: str
        @return: list of ontology labels
        @rtype: list
        '''
        thing_label = '%sThing' % self.nsmap['owl']
        if onto_label == thing_label:
            return [thing_label]
        else:
            if self.nsmap[
                    'xmlns'] not in onto_label:  # To allow things like "SportsTeam instead of http://dbpedia.org/ontology/SportsTeam
                onto_label = self.nsmap['xmlns'] + onto_label

            if onto_label not in self.superclass_for_class:
                return []
            else:
                super_path = self.get_ontology_path(
                    self.superclass_for_class[onto_label])
                super_path.insert(0, onto_label)
                return super_path

    def get_depth(self, onto_label):
        '''
        Returns the depth in the ontology hierarchy for the given ontology label (is-a relations)
        @param onto_label: the ontology label (could be http://dbpedia.org/ontology/SportsTeam or just SportsTeam)
        @type onto_label: str
        @return: depth
        @rtype: int
        '''
        path = self.get_ontology_path(onto_label)
        return len(path)

    def search(self, *args, **kwargs):
        return self.ontology.search(*args, **kwargs)

    def search_one(self, *args, **kwargs):
        return self.ontology.search_one(*args, **kwargs)

    def search_by_iri(self, iri):
        return self.ontology.search(iri=iri)

    def search_by_type(self, typ):
        return self.ontology.search(type=typ)

    def search_by_subclass_of(self, subclass_of):
        return self.ontology.search(subclass_of=subclass_of)

    def search_by_is_a(self, is_a):
        return self.ontology.search(is_a=is_a)

    def as_sql(self, query, name=None, path=None):
        """ search triples and export result in sql db"""
        name = name or query
        with GenericSQLDatabase(name, path) as db:
            triples = self.inspector.triples(query)
            for triple in triples:
                thing = triple["subject"]["value"]
                prop = triple["predicate"]["value"]
                value = triple["object"]["value"]
                t = db.add_thing(thing)
                p = db.add_property(prop, value=value)
                t.properties.append(p)
        return db

    def from_sql(self, path):
        return self

    def hermit_reason(self):
        """ load from disk, reason and return owlready2 ontology format"""
        self.world = World()
        onto = self.world.get_ontology(join(self.path,
                                            self.owl_file + ".owl")).load()
        sync_reasoner_hermit(self.world)
        return onto

    def pellet_reason(self):
        """ load from disk, reason and return owlready2 ontology format"""
        self.world = World()
        onto = self.world.get_ontology(join(self.path,
                                            self.owl_file + ".owl")).load()

        sync_reasoner_pellet(self.world)
        return onto

    def EYE_reason(self, data, rules):
        '''
        data = """
            @prefix ppl: <http://example.org/people#>.
            @prefix foaf: <http://xmlns.com/foaf/0.1/>.

            ppl:Cindy foaf:knows ppl:John.
            ppl:Cindy foaf:knows ppl:Eliza.
            ppl:Cindy foaf:knows ppl:Kate.
            ppl:Eliza foaf:knows ppl:John.
            ppl:Peter foaf:knows ppl:John.
        """

        rules = """
            @prefix foaf: <http://xmlns.com/foaf/0.1/>.

            {
                ?personA foaf:knows ?personB.
            }
            =>
            {
                ?personB foaf:knows ?personA.
            }.
        """

        output = """
            PREFIX ppl: <http://example.org/people#>
            PREFIX foaf: <http://xmlns.com/foaf/0.1/>

            ppl:Cindy foaf:knows ppl:John.
            ppl:Cindy foaf:knows ppl:Eliza.
            ppl:Cindy foaf:knows ppl:Kate.
            ppl:Eliza foaf:knows ppl:John.
            ppl:Peter foaf:knows ppl:John.
            ppl:John foaf:knows ppl:Cindy.
            ppl:Eliza foaf:knows ppl:Cindy.
            ppl:Kate foaf:knows ppl:Cindy.
            ppl:John foaf:knows ppl:Eliza.
            ppl:John foaf:knows ppl:Peter.        """

        '''
        return EYE_rest(data, rules)
Пример #13
0
class KnowledgeBase(AbstractKnowledgeBase):
    """ Knowledge Base Class representing Tbox and Abox along with the concept hierarchy """
    def __init__(self, path):
        super().__init__()
        self.path = path
        self.world = World()
        self.onto = self.world.get_ontology('file://' +
                                            self.path).load(reload=True)
        self.property_hierarchy = PropertyHierarchy(self.onto)
        self.name = self.onto.name
        self.parse()
        self._concept_generator = ConceptGenerator(
            concepts=self.uri_to_concept,
            thing=self.thing,
            nothing=self.nothing,
            onto=self.onto)

        self.describe()

    def instance_retrieval(self, c: Concept):
        if c.instances is None:
            return self._concept_generator.instance_retrieval(c)
        return c.instances

    def instance_retrieval_from_iterable(self, nodes: Iterable):
        return [self.instance_retrieval(n.concept) for n in nodes]

    def instance_retrieval_parallel_from_iterable(self, nodes: Iterable):
        """
        with multiprocessing.Pool(processes=4) as executor:
            instances = executor.map(self.concept_generator.instance_retrieval_node, nodes)
        return instances

        with concurrent.futures.ThreadPoolExecutor() as executor:
            instances = executor.map(self.concept_generator.instance_retrieval_node, nodes)
        return instances

        => The least efficient.
        with concurrent.futures.ProcessPoolExecutor() as executor:
            instances = executor.map(self.concept_generator.instance_retrieval_node, nodes)
        return instances
        """
        with multiprocessing.Pool(processes=4) as executor:
            instances = executor.map(
                self._concept_generator.instance_retrieval_node, nodes)
        return instances

    def clean(self):
        """
        Clearn all stored values if there is any.
        @return:
        """

    def __concept_hierarchy_fill(self, owl_concept, our_concept):
        """
        our_concept can not be Nothing or Thing
        """
        has_sub_concept = False

        # 3. Get all sub concepts of input concept.
        for owlready_subclass_concept_A in owl_concept.descendants(
                include_self=False):
            if owlready_subclass_concept_A.name in [
                    'Nothing', 'Thing', 'T', '⊥'
            ]:
                raise ValueError
            has_sub_concept = True
            # 3.2 Map them into the corresponding our Concept objects.
            subclass_concept_A = self.uri_to_concept[
                owlready_subclass_concept_A.iri]
            # 3.3. Add all our sub concepts into the concept the top down concept hierarchy.
            self.top_down_concept_hierarchy[our_concept].add(
                subclass_concept_A)
            self.down_top_concept_hierarchy[subclass_concept_A].add(
                our_concept)

        # 4. Get all super concepts of input concept.
        for owlready_superclass_concept_A in owl_concept.ancestors(
                include_self=False):
            if owlready_superclass_concept_A.name == 'Thing' and len(
                [i for i in owl_concept.ancestors(include_self=False)]) == 1:
                self.top_down_direct_concept_hierarchy[self.thing].add(
                    our_concept)
                self.down_top_direct_concept_hierarchy[our_concept].add(
                    self.thing)
            else:
                # 3.2 Map them into the corresponding our Concept objects.
                superclass_concept_A = self.uri_to_concept[
                    owlready_superclass_concept_A.iri]
                # 3.3. Add all our super concepts into the concept the down to concept concept hierarchy.
                self.down_top_concept_hierarchy[our_concept].add(
                    superclass_concept_A)
                self.top_down_concept_hierarchy[superclass_concept_A].add(
                    our_concept)

        # 4. If concept does not have any sub concept, then concept is a leaf concept.
        #  Every leaf concept is directly related to Nothing.
        if has_sub_concept is False:
            self.top_down_direct_concept_hierarchy[our_concept].add(
                self.nothing)
            self.down_top_direct_concept_hierarchy[self.nothing].add(
                our_concept)

    def __direct_concept_hierarchy_fill(self, owlready_concept_A, concept_A,
                                        onto):
        for owlready_direct_subclass_concept_A in owlready_concept_A.subclasses(
                world=onto.world):  # returns direct subclasses
            if owlready_concept_A == owlready_direct_subclass_concept_A:
                print(owlready_concept_A)
                print(owlready_direct_subclass_concept_A)
                raise ValueError
            direct_subclass_concept_A = self.uri_to_concept[
                owlready_direct_subclass_concept_A.iri]

            self.top_down_direct_concept_hierarchy[concept_A].add(
                direct_subclass_concept_A)
            self.down_top_direct_concept_hierarchy[
                direct_subclass_concept_A].add(concept_A)

    def __build_hierarchy(self, onto: Ontology) -> None:
        """
        Builds concept sub and super classes hierarchies.

        1) self.top_down_concept_hierarchy is a mapping from Concept objects to a set of Concept objects that are
        direct subclasses of given Concept object.

        2) self.down_top_concept_hierarchy is a mapping from Concept objects to set of Concept objects that are
        direct superclasses of given Concept object.
        """
        # 1. (Mapping from string URI to Class Expressions, Thing Concept, Nothing Concept
        self.uri_to_concept, self.thing, self.nothing = parse_tbox_into_concepts(
            onto)
        assert len(self.uri_to_concept) > 2

        assert self.thing.iri == 'http://www.w3.org/2002/07/owl#Thing'
        assert self.thing.name == '⊤'

        assert self.nothing.iri == 'http://www.w3.org/2002/07/owl#Nothing'
        assert self.nothing.name == '⊥'

        self.individuals = self.thing.instances
        self.down_top_concept_hierarchy[self.thing] = set()

        for IRI, concept_A in self.uri_to_concept.items(
        ):  # second loop over concepts in the execution,
            assert IRI == concept_A.iri
            try:
                assert len(onto.search(iri=IRI)) == 1
            except AssertionError:
                # Thing and Nothing is not added into hierarchy
                assert IRI in [
                    'http://www.w3.org/2002/07/owl#Thing',
                    'http://www.w3.org/2002/07/owl#Nothing'
                ]
                assert concept_A.name in ['⊤', '⊥']
                continue
            owlready_concept_A = onto.search(iri=concept_A.iri)[0]
            assert owlready_concept_A.iri == concept_A.iri
            self.__concept_hierarchy_fill(owlready_concept_A, concept_A)
            self.__direct_concept_hierarchy_fill(owlready_concept_A, concept_A,
                                                 onto)

            # All concepts are subsumed by Thing.
            self.top_down_concept_hierarchy[self.thing].add(concept_A)
            self.down_top_concept_hierarchy[concept_A].add(self.thing)

            # All concepts subsume Nothing.
            self.top_down_concept_hierarchy[concept_A].add(self.nothing)
            self.down_top_concept_hierarchy[self.nothing].add(concept_A)

        self.top_down_concept_hierarchy[self.thing].add(self.nothing)
        self.down_top_concept_hierarchy[self.nothing].add(self.thing)

        ################################################################################################################
        # Sanity checking
        # 1. Did we parse classes correctly ?
        owlready2_classes = {i.iri for i in onto.classes()}
        our_classes = {k for k, v in self.uri_to_concept.items()}
        try:
            assert our_classes.issuperset(owlready2_classes) and (
                our_classes.difference(owlready2_classes) == {
                    'http://www.w3.org/2002/07/owl#Thing',
                    'http://www.w3.org/2002/07/owl#Nothing'
                })
        except AssertionError:
            raise AssertionError('Assertion error => at superset checking.')

        try:
            # Thing subsumes all parsed concept except itself.
            assert len(self.top_down_concept_hierarchy[self.thing]) == (
                len(our_classes) - 1)
            assert len(self.down_top_concept_hierarchy[self.nothing]) == (
                len(our_classes) - 1)
        except AssertionError:
            raise AssertionError(
                'Assertion error => at concept hierarchy checking.')

        # start from here
        try:
            assert len(self.down_top_concept_hierarchy[self.nothing]) == (
                len(our_classes) - 1)
            assert len(self.top_down_direct_concept_hierarchy[self.thing]) >= 1
        except AssertionError:
            raise AssertionError(
                'Assertion error => total number of parsed concept checking')

        # 2. Did we create top down direct concept hierarchy correctly ?
        for concept, direct_sub_concepts in self.top_down_direct_concept_hierarchy.items(
        ):
            for dsc in direct_sub_concepts:
                assert concept.instances.issuperset(dsc.instances)

        # 3. Did we create top down concept hierarchy correctly ?
        for concept, direct_sub_concepts in self.top_down_concept_hierarchy.items(
        ):
            for dsc in direct_sub_concepts:
                assert concept.instances.issuperset(dsc.instances)

        # 3. Did we create down top direct concept hierarchy correctly ?
        for concept, direct_super_concepts in self.down_top_direct_concept_hierarchy.items(
        ):
            for dsc in direct_super_concepts:
                assert concept.instances.issubset(dsc.instances)

        # 4. Did we create down top concept hierarchy correctly ?
        for concept, direct_super_concepts in self.down_top_concept_hierarchy.items(
        ):
            for dsc in direct_super_concepts:
                try:
                    assert concept.instances.issubset(dsc.instances)
                except AssertionError:
                    raise AssertionError('Subset error')

    def parse(self):
        """
        Top-down and bottom up hierarchies are constructed from from owlready2.Ontology
        """
        self.__build_hierarchy(self.onto)

    # OPERATIONS
    def negation(self, concept: Concept) -> Concept:
        """ Return a Concept object that is a negation of given concept."""
        return self._concept_generator.negation(concept)

    def union(self, conceptA: Concept, conceptB: Concept) -> Concept:
        """Return a concept c == (conceptA OR conceptA)"""
        return self._concept_generator.union(conceptA, conceptB)

    def intersection(self, conceptA: Concept, conceptB: Concept) -> Concept:
        """Return a concept c == (conceptA AND conceptA)"""
        return self._concept_generator.intersection(conceptA, conceptB)

    def existential_restriction(self, concept: Concept, property_) -> Concept:
        """Return a concept c == (\exists R.C)"""
        return self._concept_generator.existential_restriction(
            concept, property_)

    def universal_restriction(self, concept: Concept, property_) -> Concept:
        """Return a concept c == (\forall R.C)"""
        return self._concept_generator.universal_restriction(
            concept, property_)

    @staticmethod
    def is_atomic(c: owlready2.entity.ThingClass):
        """
        Check whether input owlready2 concept object is atomic concept.
        This is a workaround
        @param c:
        @return:
        """
        assert isinstance(c, owlready2.entity.ThingClass)
        if '¬' in c.name and not (' ' in c.name):
            return False
        elif ' ' in c.name or '∃' in c.name or '∀' in c.name:
            return False
        else:
            return True

    def get_leaf_concepts(self, concept: Concept) -> Generator:
        """ Return : { x | (x subClassOf concept) AND not exist y: y subClassOf x )} """
        assert isinstance(concept, Concept)
        for leaf in self.concepts_to_leafs[concept]:
            yield leaf

    @parametrized_performance_debugger()
    def negation_from_iterables(self, s: Iterable) -> Generator:
        """ Return : { x | ( x \equiv not s} """
        assert isinstance(s, Iterable)
        for item in s:
            yield self._concept_generator.negation(item)

    # @parametrized_performance_debugger()
    def get_direct_sub_concepts(self, concept: Concept) -> Generator:
        """ Return : { x | ( x subClassOf concept )} """
        assert isinstance(concept, Concept)
        yield from self.top_down_direct_concept_hierarchy[concept]

    def get_all_sub_concepts(self, concept: Concept):
        """ Return : { x | ( x subClassOf concept ) OR ..."""
        assert isinstance(concept, Concept)
        yield from self.top_down_concept_hierarchy[concept]

    def get_direct_parents(self, concept: Concept) -> Generator:
        """ Return : { x | (concept subClassOf x)} """
        assert isinstance(concept, Concept)
        yield from self.down_top_direct_concept_hierarchy[concept]

    def get_parents(self, concept: Concept) -> Generator:
        """ Return : { x | (concept subClassOf x)} """
        yield from self.down_top_concept_hierarchy[concept]

    def most_general_existential_restrictions(self,
                                              concept: Concept) -> Generator:
        """ Return : { \exist.r.x | r \in MostGeneral r} """
        assert isinstance(concept, Concept)
        for prob in self.property_hierarchy.get_most_general_property():
            yield self._concept_generator.existential_restriction(
                concept, prob)

    def union_from_iterables(self, concept_a: Iterable, concept_b: Iterable):
        temp = set()
        seen = set()
        for i in concept_a:
            for j in concept_b:
                if (i.name, j.name) in seen:
                    continue

                u = self._concept_generator.union(i, j)
                seen.add((i.name, j.name))
                seen.add((j.name, i.name))
                temp.add(u)
        return temp

    def intersect_from_iterables(self, concept_a, concept_b):
        temp = set()
        seen = set()
        for i in concept_a:
            for j in concept_b:
                if (i.name, j.name) in seen:
                    continue

                and_ = self._concept_generator.intersection(i, j)
                seen.add((i.name, j.name))
                seen.add((j.name, i.name))
                temp.add(and_)
        return temp

    def most_general_universal_restrictions(self,
                                            concept: Concept) -> Generator:
        """ Return : { \forall.r.x | r \in MostGeneral r} """
        assert isinstance(concept, Concept)
        for prob in self.property_hierarchy.get_most_general_property():
            yield self._concept_generator.universal_restriction(concept, prob)
Пример #14
0
 def __init__(self, data_dir_path):
     my_world = World()
     # path to the owl file is given here
     my_world.get_ontology("file://%s" % data_dir_path).load()
     sync_reasoner(my_world)  # reasoner is started and synchronized here
     self.graph = my_world.as_rdflib_graph()
Пример #15
0
def update_onto_limits(var_boundaries):
    #    print("Updating boundaries")
    #    print(var_boundaries)
    # creating a new world to isolate the reasoning results
    new_world = World()
    # Loading our ontologia
    onto = new_world.get_ontology(onto_dir_path).load()

    # Updating DA variables
    onto.Variable_Dil1_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_dil1'])
    onto.Variable_AGV_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_agv_in'])
    onto.Variable_DQO_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_dqo_in'])
    onto.Variable_Biomasa_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_biomasa_x'])
    onto.Variable_DQO_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_dqo_out'])
    onto.Variable_AGV_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_agv_out'])
    onto.Variable_Dil1_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_dil1'])
    onto.Variable_AGV_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_agv_in'])
    onto.Variable_DQO_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_dqo_in'])
    onto.Variable_Biomasa_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_biomasa_x'])
    onto.Variable_DQO_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_dqo_out'])
    onto.Variable_AGV_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_agv_out'])

    # Updating MEC variables
    onto.Variable_Dil2_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_dil2'])
    onto.Variable_Eapp_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_eapp'])
    onto.Variable_Ace_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_ace'])
    onto.Variable_xa_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_xa'])
    onto.Variable_xm_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_xm'])
    onto.Variable_xh_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_xh'])
    onto.Variable_mox_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_mox'])
    onto.Variable_imec_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_imec'])
    onto.Variable_QH2_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_qh2'])
    onto.Variable_Dil2_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_dil2'])
    onto.Variable_Eapp_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_eapp'])
    onto.Variable_Ace_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_ace'])
    onto.Variable_xa_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_xa'])
    onto.Variable_xm_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_xm'])
    onto.Variable_xh_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_xh'])
    onto.Variable_mox_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_mox'])
    onto.Variable_imec_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_imec'])
    onto.Variable_QH2_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_qh2'])

    onto.save(onto_dir_path, format="rdfxml")

    print("limits updated")
    print()
Пример #16
0
def get_ontology_from_database(iri, db_dir_name, exclusive=True) -> Ontology:
    my_world = World()
    my_world.set_backend(filename=db_dir_name, exclusive=exclusive)
    return my_world.get_ontology(iri).load()