Esempio n. 1
0
 def __init__(self, iri="http://www.example.org/onto.owl"):
     self.typical_facts_list = list()
     self.a_box_members_list = list()
     self.scenarios_list = list()
     self.typical_members_list = list()
     self.cost_dict = dict()
     self.symptoms_dict = dict()
     self.my_world = World()
     self.big_world = World()
     self.onto = self.my_world.get_ontology(iri)
Esempio n. 2
0
def _read_from_source(filename: str) -> Ontology:
    name = os.path.basename(filename)
    _, ext = os.path.splitext(name)

    if ext == ".json":
        with open(filename) as json_file:
            data = json.load(json_file)

    elif ext == ".pkl":
        with open(filename, "rb") as f:
            data = pickle.load(f)

    elif ext == ".owl":
        world = World()

        try:
            ontology = world.get_ontology(filename)
            onto = ontology.load()
        except OwlReadyOntologyParsingError as ex:
            return Ontology(name, {}, filename=filename, error_msg=ex.args[0])

        with onto:
            data = _onto_to_tree(Thing, world)

    else:
        raise NotImplementedError(f"No readers for file {name}")

    assert isinstance(data, dict)
    return Ontology(name, data, filename=filename)
Esempio n. 3
0
def onto_get_var_limits():
    da_vars = {}
    mec_vars = {}
    # creating a new world to isolate the reasoning results
    new_world = World()
    # Loading our ontologia
    onto = new_world.get_ontology(onto_dir_path).load()

    variables = onto.search(type=onto.Variable)
    for var in variables:
        try:
            if 'DA' in var.esVariableDe[0].get_name():
                da_vars[django_names[var.get_name()]] = {
                    'min': var.tieneValorMinimo,
                    'max': var.tieneValorMaximo
                }
#                print(var.esVariableDe[0].get_name())
            if 'MEC' in var.esVariableDe[0].get_name():
                mec_vars[django_names[var.get_name()]] = {
                    'min': var.tieneValorMinimo,
                    'max': var.tieneValorMaximo
                }


#                print(var.esVariableDe[0].get_name())
        except Exception as e:
            print("None", e)
    return da_vars, mec_vars
Esempio n. 4
0
 def hermit_reason(self):
     """ load from disk, reason and return owlready2 ontology format"""
     self.world = World()
     onto = self.world.get_ontology(join(self.path,
                                         self.owl_file + ".owl")).load()
     sync_reasoner_hermit(self.world)
     return onto
def find_candidate_instances(w2v_vectors, tagged_words, input_onto, topn):
    candidate_instances = defaultdict(list)

    world = World()
    onto = world.get_ontology(input_onto).load()

    onto_classes = onto.classes()

    for onto_class in onto_classes:
        instances = [
            nlp.get_name_from_IRI(inst)
            for inst in onto.get_instances_of(onto_class)
        ]

        for inst in instances:
            if inst not in w2v_vectors.vocab.keys():
                instances.remove(inst)

        similar = find_by_cos_similarity(w2v_vectors, instances, onto_class,
                                         topn)

        similar = filter_by_pos(similar, instances, tagged_words)

        for s in similar[:]:
            if s[1] <= 0.42:
                similar.remove(s)

        candidate_instances[onto_class] = similar

    return candidate_instances
Esempio n. 6
0
    def ExecuteReasoner(self):
        onto = World()
        try:
            onto.get_ontology("file://" + self.input).load()

            sync_reasoner_pellet(onto, infer_property_values=True)
            onto.save(file=self.output)
            return True
        except:
            return False
Esempio n. 7
0
 def sparql_query_owlready(self, query):
     """
     ""SELECT ?p WHERE {
       <http://www.semanticweb.org/jiba/ontologies/2017/0/test#ma_pizza> <http://www.semanticweb.org/jiba/ontologies/2017/0/test#price> ?p .
     }""
     """
     my_world = World()
     onto = my_world.get_ontology(
         join(self._onto.path, self._onto.owl_file + ".owl")).load()
     graph = my_world.as_rdflib_graph()
     return list(graph.query_owlready(query))
def populate_ontology(candidate_instances, input_onto, output_onto):
    world = World()

    onto = world.get_ontology(input_onto).load()

    for onto_class, instances in candidate_instances.items():

        print_class_and_similar(onto_class, instances)

        for inst in instances:
            _save_instance(onto, onto_class, inst)

    onto.save(file=output_onto)
Esempio n. 9
0
 def convert_worker(self, filename):
     db_path = os.path.join(
         self.db_dir, '.'.join(
             (os.path.splitext(os.path.basename(filename))[0], "sqlite3")))
     if not os.path.isfile(db_path):
         self.Print(cDebug.LEVEL_DEVELOPMENT, 'Convert: ' + db_path)
         my_world = World()
         my_world.set_backend(filename=db_path)
         my_world.get_ontology('file://' + filename).load()
         my_world.save()
         if self.remove_source:
             os.remove(filename)
     return db_path
Esempio n. 10
0
    def __init__(self, owl_file, path, endpoint=None, inspector=None):
        self.owl_file = owl_file.replace(".owl", "")
        self.path = path
        self._onto = GenericOntology(join(self.path, self.owl_file))
        self.world = World()
        self.world.get_ontology(join(self.path, self.owl_file + ".owl")).load()

        self.endpoint = endpoint
        self.inspector = inspector or SPARQLQueries(endpoint, self)

        self.list_labels = set()  # An unique list of ontology labels
        self.superclass_for_class = {}
        self.__load_subclasses__()
        self.__nsmap = {}  ##mapping of namespaces
Esempio n. 11
0
def get_ontology_from_local_file(filename: str = '',
                                 db_dir: str = '',
                                 db_dir_name: str = '',
                                 use_owl_world: bool = True) -> Ontology:
    filename_with_prefix = 'file://' + filename

    if use_owl_world:
        if not os.path.isdir(db_dir):
            ret = FL.CreateDir(db_dir)
        my_world = World()
        my_world.set_backend(filename=db_dir_name)

        return my_world.get_ontology(filename_with_prefix).load()
    else:
        return get_ontology(filename_with_prefix).load()
Esempio n. 12
0
    def __init__(self, path):
        super().__init__()
        self.path = path
        self.world = World()
        self.onto = self.world.get_ontology('file://' +
                                            self.path).load(reload=True)
        self.property_hierarchy = PropertyHierarchy(self.onto)
        self.name = self.onto.name
        self.parse()
        self._concept_generator = ConceptGenerator(
            concepts=self.uri_to_concept,
            thing=self.thing,
            nothing=self.nothing,
            onto=self.onto)

        self.describe()
Esempio n. 13
0
    def __init__(self,
                 path,
                 min_size_of_concept=1,
                 max_concept_size_ratio=1.0,
                 use_pellet=False):
        self.path = path
        self.onto = World().get_ontology(self.path).load(reload=True)

        if use_pellet:
            with self.onto:
                sync_reasoner_pellet(x=self.onto.world,
                                     infer_property_values=True,
                                     infer_data_property_values=True)

        self.name = self.onto.name
        self.concepts = dict()
        self.thing = None
        self.nothing = None

        # Next time thing about including this into Concepts.
        self.top_down_concept_hierarchy = defaultdict(set)
        self.top_down_direct_concept_hierarchy = defaultdict(set)
        self.down_top_concept_hierarchy = defaultdict(set)
        self.down_top_direct_concept_hierarchy = defaultdict(set)
        self.concepts_to_leafs = defaultdict(set)
        self.property_hierarchy = None
        self.parse()

        self.min_size_of_concept = min_size_of_concept

        self.max_size_of_concept = len(
            self.thing.instances) * (max_concept_size_ratio)

        self.role_log = dict()
        self.role_log_cardinality = dict()
        self.__concept_generator = (ConceptGenerator(
            self,
            concepts=self.concepts,
            T=self.thing,
            Bottom=self.nothing,
            onto=self.onto,
            min_size_of_concept=self.min_size_of_concept,
            max_size_of_concept=self.max_size_of_concept))
Esempio n. 14
0
    def __iter__(self) -> Iterator[Optional[Tuple[Ontology, Scene]]]:
        for index, scene in enumerate(self.variation_dimensions):
            self.iterations += 1
            if scene is not None:
                world = World(backend='sqlite', filename=':memory:', dbname=f"scene_db_{index:04}")
                with world.get_ontology(self.base_iri) as onto:
                    self.domain_factory(onto)
                    self.instantiate_scene(scene, onto)

                    try:
                        sync_reasoner_pellet(x=world,
                                             infer_data_property_values=True,
                                             infer_property_values=True,
                                             debug=self.debug)
                    except Exception as e:
                        onto.save("error.rdf.xml")
                        raise e
                    yield onto, scene
            else:
                yield None
            if self.max_tries is not None and self.iterations >= self.max_tries:
                break
Esempio n. 15
0
def reasoner(data):
    # print(data.shape)
    # print("Inside OntoParser-Reasoner")
    # creating a new world to isolate the reasoning results
    # ontos = {n: World().get_ontology(onto_dir_path).load()
    #          for n in range(data.shape[0])}

    new_world = World()
    # Loading the ontology
    onto = new_world.get_ontology(onto_dir_path).load()

    # Creating individuals of Lectura that will be used by the rules
    onto.Variable_Dil1_Entrada.tieneValorPropuesto = float(data[0])
    onto.Lectura_AGV_Entrada.tieneValorCensado = float(data[1])
    onto.Lectura_DQO_Entrada.tieneValorCensado = float(data[2])
    onto.Lectura_Biomasa_Salida.tieneValorCensado = float(data[3])
    onto.Lectura_DQO_Salida.tieneValorCensado = float(data[4])
    onto.Lectura_AGV_Salida.tieneValorCensado = float(data[5])

    onto.Variable_Dil2_Entrada.tieneValorPropuesto = float(data[7])
    onto.Lectura_Ace_Salida.tieneValorCensado = float(data[9])
    onto.Lectura_xa_Salida.tieneValorCensado = float(data[10])
    onto.Lectura_xm_Salida.tieneValorCensado = float(data[11])
    onto.Lectura_xh_Salida.tieneValorCensado = float(data[12])
    onto.Lectura_mox_Salida.tieneValorCensado = float(data[13])
    onto.Lectura_imec_Salida.tieneValorCensado = float(data[14])
    onto.Lectura_QH2_Salida.tieneValorCensado = float(data[15])

    # Apply the rules using pellet reasoner
    sync_reasoner_pellet(onto,
                         infer_data_property_values=True,
                         infer_property_values=True,
                         debug=0)

    # Get new states for each process
    infered_states = get_infered_states(onto)

    return json.dumps(infered_states), onto
Esempio n. 16
0
 def create_new_world(self):
     self.onto.destroy()
     self.big_world = World()
     self.onto = self.big_world.get_ontology(
         "file://" + PATH_TO_ONTO + "//ontoBase.owl").load()     # .load(True, None, True)
Esempio n. 17
0
def update_onto_limits(var_boundaries):
    #    print("Updating boundaries")
    #    print(var_boundaries)
    # creating a new world to isolate the reasoning results
    new_world = World()
    # Loading our ontologia
    onto = new_world.get_ontology(onto_dir_path).load()

    # Updating DA variables
    onto.Variable_Dil1_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_dil1'])
    onto.Variable_AGV_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_agv_in'])
    onto.Variable_DQO_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_dqo_in'])
    onto.Variable_Biomasa_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_biomasa_x'])
    onto.Variable_DQO_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_dqo_out'])
    onto.Variable_AGV_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['da_agv_out'])
    onto.Variable_Dil1_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_dil1'])
    onto.Variable_AGV_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_agv_in'])
    onto.Variable_DQO_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_dqo_in'])
    onto.Variable_Biomasa_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_biomasa_x'])
    onto.Variable_DQO_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_dqo_out'])
    onto.Variable_AGV_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['da_agv_out'])

    # Updating MEC variables
    onto.Variable_Dil2_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_dil2'])
    onto.Variable_Eapp_Entrada.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_eapp'])
    onto.Variable_Ace_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_ace'])
    onto.Variable_xa_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_xa'])
    onto.Variable_xm_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_xm'])
    onto.Variable_xh_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_xh'])
    onto.Variable_mox_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_mox'])
    onto.Variable_imec_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_imec'])
    onto.Variable_QH2_Salida.tieneValorMinimo = float(
        var_boundaries.loc['min']['mec_qh2'])
    onto.Variable_Dil2_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_dil2'])
    onto.Variable_Eapp_Entrada.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_eapp'])
    onto.Variable_Ace_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_ace'])
    onto.Variable_xa_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_xa'])
    onto.Variable_xm_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_xm'])
    onto.Variable_xh_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_xh'])
    onto.Variable_mox_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_mox'])
    onto.Variable_imec_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_imec'])
    onto.Variable_QH2_Salida.tieneValorMaximo = float(
        var_boundaries.loc['max']['mec_qh2'])

    onto.save(onto_dir_path, format="rdfxml")

    print("limits updated")
    print()
Esempio n. 18
0
def get_ontology_from_database(iri, db_dir_name, exclusive=True) -> Ontology:
    my_world = World()
    my_world.set_backend(filename=db_dir_name, exclusive=exclusive)
    return my_world.get_ontology(iri).load()
Esempio n. 19
0
 def __init__(self, data_dir_path):
     my_world = World()
     # path to the owl file is given here
     my_world.get_ontology("file://%s" % data_dir_path).load()
     sync_reasoner(my_world)  # reasoner is started and synchronized here
     self.graph = my_world.as_rdflib_graph()