Esempio n. 1
0
 def delete_datasets(self):
     """delete the datasets from the database and the triplestore"""
     sparql = SparqlQueryLauncher(self.app, self.session)
     for dataset in self.datasets:
         # Delete from triplestore
         if dataset.graph_name:
             Utils.redo_if_failure(self.log, 3, 1, sparql.drop_dataset,
                                   dataset.graph_name)
         # Delete from db
         dataset.delete_from_db()
Esempio n. 2
0
    def delete_datasets(self):
        """delete the datasets from the database and the triplestore"""
        sparql = SparqlQueryLauncher(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        for dataset in self.datasets:
            # Delete from triplestore
            if dataset.graph_name:
                Utils.redo_if_failure(self.log, 3, 1, sparql.drop_dataset,
                                      dataset.graph_name)
            # Delete from db
            dataset.delete_from_db()

            # Uncache abstraction
            tse.uncache_abstraction(public=dataset.public)
Esempio n. 3
0
    def delete_user_rdf(self, username):
        """Delete a user rdf graphs

        Delete in DB, TS and filesystem

        Parameters
        ----------
        username : string
            Username to delete
        """
        tse = TriplestoreExplorer(self.app, self.session)
        query_launcher = SparqlQueryLauncher(self.app, self.session)
        graphs = tse.get_graph_of_user(username)
        for graph in graphs:
            Utils.redo_if_failure(self.log, 3, 1, query_launcher.drop_dataset, graph)
Esempio n. 4
0
    def integrate(self, public=False):
        """Integrate the file into the triplestore

        Parameters
        ----------
        public : bool, optional
            Integrate in private or public graph
        """
        sparql = SparqlQueryLauncher(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        self.public = public

        method = self.settings.get('triplestore', 'upload_method')

        # Load file into a RDF graph
        self.graph_chunk.parse(self.path, format=self.type_dict[self.type])

        # get metadata
        self.set_metadata()

        # Remove metadata from data
        self.delete_metadata_location()

        # insert metadata
        sparql.insert_data(self.graph_metadata, self.file_graph, metadata=True)

        if method == "load":
            # write rdf into a tmpfile and load it
            temp_file_name = 'tmp_{}_{}.{}'.format(Utils.get_random_string(5),
                                                   self.name,
                                                   self.rdf_extention)

            # Try to load data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                  self.graph_chunk, temp_file_name)

        else:
            # Insert
            # Try to insert data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                  self.graph_chunk, self.file_graph)

        # Remove chached abstraction
        tse.uncache_abstraction(public=self.public)

        self.set_triples_number()
Esempio n. 5
0
    def integrate(self, dataset_id=None):
        """Integrate the file into the triplestore"""
        sparql = SparqlQueryLauncher(self.app, self.session)

        # insert metadata
        sparql.insert_data(self.get_metadata(), self.file_graph, metadata=True)

        content_generator = self.generate_rdf_content()

        # Insert content
        chunk_number = 0

        for _ in content_generator:

            if self.graph_chunk.ntriple >= self.max_chunk_size:

                if self.graph_chunk.percent and dataset_id:
                    self.update_percent_in_db(self.graph_chunk.percent,
                                              dataset_id)

                if self.method == 'load':

                    # write rdf into a tmpfile and load it
                    temp_file_name = 'tmp_{}_{}_chunk_{}.{}'.format(
                        Utils.get_random_string(5), self.name, chunk_number,
                        self.rdf_extention)

                    # Try to load data. if failure, wait 5 sec and retry 5 time
                    Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                          self.graph_chunk, temp_file_name)
                else:
                    # Insert
                    # Try to insert data. if failure, wait 5 sec and retry 5 time
                    Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                          self.graph_chunk, self.file_graph)

                chunk_number += 1
                self.graph_chunk = RdfGraph(self.app, self.session)

        # Load the last chunk
        if self.graph_chunk.percent and dataset_id:
            self.update_percent_in_db(100, dataset_id)

        if self.method == 'load':
            temp_file_name = 'tmp_{}_{}_chunk_{}.{}'.format(
                Utils.get_random_string(5), self.name, chunk_number,
                self.rdf_extention)

            # Try to load data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                  self.graph_chunk, temp_file_name)
        else:
            # Insert
            # Try to insert data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                  self.graph_chunk, self.file_graph)

        # Content is inserted, now insert abstraction and domain_knowledge
        self.set_rdf_abstraction_domain_knowledge()

        if self.method == 'load':

            temp_file_name = 'tmp_{}_{}_abstraction_domain_knowledge.{}'.format(
                Utils.get_random_string(5), self.name, self.rdf_extention)

            self.load_graph(self.graph_abstraction_dk, temp_file_name)
        else:
            # Insert
            sparql.insert_data(self.graph_abstraction_dk, self.file_graph)

        self.set_triples_number()