Exemplo n.º 1
0
    def toggle_public(self, new_status):
        """Change public status of a dataset (triplestore and db)

        Parameters
        ----------
        new_status : bool
            True if public
        """
        # Update in TS
        query = SparqlQuery(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        string_status = "true" if new_status else "false"
        query.toggle_public(self.graph_name, string_status)

        # Update in DB
        database = Database(self.app, self.session)
        query = '''
        UPDATE datasets SET
        public=?
        WHERE user_id = ? AND id = ?
        '''
        database.execute_sql_query(query, (new_status, self.session["user"]["id"], self.id))

        # Uncache abstraction
        tse.uncache_abstraction()
Exemplo n.º 2
0
    def toggle_public(self, new_status, admin=False):
        """Change public status of a dataset (triplestore and db)

        Parameters
        ----------
        new_status : bool
            True if public
        """
        # Update in TS
        query = SparqlQuery(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)
        string_status = "true" if new_status else "false"
        query.toggle_public(self.graph_name, string_status)

        if admin and self.session['user']['admin']:
            query_params = (new_status, self.id)
            where_query = ""
        else:
            query_params = (new_status, self.id, self.session["user"]["id"])
            where_query = "AND user_id = ?"

        # Update in DB
        database = Database(self.app, self.session)
        query = '''
        UPDATE datasets SET
        public=?
        WHERE id = ?
        {}
        '''.format(where_query)

        database.execute_sql_query(query, query_params)

        # Uncache abstraction
        tse.uncache_abstraction()
Exemplo n.º 3
0
    def delete_datasets(self):
        """delete the datasets from the database and the triplestore"""
        sparql = SparqlQueryLauncher(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        for dataset in self.datasets:
            # Delete from triplestore
            if dataset.graph_name:
                Utils.redo_if_failure(self.log, 3, 1, sparql.drop_dataset,
                                      dataset.graph_name)
            # Delete from db
            dataset.delete_from_db()

            # Uncache abstraction
            tse.uncache_abstraction(public=dataset.public)
Exemplo n.º 4
0
    def integrate(self, public=False):
        """Integrate the file into the triplestore

        Parameters
        ----------
        public : bool, optional
            Integrate in private or public graph
        """
        sparql = SparqlQueryLauncher(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        self.public = public

        method = self.settings.get('triplestore', 'upload_method')

        # Load file into a RDF graph
        self.graph_chunk.parse(self.path, format=self.type_dict[self.type])

        # get metadata
        self.set_metadata()

        # Remove metadata from data
        self.delete_metadata_location()

        # insert metadata
        sparql.insert_data(self.graph_metadata, self.file_graph, metadata=True)

        if method == "load":
            # write rdf into a tmpfile and load it
            temp_file_name = 'tmp_{}_{}.{}'.format(Utils.get_random_string(5),
                                                   self.name,
                                                   self.rdf_extention)

            # Try to load data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                  self.graph_chunk, temp_file_name)

        else:
            # Insert
            # Try to insert data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                  self.graph_chunk, self.file_graph)

        # Remove chached abstraction
        tse.uncache_abstraction(public=self.public)

        self.set_triples_number()
Exemplo n.º 5
0
    def integrate(self, dataset_id=None):
        """Integrate the file into the triplestore"""
        sparql = SparqlQueryLauncher(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        # insert metadata
        self.set_metadata()
        sparql.insert_data(self.graph_metadata, self.file_graph, metadata=True)

        content_generator = self.generate_rdf_content()

        # Insert content
        chunk_number = 0

        for _ in content_generator:

            if self.graph_chunk.ntriple >= self.max_chunk_size:

                if self.graph_chunk.percent and dataset_id:
                    self.update_percent_in_db(self.graph_chunk.percent,
                                              dataset_id)

                if self.method == 'load':

                    # write rdf into a tmpfile and load it
                    temp_file_name = 'tmp_{}_{}_chunk_{}.{}'.format(
                        Utils.get_random_string(5), self.name, chunk_number,
                        self.rdf_extention)

                    # Try to load data. if failure, wait 5 sec and retry 5 time
                    Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                          self.graph_chunk, temp_file_name)
                else:
                    # Insert
                    # Try to insert data. if failure, wait 5 sec and retry 5 time
                    Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                          self.graph_chunk, self.file_graph)

                chunk_number += 1
                self.graph_chunk = RdfGraph(self.app, self.session)

        # Load the last chunk
        if self.graph_chunk.percent and dataset_id:
            self.update_percent_in_db(99, dataset_id)

        if self.method == 'load':
            temp_file_name = 'tmp_{}_{}_chunk_{}.{}'.format(
                Utils.get_random_string(5), self.name, chunk_number,
                self.rdf_extention)

            # Try to load data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                  self.graph_chunk, temp_file_name)
        else:
            # Insert
            # Try to insert data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                  self.graph_chunk, self.file_graph)

        # Content is inserted, now insert abstraction and domain_knowledge
        self.set_rdf_abstraction_domain_knowledge()

        # Remove chached abstraction
        tse.uncache_abstraction(public=self.public)

        if self.method == 'load':

            temp_file_name = 'tmp_{}_{}_abstraction_domain_knowledge.{}'.format(
                Utils.get_random_string(5), self.name, self.rdf_extention)

            self.load_graph(self.graph_abstraction_dk, temp_file_name)
        else:
            # Insert
            sparql.insert_data(self.graph_abstraction_dk, self.file_graph)

        self.update_percent_in_db(100, dataset_id)
        self.set_triples_number()
Exemplo n.º 6
0
    def clear_abstraction_cache(self):
        """Clear cache for all users"""

        tse = TriplestoreExplorer(self.app, self.session)
        tse.uncache_abstraction(public=True, force=True)