Exemplo n.º 1
0
def get_abstraction():
    """Get abstraction

    Returns
    -------
    json
        abstraction: abstraction
        error: True if error, else False
        errorMessage: the error message of error, else an empty string
    """
    try:
        tse = TriplestoreExplorer(current_app, session)
        abstraction = tse.get_abstraction()
        files_utils = FilesUtils(current_app, session)
        disk_space = files_utils.get_size_occupied_by_user(
        ) if "user" in session else None
    except Exception as e:
        traceback.print_exc(file=sys.stdout)
        return jsonify({
            'diskSpace': disk_space,
            'abstraction': [],
            'error': True,
            'errorMessage': str(e)
        }), 500
    return jsonify({
        'diskSpace': disk_space,
        'abstraction': abstraction,
        'error': False,
        'errorMessage': ''
    })
Exemplo n.º 2
0
def get_abstraction():
    """Get abstraction

    Returns
    -------
    json
        abstraction: abstraction
        error: True if error, else False
        errorMessage: the error message of error, else an empty string
    """
    try:
        # If public datasets and queries are protected, dont return anything to unlogged users
        if "user" not in session and current_app.iniconfig.getboolean("askomics", "protect_public"):
            abstraction = {}
            disk_space = None
        else:
            tse = TriplestoreExplorer(current_app, session)
            abstraction = tse.get_abstraction()
            files_utils = FilesUtils(current_app, session)
            disk_space = files_utils.get_size_occupied_by_user() if "user" in session else None
    except Exception as e:
        traceback.print_exc(file=sys.stdout)
        return jsonify({
            'diskSpace': None,
            'abstraction': {},
            'error': True,
            'errorMessage': str(e)
        }), 500
    return jsonify({
        'diskSpace': disk_space,
        'abstraction': abstraction,
        'error': False,
        'errorMessage': ''
    })
Exemplo n.º 3
0
def query():
    """Get start points

    Returns
    -------
    json
        startpoint: list of start points
        error: True if error, else False
        errorMessage: the error message of error, else an empty string
    """
    try:
        tse = TriplestoreExplorer(current_app, session)
        results_handler = ResultsHandler(current_app, session)

        startpoints = tse.get_startpoints()
        public_queries = results_handler.get_public_queries()
    except Exception as e:
        traceback.print_exc(file=sys.stdout)
        return jsonify({
            'startpoints': [],
            "publicQueries": [],
            'error': True,
            'errorMessage': str(e)
        }), 500

    return jsonify({
        'startpoints': startpoints,
        "publicQueries": public_queries,
        'error': False,
        'errorMessage': ''
    })
Exemplo n.º 4
0
    def toggle_public(self, new_status, admin=False):
        """Change public status of a dataset (triplestore and db)

        Parameters
        ----------
        new_status : bool
            True if public
        """
        # Update in TS
        query = SparqlQuery(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)
        string_status = "true" if new_status else "false"
        query.toggle_public(self.graph_name, string_status)

        if admin and self.session['user']['admin']:
            query_params = (new_status, self.id)
            where_query = ""
        else:
            query_params = (new_status, self.id, self.session["user"]["id"])
            where_query = "AND user_id = ?"

        # Update in DB
        database = Database(self.app, self.session)
        query = '''
        UPDATE datasets SET
        public=?
        WHERE id = ?
        {}
        '''.format(where_query)

        database.execute_sql_query(query, query_params)

        # Uncache abstraction
        tse.uncache_abstraction()
Exemplo n.º 5
0
    def toggle_public(self, new_status):
        """Change public status of a dataset (triplestore and db)

        Parameters
        ----------
        new_status : bool
            True if public
        """
        # Update in TS
        query = SparqlQuery(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        string_status = "true" if new_status else "false"
        query.toggle_public(self.graph_name, string_status)

        # Update in DB
        database = Database(self.app, self.session)
        query = '''
        UPDATE datasets SET
        public=?
        WHERE user_id = ? AND id = ?
        '''
        database.execute_sql_query(query, (new_status, self.session["user"]["id"], self.id))

        # Uncache abstraction
        tse.uncache_abstraction()
Exemplo n.º 6
0
    def delete_datasets(self):
        """delete the datasets from the database and the triplestore"""
        sparql = SparqlQueryLauncher(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        for dataset in self.datasets:
            # Delete from triplestore
            if dataset.graph_name:
                Utils.redo_if_failure(self.log, 3, 1, sparql.drop_dataset,
                                      dataset.graph_name)
            # Delete from db
            dataset.delete_from_db()

            # Uncache abstraction
            tse.uncache_abstraction(public=dataset.public)
Exemplo n.º 7
0
    def delete_user_rdf(self, username):
        """Delete a user rdf graphs

        Delete in DB, TS and filesystem

        Parameters
        ----------
        username : string
            Username to delete
        """
        tse = TriplestoreExplorer(self.app, self.session)
        query_launcher = SparqlQueryLauncher(self.app, self.session)
        graphs = tse.get_graph_of_user(username)
        for graph in graphs:
            Utils.redo_if_failure(self.log, 3, 1, query_launcher.drop_dataset, graph)
Exemplo n.º 8
0
    def update_base_url(self, old_url, new_url):
        """Update base url for all graphs

        Parameters
        ----------
        old_url : string
            Previous base url
        new_url : string
            New base url
        """
        tse = TriplestoreExplorer(self.app, self.session)
        graphs = tse.get_all_graphs()

        for graph in graphs:
            tse.update_base_url(graph, old_url, new_url)
Exemplo n.º 9
0
    def integrate(self, public=False):
        """Integrate the file into the triplestore

        Parameters
        ----------
        public : bool, optional
            Integrate in private or public graph
        """
        sparql = SparqlQueryLauncher(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        self.public = public

        method = self.settings.get('triplestore', 'upload_method')

        # Load file into a RDF graph
        self.graph_chunk.parse(self.path, format=self.type_dict[self.type])

        # get metadata
        self.set_metadata()

        # Remove metadata from data
        self.delete_metadata_location()

        # insert metadata
        sparql.insert_data(self.graph_metadata, self.file_graph, metadata=True)

        if method == "load":
            # write rdf into a tmpfile and load it
            temp_file_name = 'tmp_{}_{}.{}'.format(Utils.get_random_string(5),
                                                   self.name,
                                                   self.rdf_extention)

            # Try to load data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                  self.graph_chunk, temp_file_name)

        else:
            # Insert
            # Try to insert data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                  self.graph_chunk, self.file_graph)

        # Remove chached abstraction
        tse.uncache_abstraction(public=self.public)

        self.set_triples_number()
Exemplo n.º 10
0
def query():
    """Get start points

    Returns
    -------
    json
        startpoint: list of start points
        error: True if error, else False
        errorMessage: the error message of error, else an empty string
    """
    try:
        # If public datasets and queries are protected, dont return anything to unlogged users
        if "user" not in session and current_app.iniconfig.getboolean("askomics", "protect_public"):
            startpoints = []
            public_queries = []
            public_form_queries = []
        else:
            tse = TriplestoreExplorer(current_app, session)
            results_handler = ResultsHandler(current_app, session)
            startpoints = tse.get_startpoints()
            public_queries = results_handler.get_public_queries()
            public_form_queries = results_handler.get_public_form_queries()

    except Exception as e:
        traceback.print_exc(file=sys.stdout)
        return jsonify({
            'startpoints': [],
            "publicQueries": [],
            "publicFormQueries": [],
            'error': True,
            'errorMessage': str(e)
        }), 500

    return jsonify({
        'startpoints': startpoints,
        "publicQueries": public_queries,
        "publicFormQueries": public_form_queries,
        'error': False,
        'errorMessage': ''
    })
Exemplo n.º 11
0
    def integrate(self, dataset_id=None):
        """Integrate the file into the triplestore"""
        sparql = SparqlQueryLauncher(self.app, self.session)
        tse = TriplestoreExplorer(self.app, self.session)

        # insert metadata
        self.set_metadata()
        sparql.insert_data(self.graph_metadata, self.file_graph, metadata=True)

        content_generator = self.generate_rdf_content()

        # Insert content
        chunk_number = 0

        for _ in content_generator:

            if self.graph_chunk.ntriple >= self.max_chunk_size:

                if self.graph_chunk.percent and dataset_id:
                    self.update_percent_in_db(self.graph_chunk.percent,
                                              dataset_id)

                if self.method == 'load':

                    # write rdf into a tmpfile and load it
                    temp_file_name = 'tmp_{}_{}_chunk_{}.{}'.format(
                        Utils.get_random_string(5), self.name, chunk_number,
                        self.rdf_extention)

                    # Try to load data. if failure, wait 5 sec and retry 5 time
                    Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                          self.graph_chunk, temp_file_name)
                else:
                    # Insert
                    # Try to insert data. if failure, wait 5 sec and retry 5 time
                    Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                          self.graph_chunk, self.file_graph)

                chunk_number += 1
                self.graph_chunk = RdfGraph(self.app, self.session)

        # Load the last chunk
        if self.graph_chunk.percent and dataset_id:
            self.update_percent_in_db(99, dataset_id)

        if self.method == 'load':
            temp_file_name = 'tmp_{}_{}_chunk_{}.{}'.format(
                Utils.get_random_string(5), self.name, chunk_number,
                self.rdf_extention)

            # Try to load data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, self.load_graph,
                                  self.graph_chunk, temp_file_name)
        else:
            # Insert
            # Try to insert data. if failure, wait 5 sec and retry 5 time
            Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data,
                                  self.graph_chunk, self.file_graph)

        # Content is inserted, now insert abstraction and domain_knowledge
        self.set_rdf_abstraction_domain_knowledge()

        # Remove chached abstraction
        tse.uncache_abstraction(public=self.public)

        if self.method == 'load':

            temp_file_name = 'tmp_{}_{}_abstraction_domain_knowledge.{}'.format(
                Utils.get_random_string(5), self.name, self.rdf_extention)

            self.load_graph(self.graph_abstraction_dk, temp_file_name)
        else:
            # Insert
            sparql.insert_data(self.graph_abstraction_dk, self.file_graph)

        self.update_percent_in_db(100, dataset_id)
        self.set_triples_number()
Exemplo n.º 12
0
    def clear_abstraction_cache(self):
        """Clear cache for all users"""

        tse = TriplestoreExplorer(self.app, self.session)
        tse.uncache_abstraction(public=True, force=True)