def test_meta_knowledge_graph(self):
     interface = TrapiInterface(
         bkb_handler=self.bkb_handler,
         dynamic_reasoner=self.dynamic_reasoner,
         joint_reasoner=self.joint_reasoner,
     )
     meta_kg = interface.get_meta_knowledge_graph()
コード例 #2
0
 def test_meta_knowledge_graph(self):
     interface = TrapiInterface()
     meta_kg = interface.get_meta_knowledge_graph()
     self.assertIsInstance(meta_kg, dict)
コード例 #3
0
    def get_response(self, query):
        """ Main function of the processor that handles primary logic for obtaining
            a cached or calculated query response.
        """
        query_copy = query.get_copy()
        start_time = time.time()
        logger.info('Running query.')

        # Instaniate CHP TRAPI Interface
        interface = TrapiInterface(
            hosts_filename=self.chp_config.hosts_filename,
            num_processes_per_host=self.chp_config.num_processes_per_host,
            bkb_handler=self.chp_config.bkb_handler,
            joint_reasoner=self.chp_config.joint_reasoner,
            dynamic_reasoner=self.chp_config.dynamic_reasoner,
        )

        # Expand
        expand_queries = self.expand_batch_query(query)
        # Normalize to Preferred Curies
        normalization_time = time.time()
        normalize_queries, normalization_map = self.normalize_to_preferred(
            expand_queries,
            meta_knowledge_graph=interface.get_meta_knowledge_graph(),
            with_normalization_map=True,
        )

        logger.info(
            'Normalizaion time: {} seconds.'.format(time.time() -
                                                    normalization_time))
        # Conflate
        conflation_time = time.time()
        conflate_queries = self.conflate_categories(
            normalize_queries,
            conflation_map=interface.get_conflation_map(),
        )
        logger.info('Conflation time: {} seconds.'.format(time.time() -
                                                          conflation_time))
        # Onto Expand
        onto_time = time.time()
        onto_queries = self.expand_supported_ontological_descendants(
            conflate_queries,
            curies_database=interface.get_curies(),
        )
        logger.info(
            'Ontological expansion time: {} seconds.'.format(time.time() -
                                                             onto_time))
        # Semantic Ops Expand
        semops_time = time.time()
        semops_queries = self.expand_with_semantic_ops(
            onto_queries,
            meta_knowledge_graph=interface.get_meta_knowledge_graph(),
        )
        logger.info('Sem ops time: {} seconds.'.format(time.time() -
                                                       semops_time))
        # Filter out inconsistent queries
        filter_time = time.time()
        consistent_queries, inconsistent_queries = self.filter_queries_inconsistent_with_meta_knowledge_graph(
            semops_queries,
            meta_knowledge_graph=interface.get_meta_knowledge_graph(),
            with_inconsistent_queries=True)
        logger.info('Consistency filter time: {} seconds.'.format(time.time() -
                                                                  filter_time))

        # Ensure that there are actually consistent queries that have been extracted
        if len(consistent_queries) == 0:
            # Add all logs from inconsistent queries
            query_copy = self.add_logs_from_query_list(query_copy,
                                                       inconsistent_queries)
            query_copy.set_status('Bad request. See description.')
            query_copy.set_description(
                'Could not extract any supported queries from query graph.')
            self.add_transaction(query_copy)
            return JsonResponse(query_copy.to_dict())

        logger.info(
            'Number of consistent queries derived from passed query: {}.'.
            format(len(consistent_queries)))
        # Get disease specific interfaces if a subdomain was not used
        try:
            interface_dict = self.setup_queries_based_on_disease_interfaces(
                consistent_queries)
        except ValueError as ex:
            # Add logs from consistent queries
            query_copy = self.add_logs_from_query_list(query_copy,
                                                       consistent_queries)
            query_copy.set_status('Bad request. See description.')
            query_copy.set_description('Problem during setup. ' + str(ex))
            self.add_transaction(query_copy)
            return JsonResponse(query_copy.to_dict())

        # Setup for CHP inferencing
        try:
            setup_time = time.time()
            for interface, queries in interface_dict.items():
                interface.setup_trapi_queries(queries)
            logger.info(
                'Trapi Interface setup time: {} seconds.'.format(time.time() -
                                                                 setup_time))
        except Exception as ex:
            # Add logs from consistent queries
            query_copy = self.add_logs_from_query_list(query_copy,
                                                       consistent_queries)
            # Add logs from interfaces level
            for interface in interface_dict:
                query_copy.logger.add_logs(interface.logger.to_dict())
            query_copy.set_status('Bad request. See description.')
            query_copy.set_description('Problem during interface setup. ' +
                                       str(ex))
            self.add_transaction(query_copy)
            return JsonResponse(query_copy.to_dict())

        # Build CHP queries
        try:
            build_time = time.time()
            for interface in interface_dict:
                interface.build_chp_queries()
            logger.info(
                'CHP query build time: {} seconds.'.format(time.time() -
                                                           build_time))
        except Exception as ex:
            # Add logs from consistent queries
            query_copy = self.add_logs_from_query_list(query_copy,
                                                       consistent_queries)
            # Add logs from interfaces level
            for interface in interface_dict:
                query_copy.logger.add_logs(interface.logger.to_dict())
            query_copy.set_status('Bad request. See description.')
            query_copy.set_description('Problem during CHP query building. ' +
                                       str(ex))
            self.add_transaction(query_copy)
            return JsonResponse(query_copy.to_dict())

        logger.info('Built Queries.')
        # Run queries
        try:
            reasoning_start_time = time.time()
            for interface in interface_dict:
                interface.run_chp_queries()
            logger.info('Completed Reasoning in {} seconds.'.format(
                time.time() - reasoning_start_time))
        except Exception as ex:
            # Add logs from consistent queries
            query_copy = self.add_logs_from_query_list(query_copy,
                                                       consistent_queries)
            # Add logs from interfaces level
            for interface in interface_dict:
                query_copy.logger.add_logs(interface.logger.to_dict())
            query_copy.set_status('Unexpected error. See description.')
            query_copy.set_description('Problem during reasoning. ' + str(ex))
            self.add_transaction(query_copy)
            # Report critical error to logs
            logger.critical('Error during reasoning. Check query: {}'.format(
                query_copy.id))
            return JsonResponse(query_copy.to_dict())

        # Construct Response
        responses = []
        for interface in interface_dict:
            responses.extend(interface.construct_trapi_responses())

        # Check if any responses came back
        if len(responses) == 0:
            # Add logs from consistent queries
            query_copy = self.add_logs_from_query_list(query_copy,
                                                       consistent_queries)
            # Add logs from interfaces level
            for interface in interface_dict:
                query_copy.logger.add_logs(interface.logger.to_dict())
            query_copy.set_status('No results.')
            self.add_transaction(query_copy)
            return JsonResponse(query_copy.to_dict())

        # Add responses into database
        self.add_transactions(responses)

        # Construct merged response
        response = self.merge_responses(query_copy, responses)

        # Now merge all interface level log messages from each interface
        for interface in interface_dict:
            response.logger.add_logs(interface.logger.to_dict())

        # Unnormalize
        unnormalized_response = self.undo_normalization(
            response, normalization_map)

        logger.info('Constructed TRAPI response.')

        logger.info('Responded in {} seconds'.format(time.time() - start_time))
        unnormalized_response.set_status('Success')

        # Add workflow
        unnormalized_response.add_workflow("lookup")

        # Set the used biolink version
        unnormalized_response.biolink_version = TOOLKIT.get_model_version()

        # Add response to database
        self.add_transaction(unnormalized_response)

        return JsonResponse(unnormalized_response.to_dict())