def test_graph_sparql_bad(self):
     """Test ConnectionError SPARQL on graph endpoint."""
     list_query = quote(
         "select ?g (count(*) as ?count) {graph ?g {?s ?p ?o}} group by ?g")
     fuseki = GraphStore()
     with self.assertRaises(URLError):
         fuseki._graph_sparql("default", list_query)
 def on_get(self, req, resp):
     """Execution of the GET prov graph list request."""
     fuseki = GraphStore()
     resp.data = json.dumps(fuseki._prov_list(), indent=1, sort_keys=True)
     resp.content_type = 'application/json'
     resp.status = falcon.HTTP_200
     app_logger.info('Finished operations on /graph/list/prov GET Request.')
 def _store_provenance(self, wf_base_uri):
     """Store resulting provenance in the Graph Store."""
     # We need to store provenance in a separate graph for each context
     # And also in the global Provenance graph
     prov_doc = create_uri(ATTXPROVURL, wf_base_uri)
     storage = GraphStore()
     storage._graph_add(prov_doc, self.graph.serialize(format='turtle'))
 def on_post(self, req, resp, parsed):
     """Execution of the POST update query request."""
     fuseki = GraphStore()
     resp.data = json.dumps(
         fuseki._graph_add(parsed['namedGraph'], parsed['triples']))
     resp.content_type = 'application/json'
     resp.status = falcon.HTTP_200
     app_logger.info('Finished operations on /graph/update POST Request.')
 def on_post(self, req, resp, parsed):
     """Execution of the POST SPARQL query request."""
     fuseki = GraphStore()
     data = fuseki._graph_sparql(parsed['namedGraph'], parsed['query'])
     resp.data = str(data)
     resp.content_type = 'application/xml'  # for now just this type
     resp.status = falcon.HTTP_200
     app_logger.info('Finished operations on /graph/query POST Request.')
 def on_delete(self, req, resp):
     """Execution of the DELETE named graph request."""
     graph_uri = req.get_param('uri')
     fuseki = GraphStore()
     fuseki._drop_graph(graph_uri)
     resp.content_type = 'plain/text'
     app_logger.info(
         'Deleted/DELETE graph with URI: {0}.'.format(graph_uri))
     resp.status = falcon.HTTP_200
 def test_graph_retrieve_None(self):
     """Test graph retrieve non-existent graph."""
     responses.add(responses.GET,
                   "{0}/data?graph={1}".format(self.request_address,
                                               "http://test.com"),
                   status=404)
     fuseki = GraphStore()
     result = fuseki._graph_retrieve("default")
     self.assertIsNone(result)
 def test_ping(self):
     """Test_ping on graph endpoint."""
     responses.add(responses.GET,
                   "http://localhost:3030/{0}/ping".format("$"),
                   "2017-09-18T11:41:19.915+00:00",
                   status=200)
     fuseki = GraphStore()
     result = fuseki._graph_health()
     self.assertTrue(result)
 def on_get(self, req, resp):
     """Execution of the GET named graph request."""
     graph_uri = req.get_param('uri')
     fuseki = GraphStore()
     response = fuseki._graph_retrieve(graph_uri)
     if response is not None:
         resp.data = str(response)
         resp.content_type = 'text/turtle'
         app_logger.info('Retrieved: {0}.'.format(graph_uri))
         resp.status = falcon.HTTP_200
     else:
         raise falcon.HTTPGone()
 def test_graph_retrieve_ttl(self):
     """Test graph retrieve a specific graph."""
     with open('tests/resources/graph_strategy.ttl') as datafile:
         graph_data = datafile.read()
     url = "http://data.hulib.helsinki.fi/attx/strategy"
     responses.add(responses.GET,
                   "{0}/data?graph={1}".format(self.request_address, url),
                   body=graph_data,
                   status=200)
     fuseki = GraphStore()
     result = fuseki._graph_retrieve(
         "http://data.hulib.helsinki.fi/attx/strategy")
     assert (result == graph_data)
 def test_graph_list(self):
     """Test graph list on graph endpoint."""
     list_query = quote(
         "select ?g (count(*) as ?count) {graph ?g {?s ?p ?o}} group by ?g")
     with open('tests/resources/graph_list_request.json') as datafile1:
         graph_data = json.load(datafile1)
     with open('tests/resources/graph_list_response.json') as datafile2:
         graph_list = json.load(datafile2)
     responses.add(responses.GET,
                   "{0}/sparql?query={1}".format(self.request_address,
                                                 list_query),
                   json=graph_data,
                   status=200)
     fuseki = GraphStore()
     result = fuseki._graph_list()
     assert (result == graph_list)
 def test_prov_list(self):
     """Test graph list on graph endpoint."""
     list_query = quote(
         "select ?g {{graph ?g {{?s ?p ?o}} filter(regex(str(?g), '{0}'))}} group by ?g"
         .format(ATTXPROVURL))
     with open('tests/resources/prov_list_request.json') as datafile1:
         graph_data = json.load(datafile1)
     with open('tests/resources/prov_list_response.json') as datafile2:
         graph_list = json.load(datafile2)
     responses.add(responses.GET,
                   "{0}/sparql?query={1}".format(self.request_address,
                                                 list_query),
                   json=graph_data,
                   status=200)
     fuseki = GraphStore()
     result = fuseki._prov_list()
     assert (result == graph_list)
 def test_graph_sparql(self):
     """Test update graph."""
     with open('tests/resources/graph_sparql.xml') as datafile:
         graph_data = datafile.read()
     list_query = "select ?g (count(*) as ?count) {graph ?g {?s ?p ?o}} group by ?g"
     url = "http://data.hulib.helsinki.fi/attx/strategy"
     request_url = "{0}/query?default-graph-uri=%s&query={1}&output=xml&results=xml&format=xml".format(
         self.request_address, url, list_query)
     httpretty.register_uri(httpretty.GET,
                            request_url,
                            graph_data,
                            status=200,
                            content_type="application/sparql-results+xml")
     fuseki = GraphStore()
     result = fuseki._graph_sparql(url, list_query)
     assert (result == graph_data)
     httpretty.disable()
     httpretty.reset()
示例#14
0
 def test_health_response(self):
     """Response to healthcheck endpoint."""
     fuseki = GraphStore()
     httpretty.register_uri(httpretty.GET, "http://localhost:3030/{0}/ping".format("$"), "2017-09-18T11:41:19.915+00:00", status=200)
     httpretty.register_uri(httpretty.GET, "http://localhost:7030/health", status=200)
     response = healthcheck_response("Running", fuseki)
     result = self.simulate_get('/health')
     assert(result.content == response)
     httpretty.disable()
     httpretty.reset()
示例#15
0
 def test_health_message(self):
     """Response message to healthcheck endpoint."""
     fuseki = GraphStore()
     httpretty.register_uri(httpretty.GET, "http://*****:*****@localhost:15672/api/aliveness-test/%2F", body='{"status": "ok"}', status=200)
     httpretty.register_uri(httpretty.GET, "http://localhost:4304/health", status=200)
     response = healthcheck_response("Running", fuseki)
     result = self.simulate_get('/health')
     json_response = {"provService": "Running", "messageBroker": "Running", "graphStore": "Not Running"}
     assert(json_response == json.loads(response))
     assert(result.content == response)
     httpretty.disable()
     httpretty.reset()
    def test_graph_drop(self):
        """Test drop graph."""
        url = "http://data.hulib.helsinki.fi/attx/strategy"
        with open('tests/resources/graph_drop.txt') as datafile:
            graph_data = datafile.read()

        def request_callback(request):
            """Request callback for drop graph."""
            headers = {
                'content-type': 'text/html',
                'cache-control': "no-cache"
            }
            return (200, headers, graph_data)

        responses.add_callback(
            responses.POST,
            "{0}/update".format(self.request_address),
            callback=request_callback,
            content_type="application/x-www-form-urlencoded",
        )
        fuseki = GraphStore()
        result = fuseki._drop_graph(url)
        assert (result == graph_data)
    def _index_prov(self):
        """Index provenance in Elasticsearch."""
        fuseki = GraphStore()
        data = fuseki._prov_list()
        bulk_list = dict()
        if len(data['graphs']) > 0:
            for graph in data['graphs']:
                prov_doc_type = str(graph).split(
                    "http://data.hulib.helsinki.fi/prov_", 1)[1]
                frame_response = self._get_framed_provenance(
                    graph, prov_doc_type)
                frame_data = json.loads(frame_response)
                if str(frame_data["payload"]["status"]).lower() == "success":
                    bulk_list[prov_doc_type] = frame_data["payload"][
                        "framingServiceOutput"]["output"]
                    # bulk_list.append()
                else:
                    raise AssertionError("Frame operation did not succeed.")

            self._do_bulk_index(bulk_list)
            app_logger.info(
                'Indexed documents with doc type: {0}'.format(prov_doc_type))
        else:
            app_logger.warning('There are no provenance graphs.')
    def test_graph_add(self):
        """Test update graph."""
        url = "http://data.hulib.helsinki.fi/attx/strategy"
        graph_data = "<http://example/egbook3> <http://purl.org/dc/elements/1.1/title>  \"This is an example title\""
        with open('tests/resources/graph_add_response.json') as datafile:
            response_data = json.load(datafile)

        def request_callback(request):
            """Request callback for drop graph."""
            headers = {
                'content-type': "application/json",
                'cache-control': "no-cache"
            }
            return (200, headers, json.dumps(response_data))

        responses.add_callback(
            responses.POST,
            "{0}/data?graph={1}".format(self.request_address, url),
            callback=request_callback,
            content_type='text/turtle',
        )
        fuseki = GraphStore()
        result = fuseki._graph_add(url, graph_data)
        assert (result == json.dumps(response_data))
 def _store_provenance_graph(self):
     """Store resulting provenance in the Graph Store."""
     storage = GraphStore()
     storage._graph_add(ATTXProv, self.graph.serialize(format='turtle'))
 def test_graph_stats_bad(self):
     """Test ConnectionError graph stats on graph endpoint."""
     fuseki = GraphStore()
     with self.assertRaises(ConnectionError):
         fuseki._graph_statistics()
 def test_graph_retrieve_bad(self):
     """Test ConnectionError graph retrieve on graph endpoint."""
     fuseki = GraphStore()
     with self.assertRaises(ConnectionError):
         fuseki._graph_retrieve("default")
 def test_graph_add_bad(self):
     """Test ConnectionError graph update on graph endpoint."""
     fuseki = GraphStore()
     with self.assertRaises(ConnectionError):
         fuseki._graph_add("default", "")
 def test_graph_drop_bad(self):
     """Test ConnectionError graph drop on graph endpoint."""
     fuseki = GraphStore()
     with self.assertRaises(ConnectionError):
         fuseki._drop_graph("default")
 def test_prov_list_bad(self):
     """Test ConnectionError prov graph list on graph endpoint."""
     fuseki = GraphStore()
     with self.assertRaises(ConnectionError):
         fuseki._prov_list()