def virtuoso_client(loop): _virtuoso_client = SPARQLClient( "http://localhost:8890/sparql", update_endpoint=ENV.get("SPARQL_UPDATE_ENDPOINT"), crud_endpoint="http://localhost:8890/sparql-graph-crud", graph=IRI("http://aiosparql.org/%s" % uuid.uuid4().hex[:7])) yield _virtuoso_client try: loop.run_until_complete(_virtuoso_client.delete()) except aiohttp.ClientResponseError as exc: if exc.status != 404: raise loop.run_until_complete(_virtuoso_client.close())
class IntegrationTestCase(AioHTTPTestCase): example_repo = \ "https://github.com/big-data-europe/mu-swarm-ui-testing.git" sparql_timeout = 5 async def get_application(self): app = copy(muswarmadmin.main.app) app.sparql_timeout = self.sparql_timeout return app async def scheduler_complete(self, key): if key not in ActionScheduler.executers: raise KeyError( "ActionScheduler for key %s does not exist. " "HINT: the ActionScheduler is removed automatically after " "calling this function" % key) await ActionScheduler.executers[key].cancel() async def wait_scheduler(self, key, timeout=3): for i in range(timeout * 5): if key in ActionScheduler.executers: break await asyncio.sleep(0.2) await self.scheduler_complete(key) def uuid4(self): return str(uuid.uuid4()).replace("-", "").upper() def resource(self, type_, id): return ( muswarmadmin.main.Application.base_resource + "%s/%s" % (type_, id) ) def project_exists(self, project_name): return os.path.exists("/data/%s" % project_name) async def triple_exists(self, s=None, p=None, o=None): s = escape_any(s) if s is not None else "?s" p = escape_any(p) if p is not None else "?p" o = escape_any(o) if o is not None else "?o" result = await self.app.sparql.query( "ASK FROM {{graph}} WHERE { {{}} {{}} {{}} }", s, p, o) return result['boolean'] async def prepare_triples(self, triples): await self.db.update( "INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples)) async def insert_triples(self, triples): await self.app.sparql.update( "INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples)) async def prepare_node(self, node): await self.prepare_triples([node]) async def insert_node(self, node): await self.insert_triples([node]) async def remove_triples(self, s=None, p=None, o=None): s = escape_any(s) if s is not None else "?s" p = escape_any(p) if p is not None else "?p" o = escape_any(o) if o is not None else "?o" await self.app.sparql.update( """ WITH {{graph}} DELETE { {{s}} {{p}} {{o}} } WHERE { {{s}} {{p}} {{o}} }""", s=s, p=p, o=o) async def describe(self, subject): return await self.app.sparql.query("DESCRIBE {{}} FROM {{graph}}", subject) async def create_drc_node(self, repository_iri=_sentinel, location=_sentinel): if repository_iri is _sentinel: repository_iri, repository_id = \ await self.create_repository(location=location) else: s_repository_iri = str(repository_iri) repository_id = s_repository_iri.split('/')[-1][:-1] drc_text = dedent("""\ version: "2" services: service1: image: busybox command: "sleep 60" service2: image: busybox command: "sleep 60" """) drc_id = self.uuid4() d_iri = IRI("http://stack-builder.big-data-europe.eu/resources/") drc_iri = d_iri + "%s/%s" % ("docker-composes", drc_id) drc_title = "stack_{}_drc_{}".format(repository_id, drc_id) drc_node = Node(drc_iri, { RDF.type: Stackbuilder.DockerCompose, Mu.uuid: drc_id, Dct.title: drc_title, Stackbuilder.text: drc_text }) await self.insert_triples([ drc_node, (repository_iri, SwarmUI.dockerComposeFile, drc_node), ]) return (drc_iri, drc_id) async def create_repository(self, location=_sentinel): if location is _sentinel: location = self.example_repo repository_id = self.uuid4() repository_iri = self.resource("stacks", repository_id) await self.insert_node(Node(repository_iri, { RDF.type: Doap.Stack, Mu.uuid: repository_id, Doap.location: location, })) return (repository_iri, repository_id) async def create_pipeline(self, repository_iri=_sentinel, location=_sentinel): if repository_iri is _sentinel: repository_iri, repository_id = \ await self.create_repository(location=location) pipeline_id = self.uuid4() pipeline_iri = self.resource("pipeline-instances", pipeline_id) pipeline_node = Node(pipeline_iri, { RDF.type: SwarmUI.Pipeline, Mu.uuid: pipeline_id, }) await self.insert_triples([ pipeline_node, (repository_iri, SwarmUI.pipelines, pipeline_node), ]) await self.scheduler_complete(pipeline_id) return (pipeline_iri, pipeline_id) async def get_services(self, project_name): result = await self.app.sparql.query( """ SELECT ?name ?service ?uuid FROM {{graph}} WHERE { ?pipeline mu:uuid {{}} ; swarmui:services ?service . ?service mu:uuid ?uuid ; dct:title ?name . } """, escape_any(project_name)) return { x['name']['value']: (IRI(x['service']['value']), x['uuid']['value']) for x in result['results']['bindings'] } async def prepare_database(self): await self.db.update("CLEAR GRAPH {{graph}}") def setUp(self): self.loop = setup_test_loop() self.db = SPARQLClient(endpoint="http://database:8890/sparql", graph=IRI(ENV['MU_APPLICATION_GRAPH']), loop=self.loop, read_timeout=self.sparql_timeout) self.loop.run_until_complete(self.prepare_database()) self.app = self.loop.run_until_complete(self.get_application()) self.server = FixedPortTestServer(self.app) self.client = self.loop.run_until_complete( self._get_client(self.server)) self.loop.run_until_complete(self.client.start_server()) def tearDown(self): self.loop.run_until_complete(self.db.close()) super().tearDown() for project_name in os.listdir("/data"): project_path = "/data/%s" % project_name subprocess.call(["docker-compose", "down"], cwd=project_path) shutil.rmtree(project_path) # NOTE: temporary fix, will be fixed with the next aiohttp release @asyncio.coroutine def _get_client(self, app): """Return a TestClient instance.""" return TestClient(app, loop=self.loop) async def assertNode(self, subject, values): result = await self.describe(subject) self.assertTrue(result and result[subject]) for p, o in values.items(): found_values = [x['value'] for x in result[subject][p]] self.assertEqual( len(found_values), 1, "multiple predicates {} in node's subject {}: {!r}".format( p, subject, found_values)) self.assertEqual( found_values[0], o, "predicate {} in node {} has value {}, expected {}".format( p, subject, found_values[0], o)) async def assertStatus(self, subject, status): await self.assertNode(subject, {SwarmUI.status: status}) async def assertExists(self, s=None, p=None, o=None): self.assertTrue(await self.triple_exists(s, p, o)) async def assertNotExists(self, s=None, p=None, o=None): self.assertFalse(await self.triple_exists(s, p, o))
def jena_client(loop): _jena_client = SPARQLClient("http://localhost:3030/ds") yield _jena_client loop.run_until_complete(_jena_client.close())
class IntegrationTestCase(AioHTTPTestCase): example_repo = \ "https://github.com/big-data-europe/mu-swarm-ui-testing.git" sparql_timeout = 5 async def get_application(self): app = copy(muswarmadmin.main.app) app.sparql_timeout = self.sparql_timeout return app async def scheduler_complete(self, key): if key not in ActionScheduler.executers: raise KeyError( "ActionScheduler for key %s does not exist. " "HINT: the ActionScheduler is removed automatically after " "calling this function" % key) await ActionScheduler.executers[key].cancel() async def wait_scheduler(self, key, timeout=3): for i in range(timeout * 5): if key in ActionScheduler.executers: break await asyncio.sleep(0.2) await self.scheduler_complete(key) def uuid4(self): return str(uuid.uuid4()).replace("-", "").upper() def resource(self, type_, id): return (muswarmadmin.main.Application.base_resource + "%s/%s" % (type_, id)) def project_exists(self, project_name): return os.path.exists("/data/%s" % project_name) async def triple_exists(self, s=None, p=None, o=None): s = escape_any(s) if s is not None else "?s" p = escape_any(p) if p is not None else "?p" o = escape_any(o) if o is not None else "?o" result = await self.app.sparql.query( "ASK FROM {{graph}} WHERE { {{}} {{}} {{}} }", s, p, o) return result['boolean'] async def prepare_triples(self, triples): await self.db.update("INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples)) async def insert_triples(self, triples): await self.app.sparql.update( "INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples)) async def prepare_node(self, node): await self.prepare_triples([node]) async def insert_node(self, node): await self.insert_triples([node]) async def remove_triples(self, s=None, p=None, o=None): s = escape_any(s) if s is not None else "?s" p = escape_any(p) if p is not None else "?p" o = escape_any(o) if o is not None else "?o" await self.app.sparql.update(""" WITH {{graph}} DELETE { {{s}} {{p}} {{o}} } WHERE { {{s}} {{p}} {{o}} }""", s=s, p=p, o=o) async def describe(self, subject): return await self.app.sparql.query("DESCRIBE {{}} FROM {{graph}}", subject) async def create_drc_node(self, repository_iri=_sentinel, location=_sentinel): if repository_iri is _sentinel: repository_iri, repository_id = \ await self.create_repository(location=location) else: s_repository_iri = str(repository_iri) repository_id = s_repository_iri.split('/')[-1][:-1] drc_text = dedent("""\ version: "2" services: service1: image: busybox command: "sleep 60" service2: image: busybox command: "sleep 60" """) drc_id = self.uuid4() d_iri = IRI("http://stack-builder.big-data-europe.eu/resources/") drc_iri = d_iri + "%s/%s" % ("docker-composes", drc_id) drc_title = "stack_{}_drc_{}".format(repository_id, drc_id) drc_node = Node( drc_iri, { RDF.type: Stackbuilder.DockerCompose, Mu.uuid: drc_id, Dct.title: drc_title, Stackbuilder.text: drc_text }) await self.insert_triples([ drc_node, (repository_iri, SwarmUI.dockerComposeFile, drc_node), ]) return (drc_iri, drc_id) async def create_repository(self, location=_sentinel): if location is _sentinel: location = self.example_repo repository_id = self.uuid4() repository_iri = self.resource("stacks", repository_id) await self.insert_node( Node( repository_iri, { RDF.type: Doap.Stack, Mu.uuid: repository_id, Doap.location: location, })) return (repository_iri, repository_id) async def create_pipeline(self, repository_iri=_sentinel, location=_sentinel): if repository_iri is _sentinel: repository_iri, repository_id = \ await self.create_repository(location=location) pipeline_id = self.uuid4() pipeline_iri = self.resource("pipeline-instances", pipeline_id) pipeline_node = Node(pipeline_iri, { RDF.type: SwarmUI.Pipeline, Mu.uuid: pipeline_id, }) await self.insert_triples([ pipeline_node, (repository_iri, SwarmUI.pipelines, pipeline_node), ]) await self.scheduler_complete(pipeline_id) return (pipeline_iri, pipeline_id) async def get_services(self, project_name): result = await self.app.sparql.query( """ SELECT ?name ?service ?uuid FROM {{graph}} WHERE { ?pipeline mu:uuid {{}} ; swarmui:services ?service . ?service mu:uuid ?uuid ; dct:title ?name . } """, escape_any(project_name)) return { x['name']['value']: (IRI(x['service']['value']), x['uuid']['value']) for x in result['results']['bindings'] } async def prepare_database(self): await self.db.update("CLEAR GRAPH {{graph}}") def setUp(self): self.loop = setup_test_loop() self.db = SPARQLClient(endpoint="http://database:8890/sparql", graph=IRI(ENV['MU_APPLICATION_GRAPH']), loop=self.loop, read_timeout=self.sparql_timeout) self.loop.run_until_complete(self.prepare_database()) self.app = self.loop.run_until_complete(self.get_application()) self.server = FixedPortTestServer(self.app) self.client = self.loop.run_until_complete( self._get_client(self.server)) self.loop.run_until_complete(self.client.start_server()) def tearDown(self): self.loop.run_until_complete(self.db.close()) super().tearDown() for project_name in os.listdir("/data"): project_path = "/data/%s" % project_name subprocess.call(["docker-compose", "down"], cwd=project_path) shutil.rmtree(project_path) # NOTE: temporary fix, will be fixed with the next aiohttp release @asyncio.coroutine def _get_client(self, app): """Return a TestClient instance.""" return TestClient(app, loop=self.loop) async def assertNode(self, subject, values): result = await self.describe(subject) self.assertTrue(result and result[subject]) for p, o in values.items(): found_values = [x['value'] for x in result[subject][p]] self.assertEqual( len(found_values), 1, "multiple predicates {} in node's subject {}: {!r}".format( p, subject, found_values)) self.assertEqual( found_values[0], o, "predicate {} in node {} has value {}, expected {}".format( p, subject, found_values[0], o)) async def assertStatus(self, subject, status): await self.assertNode(subject, {SwarmUI.status: status}) async def assertExists(self, s=None, p=None, o=None): self.assertTrue(await self.triple_exists(s, p, o)) async def assertNotExists(self, s=None, p=None, o=None): self.assertFalse(await self.triple_exists(s, p, o))