Ejemplo n.º 1
1
 async def start_server(self):
     await self._server.start_server(loop=self._loop)
     kwargs = dict(self._client_kwargs)
     if kwargs.get('endpoint'):
         kwargs['endpoint'] = self.make_url(kwargs['endpoint'])
     if kwargs.get('update_endpoint'):
         kwargs['update_endpoint'] = \
             self.make_url(kwargs['update_endpoint'])
     if kwargs.get('crud_endpoint'):
         kwargs['crud_endpoint'] = self.make_url(kwargs['crud_endpoint'])
     self._session = SPARQLClient(loop=self._loop, **kwargs)
Ejemplo n.º 2
0
def virtuoso_client(loop):
    _virtuoso_client = SPARQLClient(
        "http://localhost:8890/sparql",
        update_endpoint=ENV.get("SPARQL_UPDATE_ENDPOINT"),
        crud_endpoint="http://localhost:8890/sparql-graph-crud",
        graph=IRI("http://aiosparql.org/%s" % uuid.uuid4().hex[:7]))
    yield _virtuoso_client
    try:
        loop.run_until_complete(_virtuoso_client.delete())
    except aiohttp.ClientResponseError as exc:
        if exc.status != 404:
            raise
    loop.run_until_complete(_virtuoso_client.close())
Ejemplo n.º 3
0
async def test_client_context(loop):
    async with SPARQLClient(endpoint="http://example.org",
                            graph="http://example/graph") as client:
        assert not client.session.closed
        assert not client.closed
    assert client.session.closed
    assert client.closed
Ejemplo n.º 4
0
    def setUp(self):
        self.loop = setup_test_loop()

        self.db = SPARQLClient(endpoint="http://database:8890/sparql",
                               graph=IRI(ENV['MU_APPLICATION_GRAPH']),
                               loop=self.loop,
                               read_timeout=self.sparql_timeout)
        self.loop.run_until_complete(self.prepare_database())

        self.app = self.loop.run_until_complete(self.get_application())

        self.server = FixedPortTestServer(self.app)
        self.client = self.loop.run_until_complete(
            self._get_client(self.server))

        self.loop.run_until_complete(self.client.start_server())
async def pollAccumulate(count=0, client=None):
    """
    Poll the database until it is ready to answer queries
    """
    if client is None:
        client = SPARQLClient(ENV['MU_SPARQL_ENDPOINT'],
                              graph=IRI(ENV['MU_APPLICATION_GRAPH']))
    if count >= int(ENV['POLL_RETRIES']):
        await client.close()
        return False
    try:
        result = await client.query("""
            ASK
            FROM {{graph}}
            WHERE {
                ?s ?p ?o
            }
            """)
        if not result:
            logger.warn('SPARQL endpoint not yet ready')
            sleep(randint(1, 5))
            return await pollAccumulate(count + 1, client)
        else:
            logger.info('SPARQL endpoint is ready')
            await client.close()
            return True
    except ClientConnectionError:
        sleep(randint(1, 5))
        return await pollAccumulate(count + 1, client)
Ejemplo n.º 6
0
 async def _create_client(self):
     return SPARQLClient(ENV.get("SPARQL_ENDPOINT",
                                 "http://localhost:8890/sparql"),
                         update_endpoint=ENV.get("SPARQL_UPDATE_ENDPOINT"),
                         crud_endpoint=ENV.get(
                             "SPARQL_UPDATE_ENDPOINT",
                             "http://localhost:8890/sparql-graph-crud"),
                         graph=self.graph)
Ejemplo n.º 7
0
 def sparql(self):
     """
     The SPARQL client
     """
     if not hasattr(self, '_sparql'):
         self._sparql = SPARQLClient(ENV['MU_SPARQL_ENDPOINT'],
                                     graph=IRI(ENV['MU_APPLICATION_GRAPH']),
                                     loop=self.loop,
                                     read_timeout=self.sparql_timeout)
     return self._sparql
Ejemplo n.º 8
0
 async def start_server(self):
     await self._server.start_server(loop=self._loop)
     kwargs = dict(self._client_kwargs)
     if kwargs.get("endpoint"):
         kwargs["endpoint"] = self.make_url(kwargs["endpoint"])
     if kwargs.get("update_endpoint"):
         kwargs["update_endpoint"] = self.make_url(
             kwargs["update_endpoint"])
     if kwargs.get("crud_endpoint"):
         kwargs["crud_endpoint"] = self.make_url(kwargs["crud_endpoint"])
     self._session = SPARQLClient(loop=self._loop, **kwargs)
Ejemplo n.º 9
0
async def run_loop(sparql_endpoint=None, debug=False):
    sparql_context = SPARQLClient(sparql_endpoint)
    docker_args = kwargs_from_env()
    docker_context = APIClient(timeout=5, **docker_args)
    with sparql_context as sparql, docker_context as docker:
        await run_on_startup_subroutines(docker, sparql)
        async for x in docker.events(decode=True):
            try:
                event = new_event(docker, x)
                await asyncio.gather(
                    *(handler(event, sparql)
                      for handler in list_handlers(event, reload=debug)))
            except Exception:
                logger.exception(
                    "An error occurred during a coroutine execution. "
                    "The loop will not be interrupted.")
    def setUp(self):
        self.loop = setup_test_loop()

        self.db = SPARQLClient(endpoint="http://database:8890/sparql",
                               graph=IRI(ENV['MU_APPLICATION_GRAPH']),
                               loop=self.loop,
                               read_timeout=self.sparql_timeout)
        self.loop.run_until_complete(self.prepare_database())

        self.app = self.loop.run_until_complete(self.get_application())

        self.server = FixedPortTestServer(self.app)
        self.client = self.loop.run_until_complete(
            self._get_client(self.server))

        self.loop.run_until_complete(self.client.start_server())
 async def async_execute_query(self, query, post = False):
     """
     Always returns JSON response.
     """
     async with SPARQLClient(self.host_name) as client:
         complete = False
         tries = 0
         maxtries = 10
         while not complete and tries < maxtries:
             try:
                 result = await client.query(query)
                 complete = True
             except:
                 time.sleep(0.5)
                 tries += 1
     return result
Ejemplo n.º 12
0
async def sparql_client(sparql_endpoint: str):
    _sparql_client = SPARQLClient(sparql_endpoint)
    yield _sparql_client
    await _sparql_client.close()
Ejemplo n.º 13
0
async def get_sparql_client():
    sparql = SPARQLClient(ENV['MU_SPARQL_ENDPOINT'],
                          graph=IRI(ENV['MU_APPLICATION_GRAPH']))
    yield sparql
    await sparql.close()
Ejemplo n.º 14
0
async def jena_client(loop):
    _jena_client = SPARQLClient("http://localhost:3030/ds")
    yield _jena_client
    await _jena_client.close()
class IntegrationTestCase(AioHTTPTestCase):
    example_repo = \
        "https://github.com/big-data-europe/mu-swarm-ui-testing.git"
    sparql_timeout = 5

    async def get_application(self):
        app = copy(muswarmadmin.main.app)
        app.sparql_timeout = self.sparql_timeout
        return app

    async def scheduler_complete(self, key):
        if key not in ActionScheduler.executers:
            raise KeyError(
                "ActionScheduler for key %s does not exist. "
                "HINT: the ActionScheduler is removed automatically after "
                "calling this function" % key)
        await ActionScheduler.executers[key].cancel()

    async def wait_scheduler(self, key, timeout=3):
        for i in range(timeout * 5):
            if key in ActionScheduler.executers:
                break
            await asyncio.sleep(0.2)
        await self.scheduler_complete(key)

    def uuid4(self):
        return str(uuid.uuid4()).replace("-", "").upper()

    def resource(self, type_, id):
        return (
            muswarmadmin.main.Application.base_resource + "%s/%s" % (type_, id)
        )

    def project_exists(self, project_name):
        return os.path.exists("/data/%s" % project_name)

    async def triple_exists(self, s=None, p=None, o=None):
        s = escape_any(s) if s is not None else "?s"
        p = escape_any(p) if p is not None else "?p"
        o = escape_any(o) if o is not None else "?o"
        result = await self.app.sparql.query(
            "ASK FROM {{graph}} WHERE { {{}} {{}} {{}} }", s, p, o)
        return result['boolean']

    async def prepare_triples(self, triples):
        await self.db.update(
            "INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples))

    async def insert_triples(self, triples):
        await self.app.sparql.update(
            "INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples))

    async def prepare_node(self, node):
        await self.prepare_triples([node])

    async def insert_node(self, node):
        await self.insert_triples([node])

    async def remove_triples(self, s=None, p=None, o=None):
        s = escape_any(s) if s is not None else "?s"
        p = escape_any(p) if p is not None else "?p"
        o = escape_any(o) if o is not None else "?o"
        await self.app.sparql.update(
            """
            WITH {{graph}}
            DELETE {
                {{s}} {{p}} {{o}}
            }
            WHERE {
                {{s}} {{p}} {{o}}
            }""", s=s, p=p, o=o)

    async def describe(self, subject):
        return await self.app.sparql.query("DESCRIBE {{}} FROM {{graph}}",
                                           subject)

    async def create_drc_node(self, repository_iri=_sentinel,
                              location=_sentinel):
        if repository_iri is _sentinel:
            repository_iri, repository_id = \
                await self.create_repository(location=location)
        else:
            s_repository_iri = str(repository_iri)
            repository_id = s_repository_iri.split('/')[-1][:-1]
        drc_text = dedent("""\
            version: "2"
            services:
              service1:
                image: busybox
                command: "sleep 60"
              service2:
                image: busybox
                command: "sleep 60"
            """)
        drc_id = self.uuid4()
        d_iri = IRI("http://stack-builder.big-data-europe.eu/resources/")
        drc_iri = d_iri + "%s/%s" % ("docker-composes", drc_id)
        drc_title = "stack_{}_drc_{}".format(repository_id, drc_id)
        drc_node = Node(drc_iri, {
            RDF.type: Stackbuilder.DockerCompose,
            Mu.uuid: drc_id,
            Dct.title: drc_title,
            Stackbuilder.text: drc_text
        })

        await self.insert_triples([
            drc_node,
            (repository_iri, SwarmUI.dockerComposeFile, drc_node),
        ])
        return (drc_iri, drc_id)

    async def create_repository(self, location=_sentinel):
        if location is _sentinel:
            location = self.example_repo
        repository_id = self.uuid4()
        repository_iri = self.resource("stacks", repository_id)
        await self.insert_node(Node(repository_iri, {
            RDF.type: Doap.Stack,
            Mu.uuid: repository_id,
            Doap.location: location,
        }))
        return (repository_iri, repository_id)

    async def create_pipeline(self, repository_iri=_sentinel,
                              location=_sentinel):
        if repository_iri is _sentinel:
            repository_iri, repository_id = \
                await self.create_repository(location=location)
        pipeline_id = self.uuid4()
        pipeline_iri = self.resource("pipeline-instances", pipeline_id)
        pipeline_node = Node(pipeline_iri, {
            RDF.type: SwarmUI.Pipeline,
            Mu.uuid: pipeline_id,
        })
        await self.insert_triples([
            pipeline_node,
            (repository_iri, SwarmUI.pipelines, pipeline_node),
        ])
        await self.scheduler_complete(pipeline_id)
        return (pipeline_iri, pipeline_id)

    async def get_services(self, project_name):
        result = await self.app.sparql.query(
            """
            SELECT ?name ?service ?uuid
            FROM {{graph}}
            WHERE {
                ?pipeline mu:uuid {{}} ;
                  swarmui:services ?service .

                ?service mu:uuid ?uuid ;
                  dct:title ?name .
            }
            """, escape_any(project_name))
        return {
            x['name']['value']: (IRI(x['service']['value']),
                                 x['uuid']['value'])
            for x in result['results']['bindings']
        }

    async def prepare_database(self):
        await self.db.update("CLEAR GRAPH {{graph}}")

    def setUp(self):
        self.loop = setup_test_loop()

        self.db = SPARQLClient(endpoint="http://database:8890/sparql",
                               graph=IRI(ENV['MU_APPLICATION_GRAPH']),
                               loop=self.loop,
                               read_timeout=self.sparql_timeout)
        self.loop.run_until_complete(self.prepare_database())

        self.app = self.loop.run_until_complete(self.get_application())

        self.server = FixedPortTestServer(self.app)
        self.client = self.loop.run_until_complete(
            self._get_client(self.server))

        self.loop.run_until_complete(self.client.start_server())

    def tearDown(self):
        self.loop.run_until_complete(self.db.close())
        super().tearDown()
        for project_name in os.listdir("/data"):
            project_path = "/data/%s" % project_name
            subprocess.call(["docker-compose", "down"], cwd=project_path)
            shutil.rmtree(project_path)

    # NOTE: temporary fix, will be fixed with the next aiohttp release
    @asyncio.coroutine
    def _get_client(self, app):
        """Return a TestClient instance."""
        return TestClient(app, loop=self.loop)

    async def assertNode(self, subject, values):
        result = await self.describe(subject)
        self.assertTrue(result and result[subject])
        for p, o in values.items():
            found_values = [x['value'] for x in result[subject][p]]
            self.assertEqual(
                len(found_values), 1,
                "multiple predicates {} in node's subject {}: {!r}".format(
                    p, subject, found_values))
            self.assertEqual(
                found_values[0], o,
                "predicate {} in node {} has value {}, expected {}".format(
                    p, subject, found_values[0], o))

    async def assertStatus(self, subject, status):
        await self.assertNode(subject, {SwarmUI.status: status})

    async def assertExists(self, s=None, p=None, o=None):
        self.assertTrue(await self.triple_exists(s, p, o))

    async def assertNotExists(self, s=None, p=None, o=None):
        self.assertFalse(await self.triple_exists(s, p, o))
Ejemplo n.º 16
0
async def jena_client(jena_endpoint):
    _jena_client = SPARQLClient(jena_endpoint)
    yield _jena_client
    await _jena_client.close()
Ejemplo n.º 17
0
class IntegrationTestCase(AioHTTPTestCase):
    example_repo = \
        "https://github.com/big-data-europe/mu-swarm-ui-testing.git"
    sparql_timeout = 5

    async def get_application(self):
        app = copy(muswarmadmin.main.app)
        app.sparql_timeout = self.sparql_timeout
        return app

    async def scheduler_complete(self, key):
        if key not in ActionScheduler.executers:
            raise KeyError(
                "ActionScheduler for key %s does not exist. "
                "HINT: the ActionScheduler is removed automatically after "
                "calling this function" % key)
        await ActionScheduler.executers[key].cancel()

    async def wait_scheduler(self, key, timeout=3):
        for i in range(timeout * 5):
            if key in ActionScheduler.executers:
                break
            await asyncio.sleep(0.2)
        await self.scheduler_complete(key)

    def uuid4(self):
        return str(uuid.uuid4()).replace("-", "").upper()

    def resource(self, type_, id):
        return (muswarmadmin.main.Application.base_resource + "%s/%s" %
                (type_, id))

    def project_exists(self, project_name):
        return os.path.exists("/data/%s" % project_name)

    async def triple_exists(self, s=None, p=None, o=None):
        s = escape_any(s) if s is not None else "?s"
        p = escape_any(p) if p is not None else "?p"
        o = escape_any(o) if o is not None else "?o"
        result = await self.app.sparql.query(
            "ASK FROM {{graph}} WHERE { {{}} {{}} {{}} }", s, p, o)
        return result['boolean']

    async def prepare_triples(self, triples):
        await self.db.update("INSERT DATA { GRAPH {{graph}} { {{}} } }",
                             Triples(triples))

    async def insert_triples(self, triples):
        await self.app.sparql.update(
            "INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples))

    async def prepare_node(self, node):
        await self.prepare_triples([node])

    async def insert_node(self, node):
        await self.insert_triples([node])

    async def remove_triples(self, s=None, p=None, o=None):
        s = escape_any(s) if s is not None else "?s"
        p = escape_any(p) if p is not None else "?p"
        o = escape_any(o) if o is not None else "?o"
        await self.app.sparql.update("""
            WITH {{graph}}
            DELETE {
                {{s}} {{p}} {{o}}
            }
            WHERE {
                {{s}} {{p}} {{o}}
            }""",
                                     s=s,
                                     p=p,
                                     o=o)

    async def describe(self, subject):
        return await self.app.sparql.query("DESCRIBE {{}} FROM {{graph}}",
                                           subject)

    async def create_drc_node(self,
                              repository_iri=_sentinel,
                              location=_sentinel):
        if repository_iri is _sentinel:
            repository_iri, repository_id = \
                await self.create_repository(location=location)
        else:
            s_repository_iri = str(repository_iri)
            repository_id = s_repository_iri.split('/')[-1][:-1]
        drc_text = dedent("""\
            version: "2"
            services:
              service1:
                image: busybox
                command: "sleep 60"
              service2:
                image: busybox
                command: "sleep 60"
            """)
        drc_id = self.uuid4()
        d_iri = IRI("http://stack-builder.big-data-europe.eu/resources/")
        drc_iri = d_iri + "%s/%s" % ("docker-composes", drc_id)
        drc_title = "stack_{}_drc_{}".format(repository_id, drc_id)
        drc_node = Node(
            drc_iri, {
                RDF.type: Stackbuilder.DockerCompose,
                Mu.uuid: drc_id,
                Dct.title: drc_title,
                Stackbuilder.text: drc_text
            })

        await self.insert_triples([
            drc_node,
            (repository_iri, SwarmUI.dockerComposeFile, drc_node),
        ])
        return (drc_iri, drc_id)

    async def create_repository(self, location=_sentinel):
        if location is _sentinel:
            location = self.example_repo
        repository_id = self.uuid4()
        repository_iri = self.resource("stacks", repository_id)
        await self.insert_node(
            Node(
                repository_iri, {
                    RDF.type: Doap.Stack,
                    Mu.uuid: repository_id,
                    Doap.location: location,
                }))
        return (repository_iri, repository_id)

    async def create_pipeline(self,
                              repository_iri=_sentinel,
                              location=_sentinel):
        if repository_iri is _sentinel:
            repository_iri, repository_id = \
                await self.create_repository(location=location)
        pipeline_id = self.uuid4()
        pipeline_iri = self.resource("pipeline-instances", pipeline_id)
        pipeline_node = Node(pipeline_iri, {
            RDF.type: SwarmUI.Pipeline,
            Mu.uuid: pipeline_id,
        })
        await self.insert_triples([
            pipeline_node,
            (repository_iri, SwarmUI.pipelines, pipeline_node),
        ])
        await self.scheduler_complete(pipeline_id)
        return (pipeline_iri, pipeline_id)

    async def get_services(self, project_name):
        result = await self.app.sparql.query(
            """
            SELECT ?name ?service ?uuid
            FROM {{graph}}
            WHERE {
                ?pipeline mu:uuid {{}} ;
                  swarmui:services ?service .

                ?service mu:uuid ?uuid ;
                  dct:title ?name .
            }
            """, escape_any(project_name))
        return {
            x['name']['value']:
            (IRI(x['service']['value']), x['uuid']['value'])
            for x in result['results']['bindings']
        }

    async def prepare_database(self):
        await self.db.update("CLEAR GRAPH {{graph}}")

    def setUp(self):
        self.loop = setup_test_loop()

        self.db = SPARQLClient(endpoint="http://database:8890/sparql",
                               graph=IRI(ENV['MU_APPLICATION_GRAPH']),
                               loop=self.loop,
                               read_timeout=self.sparql_timeout)
        self.loop.run_until_complete(self.prepare_database())

        self.app = self.loop.run_until_complete(self.get_application())

        self.server = FixedPortTestServer(self.app)
        self.client = self.loop.run_until_complete(
            self._get_client(self.server))

        self.loop.run_until_complete(self.client.start_server())

    def tearDown(self):
        self.loop.run_until_complete(self.db.close())
        super().tearDown()
        for project_name in os.listdir("/data"):
            project_path = "/data/%s" % project_name
            subprocess.call(["docker-compose", "down"], cwd=project_path)
            shutil.rmtree(project_path)

    # NOTE: temporary fix, will be fixed with the next aiohttp release
    @asyncio.coroutine
    def _get_client(self, app):
        """Return a TestClient instance."""
        return TestClient(app, loop=self.loop)

    async def assertNode(self, subject, values):
        result = await self.describe(subject)
        self.assertTrue(result and result[subject])
        for p, o in values.items():
            found_values = [x['value'] for x in result[subject][p]]
            self.assertEqual(
                len(found_values), 1,
                "multiple predicates {} in node's subject {}: {!r}".format(
                    p, subject, found_values))
            self.assertEqual(
                found_values[0], o,
                "predicate {} in node {} has value {}, expected {}".format(
                    p, subject, found_values[0], o))

    async def assertStatus(self, subject, status):
        await self.assertNode(subject, {SwarmUI.status: status})

    async def assertExists(self, s=None, p=None, o=None):
        self.assertTrue(await self.triple_exists(s, p, o))

    async def assertNotExists(self, s=None, p=None, o=None):
        self.assertFalse(await self.triple_exists(s, p, o))
Ejemplo n.º 18
0
 def init_sparql(self, sparql_url, *args, **kwargs):
     self.sparql = SPARQLClient(sparql_url)
Ejemplo n.º 19
0
async def async_get_results(endpoint_url, query):
    sparql = SPARQLClient(endpoint_url)
    result = await sparql.query(query)
    await sparql.close()
    return result
Ejemplo n.º 20
0
def jena_client(loop):
    _jena_client = SPARQLClient("http://localhost:3030/ds")
    yield _jena_client
    loop.run_until_complete(_jena_client.close())