async def test_query(self): triples = Triples([("john", RDF.type, "doe"), ("john", "p", "o")]) res = await self.client.query( """ SELECT * FROM {{graph}} WHERE { {{}} } """, triples, ) self.assertEqual(res["path"], self.client_kwargs["endpoint"]) self.assertIn("post", res) self.assertIn("query", res["post"]) self.assertEqual( res["post"]["query"], dedent( """\ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT * FROM <http://mu.semte.ch/test-application> WHERE { john rdf:type "doe" ; p "o" . }""" ), ) with self.assertRaises(SPARQLRequestFailed): await self.client.query("failure")
async def save_container_logs(client, container, since, sparql, base_concept): """ Iterates over the container's log lines and insert triples to the database until there is no more lines """ try: async for line in client.logs(container, stream=True, timestamps=True, since=since): timestamp, log = line.decode().split(" ", 1) timestamp = datetime_parser.parse(timestamp) uuid = uuid1(0) concept = base_concept + ("/log/%s" % uuid) logger.debug("Log into %s: %s", concept, log.strip()) triples = Triples([ (base_concept, SwarmUI.logLine, concept), Node(concept, { Mu.uuid: uuid, Dct.issued: timestamp, Dct.title: log, }), ]) resp = await sparql.update( """ WITH {{graph}} INSERT DATA { {{}} } """, triples) finally: logger.info("Finished logging into %s (container %s is stopped)", base_concept, container[:12])
async def test_update(self): triples = Triples([("john", RDF.type, "doe"), ("john", "p", "o")]) res = await self.client.update( """ WITH {{graph}} INSERT DATA { {{}} } """, triples, ) self.assertEqual(res["path"], self.client_kwargs["update_endpoint"]) self.assertIn("post", res) self.assertIn("update", res["post"]) self.assertEqual( res["post"]["update"], dedent( """\ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> WITH <http://mu.semte.ch/test-application> INSERT DATA { john rdf:type "doe" ; p "o" . }""" ), ) with self.assertRaises(SPARQLRequestFailed): await self.client.update("failure")
async def update_pipeline_services(self, subject): """ Generate and insert the triples of the services of a Docker Compose project (pipeline) inside the database """ project_id = await self.get_resource_id(subject) data = self.open_compose_data(project_id) triples = Triples() for service in data.services: service_id = uuid4() service_iri = RDFTerm(":%s" % service_id) triples.append((subject, SwarmUI.services, service_iri)) triples.append(Node(service_iri, { Mu.uuid: service_id, Dct.title: service['name'], SwarmUI.scaling: 0, RDF.type: SwarmUI.Service, SwarmUI.status: SwarmUI.Stopped, })) await self.sparql.update(""" WITH {{graph}} DELETE { {{pipeline}} swarmui:services ?service . ?service ?p ?o . } WHERE { {{pipeline}} swarmui:services ?service . ?service ?p ?o . } """, pipeline=subject) await self.sparql.update(""" PREFIX : {{services_iri}} INSERT DATA { GRAPH {{graph}} { {{triples}} } }""", services_iri=(self.base_resource + "services/"), triples=triples)
def test_triples(self): triples = Triples([("john", RDF.type, "doe")]) triples.append(("john", "foo", "bar")) triples.extend([("jane", "hello", Literal("world", "en"))]) self.assertEqual( str(triples), dedent("""\ john rdf:type "doe" ; foo "bar" . jane hello "world"@en .""")) self.assertEqual(triples.indent(" "), indent(str(triples), " "))
async def update_pipeline_services(self, subject): """ Generate and insert the triples of the services of a Docker Compose project (pipeline) inside the database """ project_id = await self.get_resource_id(subject) data = self.open_compose_data(project_id) triples = Triples() for service in data.services: service_id = uuid4() service_iri = RDFTerm(":%s" % service_id) triples.append((subject, SwarmUI.services, service_iri)) triples.append( Node( service_iri, { Mu.uuid: service_id, Dct.title: service['name'], SwarmUI.scaling: 0, RDF.type: SwarmUI.Service, SwarmUI.status: SwarmUI.Stopped, })) await self.sparql.update(""" WITH {{graph}} DELETE { {{pipeline}} swarmui:services ?service . ?service ?p ?o . } WHERE { {{pipeline}} swarmui:services ?service . ?service ?p ?o . } """, pipeline=subject) await self.sparql.update(""" PREFIX : {{services_iri}} INSERT DATA { GRAPH {{graph}} { {{triples}} } }""", services_iri=(self.base_resource + "services/"), triples=triples)
async def save_container_stats(client, container, since, sparql): """ Docker stats API doc: https://docs.docker.com/engine/api/v1.26/#operation/ContainerStats """ stats = client.stats(container, decode=True) async for data in stats: uuid_pids_stats = uuid1(0) uuid_cpu_stats_cpu_usage = uuid1(0) uuid_cpu_stats_throttling_data = uuid1(0) uuid_cpu_stats = uuid1(0) uuid_precpu_stats_cpu_usage = uuid1(0) uuid_precpu_stats_throttling_data = uuid1(0) uuid_precpu_stats = uuid1(0) uuid_memory_stats_stats = uuid1(0) uuid_memory_stats = uuid1(0) uuid = uuid1(0) stats_node = Node( ':%s' % uuid, { 'a': SwarmUI.Stats, 'mu:uuid': uuid, 'swarmui:read': datetime_parser.parse(data['read']), 'swarmui:preread': datetime_parser.parse(data['preread']), 'swarmui:pidsStats': RDFTerm(':%s' % uuid_pids_stats), 'swarmui:numProcs': data['num_procs'], 'swarmui:cpuStats': RDFTerm(':%s' % uuid_cpu_stats), 'swarmui:precpuStats': RDFTerm(':%s' % uuid_precpu_stats), 'swarmui:memoryStats': RDFTerm(':%s' % uuid_memory_stats), 'swarmui:name': data['name'], 'swarmui:id': data['id'], }) triples = Triples([stats_node]) for if_, network in data['networks'].items(): network_uuid = uuid1(0) network_node = Node( ':%s' % network_uuid, { 'a': SwarmUI.Network, 'mu:uuid': network_uuid, 'swarmui:interface': if_, 'swarmui:rxBytes': network['rx_bytes'], 'swarmui:rxPackets': network['rx_packets'], 'swarmui:rxErrors': network['rx_errors'], 'swarmui:rxDropped': network['rx_dropped'], 'swarmui:txBytes': network['tx_bytes'], 'swarmui:txPackets': network['tx_packets'], 'swarmui:txErrors': network['tx_errors'], 'swarmui:txDropped': network['tx_dropped'], }) triples.append(network_node) stats_node.append(('swarmui:network', network_node)) triples.extend([ Node( ':%s' % uuid_pids_stats, { 'a': SwarmUI.PidsStats, 'mu:uuid': uuid_pids_stats, 'swarmui:current': data['pids_stats']['current'], }), Node(':%s' % uuid_cpu_stats_cpu_usage, [ ('a', SwarmUI.CpuUsage), ('mu:uuid', uuid_cpu_stats_cpu_usage), ('swarmui:totalUsage', data['cpu_stats']['cpu_usage']['total_usage']), ] + [('swarmui:percpuUsage', x) for x in data['cpu_stats'] ['cpu_usage'].get('percpu_usage', [])] + [ ('swarmui:usageInKernelmode', data['cpu_stats']['cpu_usage']['usage_in_kernelmode']), ('swarmui:usageInUsermode', data['cpu_stats']['cpu_usage']['usage_in_usermode']), ]), Node( ':%s' % uuid_cpu_stats_throttling_data, { 'a': SwarmUI.ThrottlingData, 'mu:uuid': uuid_cpu_stats_throttling_data, 'swarmui:periods': data['cpu_stats']['throttling_data']['periods'], 'swarmui:throttledPeriods': data['cpu_stats']['throttling_data']['throttled_periods'], 'swarmui:throttledTime': data['cpu_stats']['throttling_data']['throttled_time'], }), Node( ':%s' % uuid_cpu_stats, { 'a': SwarmUI.CpuStats, 'mu:uuid': uuid_cpu_stats, 'swarmui:cpuUsage': RDFTerm(':%s' % uuid_cpu_stats_cpu_usage), 'swarmui:systemCpuUsage': data['cpu_stats']['system_cpu_usage'], 'swarmui:throttlingData': RDFTerm(':%s' % uuid_cpu_stats_throttling_data), }), Node(':%s' % uuid_precpu_stats_cpu_usage, [ ('a', SwarmUI.CpuUsage), ('mu:uuid', uuid_precpu_stats_cpu_usage), ('swarmui:totalUsage', data['precpu_stats']['cpu_usage']['total_usage']), ] + [ ('swarmui:percpuUsage', x) for x in data['precpu_stats']['cpu_usage'].get( 'percpu_usage', []) ] + [ ('swarmui:usageInKernelmode', data['precpu_stats']['cpu_usage']['usage_in_kernelmode']), ('swarmui:usageInUsermode', data['precpu_stats']['cpu_usage']['usage_in_usermode']), ]), Node( ':%s' % uuid_precpu_stats_throttling_data, { 'a': SwarmUI.ThrottlingData, 'mu:uuid': uuid_precpu_stats_throttling_data, 'swarmui:periods': data['precpu_stats']['throttling_data']['periods'], 'swarmui:throttledPeriods': data['precpu_stats']['throttling_data'] ['throttled_periods'], 'swarmui:throttledTime': data['precpu_stats']['throttling_data']['throttled_time'], }), Node( ':%s' % uuid_precpu_stats, { 'a': SwarmUI.PrecpuStats, 'mu:uuid': uuid_precpu_stats, 'swarmui:cpuUsage': RDFTerm(':%s' % uuid_precpu_stats_cpu_usage), 'swarmui:systemCpuUsage': data['precpu_stats'].get('system_cpu_usage'), 'swarmui:throttlingData': RDFTerm(':%s' % uuid_precpu_stats_throttling_data), }), Node( ':%s' % uuid_memory_stats_stats, { 'a': SwarmUI.Stats, 'mu:uuid': uuid_memory_stats_stats, 'swarmui:activeAnon': data['memory_stats']['stats']['active_anon'], 'swarmui:activeFile': data['memory_stats']['stats']['active_file'], 'swarmui:cache': data['memory_stats']['stats']['cache'], 'swarmui:dirty': data['memory_stats']['stats']['dirty'], 'swarmui:hierarchicalMemoryLimit': data['memory_stats']['stats']['hierarchical_memory_limit'], 'swarmui:hierarchicalMemswLimit': data['memory_stats']['stats']['hierarchical_memsw_limit'], 'swarmui:inactiveAnon': data['memory_stats']['stats']['inactive_anon'], 'swarmui:inactiveFile': data['memory_stats']['stats']['inactive_file'], 'swarmui:mappedFile': data['memory_stats']['stats']['mapped_file'], 'swarmui:pgfault': data['memory_stats']['stats']['pgfault'], 'swarmui:pgmajfault': data['memory_stats']['stats']['pgmajfault'], 'swarmui:pgpgin': data['memory_stats']['stats']['pgpgin'], 'swarmui:pgpgout': data['memory_stats']['stats']['pgpgout'], 'swarmui:rss': data['memory_stats']['stats']['rss'], 'swarmui:rssHuge': data['memory_stats']['stats']['rss_huge'], 'swarmui:swap': data['memory_stats']['stats']['swap'], 'swarmui:totalActiveAnon': data['memory_stats']['stats']['total_active_anon'], 'swarmui:totalActiveFile': data['memory_stats']['stats']['total_active_file'], 'swarmui:totalCache': data['memory_stats']['stats']['total_cache'], 'swarmui:totalDirty': data['memory_stats']['stats']['total_dirty'], 'swarmui:totalInactiveAnon': data['memory_stats']['stats']['total_inactive_anon'], 'swarmui:totalInactiveFile': data['memory_stats']['stats']['total_inactive_file'], 'swarmui:totalMappedFile': data['memory_stats']['stats']['total_mapped_file'], 'swarmui:totalPgfault': data['memory_stats']['stats']['total_pgfault'], 'swarmui:totalPgmajfault': data['memory_stats']['stats']['total_pgmajfault'], 'swarmui:totalPgpgin': data['memory_stats']['stats']['total_pgpgin'], 'swarmui:totalPgpgout': data['memory_stats']['stats']['total_pgpgout'], 'swarmui:totalRss': data['memory_stats']['stats']['total_rss'], 'swarmui:totalRssHuge': data['memory_stats']['stats']['total_rss_huge'], 'swarmui:totalSwap': data['memory_stats']['stats']['total_swap'], 'swarmui:totalUnevictable': data['memory_stats']['stats']['total_unevictable'], 'swarmui:totalWriteback': data['memory_stats']['stats']['total_writeback'], 'swarmui:unevictable': data['memory_stats']['stats']['unevictable'], 'swarmui:writeback': data['memory_stats']['stats']['writeback'], }), Node( ':%s' % uuid_memory_stats, { 'a': SwarmUI.MemoryStats, 'mu:uuid': uuid_memory_stats, 'swarmui:usage': data['memory_stats']['usage'], 'swarmui:maxUsage': data['memory_stats']['max_usage'], 'swarmui:stats': RDFTerm(':%s' % uuid_memory_stats_stats), 'swarmui:limit': data['memory_stats']['limit'], }), ]) await sparql.update( """ PREFIX : <http://ontology.aksw.org/dockstats/> WITH {{graph}} INSERT DATA { {{}} } """, triples) logger.info("Finished logging stats (container %s is stopped)", container[:12])
async def insert_triples(self, triples): await self.app.sparql.update( "INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples))
async def prepare_triples(self, triples): await self.db.update("INSERT DATA { GRAPH {{graph}} { {{}} } }", Triples(triples))