Exemplo n.º 1
0
    async def prepare_database(self):
        await super().prepare_database()
        repository_id = self.uuid4()
        repository_iri = self.resource("stacks", repository_id)
        pipeline_id = self.uuid4()
        pipeline_iri = self.resource("pipeline-instances", pipeline_id)
        await self.prepare_node(
            Node(
                repository_iri, {
                    RDF.type:
                    Doap.Stack,
                    Mu.uuid:
                    repository_id,
                    Doap.location:
                    self.example_repo,
                    SwarmUI.pipelines:
                    Node(pipeline_iri, [
                        (RDF.type, SwarmUI.Pipeline),
                        (Mu.uuid, pipeline_id),
                        (SwarmUI.status, SwarmUI.Error),
                    ]),
                }))

        subprocess.check_call(["git", "clone", self.example_repo, pipeline_id],
                              cwd="/data")

        self.pipeline_iri = pipeline_iri
        self.pipeline_id = pipeline_id
Exemplo n.º 2
0
async def save_container_logs(client, container, since, sparql, base_concept):
    """
    Iterates over the container's log lines and insert triples to the database
    until there is no more lines
    """
    try:
        async for line in client.logs(container, stream=True, timestamps=True,
                                      since=since):
            timestamp, log = line.decode().split(" ", 1)
            timestamp = datetime_parser.parse(timestamp)
            uuid = uuid1(0)
            concept = base_concept + ("/log/%s" % uuid)
            logger.debug("Log into %s: %s", concept, log.strip())
            triples = Triples([
                (base_concept, SwarmUI.logLine, concept),
                Node(concept, {
                    Mu.uuid: uuid,
                    Dct.issued: timestamp,
                    Dct.title: log,
                }),
            ])
            resp = await sparql.update(
                """
                WITH {{graph}}
                INSERT DATA {
                    {{}}
                }
                """, triples)
    finally:
        logger.info("Finished logging into %s (container %s is stopped)",
                    base_concept, container[:12])
Exemplo n.º 3
0
    def test_node_in_node(self):
        node1 = Node(IRI("john"), [("foo", "bar")])
        node2 = Node(IRI("jane"), [("foo", "bar")])
        node3 = Node("parent", [("child1", node1), ("child2", node2),
                                ("foo", "bar")])
        self.assertEqual(
            str(node3),
            dedent("""\
            parent child1 <john> ;
                child2 <jane> ;
                foo "bar" .

            <jane> foo "bar" .

            <john> foo "bar" ."""),
        )
Exemplo n.º 4
0
 async def prepare_database(self):
     await super().prepare_database()
     self.node1_id = self.uuid4()
     self.node1_iri = self.resource("services", self.node1_id)
     await self.prepare_node(
         Node(
             self.pipeline_iri,
             {
                 SwarmUI.services:
                 Node(
                     self.node1_iri,
                     {
                         RDF.type: SwarmUI.Service,
                         Mu.uuid: self.node1_id,
                         SwarmUI.scaling: 3,  # an invalid value
                         SwarmUI.status:
                         SwarmUI.Started,  # an invalid value
                         Dct.title: "service1",
                     }),
             }))
Exemplo n.º 5
0
 async def prepare_database(self):
     await super().prepare_database()
     self.node2_id = self.uuid4()
     self.node2_iri = self.resource("services", self.node2_id)
     await self.prepare_node(
         Node(
             self.pipeline_iri,
             {
                 SwarmUI.services:
                 Node(
                     self.node2_iri,
                     {
                         RDF.type: SwarmUI.Service,
                         Mu.uuid: self.node2_id,
                         SwarmUI.scaling: 0,  # an invalid value
                         SwarmUI.status:
                         SwarmUI.Stopped,  # an invalid value
                         Dct.title: "service2",
                     }),
             }))
     subprocess.check_call(["docker-compose", "up", "-d", "service2"],
                           cwd="/data/%s" % self.pipeline_id)
Exemplo n.º 6
0
 async def create_repository(self, location=_sentinel):
     if location is _sentinel:
         location = self.example_repo
     repository_id = self.uuid4()
     repository_iri = self.resource("stacks", repository_id)
     await self.insert_node(
         Node(
             repository_iri, {
                 RDF.type: Doap.Stack,
                 Mu.uuid: repository_id,
                 Doap.location: location,
             }))
     return (repository_iri, repository_id)
Exemplo n.º 7
0
 def test_node(self):
     node = Node("john", [
         ("_first", None),
         (RDF.type, "doe"),
         ("foo", "bar"),
         ("foo", "baz"),
         ("foo", None),
     ])
     self.assertEqual(
         str(node),
         dedent("""\
         john foo "bar" ;
             foo "baz" ;
             rdf:type "doe" ."""))
Exemplo n.º 8
0
 def test_escape_any(self):
     now = datetime.datetime.now()
     self.assertEqual(escape_any('foo\n\r\\bar'), r'"foo\n\r\\bar"')
     self.assertEqual(escape_any(now),
                      '"%s"^^xsd:dateTime' % now.isoformat())
     self.assertEqual(escape_any(now.date()),
                      '"%s"^^xsd:date' % now.date().isoformat())
     self.assertEqual(escape_any(now.time()),
                      '"%s"^^xsd:time' % now.time().isoformat())
     self.assertEqual(escape_any(True), 'true')
     self.assertEqual(escape_any(5), '5')
     self.assertEqual(escape_any(Decimal(5.5)), '5.5')
     self.assertEqual(escape_any(5.5), '"5.5"^^xsd:double')
     self.assertEqual(escape_any(RDFTerm("raw")), 'raw')
     self.assertEqual(escape_any(Node("subject", {})), 'subject')
     with self.assertRaises(TypeError):
         escape_any(int)
Exemplo n.º 9
0
    async def update_pipeline_services(self, subject):
        """
        Generate and insert the triples of the services of a Docker Compose
        project (pipeline) inside the database
        """
        project_id = await self.get_resource_id(subject)
        data = self.open_compose_data(project_id)
        triples = Triples()
        for service in data.services:
            service_id = uuid4()
            service_iri = RDFTerm(":%s" % service_id)
            triples.append((subject, SwarmUI.services, service_iri))
            triples.append(
                Node(
                    service_iri, {
                        Mu.uuid: service_id,
                        Dct.title: service['name'],
                        SwarmUI.scaling: 0,
                        RDF.type: SwarmUI.Service,
                        SwarmUI.status: SwarmUI.Stopped,
                    }))
        await self.sparql.update("""
            WITH {{graph}}
            DELETE {
                {{pipeline}} swarmui:services ?service .

                ?service ?p ?o .
            }
            WHERE {
                {{pipeline}} swarmui:services ?service .

                ?service ?p ?o .
            }
            """,
                                 pipeline=subject)
        await self.sparql.update("""
            PREFIX : {{services_iri}}

            INSERT DATA {
                GRAPH {{graph}} {
                    {{triples}}
                }
            }""",
                                 services_iri=(self.base_resource +
                                               "services/"),
                                 triples=triples)
Exemplo n.º 10
0
 async def create_pipeline(self,
                           repository_iri=_sentinel,
                           location=_sentinel):
     if repository_iri is _sentinel:
         repository_iri, repository_id = \
             await self.create_repository(location=location)
     pipeline_id = self.uuid4()
     pipeline_iri = self.resource("pipeline-instances", pipeline_id)
     pipeline_node = Node(pipeline_iri, {
         RDF.type: SwarmUI.Pipeline,
         Mu.uuid: pipeline_id,
     })
     await self.insert_triples([
         pipeline_node,
         (repository_iri, SwarmUI.pipelines, pipeline_node),
     ])
     await self.scheduler_complete(pipeline_id)
     return (pipeline_iri, pipeline_id)
Exemplo n.º 11
0
    async def create_drc_node(self,
                              repository_iri=_sentinel,
                              location=_sentinel):
        if repository_iri is _sentinel:
            repository_iri, repository_id = \
                await self.create_repository(location=location)
        else:
            s_repository_iri = str(repository_iri)
            repository_id = s_repository_iri.split('/')[-1][:-1]
        drc_text = dedent("""\
            version: "2"
            services:
              service1:
                image: busybox
                command: "sleep 60"
              service2:
                image: busybox
                command: "sleep 60"
            """)
        drc_id = self.uuid4()
        d_iri = IRI("http://stack-builder.big-data-europe.eu/resources/")
        drc_iri = d_iri + "%s/%s" % ("docker-composes", drc_id)
        drc_title = "stack_{}_drc_{}".format(repository_id, drc_id)
        drc_node = Node(
            drc_iri, {
                RDF.type: Stackbuilder.DockerCompose,
                Mu.uuid: drc_id,
                Dct.title: drc_title,
                Stackbuilder.text: drc_text
            })

        await self.insert_triples([
            drc_node,
            (repository_iri, SwarmUI.dockerComposeFile, drc_node),
        ])
        return (drc_iri, drc_id)
Exemplo n.º 12
0
async def save_container_stats(client, container, since, sparql):
    """
    Docker stats API doc:
    https://docs.docker.com/engine/api/v1.26/#operation/ContainerStats
    """
    stats = client.stats(container, decode=True)
    async for data in stats:
        uuid_pids_stats = uuid1(0)
        uuid_cpu_stats_cpu_usage = uuid1(0)
        uuid_cpu_stats_throttling_data = uuid1(0)
        uuid_cpu_stats = uuid1(0)
        uuid_precpu_stats_cpu_usage = uuid1(0)
        uuid_precpu_stats_throttling_data = uuid1(0)
        uuid_precpu_stats = uuid1(0)
        uuid_memory_stats_stats = uuid1(0)
        uuid_memory_stats = uuid1(0)
        uuid = uuid1(0)

        stats_node = Node(
            ':%s' % uuid, {
                'a': SwarmUI.Stats,
                'mu:uuid': uuid,
                'swarmui:read': datetime_parser.parse(data['read']),
                'swarmui:preread': datetime_parser.parse(data['preread']),
                'swarmui:pidsStats': RDFTerm(':%s' % uuid_pids_stats),
                'swarmui:numProcs': data['num_procs'],
                'swarmui:cpuStats': RDFTerm(':%s' % uuid_cpu_stats),
                'swarmui:precpuStats': RDFTerm(':%s' % uuid_precpu_stats),
                'swarmui:memoryStats': RDFTerm(':%s' % uuid_memory_stats),
                'swarmui:name': data['name'],
                'swarmui:id': data['id'],
            })

        triples = Triples([stats_node])

        for if_, network in data['networks'].items():
            network_uuid = uuid1(0)
            network_node = Node(
                ':%s' % network_uuid, {
                    'a': SwarmUI.Network,
                    'mu:uuid': network_uuid,
                    'swarmui:interface': if_,
                    'swarmui:rxBytes': network['rx_bytes'],
                    'swarmui:rxPackets': network['rx_packets'],
                    'swarmui:rxErrors': network['rx_errors'],
                    'swarmui:rxDropped': network['rx_dropped'],
                    'swarmui:txBytes': network['tx_bytes'],
                    'swarmui:txPackets': network['tx_packets'],
                    'swarmui:txErrors': network['tx_errors'],
                    'swarmui:txDropped': network['tx_dropped'],
                })
            triples.append(network_node)
            stats_node.append(('swarmui:network', network_node))

        triples.extend([
            Node(
                ':%s' % uuid_pids_stats, {
                    'a': SwarmUI.PidsStats,
                    'mu:uuid': uuid_pids_stats,
                    'swarmui:current': data['pids_stats']['current'],
                }),
            Node(':%s' % uuid_cpu_stats_cpu_usage, [
                ('a', SwarmUI.CpuUsage),
                ('mu:uuid', uuid_cpu_stats_cpu_usage),
                ('swarmui:totalUsage',
                 data['cpu_stats']['cpu_usage']['total_usage']),
            ] + [('swarmui:percpuUsage', x) for x in data['cpu_stats']
                 ['cpu_usage'].get('percpu_usage', [])] + [
                     ('swarmui:usageInKernelmode',
                      data['cpu_stats']['cpu_usage']['usage_in_kernelmode']),
                     ('swarmui:usageInUsermode',
                      data['cpu_stats']['cpu_usage']['usage_in_usermode']),
                 ]),
            Node(
                ':%s' % uuid_cpu_stats_throttling_data, {
                    'a':
                    SwarmUI.ThrottlingData,
                    'mu:uuid':
                    uuid_cpu_stats_throttling_data,
                    'swarmui:periods':
                    data['cpu_stats']['throttling_data']['periods'],
                    'swarmui:throttledPeriods':
                    data['cpu_stats']['throttling_data']['throttled_periods'],
                    'swarmui:throttledTime':
                    data['cpu_stats']['throttling_data']['throttled_time'],
                }),
            Node(
                ':%s' % uuid_cpu_stats, {
                    'a':
                    SwarmUI.CpuStats,
                    'mu:uuid':
                    uuid_cpu_stats,
                    'swarmui:cpuUsage':
                    RDFTerm(':%s' % uuid_cpu_stats_cpu_usage),
                    'swarmui:systemCpuUsage':
                    data['cpu_stats']['system_cpu_usage'],
                    'swarmui:throttlingData':
                    RDFTerm(':%s' % uuid_cpu_stats_throttling_data),
                }),
            Node(':%s' % uuid_precpu_stats_cpu_usage, [
                ('a', SwarmUI.CpuUsage),
                ('mu:uuid', uuid_precpu_stats_cpu_usage),
                ('swarmui:totalUsage',
                 data['precpu_stats']['cpu_usage']['total_usage']),
            ] + [
                ('swarmui:percpuUsage', x)
                for x in data['precpu_stats']['cpu_usage'].get(
                    'percpu_usage', [])
            ] + [
                ('swarmui:usageInKernelmode',
                 data['precpu_stats']['cpu_usage']['usage_in_kernelmode']),
                ('swarmui:usageInUsermode',
                 data['precpu_stats']['cpu_usage']['usage_in_usermode']),
            ]),
            Node(
                ':%s' % uuid_precpu_stats_throttling_data, {
                    'a':
                    SwarmUI.ThrottlingData,
                    'mu:uuid':
                    uuid_precpu_stats_throttling_data,
                    'swarmui:periods':
                    data['precpu_stats']['throttling_data']['periods'],
                    'swarmui:throttledPeriods':
                    data['precpu_stats']['throttling_data']
                    ['throttled_periods'],
                    'swarmui:throttledTime':
                    data['precpu_stats']['throttling_data']['throttled_time'],
                }),
            Node(
                ':%s' % uuid_precpu_stats, {
                    'a':
                    SwarmUI.PrecpuStats,
                    'mu:uuid':
                    uuid_precpu_stats,
                    'swarmui:cpuUsage':
                    RDFTerm(':%s' % uuid_precpu_stats_cpu_usage),
                    'swarmui:systemCpuUsage':
                    data['precpu_stats'].get('system_cpu_usage'),
                    'swarmui:throttlingData':
                    RDFTerm(':%s' % uuid_precpu_stats_throttling_data),
                }),
            Node(
                ':%s' % uuid_memory_stats_stats, {
                    'a':
                    SwarmUI.Stats,
                    'mu:uuid':
                    uuid_memory_stats_stats,
                    'swarmui:activeAnon':
                    data['memory_stats']['stats']['active_anon'],
                    'swarmui:activeFile':
                    data['memory_stats']['stats']['active_file'],
                    'swarmui:cache':
                    data['memory_stats']['stats']['cache'],
                    'swarmui:dirty':
                    data['memory_stats']['stats']['dirty'],
                    'swarmui:hierarchicalMemoryLimit':
                    data['memory_stats']['stats']['hierarchical_memory_limit'],
                    'swarmui:hierarchicalMemswLimit':
                    data['memory_stats']['stats']['hierarchical_memsw_limit'],
                    'swarmui:inactiveAnon':
                    data['memory_stats']['stats']['inactive_anon'],
                    'swarmui:inactiveFile':
                    data['memory_stats']['stats']['inactive_file'],
                    'swarmui:mappedFile':
                    data['memory_stats']['stats']['mapped_file'],
                    'swarmui:pgfault':
                    data['memory_stats']['stats']['pgfault'],
                    'swarmui:pgmajfault':
                    data['memory_stats']['stats']['pgmajfault'],
                    'swarmui:pgpgin':
                    data['memory_stats']['stats']['pgpgin'],
                    'swarmui:pgpgout':
                    data['memory_stats']['stats']['pgpgout'],
                    'swarmui:rss':
                    data['memory_stats']['stats']['rss'],
                    'swarmui:rssHuge':
                    data['memory_stats']['stats']['rss_huge'],
                    'swarmui:swap':
                    data['memory_stats']['stats']['swap'],
                    'swarmui:totalActiveAnon':
                    data['memory_stats']['stats']['total_active_anon'],
                    'swarmui:totalActiveFile':
                    data['memory_stats']['stats']['total_active_file'],
                    'swarmui:totalCache':
                    data['memory_stats']['stats']['total_cache'],
                    'swarmui:totalDirty':
                    data['memory_stats']['stats']['total_dirty'],
                    'swarmui:totalInactiveAnon':
                    data['memory_stats']['stats']['total_inactive_anon'],
                    'swarmui:totalInactiveFile':
                    data['memory_stats']['stats']['total_inactive_file'],
                    'swarmui:totalMappedFile':
                    data['memory_stats']['stats']['total_mapped_file'],
                    'swarmui:totalPgfault':
                    data['memory_stats']['stats']['total_pgfault'],
                    'swarmui:totalPgmajfault':
                    data['memory_stats']['stats']['total_pgmajfault'],
                    'swarmui:totalPgpgin':
                    data['memory_stats']['stats']['total_pgpgin'],
                    'swarmui:totalPgpgout':
                    data['memory_stats']['stats']['total_pgpgout'],
                    'swarmui:totalRss':
                    data['memory_stats']['stats']['total_rss'],
                    'swarmui:totalRssHuge':
                    data['memory_stats']['stats']['total_rss_huge'],
                    'swarmui:totalSwap':
                    data['memory_stats']['stats']['total_swap'],
                    'swarmui:totalUnevictable':
                    data['memory_stats']['stats']['total_unevictable'],
                    'swarmui:totalWriteback':
                    data['memory_stats']['stats']['total_writeback'],
                    'swarmui:unevictable':
                    data['memory_stats']['stats']['unevictable'],
                    'swarmui:writeback':
                    data['memory_stats']['stats']['writeback'],
                }),
            Node(
                ':%s' % uuid_memory_stats, {
                    'a': SwarmUI.MemoryStats,
                    'mu:uuid': uuid_memory_stats,
                    'swarmui:usage': data['memory_stats']['usage'],
                    'swarmui:maxUsage': data['memory_stats']['max_usage'],
                    'swarmui:stats': RDFTerm(':%s' % uuid_memory_stats_stats),
                    'swarmui:limit': data['memory_stats']['limit'],
                }),
        ])
        await sparql.update(
            """
            PREFIX : <http://ontology.aksw.org/dockstats/>

            WITH {{graph}}
            INSERT DATA {
                {{}}
            }
            """, triples)

    logger.info("Finished logging stats (container %s is stopped)",
                container[:12])
Exemplo n.º 13
0
async def store_events(event: ContainerEvent, sparql: SPARQLClient):
    """
    Convert a Docker container event to triples and insert them to the database
    """
    container = (await event.container) if event.status == "start" else None

    event_id = event.data.get("id", "")
    if event_id == "":
        return None

    _time = event.data.get("time", "")
    _timeNano = event.data.get("timeNano", "")
    _datetime = datetime.fromtimestamp(int(_time))

    event_id = "%s_%s" % (event_id, _timeNano)
    event_node = Node(
        DockEvent.__iri__ + event_id, {
            RDF.type: DockEventTypes.event,
            DockEvent.eventId: event_id,
            DockEvent.time: _time,
            DockEvent.timeNano: _timeNano,
            DockEvent.dateTime: _datetime,
        })

    event_type = event.data.get("Type", "")
    event_node.append((DockEvent.type, getattr(DockEventTypes, event_type)))

    event_action = event.data.get("Action", "")
    if ":" in event_action:
        event_action_type = event_action.split(":")[0]
        event_action_extra = event_action.split(":")[-1].strip()
        event_node.append((DockEvent.actionExtra, event_action_extra))
    else:
        event_action_type = event_action

    event_node.append(
        (DockEvent.action, getattr(DockEventActions, event_action_type)))

    if container is not None:
        container_id = "%s_%s" % (container["Id"], _timeNano)
        container_node = Node(
            DockContainer.__iri__ + container_id, {
                DockContainer.id: container["Id"],
                DockContainer.name: container["Name"],
            })
        for label, value in container["Config"]["Labels"].items():
            container_node.append(
                (DockContainer.label, "%s=%s" % (label, value)))
        for env_with_value in container["Config"]["Env"]:
            container_node.append((DockContainer.env, env_with_value))
        event_node.append((DockEvent.container, container_node))
        for name, network in \
                container["NetworkSettings"]["Networks"].items():
            network_id = "%s_%s" % (network["NetworkID"], _timeNano)
            network_node = Node(
                DockContainerNetwork.__iri__ + network_id, {
                    DockContainerNetwork.name: name,
                    DockContainerNetwork.id: network["NetworkID"],
                    DockContainerNetwork.ipAddress: network["IPAddress"],
                })
            if network.get("Links"):
                for link in network["Links"]:
                    network_node.append((DockEvent.link, link))
            container_node.append((DockContainer.network, network_node))

    actor = event.data.get("Actor", "")
    if actor != "":
        actor_id = actor.get("ID", "")
        actor_id = "%s_%s" % (actor_id, _timeNano)
        actor_node = Node(DockEventActor.__iri__ + actor_id, {
            DockEventActor.actorId: actor_id,
        })
        actor_attributes = actor.get("Attributes", {})
        actor_node.extend([
            (DockEventActor.image, actor_attributes.get("image", "")),
            (DockEventActor.name, actor_attributes.get("name", "")),
            (DockEventActor.nodeIpPort, actor_attributes.get("node.addr", "")),
            (DockEventActor.nodeId, actor_attributes.get("node.id", "")),
            (DockEventActor.nodeIp, actor_attributes.get("node.ip", "")),
            (DockEventActor.nodeName, actor_attributes.get("node.name", "")),
        ])
        event_node.append((DockEvent.actor, actor_node))

    _from = event.data.get("from", "")
    if _from != "":
        event_node.append((DockEvent.source, _from))

    await sparql.update(
        """
        INSERT DATA {
            GRAPH {{graph}} {
                {{}}
            }
        }
        """, event_node)