def test_sequence_searcher_multi_sequence(self):
     """
     Test scenario:
      * search containing multiple seqeunce definitions
      * data containing 2 results of each where one is incomplete
      * test that single incomplete result gets removed
     """
     with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp:
         ftmp.write(MULTI_SEQ_TEST)
         ftmp.close()
         s = FileSearcher()
         sdA = SequenceSearchDef(start=SearchDef(r"^sectionA (\d+)"),
                                 body=SearchDef(r"\d_\d"),
                                 end=SearchDef(r"^section\S+ (\d+)"),
                                 tag="seqA-search-test")
         sdB = SequenceSearchDef(start=SearchDef(r"^sectionB (\d+)"),
                                 body=SearchDef(r"\d_\d"),
                                 end=SearchDef(r"^section\S+ (\d+)"),
                                 tag="seqB-search-test")
         s.add_search_term(sdA, path=ftmp.name)
         s.add_search_term(sdB, path=ftmp.name)
         results = s.search()
         sections = results.find_sequence_sections(sdA)
         self.assertEqual(len(sections), 1)
         sections = results.find_sequence_sections(sdB)
         self.assertEqual(len(sections), 2)
         os.remove(ftmp.name)
Beispiel #2
0
    def _get_version_info(self, daemon_type=None):
        """
        Returns a dict of ceph versions info for the provided daemon type. If
        no daemon type provided, version info is collected for all types and
        the resulting dict is keyed by daemon type otherwise it is keyed by
        version (and only versions for that daemon type.)
        """
        out = self.cli_cache['ceph_versions']
        if not out:
            return

        versions = {}
        s = FileSearcher()
        body = SearchDef(r"\s+\"ceph version (\S+) .+ (\S+) "
                         r"\(\S+\)\":\s+(\d)+,?$")
        if daemon_type is None:
            # all/any - start matches any so no seq ending needed
            sd = SequenceSearchDef(start=SearchDef(r"^\s+\"(\S+)\":\s+{"),
                                   body=body,
                                   tag='versions')
        else:
            start = SearchDef(r"^\s+\"({})\":\s+{{".format(daemon_type))
            sd = SequenceSearchDef(start=start,
                                   body=body,
                                   end=SearchDef(r"^\s+\"\S+\":\s+{"),
                                   tag='versions')

        s.add_search_term(sd, path=self.cli_cache['ceph_versions'])
        for section in s.search().find_sequence_sections(sd).values():
            _versions = {}
            for result in section:
                if result.tag == sd.start_tag:
                    _daemon_type = result.get(1)
                    versions[_daemon_type] = _versions
                elif result.tag == sd.body_tag:
                    version = result.get(1)
                    rname = result.get(2)
                    amount = result.get(3)
                    _versions[version] = {
                        'release_name': rname,
                        'count': int(amount)
                    }

        # If specific daemon_type provided only return version for that type
        # otherwise all.
        if daemon_type is not None:
            versions = versions.get(daemon_type)

        return versions
Beispiel #3
0
 def memory_searchdef(self):
     start = SearchDef([r"^Status of node '([^']*)'$",
                        r"^Status of node ([^']*) ...$"])
     body = SearchDef(r"^\s+\[{total,([0-9]+)}.+")
     end = SearchDef(r"^$")
     return SequenceSearchDef(start=start, body=body, end=end,
                              tag='memory')
Beispiel #4
0
    def _get_local_osds(self):
        if not self.cli_cache['ceph_volume_lvm_list']:
            return

        s = FileSearcher()
        sd = SequenceSearchDef(start=SearchDef(r"^=+\s+osd\.(\d+)\s+=+.*"),
                               body=SearchDef([
                                   r"\s+osd\s+(fsid)\s+(\S+)\s*",
                                   r"\s+(devices)\s+([\S]+)\s*"
                               ]),
                               tag="ceph-lvm")
        s.add_search_term(sd, path=self.cli_cache['ceph_volume_lvm_list'])
        local_osds = []
        for results in s.search().find_sequence_sections(sd).values():
            id = None
            fsid = None
            dev = None
            for result in results:
                if result.tag == sd.start_tag:
                    id = int(result.get(1))
                elif result.tag == sd.body_tag:
                    if result.get(1) == "fsid":
                        fsid = result.get(2)
                    elif result.get(1) == "devices":
                        dev = result.get(2)

            local_osds.append(CephOSD(id, fsid, dev))

        return local_osds
    def test_sequence_searcher_section_start_end_same(self):
        """
        Test scenario:
         * multiple sections that end with start of the next
         * start def matches unique start
         * end def matches any start
        """
        with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp:
            ftmp.write(SEQ_TEST_7)
            ftmp.close()
            s = FileSearcher()
            sd = SequenceSearchDef(start=SearchDef(r"^section (2)"),
                                   body=SearchDef(r"\d_\d"),
                                   end=SearchDef(r"^section (\d+)"),
                                   tag="seq-search-test7")
            s.add_search_term(sd, path=ftmp.name)
            results = s.search()
            sections = results.find_sequence_sections(sd)
            self.assertEqual(len(sections), 1)
            for id in sections:
                for r in sections[id]:
                    if r.tag == sd.start_tag:
                        self.assertEqual(r.get(1), "2")
                    elif r.tag == sd.body_tag:
                        self.assertTrue(r.get(0) in ["2_1"])

            os.remove(ftmp.name)
Beispiel #6
0
    def udev_bcache_devs(self):
        """ If bcache devices exist fetch information and return as a list. """
        if self._bcache_devs:
            return self._bcache_devs

        udevadm_info = self.cli.udevadm_info_exportdb()
        if not udevadm_info:
            return self._bcache_devs

        s = FileSearcher()
        sdef = SequenceSearchDef(start=SearchDef(r"^P: .+/(bcache\S+)"),
                                 body=SearchDef(r"^S: disk/by-uuid/(\S+)"),
                                 tag="bcacheinfo")
        s.add_search_term(sdef, utils.mktemp_dump('\n'.join(udevadm_info)))
        results = s.search()
        devs = []
        for section in results.find_sequence_sections(sdef).values():
            dev = {}
            for r in section:
                if r.tag == sdef.start_tag:
                    dev["name"] = r.get(1)
                else:
                    dev["by-uuid"] = r.get(1)

            devs.append(dev)

        self._bcache_devs = devs
        return self._bcache_devs
Beispiel #7
0
 def connections_searchdef(self):
     start = SearchDef([r"^Connections:$",
                        r"^Listing connections ...$"])
     # Again, the user and protocol columns are inverted
     # between 3.6.x and 3.8.x so we have to catch both and
     # decide.
     body = SearchDef(r"^<(rabbit[^>.]*)(?:[.][0-9]+)+>.+(?:[A-Z]+\s+{[\d,]+}\s+(\S+)|\d+\s+{[\d,]+}\s+\S+\s+(\S+)).+{\"connection_name\",\"([^:]+):\d+:.+$")  # pylint: disable=C0301  # noqa
     end = SearchDef(r"^$")
     return SequenceSearchDef(start=start, body=body, end=end,
                              tag='connections')
Beispiel #8
0
 def queues_searchdef(self):
     start = SearchDef([r"^Queues on ([^:]+):",
                        (r"^Listing queues for vhost ([^:]+) "
                         r"...")])
     # NOTE: we don't use a list for the body here because
     # we need to know which expression matched so that we
     # can know in which order to retrieve the columns since
     # their order is inverted between 3.6.x and 3.8.x
     body = SearchDef(r"^(?:<([^.\s]+)[.0-9]+>\s+(\S+)|"
                      r"(\S+)\s+(?:\S+\s+){4}<([^.\s]+)[.0-9]"
                      r"+>)\s+.+")
     end = SearchDef(r"^$")
     return SequenceSearchDef(start=start, body=body, end=end,
                              tag='queues')
Beispiel #9
0
    def stats(self):
        """ Get ip link info for the interface. """
        counters = self.cache_load()
        if counters:
            return counters

        s = FileSearcher()
        seqdef = SequenceSearchDef(
                    # match start of interface
                    start=SearchDef(IP_IFACE_NAME_TEMPLATE.format(self.name)),
                    # match body of interface
                    body=SearchDef(r".+"),
                    # match next interface or EOF
                    end=SearchDef([IP_IFACE_NAME, IP_EOF]),
                    tag="ifaces")
        f_ip_link_show = mktemp_dump(''.join(self.cli_helper.ip_link()))
        s.add_search_term(seqdef, path=f_ip_link_show)
        results = s.search()
        os.unlink(f_ip_link_show)
        stats_raw = []
        for section in results.find_sequence_sections(seqdef).values():
            for result in section:
                if result.tag == seqdef.body_tag:
                    stats_raw.append(result.get(0))

        if not stats_raw:
            return {}

        # NOTE: we only expect one match
        counters = {}
        for i, line in enumerate(stats_raw):
            ret = re.compile(r"\s+([RT]X):\s+.+").findall(line)
            if ret:
                rxtx = ret[0].lower()
                ret = re.compile(r"\s*([a-z]+)\s*").findall(line)
                if ret:
                    for j, column in enumerate(ret):
                        value = int(stats_raw[i + 1].split()[j])
                        if column in ['packets', 'dropped', 'errors',
                                      'overrun']:
                            if rxtx not in counters:
                                counters[rxtx] = {}

                            counters[rxtx][column] = value

        if counters:
            self.cache_save(counters)
            return counters

        return {}
    def test_sequence_searcher_overlapping_incomplete(self):
        with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp:
            ftmp.write(SEQ_TEST_3)
            ftmp.close()
            s = FileSearcher()
            sd = SequenceSearchDef(
                start=SearchDef(r"^(a\S*) (start\S*) point\S*"),
                body=SearchDef(r"leads to"),
                end=SearchDef(r"^an (ending)$"),
                tag="seq-search-test3")
            s.add_search_term(sd, path=ftmp.name)
            results = s.search()
            sections = results.find_sequence_sections(sd)
            self.assertEqual(len(sections), 1)
            for id in sections:
                for r in sections[id]:
                    if r.tag == sd.start_tag:
                        self.assertEqual(r.get(1), "another")
                    elif r.tag == sd.end_tag:
                        self.assertEqual(r.get(1), "ending")

            os.remove(ftmp.name)
    def test_sequence_searcher_multiple_sections(self):
        with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp:
            ftmp.write(SEQ_TEST_5)
            ftmp.close()
            s = FileSearcher()
            sd = SequenceSearchDef(
                start=SearchDef(r"^(a\S*) (start\S*) point\S*"),
                body=SearchDef(r"value is (\S+)"),
                end=SearchDef(r"^$"),
                tag="seq-search-test5")
            s.add_search_term(sd, path=ftmp.name)
            results = s.search()
            sections = results.find_sequence_sections(sd)
            self.assertEqual(len(sections), 2)
            for id in sections:
                for r in sections[id]:
                    if r.tag == sd.start_tag:
                        self.assertEqual(r.get(1), "another")
                    elif r.tag == sd.body_tag:
                        self.assertTrue(r.get(1) in ["3", "4"])
                    elif r.tag == sd.end_tag:
                        self.assertEqual(r.get(0), "")

            os.remove(ftmp.name)
Beispiel #12
0
    def _get_interfaces(self, namespaces=False):
        """
        Get all interfaces in ip address show.

        @param namespaces: if set to True will get interfaces from all
        namespaces on the host.
        @return: list of NetworkPort objects for each interface found.
        """
        interfaces = []

        interfaces_raw = self.cache_load(namespaces=namespaces)
        if interfaces_raw:
            for iface in interfaces_raw:
                interfaces.append(NetworkPort(**iface))

            return interfaces

        interfaces_raw = []
        seq = SequenceSearchDef(start=SearchDef(IP_IFACE_NAME),
                                body=SearchDef([IP_IFACE_V4_ADDR,
                                                IP_IFACE_V6_ADDR,
                                                IP_IFACE_HW_ADDR,
                                                IP_IFACE_VXLAN_INFO]),
                                tag='ip_addr_show')
        search_obj = FileSearcher()
        if namespaces:
            for ns in self.cli.ip_netns():
                ns_name = ns.partition(" ")[0]
                ip_addr = self.cli.ns_ip_addr(namespace=ns_name)
                path = mktemp_dump('\n'.join(ip_addr))
                search_obj.add_search_term(seq, path)
        else:
            path = mktemp_dump('\n'.join(self.cli.ip_addr()))
            search_obj.add_search_term(seq, path)

        if not search_obj.paths:
            log.debug("no network info found (namespaces=%s)", namespaces)
            return []

        r = search_obj.search()
        for path in search_obj.paths:
            # we no longer need this file so can delete it
            os.unlink(path)
            sections = r.find_sequence_sections(seq, path).values()
            for section in sections:
                addrs = []
                encap_info = None
                hwaddr = None
                name = None
                state = None
                for result in section:
                    if result.tag == seq.start_tag:
                        name = result.get(1)
                        state = result.get(2)
                    elif result.tag == seq.body_tag:
                        if result.get(1) in ['inet', 'inet6']:
                            addrs.append(result.get(2))
                        elif result.get(1) in ['vxlan']:
                            encap_info = {result.get(1): {
                                              'id': result.get(2),
                                              'local_ip': result.get(3),
                                              'dev': result.get(4)}}
                        else:
                            hwaddr = result.get(2)

                interfaces_raw.append({'name': name, 'addresses': addrs,
                                       'hwaddr': hwaddr, 'state': state,
                                       'encap_info': encap_info})

        self.cache_save(interfaces_raw, namespaces=namespaces)
        for iface in interfaces_raw:
            interfaces.append(NetworkPort(**iface))

        return interfaces
Beispiel #13
0
    def _load_event_definitions(self):
        """
        Load event search definitions from yaml.

        An event is identified using between one and two expressions. If it
        requires a start and end to be considered complete then these can be
        specified for match otherwise we can match on a single line.
        Note that multi-line events can be overlapping hence why we don't use a
        SequenceSearchDef (we use core.analytics.LogEventStats).
        """
        plugin = YDefsLoader('events').load_plugin_defs()
        if not plugin:
            return

        group_name = self._yaml_defs_group
        log.debug("loading defs for subgroup=%s", group_name)
        group_defs = plugin.get(group_name)
        group = YDefsSection(group_name, group_defs)
        log.debug("sections=%s, events=%s",
                  len(group.branch_sections),
                  len(group.leaf_sections))

        for event in group.leaf_sections:
            results_passthrough = bool(event.passthrough_results)
            log.debug("event: %s", event.name)
            log.debug("input: %s (command=%s)", event.input.path,
                      event.input.command is not None)
            log.debug("passthrough: %s", results_passthrough)

            section_name = event.parent.name
            # this is hopefully unique enough to allow two events from
            # different sections to have the same name and not clobber each
            # others results.
            search_tag = "{}.{}".format(section_name, event.name)

            # if this is a multiline event (has a start and end), append
            # this to the tag so that it can be used with
            # core.analytics.LogEventStats.
            search_meta = {'searchdefs': [], 'datasource': None,
                           'passthrough_results': results_passthrough}

            if event.expr:
                hint = None
                if event.hint:
                    hint = event.hint.value

                search_meta['searchdefs'].append(
                    SearchDef(event.expr.value, tag=search_tag, hint=hint))
            elif event.start:
                if (event.body or
                        (event.end and not results_passthrough)):
                    log.debug("event '%s' search is a sequence", event.name)
                    sd_start = SearchDef(event.start.expr)

                    sd_end = None
                    # explicit end is optional for sequence definition
                    if event.end:
                        sd_end = SearchDef(event.end.expr)

                    sd_body = None
                    if event.body:
                        sd_body = SearchDef(event.body.expr)

                    # NOTE: we don't use hints here
                    sequence_def = SequenceSearchDef(start=sd_start,
                                                     body=sd_body,
                                                     end=sd_end,
                                                     tag=search_tag)
                    search_meta['searchdefs'].append(sequence_def)
                    search_meta['is_sequence'] = True
                elif (results_passthrough and
                      (event.start and event.end)):
                    # start and end required for core.analytics.LogEventStats
                    search_meta['searchdefs'].append(
                        SearchDef(event.start.expr,
                                  tag="{}-start".format(search_tag),
                                  hint=event.start.hint))
                    search_meta['searchdefs'].append(
                        SearchDef(event.end.expr,
                                  tag="{}-end".format(search_tag),
                                  hint=event.end.hint))
                else:
                    log.debug("unexpected search definition passthrough=%s "
                              "body provided=%s, end provided=%s",
                              results_passthrough, event.body is not None,
                              event.end is not None)
            else:
                log.debug("invalid search definition for event '%s' in "
                          "section '%s'", event, event.parent.name)
                continue

            datasource = event.input.path
            if section_name not in self.__event_defs:
                self.__event_defs[section_name] = {}

            search_meta['datasource'] = datasource
            self.__event_defs[section_name][event.name] = search_meta