Esempio n. 1
0
 def get_data(self, **kwargs):
     data = []
     for vrf in VRF.objects.all().order_by("name"):
         if vrf.rd == "0:0":
             continue  # Skip global
         d = []
         for fi in ForwardingInstance.objects.filter(type="VRF",
                                                     name=vrf.name):
             si = [
                 i.name for i in SubInterface.objects.filter(
                     forwarding_instance=fi.id).only("name")
             ]
             si = sorted(si)
             if si:
                 d += [[fi.managed_object.name, ", ".join(si)]]
         if d:
             data += [
                 SectionRow(name="VRF %s, RD: %s [%s]" %
                            (vrf.name, vrf.rd, vrf.state.name))
             ]
             data += d
     #
     return self.from_dataset(
         title=self.title,
         columns=[_("Managed. Object"),
                  _("Interfaces")],
         data=data)
Esempio n. 2
0
    def get_data(self, **kwargs):
        data = []
        for v in Vendor.objects.order_by("name"):
            vd = []
            for m in ObjectModel.objects.filter(
                    vendor=v.id,
                    data__management__managed=True).order_by("name"):
                ru = m.get_data("rackmount", "units")
                if ru:
                    ru = "%sU" % ru
                else:
                    ru = ""
                weight = m.get_data("weight", "weight")
                if weight:
                    weight = str(weight)
                    if m.get_data("weight", "is_recursive"):
                        weight += "+"
                else:
                    weight = ""
                vd += [(m.name, m.get_data("dimensions", "width")
                        or "", m.get_data("dimensions", "height") or "",
                        m.get_data("dimensions", "depth") or "", ru, weight)]
            if vd:
                data += [SectionRow(name=v.name)]
                data += vd

        return self.from_dataset(
            title=self.title,
            columns=["Model", "W", "H", "D", "RU", "Weight (kg)"],
            data=data,
            enumerate=True)
Esempio n. 3
0
    def get_data(self, **kwargs):
        data = []
        for v in Vendor.objects.order_by("name"):
            data += [SectionRow(name=v.name)]
            for m in ObjectModel.objects.filter(vendor=v.id):
                data += [[
                    m.get_data("asset", "part_no0"),
                    m.get_data("asset", "part_no1"),
                    m.get_data("asset", "part_no2"),
                    m.get_data("asset", "part_no3"),
                    m.get_data("asset", "asset_part_no0"),
                    m.get_data("asset", "asset_part_no1"),
                    m.get_data("asset", "asset_part_no2"),
                    m.get_data("asset", "asset_part_no3"),
                    m.name,
                    m.description
                ]]

        return self.from_dataset(
            title=self.title,
            columns=[
                "0", "1", "2", "3",
                "0", "1", "2", "3",
                "Name",
                "Description"
            ],
            data=data, enumerate=True
        )
Esempio n. 4
0
def reduce_switchport(task):
    from noc.lib.app.simplereport import Report, TableSection,\
        SectionRow, TableColumn
    from noc.lib.text import list_to_ranges
    # Prepare data
    data = []
    for mt in task.maptask_set.filter(status="C"):
        data += [SectionRow("%s (%s)" % (
        mt.managed_object.name, mt.managed_object.profile_name))]
        for r in mt.script_result:
            data += [[
                r["interface"],
                r.get("description", ""),
                r["status"],
                r.get("untagged", ""),
                list_to_ranges(r.get("tagged", [])),
                ", ".join(r.get("members", []))
            ]]
        # Prepare report
    r = Report()
    r.append_section(TableSection(name="",
                                  columns=["Interface", "Description",
                                           TableColumn("Status", format="bool"),
                                           "Untagged", "Tagged", "Members"],
                                  data=data))
    return r
Esempio n. 5
0
 def get_data(self, **kwargs):
     ov = {}
     iv = {}
     for vn in validator_registry.validators:
         v = validator_registry.validators[vn]
         scopes = []
         if v.is_object():
             scopes += ["OBJECT"]
         if v.is_interface():
             scopes += ["INTERFACE"]
         r = [
             SectionRow("[%s] %s" % (", ".join(scopes), v.TITLE)),
             ["Description", v.DESCRIPTION],
             ["Handler", "%s.%s" % (v.__module__, v.__name__)],
             ["Tags", ", ".join(v.TAGS or [])]
         ]
         if v.is_object():
             ov[v.TITLE] = r
         if v.is_interface():
             iv[v.TITLE] = r
     r = []
     for vn in sorted(ov):
         r += ov[vn]
     for vn in sorted(iv):
         r += iv[vn]
     return self.from_dataset(title=self.title, columns=["", ""], data=r)
Esempio n. 6
0
    def get_data(self, request, pool=None, selector=None, report_type=None, **kwargs):

        data = []
        columns, columns_desr = [], []
        r_map = [
            (_("Not Available"), "2is1.3isp1.3is1"),
            (_("Failed to guess CLI credentials"), "2is1.3isp0.2isp1"),
            (_("Failed to guess SNMP community"), "2is1.3isp1.3is2.1isp1")
        ]
        for x, y in r_map:
            columns += [y]
            columns_desr += [x]

        mos = ManagedObject.objects.filter()
        if pool:
            mos = mos.filter(pool=pool)
            data += [SectionRow(name=pool.name)]
        if not request.user.is_superuser:
            mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user))
        mos = list(mos.values_list("id", flat=True).order_by("id"))
        mos_s = set(mos)
        report = ReportModelFilter()
        result = report.proccessed(",".join(columns))

        mo_hostname = ReportObjectsHostname1(sync_ids=mos)
        mo_hostname = mo_hostname.get_dictionary()
        d_result = ReportDiscoveryResult(sync_ids=mos)
        d_result = d_result.get_dictionary()
        for col in columns:
            for mo_id in result[col.strip()].intersection(mos_s):
                mo = ManagedObject.get_by_id(mo_id)
                problem = self.decode_problem(d_result.get(mo_id))
                if not problem and mo_id not in d_result:
                    problem = "Discovery disabled"
                data += [(
                    mo.name,
                    mo.address,
                    mo.administrative_domain.name,
                    mo.profile.name,
                    mo_hostname.get(mo.id, ""),
                    mo.auth_profile if mo.auth_profile else "",
                    mo.auth_profile.user if mo.auth_profile else mo.user,
                    mo.auth_profile.snmp_ro if mo.auth_profile else mo.snmp_ro,
                    _("No") if not mo.get_status() else _("Yes"),
                    columns_desr[columns.index(col)],
                    problem
                )]

        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"), _("Address"), _("Administrative Domain"), _("Profile"), _("Hostname"),
                _("Auth Profile"), _("Username"), _("SNMP Community"),
                _("Avail"), _("Error"), _("Error Detail")
            ],
            data=data)
Esempio n. 7
0
    def get_data(self, request, **kwargs):

        data = []
        # Find object with equal ID
        find = DiscoveryID._get_collection().aggregate([{
            "$group": {
                "_id": "$macs",
                "count": {
                    "$sum": 1
                }
            }
        }, {
            "$match": {
                "count": {
                    "$gt": 1
                }
            }
        }])

        for f in find:
            # DiscoveryID.objects.filter(chassis_mac=f["_id"])
            if not f["_id"]:
                # Empty DiscoveryID
                continue
            data_c = []
            reason = "Other"

            for r in DiscoveryID._get_collection().find({"macs": f["_id"][0]},
                                                        {
                                                            "_id": 0,
                                                            "object": 1
                                                        }):
                # ManagedObject.get_by_id(o)
                mo = ManagedObject.get_by_id(r["object"])
                if len(data_c) > 0:
                    if mo.address == data_c[-1][1]:
                        reason = _("Duplicate MO")
                    elif not mo.is_managed == data_c[-1][3]:
                        reason = _("MO is move")

                data_c += [(mo.name, mo.address, mo.profile.name,
                            mo.is_managed)]

            data += [SectionRow(name="%s %s" % (f["_id"][0], reason))]
            data += data_c

        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"),
                _("Address"),
                _("Profile"),
                _("is managed")
            ],
            data=data,
        )
Esempio n. 8
0
    def get_data(self, days, include_prefixes,
                 include_addresses,**kwargs):
        dt = datetime.date.today() - datetime.timedelta(days=days)
        scope = []
        if include_prefixes:
            scope += ["ip.Prefix"]
        if include_addresses:
            scope += ["ip.Address"]
        last = None
        r = []
        for l in AuditTrail.objects.filter(timestamp__gte=dt,
                                           model_id__in=scope)\
                                   .order_by("-timestamp"):
            d = l.timestamp.date()
            if d != last:
                last = d
                r += [SectionRow(d.isoformat())]
            model = self.MODELS[l.model_id]
            if l.object:
                try:
                    obj = unicode(model.objects.get(id=int(l.object)))
                except model.DoesNotExist:
                    obj = "UNKNOWN?"
            else:
                obj = "?"
            chg = []
            for c in l.changes:
                if c.old is None and c.new is None:
                    continue
                chg += ["%s: %s -> %s" % (c.field, c.old, c.new)]
            r += [(
                self.to_json(l.timestamp),
                l.user,
                {
                    "C": "Create",
                    "U": "Modify",
                    "M": "Modify",
                    "D": "Delete"
                }[l.op],
                obj,
                self.format_detail("\n".join(chg))
            )]

        return self.from_dataset(
            title=self.title,
            columns=["Time", "User", "Action", "Object", "Detail"],
            data=r)
Esempio n. 9
0
    def get_data(self, request, **kwargs):
        columns, columns_desr = [], []

        r_map = [
            (_("All polling"),
             "2is1.6is1.7a2"),  # "Is Managed, object type defined"
            (_("0"), "2is1.6is1.7a2.3hs0"),  # "Has 0 Links w type defined"
            (_("1"), "2is1.6is1.3hs2"),  # "Has 1 links"
            (_("2"), "2is1.6is1.3hs3"),  # "Has 2 links"
            (_("More 3"), "2is1.6is1.3hs4"),  # "Has more 3 links"
        ]
        for x, y in r_map:
            columns += [y]
            columns_desr += [x]
        report = ReportModelFilter()
        result = report.proccessed(",".join(columns))

        summary = defaultdict(int)
        data = []
        # url = "/sa/reportstat/repstat_download/?report=%s"
        url = "/sa/reportobjectdetail/download/?" + "&".join([
            "o_format=xlsx",
            "columns=object_name,object_address,object_profile,object_status,profile_name,admin_domain,segment",
            "detail_stat=%s&pool=%s",
        ])
        for p in Pool.objects.filter().order_by("name"):
            m = []
            moss = set(
                ManagedObject.objects.filter(pool=p).values_list("id",
                                                                 flat=True))
            for col in columns:
                m += [len(result[col.strip()].intersection(moss))]
                summary[col] += m[-1]
            data += [SectionRow(name=p.name)]
            data += [(x, y, self.calc_percent(x, y),
                      url % (columns[columns_desr.index(x)], p.name))
                     for x, y in zip(columns_desr, m)]
        return self.from_dataset(
            title=self.title,
            columns=[
                _("Links count"),
                _("MO Count"),
                _("Percent at All"),
                TableColumn(_("Detail"), format="url"),
            ],
            data=data,
        )
Esempio n. 10
0
    def get_data(self, **kwargs):
        from django.db import connection

        data = []
        last_vrf = None
        c = connection.cursor()
        c.execute(self.QUERY)
        for vrf, rd, afi, prefix, description, used in c:
            if last_vrf != vrf:
                data += [SectionRow("%s (%s)" % (vrf, rd))]
                last_vrf = vrf
            p = IP.prefix(prefix)
            if afi == "4":
                total = p.size
                if p.mask < 31 and total - used >= 2:
                    # Exclude network and broadcast
                    total = p.size - 2
                free = total - used
                percent = used * 100 / total
            elif afi == "6":
                if p.mask >= 96:
                    total = 2**(128 - p.mask)
                    free = total - used
                    percent = used * 100 / total
                else:
                    total = "-"
                    free = "-"
                    percent = "-"
            data += [[prefix, description, used, free, total, percent]]
        return self.from_dataset(title=self.title,
                                 columns=[
                                     "Prefix", "Description",
                                     TableColumn("IP Used",
                                                 align="right",
                                                 format="numeric"),
                                     TableColumn("IP Free",
                                                 align="right",
                                                 format="numeric"),
                                     TableColumn("IP Total",
                                                 align="right",
                                                 format="numeric"),
                                     TableColumn("% Used",
                                                 align="right",
                                                 format="percent")
                                 ],
                                 data=data)
Esempio n. 11
0
    def get_data(self, profile):
        def get_profile(r):
            for p in r.patterns:
                if p.key_re in ("profile", "^profile$"):
                    return p.value_re
            return None

        data = []
        for r in EventClassificationRule.objects.order_by("preference"):
            p_re = get_profile(r)
            if p_re and not re.search(p_re, profile):
                # Skip
                continue
            data += [SectionRow("%s (%s)" % (r.name, r.preference))]
            data += [["Event Class", r.event_class.name]]
            for p in r.patterns:
                data += [[p.key_re, p.value_re]]
        return self.from_dataset(title=self.title,
                                 columns=["Key RE", "Value RE"],
                                 data=data)
Esempio n. 12
0
 def get_data(self, **kwargs):
     data = []
     for m in Map.objects.filter(is_active=True).order_by("name"):
         data += [SectionRow(m.name)]
         for zoom in range(MIN_ZOOM, MAX_ZOOM + 1):
             tcc = TileCache.objects.filter(map=m.id, zoom=zoom).count()
             mt = 2**(2 * zoom)
             data += [[zoom, tcc, mt, tcc * 100.0 / mt]]
     return self.from_dataset(title=self.title,
                              columns=[
                                  TableColumn("Zoom", align="right"),
                                  TableColumn("Tiles",
                                              align="right",
                                              format="integer"),
                                  TableColumn("Max. Tiles",
                                              align="right",
                                              format="integer"),
                                  TableColumn("%",
                                              align="right",
                                              format="percent")
                              ],
                              data=data)
Esempio n. 13
0
 def get_data(self, **kwargs):
     models = defaultdict(list)
     for ms in MetricSettings.objects.all():
         models[ms.model_id] += [[
             unicode(ms.get_object()),
             "; ".join(s.metric_set.name
                        for s in ms.metric_sets if s.is_active),
             "; ".join(s.metric_set.name
                        for s in ms.metric_sets if not s.is_active)
         ]]
     data = []
     for m in sorted(models):
         data += [SectionRow("Model: %s" % m)]
         data += sorted(models[m], key=lambda x: x[0])
     return self.from_dataset(
         title=self.title,
         columns=[
             "Object",
             "Active Metric Sets",
             "Inactive Metric Sets"
         ],
         data=data
     )
Esempio n. 14
0
    def get_data(self,
                 request,
                 pool=None,
                 obj_profile=None,
                 selector=None,
                 avail_status=None,
                 profile_check_only=None,
                 failed_scripts_only=None,
                 filter_pending_links=None,
                 filter_none_objects=None,
                 filter_view_other=None,
                 **kwargs):
        data = []
        match = None
        code_map = {
            "1": "Unknown error",
            "10000": "Unspecified CLI error",
            "10005": "Connection refused",
            "10001": "Authentication failed",
            "10002": "No super command defined",
            "10003": "No super privileges",
            "10004": "SSH Protocol error"
        }

        if not pool:
            pool = Pool.objects.filter()[0]
        data += [SectionRow(name="Report by %s" % pool.name)]
        if selector:
            mos = ManagedObject.objects.filter(selector.Q)
        else:
            mos = ManagedObject.objects.filter(pool=pool, is_managed=True)

        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if obj_profile:
            mos = mos.filter(object_profile=obj_profile)
        if filter_view_other:
            mnp_in = list(
                ManagedObjectProfile.objects.filter(enable_ping=False))
            mos = mos.filter(profile=Profile.objects.get(
                name=GENERIC_PROFILE)).exclude(object_profile__in=mnp_in)
        discovery = "noc.services.discovery.jobs.box.job.BoxDiscoveryJob"
        mos_id = list(mos.values_list("id", flat=True))
        if avail_status:
            avail = ObjectStatus.get_statuses(mos_id)

        if profile_check_only:
            match = {
                "$or": [{
                    "job.problems.suggest_cli": {
                        "$exists": True
                    }
                }, {
                    "job.problems.suggest_snmp": {
                        "$exists": True
                    }
                }, {
                    "job.problems.profile.": {
                        "$regex": "Cannot detect profile"
                    }
                }, {
                    "job.problems.version.": {
                        "$regex": "Remote error code 1000[1234]"
                    }
                }]
            }

        elif failed_scripts_only:
            match = {
                "$and": [{
                    "job.problems": {
                        "$exists": "true",
                        "$ne": {}
                    }
                }, {
                    "job.problems.suggest_snmp": {
                        "$exists": False
                    }
                }, {
                    "job.problems.suggest_cli": {
                        "$exists": False
                    }
                }]
            }
        elif filter_view_other:
            match = {"job.problems.suggest_snmp": {"$exists": False}}

        rdp = ReportDiscoveryProblem(mos, avail_only=avail_status, match=match)
        exclude_method = []
        if filter_pending_links:
            exclude_method += ["lldp", "lacp", "cdp", "huawei_ndp"]

        for discovery in rdp:
            mo = ManagedObject.get_by_id(discovery["key"])
            for method in ifilterfalse(lambda x: x in exclude_method,
                                       discovery["job"][0]["problems"]):
                problem = discovery["job"][0]["problems"][method]
                if filter_none_objects and not problem:
                    continue
                if isinstance(problem, dict) and "" in problem:
                    problem = problem.get("", "")
                if "Remote error code" in problem:
                    problem = code_map.get(problem.split(" ")[-1], problem)
                if isinstance(problem, six.string_types):
                    problem = problem.replace("\n", " ").replace("\r", " ")

                data += [(mo.name, mo.address,
                          mo.profile.name, mo.administrative_domain.name,
                          _("Yes") if mo.get_status() else _("No"),
                          discovery["st"].strftime("%d.%m.%Y %H:%M")
                          if "st" in discovery else "", method, problem)]

        return self.from_dataset(title=self.title,
                                 columns=[
                                     _("Managed Object"),
                                     _("Address"),
                                     _("Profile"),
                                     _("Administrative Domain"),
                                     _("Avail"),
                                     _("Last successful discovery"),
                                     _("Discovery"),
                                     _("Error")
                                 ],
                                 data=data)
Esempio n. 15
0
 def get_data(self, **kwargs):
     data = []
     # Managed objects summary
     data += [SectionRow("Managed Objects")]
     d = []
     j_box = 0
     j_box_sec = 0.0
     j_periodic = 0
     j_periodic_sec = 0.0
     for p in ManagedObjectProfile.objects.all():
         o_count = ManagedObject.objects.filter(object_profile=p).count()
         d += [[p.name, o_count]]
         if p.enable_box_discovery:
             j_box += o_count
             j_box_sec += float(o_count) / p.box_discovery_interval
         if p.enable_periodic_discovery:
             j_periodic += o_count
             j_periodic_sec += float(
                 o_count) / p.periodic_discovery_interval
     data += sorted(d, key=lambda x: -x[1])
     # Interface summary
     d = []
     data += [SectionRow("Interfaces")]
     d_count = Interface.objects.count()
     for p in InterfaceProfile.objects.all():
         n = Interface.objects.filter(profile=p).count()
         d += [[p.name, n]]
         d_count -= n
     data += [["-", d_count]]
     data += sorted(d, key=lambda x: -x[1])
     # Links summary
     data += [SectionRow("Links")]
     r = Link._get_collection().aggregate([{
         "$group": {
             "_id": "$discovery_method",
             "count": {
                 "$sum": 1
             }
         }
     }, {
         "$sort": {
             "count": -1
         }
     }])
     d = [(x["_id"], x["count"]) for x in r]
     data += sorted(d, key=lambda x: -x[1])
     # Discovery jobs
     data += [SectionRow("Discovery jobs summary")]
     data += [["Box", j_box]]
     data += [["Periodic", j_periodic]]
     data += [SectionRow("Jobs per second")]
     data += [["Box", j_box_sec]]
     data += [["Periodic", j_periodic_sec]]
     return self.from_dataset(title=self.title,
                              columns=[
                                  "",
                                  TableColumn("Count",
                                              align="right",
                                              format="integer",
                                              total="sum",
                                              total_label="Total")
                              ],
                              data=data)
Esempio n. 16
0
def switchport_validation_reduce(task):
    from noc.lib.app.simplereport import Report, TableSection, SectionRow
    from noc.lib.text import list_to_ranges

    switchports = {}  # object -> interface -> (description, set of vlans)
    macs = {}  # object -> interface -> set of vlans
    # Collect data
    for mt in task.maptask_set.filter(status="C"):
        o = mt.managed_object
        if mt.map_script.endswith(".get_mac_address_table"):
            # Populate macs
            macs[o] = {}
            for m in mt.script_result:
                for i in m["interfaces"]:
                    if i not in macs[o]:
                        macs[o][i] = set()
                    macs[o][i].add(m["vlan_id"])
        elif mt.map_script.endswith(".get_switchport"):
            # Populate switchports
            switchports[o] = {}
            for p in mt.script_result:
                if not p["status"]:
                    # Skip ports in shutdown
                    continue
                i = p["interface"]
                if i not in switchports[o]:
                    switchports[o][i] = (p.get("description", ""), set())
                if "untagged" in p and p["untagged"]:
                    switchports[o][i][1].add(p["untagged"])
                if p["tagged"]:
                    switchports[o][i][1].update(p["tagged"])
        else:
            raise Exception("Inconsistent map task")
    if not macs or not switchports:
        return "Failed to retrieve the data!!!"
        # Process data
    data = []
    for o in switchports:
        if o not in macs or not macs[o]:
            continue
        # Find inconsistent ports
        inconsistent_ports = []  # (port, swtichport vlans, excessive vlans)
        for i in switchports[o]:
            if i not in macs[o]:
                # No mac data for port
                inconsistent_ports += [(i, switchports[o][i][0],
                                        switchports[o][i][1], None)]
            else:
                # Remove intersection
                v = switchports[o][i][1] - macs[o][i]
                if v:
                    inconsistent_ports += [(i, switchports[o][i][0],
                                            switchports[o][i][1], v)]
        # Add to data if inconsistent ports found
        if inconsistent_ports:
            data += [SectionRow(o.name)]
            data += [(p, d, list_to_ranges(v),
                      list_to_ranges(e) if e is not None else "No MACs found")
                     for p, d, v, e in sorted(inconsistent_ports,
                                              lambda x, y: cmp(x[0], y[0]))]
        #
    if not data:
        return "Failed to retrieve data!!!"
    # Build report
    r = Report()
    r.append_section(
        TableSection("",
                     columns=[
                         "Port", "Description", "Switchport VLANs",
                         "Excessive VLANs"
                     ],
                     data=data))
    return r
Esempio n. 17
0
File: views.py Progetto: nbashev/noc
    def get_data(self, request, report_type=None, **kwargs):

        columns, columns_desr = [], []
        sc_code = [
            "1.1",
            "1.2",
            "1.2.1",
            "1.2.1.1",
            "1.2.2",
            "1.2.2.1",
            "1.2.2.2",
            "1.2.2.2.1",
            "1.2.2.2.2",
            "1.2.2.2.2.1",
            "1.2.3",
        ]
        r_map = [
            (_("Not Managed"), "1is1"),
            (_("Is Managed"), "2is1"),
            # (_("Is Managed not monitoring"), "2is2"),
            # (_("Is monitoring"), "2is1"),
            (_("Is Managed, object type defined"), "2is1.6is1.3isp0"),
            (_("Is Managed, object type defined bad CLI Credential"),
             "2is1.6is1.3isp0.2isp1"),
            (_("Is Managed, object type undefined"), "2is1.6is1.3isp1"),
            (_("Is Managed, object type undefined not ping response"),
             "2is1.6is1.3isp1.3is1"),
            (_("Is Managed, object type undefined has ping response"),
             "2is1.6is1.3isp1.3is2"),
            (
                _("Is Managed, object type undefined bad SNMP Credential"),
                "2is1.6is1.3isp1.3is2.1isp1",
            ),
            (
                _("Is Managed, object type undefined for various reasons"),
                "2is1.6is1.3isp1.3is2.1isp0",
            ),
            (_("Is Managed, object type Profile is not know"),
             "2is1.6is1.9a1.3is2.4isp1"),
            (_("Is monitoring, object type undefined, only availablity check"),
             "2is1.6is2"),
        ]

        for x, y in r_map:
            columns += [y]
            columns_desr += [x]
        report = ReportModelFilter()
        result = report.proccessed(",".join(columns))

        # result = stat.proccessed(",".join(columns))
        # result = stat.api_result(",".join(columns))
        summary = defaultdict(int)
        data = []
        # url = "/sa/reportstat/repstat_download/?report=%s"
        url = "/sa/reportobjectdetail/download/?" + "&".join([
            "o_format=xlsx",
            "columns=object_name,object_address,object_profile,object_status,profile_name,admin_domain,segment",
            "detail_stat=%s&pool=%s",
        ])
        for p in Pool.objects.filter().order_by("name"):
            m = []
            moss = set(
                ManagedObject.objects.filter(pool=p).values_list("id",
                                                                 flat=True))
            for col in columns:
                m += [len(result[col.strip()].intersection(moss))]
                summary[col] += m[-1]
            data += [SectionRow(name=p.name)]
            data += [(sc, x, y, self.calc_percent(x, y),
                      url % (columns[columns_desr.index(x)], p.name))
                     for sc, x, y in zip(sc_code, columns_desr, m)]
            data += [("1.2.2.2.2.2",
                      _("Is Managed, objects not processed yet"), 0, "")]
        data += [SectionRow(name="Summary")]
        summary = [summary[k] for k in columns]
        data += [(sc, x, y, self.calc_percent(x, y),
                  url % (columns[columns_desr.index(x)], ""))
                 for sc, x, y in zip(sc_code, columns_desr, summary)]
        data += [("1.2.2.2.2.2", _("Is Managed, objects not processed yet"), 0,
                  "")]
        # columns = ["ID", "Value", "Percent", TableColumn(_("Detail"), format="url")]
        columns = [
            _("PP"),
            _("Status"),
            _("Quantity"),
            _("Percent"),
            TableColumn(_("Detail"), format="url"),
        ]

        return self.from_dataset(title=self.title, columns=columns, data=data)
Esempio n. 18
0
    def get_data(self, request, **kwargs):
        data = []

        value = get_db()["noc.links"].with_options(
            read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate([{
                "$unwind":
                "$interfaces"
            }, {
                "$lookup": {
                    "from": "noc.interfaces",
                    "localField": "interfaces",
                    "foreignField": "_id",
                    "as": "int"
                }
            }, {
                "$group": {
                    "_id": "$int.managed_object",
                    "count": {
                        "$sum": 1
                    }
                }
            }])
        count = {0: set([]), 1: set([]), 2: set([]), 3: set([])}
        ap = AuthProfile.objects.filter(name__startswith="TG")
        for v in value:
            if v["count"] > 2:
                count[3].add(v["_id"][0])
                continue
            if not v["_id"]:
                self.logger.warning("No IDS in response query")
                continue
            count[v["count"]].add(v["_id"][0])

        for p in Pool.objects.order_by("name"):
            if p.name == "P0001":
                continue
            data += [SectionRow(name=p.name)]
            smos = set(
                ManagedObject.objects.filter(pool=p, is_managed=True).exclude(
                    profile=Profile.get_by_name(GENERIC_PROFILE)).exclude(
                        auth_profile__in=ap).values_list('id', flat=True))
            all_p = 100.0 / len(smos) if len(smos) else 1.0
            data += [("All polling", len(smos))]
            for c in count:
                if c == 3:
                    data += [
                        ("More 3", len(count[c].intersection(smos)),
                         "%.2f %%" %
                         round(len(count[c].intersection(smos)) * all_p, 2))
                    ]
                    continue
                data += [(c, len(count[c].intersection(smos)), "%.2f %%" %
                          round(len(count[c].intersection(smos)) * all_p), 2)]

            # 0 links - All discovering- summary with links
            s0 = len(smos) - sum([d[1] for d in data[-3:]])
            data.pop(-4)
            data.insert(-3, (0, s0, "%.2f %%" % round(s0 * all_p, 2)))

        return self.from_dataset(
            title=self.title,
            columns=[_("Links count"),
                     _("MO Count"),
                     _("Percent at All")],
            data=data)
Esempio n. 19
0
    def get_data(self,
                 request,
                 interval=1,
                 repo_format=0,
                 from_date=None,
                 to_date=None,
                 **kwargs):
        # Date Time Block
        if from_date:
            from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y")
        elif interval:
            from_date = datetime.datetime.now() - datetime.timedelta(
                days=int(interval))
        else:
            from_date = datetime.datetime.now() - datetime.timedelta(days=1)

        if to_date:
            to_date = datetime.datetime.strptime(to_date, "%d.%m.%Y")
            if from_date == to_date:
                to_date = from_date + datetime.timedelta(days=1)
        elif interval:
            to_date = from_date + datetime.timedelta(days=int(interval))
        else:
            to_date = from_date + datetime.timedelta(days=1)
        columns = [
            _("Server"),
            _("Service"),
            _("Request count"),
            _("Success request count"),
            _("Failed request count"),
            _("Success request (%)"),
            _("Q1 (ms)"),
            _("Q2 (ms)"),
            _("Q3 (ms)"),
            _("p95 (ms)"),
            _("max (ms)")
        ]
        if repo_format == "1":
            columns = [
                _("Timestamp"),
                _("Server"),
                _("Service"),
                _("Managed object"),
                _("TT ID"),
                _("Error code"),
                _("Error text")
            ]
        ts_from_date = time.mktime(from_date.timetuple())
        ts_to_date = time.mktime(to_date.timetuple())

        tt_systems = TTSystem.objects.filter().scalar("name")
        # Manged Object block

        q1 = """select server, service, count(), round(quantile(0.25)(duration), 0)/1000 as q1,
                                        round(quantile(0.5)(duration), 0)/1000 as q2,
                                        round(quantile(0.75)(duration), 0)/1000 as q3,
                                        round(quantile(0.95)(duration),0)/1000 as p95,
                                        round(max(duration),0)/1000 as max from span where %s
                                        group by server, service"""

        q2 = """select server, service, error_code, count(), avg(duration)
                from span where %s group by server, service, error_code"""

        q3 = """select ts, server, service, in_label, in_label, error_code, error_text from span
                where service IN ('create_massive_damage_outer', 'change_massive_damage_outer_close') and
                      error_code <> 0 and %s"""

        q_where = ["server IN ('%s')" % "', '".join(tt_systems)]
        # q_where = ["managed_object IN (%s)" % ", ".join(mo_bi_dict.keys())]
        q_where += [
            "(date >= toDate(%d)) AND (ts >= toDateTime(%d) AND ts <= toDateTime(%d))"
            % (ts_from_date, ts_from_date, ts_to_date)
        ]
        r = []
        ch = connection()
        if repo_format == "1":
            aa = {
                aa.escalation_tt.split(":")[-1]: aa
                for aa in ArchivedAlarm.objects.filter(
                    clear_timestamp__gte=from_date,
                    clear_timestamp__lte=to_date,
                    escalation_tt__exists=True)
            }
            query = q3 % " and ".join(q_where)
            for row in ch.execute(query):
                if row[2] in ["create_massive_damage_outer"]:
                    row[2] = u"Создание ТТ"
                    try:
                        row[3] = ManagedObject.objects.get(
                            tt_system_id=int(row[3]))
                        row[4] = ""
                    except ManagedObject.DoesNotExist:
                        pass
                    except ManagedObject.MultipleObjectsReturned:
                        row[3] = ManagedObject.objects.get(tt_system_id=int(
                            row[3]),
                                                           is_managed=True)
                        row[4] = ""
                elif row[2] in ["change_massive_damage_outer_close"]:
                    row[2] = u"Закрытие ТТ"
                    row[4] = row[3]
                    row[3] = aa[
                        row[3]].managed_object if row[3] in aa else row[3]
                else:
                    continue
                r += [row]
        else:
            query = q1 % " and ".join(q_where)
            # (server, service)
            tt_s = {}
            for row in ch.execute(query):
                tt_s[(row[0], row[1])] = [row[2]] + [0, 0, 0] + row[3:]
            query = q2 % " and ".join(q_where)
            for row in ch.execute(query):
                if row[2] == "0":
                    tt_s[(row[0], row[1])][1] = row[3]
                else:
                    tt_s[(row[0], row[1])][2] += int(row[3])

            r += [
                SectionRow(name="Report from %s to %s" %
                           (from_date.strftime("%d.%m.%Y %H:%M"),
                            to_date.strftime("%d.%m.%Y %H:%M")))
            ]
            for line in sorted(tt_s, key=lambda x: x[0]):
                data = list(line)
                data += tt_s[line]
                data[5] = round((float(data[3]) / float(data[2])) * 100.0, 2)
                r += [data]

        return self.from_dataset(title=self.title,
                                 columns=columns,
                                 data=r,
                                 enumerate=True)
Esempio n. 20
0
    def get_data(self,
                 request,
                 pool=None,
                 obj_profile=None,
                 filter_ignore_iface=True,
                 **kwargs):

        rn = re.compile(
            r"'remote_chassis_id': u'(?P<rem_ch_id>\S+)'.+'remote_system_name': u'(?P<rem_s_name>\S+)'",
            re.IGNORECASE,
        )
        problem = {
            "Not found iface on remote": "->",
            "Not found local iface on remote": "<-",
            "Remote object is not found": "X",
        }
        data = []
        # MAC, hostname, count
        not_found = defaultdict(int)
        # Name, IP, count
        local_on_remote = defaultdict(int)
        # Get all managed objects
        mos = ManagedObject.objects.filter(is_managed=True, pool=pool)

        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if obj_profile:
            mos = mos.filter(object_profile=obj_profile)
        mos_id = dict((mo.id, mo) for mo in mos)
        report = ReportPendingLinks(
            list(six.iterkeys(mos_id)),
            ignore_profiles=list(
                InterfaceProfile.objects.filter(discovery_policy="I")),
        )
        problems = report.out
        for mo_id in problems:
            mo = mos_id.get(mo_id, ManagedObject.get_by_id(mo_id))
            for iface in problems[mo_id]:
                data += [(
                    mo.name,
                    mo.address,
                    mo.profile.name,
                    mo.administrative_domain.name,
                    iface,
                    problem[problems[mo_id][iface]["problem"]],
                    problems[mo_id][iface]["remote_id"],
                )]
                if problems[mo_id][iface][
                        "problem"] == "Remote object is not found":
                    match = rn.findall(problems[mo_id][iface]["remote_id"])
                    if match:
                        not_found[match[0]] += 1
                elif problems[mo_id][iface][
                        "problem"] == "Not found iface on remote":
                    local_on_remote[(mo.name, mo.address)] += 1
        data += [SectionRow(name="Summary information on u_object")]
        for c in not_found:
            if not_found[c] > 4:
                data += [c]
        data += [SectionRow(name="Summary information on agg")]
        for c in local_on_remote:
            if local_on_remote[c] > 4:
                data += [c]
        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"),
                _("Address"),
                _("Profile"),
                _("Administrative domain"),
                _("Interface"),
                _("Direction"),
                _("Remote Object")
                # _("Discovery"), _("Error")
            ],
            data=data,
        )
Esempio n. 21
0
def reduce(task):
    """
    Reduce handler to show version report
    :param task: ReduceTask instance
    :return:
    """
    from noc.lib.app.simplereport import Report, TableSection, SectionRow
    # Fetch data
    ad = {}  # administrative domain -> data
    summary = {}  # (vendor, platform, version) -> count
    attrs = {}  # Attribute, count
    # First pass - count summary and attributes
    for mt in task.maptask_set.all():
        adn = mt.managed_object.administrative_domain.name
        if adn not in ad:
            ad[adn] = []
        if mt.status == "C":
            r = mt.script_result
            # Update summary
            if (r["vendor"], r["platform"], r["version"]) in summary:
                summary[(r["vendor"], r["platform"], r["version"])] += 1
            else:
                summary[(r["vendor"], r["platform"], r["version"])] = 1
            # Update attributes count
            if "attributes" in r:
                for k in r["attributes"]:
                    if k in attrs:
                        attrs[k] += 1
                    else:
                        attrs[k] = 1
    # Prepare a list of additional attributes
    a_fail = ("-", ) * (len(attrs) + 3)
    a_list = sorted(attrs.keys(), lambda x, y: cmp(attrs[y], attrs[x]))
    # Second pass - fill data
    for mt in task.maptask_set.all():
        adn = mt.managed_object.administrative_domain.name
        r = mt.script_result
        if mt.status == "C":
            # Completed tasks
            s = (mt.managed_object.name, r["vendor"], r["platform"],
                 r["version"])
            if "attributes" in r:
                s += tuple([r["attributes"].get(k, "-") for k in a_list])
            else:
                s += ("-", ) * len(a_list)
            ad[adn] += [s]
        else:
            # Failed tasks
            ad[adn] += [(mt.managed_object.name, ) + a_fail]
    # Prepare data
    data = []
    for adn in sorted(ad.keys()):
        data += [SectionRow(name=adn)]
        data += sorted(ad[adn], lambda x, y: cmp(x[0], y[0]))
    # Build report
    report = Report()
    # Object versions
    t = TableSection(name="result",
                     columns=["Object", "Vendor", "Plaform", "Version"] +
                     a_list,
                     data=data,
                     enumerate=True)
    report.append_section(t)
    # Version summary
    summary = sorted([(vp[0], vp[1], vp[2], c) for vp, c in summary.items()],
                     lambda x, y: -cmp(x[3], y[3]))
    t = TableSection(name="summary",
                     columns=["Vendor", "Platform", "Version", "Count"],
                     data=summary,
                     enumerate=True)
    report.append_section(t)
    return report
Esempio n. 22
0
    def get_data(self,
                 request,
                 interval=1,
                 from_date=None,
                 to_date=None,
                 skip_avail=False,
                 skip_zero_avail=False,
                 filter_zero_access=False,
                 **kwargs):
        """
        a1 = self.get_availability(1)
        a7 = self.get_availability(7)
        a30 = self.get_availability(30)
        """

        if not from_date:
            from_date = datetime.datetime.now() - datetime.timedelta(
                days=interval)
        else:
            from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y")

        if not to_date or from_date == to_date:
            to_date = from_date + datetime.timedelta(days=1)
        else:
            to_date = datetime.datetime.strptime(
                to_date, "%d.%m.%Y") + datetime.timedelta(days=1)

        a = self.get_availability(start_date=from_date,
                                  stop_date=to_date,
                                  skip_zero_avail=skip_zero_avail)
        rb = self.get_reboots(start_date=from_date, stop_date=to_date)
        r = [SectionRow("Report from %s to %s" % (from_date, to_date))]
        mos = ManagedObject.objects.filter(is_managed=True)

        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if skip_avail:
            mos = mos.filter(id__in=list(a))
        mos_id = list(mos.order_by("id").values_list("id", flat=True))
        if filter_zero_access:
            iface_p = InterfaceProfile.objects.get(name="Клиентский порт")
            match = {"profile": iface_p.id, "managed_object": {"$in": mos_id}}
            pipeline = [
                {
                    "$match": match
                },
                {
                    "$group": {
                        "_id": "$managed_object",
                        "count": {
                            "$sum": 1
                        },
                        "m": {
                            "$max": "$oper_status"
                        },
                    }
                },
                {
                    "$match": {
                        "m": False
                    }
                },
                {
                    "$project": {
                        "_id": True
                    }
                },
            ]
            # data = Interface.objects._get_collection().aggregate(pipeline,
            data = (get_db()["noc.interfaces"].with_options(
                read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate(
                    pipeline))
            data = [d["_id"] for d in data]
            mos = mos.exclude(id__in=data)

        mo_hostname = ReportObjectsHostname1(sync_ids=mos_id)
        mo_hostname = mo_hostname.get_dictionary()
        for o in mos:
            s = [
                o.administrative_domain.name,
                o.name,
                mo_hostname.get(o.id, ""),
                o.address,
                o.profile.name,
                round(a.get(o.id, (100.0, 0, 0))[0], 2),
            ]
            s.extend(a.get(o.id, (100.0, 0, 0))[1:])
            s.append(rb[o.id] if o.id in rb else 0)
            r += [s]
            """
            a1.get(o.id, 100),
            a7.get(o.id, 100),
            a30.get(o.id, 100)
            """
        # print r
        return self.from_dataset(
            title=self.title,
            columns=[
                _("Adm. Domain"),
                _("Managed Object"),
                _("Hostname"),
                _("Address"),
                _("Profile"),
                # TableColumn(_("Avail"), align="right", format="percent"),
                # TableColumn(_("Total avail (sec)"), align="right", format="numeric"),
                _("Avail"),
                _("Total unavail (sec)"),
                _("Count outages"),
                _("Reboots"),
            ],
            data=r,
            enumerate=True,
        )
Esempio n. 23
0
    def get_data(self, days, date_from, date_to, include_prefixes, search_ip,
                 search_prefix, search_user, include_addresses, **kwargs):
        scope = []
        if include_prefixes:
            scope += ["ip.Prefix"]
        if include_addresses:
            scope += ["ip.Address"]
        last = None
        r = []
        if date_from and date_to:
            audit_trail = AuditTrail.objects.filter(
                timestamp__gte=date_from,
                timestamp__lte=date_to + datetime.timedelta(days=1),
                model_id__in=scope,
            ).order_by("-timestamp")
        else:
            dt = datetime.date.today() - datetime.timedelta(days=days)
            audit_trail = AuditTrail.objects.filter(
                timestamp__gte=dt, model_id__in=scope).order_by("-timestamp")
        if search_ip:
            try:
                audit_trail = audit_trail.filter(
                    object=str(Address.objects.get(address=search_ip).id))
            except Address.DoesNotExist:
                audit_trail = audit_trail.none()
        if search_prefix:
            try:
                audit_trail = audit_trail.filter(
                    object=str(Prefix.objects.get(prefix=search_prefix).id))
            except Prefix.DoesNotExist:
                audit_trail = audit_trail.none()
        if search_user:
            audit_trail = audit_trail.filter(user__iexact=search_user)
        for l in audit_trail:
            d = l.timestamp.date()
            if d != last:
                last = d
                r += [SectionRow(d.isoformat())]
            model = self.MODELS[l.model_id]
            if l.object:
                try:
                    obj = smart_text(model.objects.get(id=int(l.object)))
                except model.DoesNotExist:
                    obj = "UNKNOWN?"
            else:
                obj = "?"
            chg = []
            for c in l.changes:
                if c.old is None and c.new is None:
                    continue
                chg += ["%s: %s -> %s" % (c.field, c.old, c.new)]
            r += [(
                self.to_json(l.timestamp),
                l.user,
                {
                    "C": "Create",
                    "U": "Modify",
                    "M": "Modify",
                    "D": "Delete"
                }[l.op],
                obj,
                self.format_detail("\n".join(chg)),
            )]

        return self.from_dataset(
            title=self.title,
            columns=["Time", "User", "Action", "Object", "Detail"],
            data=r)
Esempio n. 24
0
File: views.py Progetto: nbashev/noc
    def get_data(self, request, pool=None, filter_dup_macs=False, **kwargs):

        data = []
        # Find object with equal ID
        find = DiscoveryID._get_collection().aggregate(
            [
                {
                    "$unwind": "$macs"
                },
                {
                    "$group": {
                        "_id": "$macs",
                        "count": {
                            "$sum": 1
                        },
                        "mo": {
                            "$push": "$object"
                        }
                    }
                },
                {
                    "$match": {
                        "count": {
                            "$gt": 1
                        }
                    }
                },
                {
                    "$group": {
                        "_id": "$mo",
                        "macs": {
                            "$push": "$_id"
                        }
                    }
                },
            ],
            allowDiskUse=True,
        )

        for f in find:
            # DiscoveryID.objects.filter(chassis_mac=f["_id"])
            if not f["_id"]:
                # Empty DiscoveryID
                continue
            data_c = []
            pool_c = set()
            reason = "Other"
            for mo in ManagedObject.objects.filter(id__in=f["_id"]):
                pool_c.add(mo.pool.name)
                data_c.append((mo.name, mo.address, mo.profile.name,
                               mo.pool.name, mo.is_managed))
            if len(data_c) > 0:
                if data_c[0][1] == data_c[1][1]:
                    reason = _("Duplicate MO")
                elif not data_c[0][4] == data_c[1][4]:
                    reason = _("MO is move")
            if pool and pool not in pool_c:
                continue
            if reason == "Other" and MACBlacklist.is_banned_mac(
                    f["macs"][0], is_duplicated=True):
                if filter_dup_macs:
                    continue
                data += [
                    SectionRow(name="%s %s (%s)" %
                               (MAC(f["macs"][0]), reason, "On duplicated"))
                ]
            else:
                data += [
                    SectionRow(name="%s %s" % (MAC(f["macs"][0]), reason))
                ]
            data += data_c

        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"),
                _("Address"),
                _("Profile"),
                _("Pool"),
                _("is managed")
            ],
            data=data,
        )
Esempio n. 25
0
File: views.py Progetto: nbashev/noc
    def get_data(self, **kwargs):
        def lines(path):
            """
            Count lines in file
            """
            with open(path) as f:
                return len(f.read().split("\n"))

        def dir_loc(path):
            """
            Scan directory and count lines of code
            """
            py_loc = 0
            html_loc = 0
            tests_loc = 0
            for dirpath, dirnames, filenames in os.walk(path):
                in_tests = "tests" in dirpath.split(os.sep)
                for f in filenames:
                    path = os.path.join(dirpath, f)
                    if f.endswith(".py"):
                        if in_tests:
                            tests_loc += lines(path)
                        else:
                            py_loc += lines(path)
                    elif f.endswith(".html"):
                        html_loc += lines(path)
            return py_loc, html_loc, tests_loc

        data = []
        # Scan modules
        for m in [m for m in settings.INSTALLED_APPS if m.startswith("noc.")]:
            m = m[4:]
            module_name = __import__("noc.%s" % m, {}, {},
                                     ["MODULE_NAME"]).MODULE_NAME
            data += [SectionRow(module_name)]
            # Scan models
            models_path = os.path.join(m, "models.py")
            if os.path.exists(models_path):
                # Module
                py_loc = lines(models_path)
                mn = "models.py"
            else:
                # Directory
                py_loc = 0
                models_path = os.path.join(m, "models")
                for f in os.listdir(models_path):
                    if f.endswith(".py"):
                        py_loc += lines(os.path.join(models_path, f))
                mn = "models/"
            _, _, tests_loc = dir_loc(os.path.join(m, "tests"))
            data += [["Model", mn, py_loc, 0, tests_loc]]
            # Scan Migrations
            py_loc, _, _ = dir_loc(os.path.join(m, "migrations"))
            data += [["Migrations", "", py_loc, 0, 0]]
            # Scan Management
            for dirpath, dirnames, filenames in os.walk(
                    os.path.join(m, "management", "commands")):
                for f in [
                        f for f in filenames
                        if f.endswith(".py") and f != "__init__.py"
                ]:
                    py_loc = lines(os.path.join(dirpath, f))
                    data += [["Management", f[:-3], py_loc, 0, 0]]
            # Scan Templates
            py_loc, html_loc, _ = dir_loc(os.path.join(m, "templates"))
            data += [["Templates", "", py_loc, html_loc, 0]]
            # Scan applications
            for app in [
                    d for d in os.listdir(os.path.join(m, "apps"))
                    if not d.startswith(".")
            ]:
                app_path = os.path.join(m, "apps", app)
                if not os.path.isdir(app_path):
                    continue
                py_loc = 0
                html_loc = 0
                tests_loc = 0
                for dirpath, dirnames, filenames in os.walk(app_path):
                    if os.sep + "tests" in dirpath:
                        for f in [f for f in filenames if f.endswith(".py")]:
                            tests_loc += lines(os.path.join(dirpath, f))
                    else:
                        for f in [f for f in filenames if f.endswith(".py")]:
                            py_loc += lines(os.path.join(dirpath, f))
                        for f in [f for f in filenames if f.endswith(".html")]:
                            html_loc += lines(os.path.join(dirpath, f))
                data += [[
                    "Application",
                    "%s.%s" % (m, app), py_loc, html_loc, tests_loc
                ]]
                # Scan Profiles
            if m == "sa":
                for d in glob.glob("sa/profiles/*/*"):
                    if not os.path.isdir(d):
                        continue
                    pp = d.split(os.sep)
                    profile = ".".join(pp[-2:])
                    py_loc, html_loc, tests_loc = dir_loc(d)
                    data += [["Profile", profile, py_loc, html_loc, tests_loc]]
                # Scan other
            py_loc = 0
            for dirpath, dirnames, filenames in os.walk(m):
                if (os.sep + "tests" in dirpath
                        or os.sep + "templates" in dirpath
                        or os.sep + "apps" in dirpath
                        or os.sep + "management" in dirpath
                        or os.sep + "migrations" in dirpath):
                    continue
                for f in [
                        f for f in filenames if f.endswith(".py")
                        if f != "models.py"
                ]:
                    py_loc += lines(os.path.join(dirpath, f))
            data += [["Other", "", py_loc, 0, 0]]
        return self.from_dataset(
            title=self.title,
            columns=[
                "Type",
                TableColumn("Name", total_label="Total"),
                TableColumn("Python",
                            format="numeric",
                            align="right",
                            total="sum"),
                TableColumn("HTML",
                            format="numeric",
                            align="right",
                            total="sum"),
                TableColumn("Tests",
                            format="numeric",
                            align="right",
                            total="sum"),
            ],
            data=data,
        )