def get_data(self, request, **kwargs): def get_row(p): r = [p.vrf.name, p.prefix, p.state.name, unicode(p.asn), unicode(p.vc) if p.vc else ""] for f in cf: v = getattr(p, f.name) r += [v if v is not None else ""] r += [p.description, p] return r q = {} for k in kwargs: v = kwargs[k] if v: if k == "description": q[k + "__icontains"] = v else: q[k] = v columns = ["VRF", "Prefix", "State", "AS", "VC"] cf = CustomField.table_fields("ip_prefix") for f in cf: if f.type == "bool": columns += [TableColumn(f.label, format="bool")] else: columns += [f.label] columns += ["Description", TableColumn(_("Tags"), format="tags")] data = [ get_row(p) for p in Prefix.objects.filter(**q) .exclude(prefix="0.0.0.0/0") .exclude(prefix="::/0") .order_by("vrf__name", "prefix") .select_related() ] return self.from_dataset(title=self.title, columns=columns, data=data)
def get_data(self, **kwargs): default_ids = [ c.id for c in EventClass.objects.filter(name__startswith="Unknown | ") ] count = ActiveEvent.objects.count() not_classified = ActiveEvent.objects.filter( event_class__in=default_ids).count() classified = count - not_classified quality = classified * 100 / count if count else 100 data = [ ["Active Events", classified, count, quality], ] return self.from_dataset(title=self.title, columns=[ "", TableColumn("Classified", format="integer", align="right"), TableColumn("Total", format="integer", align="right"), TableColumn("Quality", format="percent", align="right") ], data=data)
def get_data(self, **kwargs): def get_count(cn, m): collection = Collection(cn) total = collection.model.objects.count() builtin = len(collection.get_items()) local = total - builtin return [builtin, local, total] data = [ ["Alarm Classes"] + get_count("fm.alarmclasses", AlarmClass), ["Event Classies"] + get_count("fm.eventclasses", EventClass), ["Classification Rules"] + get_count("fm.eventclassificationrules", EventClassificationRule), ] return self.from_dataset( title=self.title, columns=[ "", TableColumn("Builtin", align="right", format="integer"), TableColumn("Local", align="right", format="integer"), TableColumn("Total", align="right", format="integer"), ], data=data, )
def get_data(self, request, **kwargs): data = [] # Mib, Last Updated, Entries, Depends, Used by for m in MIB.objects.order_by("name"): ec = MIBData.objects.filter(mib=m.id).count() data += [ [ m.name, m.last_updated, ec, ", ".join(m.depends_on), ", ".join([x.name for x in m.depended_by]), ] ] return self.from_dataset( title=self.title, columns=[ TableColumn("MIB", total_label="Total:"), TableColumn("Last Updated", format="date"), TableColumn("Entries", align="right", format="integer", total="sum"), TableColumn("Depends on"), TableColumn("Used by"), ], data=data, enumerate=True, )
def get_data(self, **kwargs): return self.from_query( title="IPAM Summary", columns=[ "VRF", "RD", TableColumn("Prefixes", align="right", format="integer", total="sum"), TableColumn("Addresses", align="right", format="integer", total="sum"), ], query=""" SELECT vrf.name, vrf.rd, (SELECT COUNT(*) FROM ip_prefix WHERE vrf_id=vrf.id) AS prefixes, (SELECT COUNT(*) FROM ip_address WHERE vrf_id=vrf.id) AS addresses FROM ip_vrf vrf ORDER BY 1 """, enumerate=True, )
def get_data(self, request): def ppower(prefix): m = int(prefix.split("/")[1]) if m <= powermask: return long(2 * (powermask - m)) else: return 0 powermask = 24 r = [] # (Descption, as, filter, cone) peers = {} # peer id -> peer cone_powers = {} # peer id -> power uniq_powers = {} # peer id -> power prefixes = {} # Prefix -> set(peer ids) for p in Peer.objects.filter(status="A").exclude(import_filter="ANY"): peers[p.id] = p cone_powers[p.id] = 0 for cp in WhoisCache.resolve_as_set_prefixes(p.import_filter, optimize=True): # Get powers cone_powers[p.id] += ppower(cp) # Assign to prefixes for i in IP.prefix(cp).iter_cover(powermask): pfx = i.prefix try: prefixes[pfx].add(p.id) except KeyError: prefixes[pfx] = set([p.id]) # Calculate unique powers for pfx in prefixes: pfx_peers = prefixes[pfx] if len(pfx_peers) == 1: # Unique peer = list(pfx_peers)[0] try: uniq_powers[peer] += 1 except KeyError: uniq_powers[peer] = 1 # Build result for peer_id in peers: p = peers[peer_id] r += [(p.description, "AS%d" % p.remote_asn, p.import_filter, cone_powers.get(peer_id, 0), uniq_powers.get(peer_id, 0))] r = sorted(r, key=lambda x: -x[4]) return self.from_dataset(title=self.title, columns=[ "Peer", "ASN", "Import Filter", TableColumn("Cone Power", format="numeric", align="right"), TableColumn("Uniq. Cone Power", format="numeric", align="right"), ], data=r)
def get_data(self, days, **kwargs): return self.from_query(title=self.title, columns=[ "Domain", TableColumn("Expired", format="bool"), TableColumn("Paid Till", format="date") ], query=""" SELECT name,paid_till<='now'::date,paid_till FROM dns_dnszone WHERE paid_till IS NOT NULL AND 'now'::date >= (paid_till-'%d days'::interval) ORDER BY paid_till """ % days)
def get_data(self, duration, **kwargs): now = datetime.datetime.now() d = datetime.timedelta(seconds=int(duration)) b = now - d outages = defaultdict(list) otime = defaultdict(int) q = Q(start__gte=b) | Q(stop__gte=b) | Q(stop__exists=False) for o in Outage.objects.filter(q): start = max(o.start, b) stop = o.stop if o.stop else now outages[o.object] += [o] otime[o.object] = total_seconds(stop - start) td = total_seconds(d) # Load managed objects mos = list(otime) chunk = 500 mo = {} while mos: for o in ManagedObject.objects.filter(id__in=mos[:chunk]): mo[o.id] = o mos = mos[chunk:] r = [] for o in sorted(otime, key=lambda x: -otime[x]): m = mo.get(o) if not m: continue # Hanging Outage dt = otime[o] downtime = "%02d:%02d:%02d" % ((dt // 3600) % 24, (dt // 60) % 60, dt % 60) if dt >= 86400: downtime = "%dd %s" % (dt // 86400, downtime) r += [(m.name, m.profile_name, m.platform, m.is_managed, m.get_status(), downtime, float(td - dt) * 100 / td, len(outages[o]))] return self.from_dataset(title=self.title, columns=[ "Object", "Profile", "Platform", TableColumn("Managed", format="bool"), TableColumn("Status", format="bool"), TableColumn("Downtime", align="right"), TableColumn("Availability", align="right", format="percent"), TableColumn("Downs", align="right", format="integer") ], data=r, enumerate=True)
def get_data(self, **kwargs): # profile -> (syslog, snmp, other) r = dict([(p, [0, 0, 0]) for p in profile_loader.iter_profiles()]) for rule in EventClassificationRule.objects.all(): profile = None source = None for p in rule.patterns: if p.key_re in ("^profile$", "profile"): profile = p.value_re elif p.key_re in ("^source$", "source"): source = p.value_re if profile and source: break for p in r: if not profile or re.search(profile, p): d = r[p] if source in ("syslog", "^syslog$"): d[0] += 1 elif source in ("SNMP Trap", "^SNMP Trap$"): d[1] += 1 else: d[2] += 1 # Build data data = [(p, v[0], v[1], v[2], v[0] + v[1] + v[2]) for p, v in six.iteritems(r)] data = sorted(data, key=lambda x: -x[4]) return self.from_dataset( title=self.title, columns=[ "Profile", TableColumn("Syslog", align="right", format="integer", total="sum"), TableColumn("SNMP Traps", align="right", format="integer", total="sum"), TableColumn("Other", align="right", format="integer", total="sum"), TableColumn("Total", align="right", format="integer", total="sum"), ], data=data, )
def reduce_switchport(task): from noc.lib.app.simplereport import Report, TableSection,\ SectionRow, TableColumn from noc.lib.text import list_to_ranges # Prepare data data = [] for mt in task.maptask_set.filter(status="C"): data += [SectionRow("%s (%s)" % ( mt.managed_object.name, mt.managed_object.profile_name))] for r in mt.script_result: data += [[ r["interface"], r.get("description", ""), r["status"], r.get("untagged", ""), list_to_ranges(r.get("tagged", [])), ", ".join(r.get("members", [])) ]] # Prepare report r = Report() r.append_section(TableSection(name="", columns=["Interface", "Description", TableColumn("Status", format="bool"), "Untagged", "Tagged", "Members"], data=data)) return r
def get_data(self, **kwargs): data = {} # vendor, part_no -> description, count for c in UnknownModel._get_collection().find(): vendor = c["vendor"] if isinstance(c["vendor"], list): # Fix for bad vendor code in DB vendor = c["vendor"][0] k = (vendor, c["part_no"]) if k in data: data[k][1] += 1 else: data[k] = [c["description"], 1] data = sorted(((k[0], k[1], data[k][0], data[k][1]) for k in data), key=lambda x: -x[3]) return self.from_dataset( title=self.title, columns=[ "Vendor", "Part No", "Description", TableColumn("Count", format="numeric", align="right", total="sum"), ], data=data, )
def get_data(self, **kwargs): c = EventClass.objects.filter(name="Unknown | SNMP Trap").first() # Переделать на agregate Функция считает число OID'ов в переменных аварий # и проверяет их на опознанность pipeline = [{ "$match": { "event_class": c.id } }, { "$project": { "vars": 1 } }, { "$group": { "_id": "$vars.trap_oid", "count": { "$sum": 1 } } }] oids = ActiveEvent._get_collection().aggregate(pipeline) d = [(e["_id"], MIB.get_name(e["_id"]), e["count"]) for e in oids] print d data = [(o, n, c) for o, n, c in d if self.rx_unclassified.search(n)] return self.from_dataset(title=self.title, columns=[ "OID", "Name", TableColumn("Count", format="integer", align="right", total="sum") ], data=data)
def get_data(self, vrf, afi, prefix, **kwargs): def get_row(p): r = [p.prefix, p.state.name, smart_text(p.vc) if p.vc else ""] for f in cf: v = getattr(p, f.name) r += [v if v is not None else ""] r += [p.description, p] return r cf = CustomField.table_fields("ip_prefix") cfn = {f.name: f for f in cf} # Prepare columns columns = ["Prefix", "State", "VC"] for f in cf: columns += [f.label] columns += ["Description", TableColumn(_("Tags"), format="tags")] # Prepare query q = Q() for k in kwargs: v = kwargs[k] if k in cfn and v is not None and v != "": q &= Q(**{str(k): v}) # return self.from_dataset( title=_( "Allocated blocks in VRF %(vrf)s (IPv%(afi)s), %(prefix)s" % {"vrf": vrf.name, "afi": afi, "prefix": prefix.prefix} ), columns=columns, data=[get_row(p) for p in prefix.children_set.filter(q).order_by("prefix")], enumerate=True, )
def get_data(self, request, repo="config", days=1, **kwargs): data = [] baseline = datetime.datetime.now() - datetime.timedelta(days=days) if repo == "config": mos = ManagedObject.objects.filter() if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) mos = dict(mos.values_list("id", "name")) config_db = get_db()["noc.gridvcs.config.files"].with_options( read_preference=ReadPreference.SECONDARY_PREFERRED) pipeline = [{"$match": {"ts": {"$gte": baseline}}}, {"$group": {"_id": "$object", "last_ts": {"$max": "$ts"}}}, {"$sort": {"_id": 1}}] for value in config_db.aggregate(pipeline): if value["_id"] not in mos: continue data += [(mos[value["_id"]], value["last_ts"])] else: oc = Object.get_object_class(repo) data = [(o, o.last_modified) for o in oc.objects.filter(last_modified__gte=baseline).order_by("-last_modified")] return self.from_dataset( title="%s: %s in %d days" % (self.title, repo, days), columns=[ "Object", TableColumn(_("Last Changed"), format="datetime")], data=data, enumerate=True)
def get_data(self, report_type, **kwargs): if report_type == "class": # Summary by class columns = ["Event Class"] data = self.get_by_event_class() elif report_type == "object": # Summary by object columns = ["Managed Object"] data = self.get_by_object() elif report_type == "profile": # Summary by profile columns = ["Profile"] data = self.get_by_profile() elif report_type == "status": # Summary by event status columns = ["Status"] data = self.get_by_status() else: raise Exception("Invalid report type: %s" % report_type) for r, t in report_types: if r == report_type: title = self.title + ": " + t break columns += [ TableColumn("Quantity", align="right", total="sum", format="integer") ] return self.from_dataset(title=title, columns=columns, data=data, enumerate=True)
def get_data(self, vrf, afi, prefix, **kwargs): def get_row(p, level=0): s = "--" * level r = [s + p.prefix, p.state.name, unicode(p.vc) if p.vc else ""] for f in cf: v = getattr(p, f.name) r += [v if v is not None else ""] r += [p.description, p] return r def get_info(prefix, level=0): data = [get_row(prefix, level)] for c in prefix.children_set.order_by("prefix"): data += get_info(c, level + 1) return data cf = CustomField.table_fields("ip_prefix") # Prepare columns columns = ["Prefix", "State", "VC"] for f in cf: columns += [f.label] columns += ["Description", TableColumn(_("Tags"), format="tags")] data = get_info(prefix) return self.from_dataset(title=_( "All allocated blocks in VRF %(vrf)s (IPv%(afi)s), %(prefix)s" % { "vrf": vrf.name, "afi": afi, "prefix": prefix.prefix }), columns=columns, data=data, enumerate=True)
def get_data(self, **kwargs): c = EventClass.objects.filter(name="Unknown | SNMP Trap").first() pipeline = [{ "$match": { "event_class": c.id } }, { "$project": { "vars": 1 } }, { "$group": { "_id": "$vars.trap_oid", "count": { "$sum": 1 } } }] oids = ActiveEvent._get_collection().aggregate(pipeline) data = [(e["_id"], MIB.get_name(e["_id"]), e["count"]) for e in oids] data = sorted(data, key=lambda x: -x[2]) return self.from_dataset(title=self.title, columns=[ "OID", "Name", TableColumn("Count", format="integer", align="right", total="sum") ], data=data)
def get_data(self, request, **kwargs): def get_row(a): r = [ a.vrf.name, a.prefix.prefix, a.address, a.state.name, smart_text(a.fqdn) if a.fqdn else "", ] for f in cf: v = getattr(a, f.name) r += [v if v is not None else ""] r += [a.description, a] return r def get_or_none(classmodel, **kwargs): try: return classmodel.objects.filter(**kwargs) except classmodel.DoesNotExist: return None q = {} for k in kwargs: v = kwargs[k] if v: if k in ["description", "fqdn", "name"]: q[k + "__icontains"] = v elif k == "prefix": q[k + "__in"] = get_or_none(Prefix, prefix=v) elif k == "managed_object": q[k + "__in"] = get_or_none(ManagedObject, name=v) else: q[k] = v columns = ["VRF", "Prefix", "Address", "State", "FQDN"] cf = CustomField.table_fields("ip_address") for f in cf: if f.type == "bool": columns += [TableColumn(f.label, format="bool")] else: columns += [f.label] columns += ["Description", TableColumn(_("Tags"), format="tags")] data = [ get_row(a) for a in Address.objects.filter( **q).order_by("vrf__name", "address").select_related() ] return self.from_dataset(title=self.title, columns=columns, data=data)
def get_data(self, **kwargs): from django.db import connection data = [] last_vrf = None c = connection.cursor() c.execute(self.QUERY) for vrf, rd, afi, prefix, description, used in c: if last_vrf != vrf: data += [SectionRow("%s (%s)" % (vrf, rd))] last_vrf = vrf p = IP.prefix(prefix) if afi == "4": total = p.size if p.mask < 31 and total - used >= 2: # Exclude network and broadcast total = p.size - 2 free = total - used percent = used * 100 / total elif afi == "6": if p.mask >= 96: total = 2**(128 - p.mask) free = total - used percent = used * 100 / total else: total = "-" free = "-" percent = "-" data += [[prefix, description, used, free, total, percent]] return self.from_dataset(title=self.title, columns=[ "Prefix", "Description", TableColumn("IP Used", align="right", format="numeric"), TableColumn("IP Free", align="right", format="numeric"), TableColumn("IP Total", align="right", format="numeric"), TableColumn("% Used", align="right", format="percent") ], data=data)
def get_data(self, **kwargs): return self.from_query( title=self.title, columns=[ "Table", "Tablespace", TableColumn("Pages", align="right", format="integer", total="sum"), TableColumn("Records", align="right", format="integer", total="sum"), TableColumn("Size", align="right", format="size", total="sum"), ], query=""" SELECT c.relname,t.spcname,c.relpages,c.reltuples,c.relpages*8192 FROM pg_class c LEFT JOIN pg_tablespace t ON (t.oid=c.reltablespace) WHERE c.relkind='r' AND c.relname NOT LIKE 'pg_%%' AND c.relname NOT LIKE 'sql_%%' ORDER BY c.relpages DESC,c.reltuples DESC """, )
def get_data(self, **kwargs): def get_profile_scripts(p): return ", ".join(sorted(p.scripts.keys())) r = sorted([x for x in profile_registry.classes.items()], lambda x, y: cmp(x[0], y[0])) return self.from_dataset(title=self.title, columns=[ "Vendor", "OS", TableColumn("Telnet",format="bool"), TableColumn("SSH",format="bool"), TableColumn("HTTP",format="bool"), TableColumn("VC Provisioning",format="bool"), "Scripts"], data=[x.split(".")\ +[TELNET in c.supported_schemes,SSH in c.supported_schemes,HTTP in c.supported_schemes, "sync_vlans" in c.scripts,get_profile_scripts(c)] for x,c in r], enumerate=True)
def get_data(self, vrf, afi, prefix, **kwargs): p = IP.prefix(prefix.prefix) allocated = [IP.prefix(a.prefix) for a in prefix.children_set.all()] if afi == "4": allocated_30 = [a for a in allocated if a.mask == 30] free = list(p.iter_free(allocated)) if afi == "4": allocated_size = sum([a.size for a in allocated]) allocated_30_size = sum([a.size for a in allocated_30]) free_size = sum([a.size for a in free]) total = p.size data = [ ("Allocated addresses", allocated_size, float(allocated_size) * 100 / float(total)), (".... in /30", allocated_30_size, float(allocated_30_size) * 100 / float(total)), ("Free addresses", free_size, float(free_size) * 100 / float(total)), ("Total addresses", total, 1.0), ] a_s = len(allocated) if a_s: avg_allocated_size = allocated_size / a_s avg_allocated_mask = 32 - int( math.ceil(math.log(avg_allocated_size, 2))) data += [ ("Average allocated block", avg_allocated_size, ""), ("Average allocated mask", avg_allocated_mask, ""), ] return self.from_dataset( title=_("Summary for VRF %(vrf)s (IPv%(afi)s): %(prefix)s") % { "vrf": vrf.name, "afi": afi, "prefix": p.prefix }, columns=[ "", TableColumn(_("Size"), format="numeric", align="right"), TableColumn(_("%"), format="percent", align="right"), ], data=data, )
def get_data(self, **kwargs): c = EventClass.objects.filter(name="Unknown | SNMP Trap").first() oids = ActiveEvent.objects.filter(event_class=c.id).exec_js(self.c_f) d = [(o, MIB.get_name(o), c) for o, c in oids.items()] data = [(o, n, c) for o, n, c in d if self.rx_unclassified.search(n)] return self.from_dataset(title=self.title, columns=["OID", "Name", TableColumn("Count", format="integer", align="right", total="sum")], data=data)
def get_data(self,**kwargs): return self.from_query(title=self.title, columns=[ "NS", TableColumn("Master",format="integer",align="right"), TableColumn("Slave",format="integer",align="right"), ], query="""SELECT ns.name, ( SELECT COUNT(*) FROM dns_dnszoneprofile_masters pm JOIN dns_dnszone z ON (z.profile_id=pm.dnszoneprofile_id) WHERE pm.dnsserver_id=ns.id AND z.is_auto_generated ) AS masters, ( SELECT COUNT(*) FROM dns_dnszoneprofile_slaves ps JOIN dns_dnszone z ON (z.profile_id=ps.dnszoneprofile_id) WHERE ps.dnsserver_id=ns.id AND z.is_auto_generated ) AS slaves FROM dns_dnsserver ns ORDER BY 2 DESC, 1 ASC""")
def get_data(self, **kwargs): data = [] for m in Map.objects.filter(is_active=True).order_by("name"): data += [SectionRow(m.name)] for zoom in range(MIN_ZOOM, MAX_ZOOM + 1): tcc = TileCache.objects.filter(map=m.id, zoom=zoom).count() mt = 2**(2 * zoom) data += [[zoom, tcc, mt, tcc * 100.0 / mt]] return self.from_dataset(title=self.title, columns=[ TableColumn("Zoom", align="right"), TableColumn("Tiles", align="right", format="integer"), TableColumn("Max. Tiles", align="right", format="integer"), TableColumn("%", align="right", format="percent") ], data=data)
def get_data(self, **kwargs): a1 = self.get_availability(1) a7 = self.get_availability(7) a30 = self.get_availability(30) r = [] for o in ManagedObject.objects.filter(is_managed=True): r += [(o.administrative_domain.name, o.name, o.profile_name, o.platform, o.address, a1.get(o.id, 100), a7.get(o.id, 100), a30.get(o.id, 100))] return self.from_dataset(title=self.title, columns=[ "Adm. Domain", "Object", "Profile", "Platform", "IP", TableColumn("24h", align="right", format="percent"), TableColumn("7d", align="right", format="percent"), TableColumn("30d", align="right", format="percent") ], data=r)
def get_data(self, interval, **kwargs): interval = int(interval) ts = datetime.datetime.now() - datetime.timedelta(days=interval) pipeline = [{ "$match": { "ts": { "$gte": ts } } }, { "$group": { "_id": "$object", "count": { "$sum": 1 } } }, { "$sort": { "count": -1 } }] data = Reboot._get_collection().aggregate(pipeline) data = data["result"] # Get managed objects ids = [x["_id"] for x in data] mo_names = {} # mo id -> mo name cursor = connection.cursor() while ids: chunk = [str(x) for x in ids[:500]] ids = ids[500:] cursor.execute(""" SELECT id, name FROM sa_managedobject WHERE id IN (%s)""" % ", ".join(chunk)) mo_names.update(dict(cursor)) # data = [(mo_names.get(x["_id"], "---"), x["count"]) for x in data] return self.from_dataset(title=self.title, columns=[ "Managed Object", TableColumn("Reboots", align="right", format="numeric", total="sum") ], data=data, enumerate=True)
def get_data(self, request, **kwargs): columns, columns_desr = [], [] r_map = [ (_("All polling"), "2is1.6is1.7a2"), # "Is Managed, object type defined" (_("0"), "2is1.6is1.7a2.3hs0"), # "Has 0 Links w type defined" (_("1"), "2is1.6is1.3hs2"), # "Has 1 links" (_("2"), "2is1.6is1.3hs3"), # "Has 2 links" (_("More 3"), "2is1.6is1.3hs4"), # "Has more 3 links" ] for x, y in r_map: columns += [y] columns_desr += [x] report = ReportModelFilter() result = report.proccessed(",".join(columns)) summary = defaultdict(int) data = [] # url = "/sa/reportstat/repstat_download/?report=%s" url = "/sa/reportobjectdetail/download/?" + "&".join([ "o_format=xlsx", "columns=object_name,object_address,object_profile,object_status,profile_name,admin_domain,segment", "detail_stat=%s&pool=%s", ]) for p in Pool.objects.filter().order_by("name"): m = [] moss = set( ManagedObject.objects.filter(pool=p).values_list("id", flat=True)) for col in columns: m += [len(result[col.strip()].intersection(moss))] summary[col] += m[-1] data += [SectionRow(name=p.name)] data += [(x, y, self.calc_percent(x, y), url % (columns[columns_desr.index(x)], p.name)) for x, y in zip(columns_desr, m)] return self.from_dataset( title=self.title, columns=[ _("Links count"), _("MO Count"), _("Percent at All"), TableColumn(_("Detail"), format="url"), ], data=data, )
def get_data(self, report_type, **kwargs): # By Profile if report_type == "profile": columns = ["Profile"] query = "SELECT profile_name,COUNT(*) FROM sa_managedobject GROUP BY 1 ORDER BY 2 DESC" # By Administrative Domain elif report_type == "domain": columns = ["Administrative Domain"] query = """SELECT a.name,COUNT(*) FROM sa_managedobject o JOIN sa_administrativedomain a ON (o.administrative_domain_id=a.id) GROUP BY 1 ORDER BY 2 DESC""" # By Profile and Administrative Domains elif report_type == "domain-profile": columns = ["Administrative Domain", "Profile"] query = """SELECT d.name,profile_name,COUNT(*) FROM sa_managedobject o JOIN sa_administrativedomain d ON (o.administrative_domain_id=d.id) GROUP BY 1,2 """ # By tags elif report_type == "tag": columns = ["Tag"] query = """SELECT t.name,COUNT(*) FROM tagging_tag t JOIN tagging_taggeditem ti ON (t.id=ti.tag_id) JOIN django_content_type c ON (ti.content_type_id=c.id) WHERE c.app_label='sa' AND c.model='managedobject' GROUP BY 1 ORDER BY 1 DESC """ else: raise Exception("Invalid report type: %s" % report_type) for r, t in report_types: if r == report_type: title = self.title + ": " + t break columns += [ TableColumn("Quantity", align="right", total="sum", format="integer") ] return self.from_query(title=title, columns=columns, query=query, enumerate=True)
def get_data(self, **kwargs): db = get_db() data = db.noc.links.group( key={"discovery_method": True}, condition={}, initial={"count": 0}, reduce="function(doc, prev) {prev.count += 1;}" ) data = [(r["discovery_method"], int(r["count"])) for r in data] data = sorted(data, key=lambda x: -x[1]) return self.from_dataset( title=self.title, columns=[ "Method", TableColumn("Count", align="right", format="integer", total="sum", total_label="Total") ], data=data )