class VCPrefixes(Cache): cache_id = "vc_vcprefixes" ttl = config.getint("cache", "vc_vcprefixes") @classmethod def get_key(cls, vc): if hasattr(vc, "id"): return vc.id else: return int(vc) @classmethod def find(cls, vc): if not hasattr(vc, "id"): vc = VC.objects.get(id=int(vc)) objects = vc.vc_domain.managedobject_set.values_list("id", flat=True) ipv4 = set() ipv6 = set() # @todo: Exact match on vlan_ids for si in SubInterface.objects.filter( Q(managed_object__in=objects) & Q(vlan_ids=vc.l1) & (Q(enabled_afi=["IPv4"]) | Q(enabled_afi=["IPv6"]))).only( "enabled_afi", "ipv4_addresses", "ipv6_addresses"): if "IPv4" in si.enabled_afi: ipv4.update([IP.prefix(ip).first for ip in si.ipv4_addresses]) if "IPv6" in si.enabled_afi: ipv6.update([IP.prefix(ip).first for ip in si.ipv6_addresses]) p = [str(x.first) for x in sorted(ipv4)] p += [str(x.first) for x in sorted(ipv6)] return p
def execute(self): def format_table(l): mw=max([len(n) for n,d in l]) m="%%%ds | %%s"%mw out=[m%("Domain","Expiration date")] for n,d in l: out+=[m%(n,DateFormat(d).format(date_format))] return "\n".join(out) # from noc.main.models import SystemNotification from noc.dns.models import DNSZone date_format=config.get("main","date_format") now=datetime.date.today() ## Check expired soon domains days=config.getint("dns","warn_before_expired_days") soon_expired=list([(z.name,z.paid_till) for z in DNSZone.objects.filter(paid_till__isnull=False,paid_till__range=[now+datetime.timedelta(days=1),now+datetime.timedelta(days=days)]).order_by("paid_till")]) if soon_expired: SystemNotification.notify("dns.domain_expiration_warning", subject="%d domains to be expired in %d days"%(len(soon_expired),days), body="Following domains are to be expired in %d days:\n"%days+format_table(soon_expired) ) ## Check expired domains expired=list([(z.name,z.paid_till) for z in DNSZone.objects.filter(paid_till__isnull=False,paid_till__lte=now).order_by("paid_till")]) if expired: SystemNotification.notify("dns.domain_expired", subject="%d domains are expired"%(len(expired)), body="Following domains are expired:\n"+format_table(expired) ) return True
def __init__(self, *args, **kwargs): ExtApplication.__init__(self, *args, **kwargs) # # Parse themes self.default_theme = config.get("customization", "default_theme") self.themes = {} # id -> {name: , css:} for o in config.options("themes"): if o.endswith(".name"): theme_id = o[:-5] nk = "%s.name" % theme_id ek = "%s.enabled" % theme_id if (config.has_option("themes", nk) and config.has_option("themes", ek) and config.getboolean("themes", ek)): self.themes[theme_id] = { "id": theme_id, "name": config.get("themes", nk).strip(), "css": "/static/pkg/extjs/packages/ext-theme-%s/build/resources/ext-theme-%s-all.css" % (theme_id, theme_id), "js": "/static/pkg/extjs/packages/ext-theme-%s/build/ext-theme-%s.js" % (theme_id, theme_id) } # Login restrictions self.restrict_to_group = self.get_group( config.get("authentication", "restrict_to_group")) self.single_session_group = self.get_group( config.get("authentication", "single_session_group")) self.mutual_exclusive_group = self.get_group( config.get("authentication", "mutual_exclusive_group")) self.idle_timeout = config.getint("authentication", "idle_timeout")
def resolve_as_set_prefixes(cls, as_set, optimize=None): prefixes = cls._resolve_as_set_prefixes(as_set) pl_optimize = config.getboolean("peer", "prefix_list_optimization") threshold = config.getint("peer", "prefix_list_optimization_threshold") if (optimize or (optimize is None and pl_optimize and len(prefixes) >= threshold)): return set(optimize_prefix_list(prefixes)) return prefixes
def resolve_as_set_prefixes_maxlen(cls, as_set, optimize=None): """ Generate prefixes for as-sets. Returns a list of (prefix, min length, max length) """ prefixes = cls._resolve_as_set_prefixes(as_set) pl_optimize = config.getboolean("peer", "prefix_list_optimization") threshold = config.getint("peer", "prefix_list_optimization_threshold") max_len = config.getint("peer", "max_prefix_length") if (optimize or (optimize is None and pl_optimize and len(prefixes) >= threshold)): # Optimization is enabled return [(p.prefix, p.mask, m) for p, m in optimize_prefix_list_maxlen(prefixes) if p.mask <= max_len] else: # Optimization is disabled return [(x.prefix, x.mask, x.mask) for x in sorted([IP.prefix(p) for p in prefixes]) if x.mask <= max_len]
class ManagedObjectSelectorObjectsIds(Cache): """ Managed Object's selector -> list of object ids """ cache_id = "sa_managedobjectselector_object_ids" ttl = config.getint("cache", "sa_managedobjectselector_object_ids") @classmethod def get_key(cls, selector): if hasattr(selector, "id"): return selector.id else: return int(selector) @classmethod def find(cls, selector): if not hasattr(selector, "id"): selector = ManagedObjectSelector.objects.get(id=int(selector)) return set(selector.managed_objects.values_list("id", flat=True))
class VCInterfacesCount(Cache): cache_id = "vc_vcinterfacescount" ttl = config.getint("cache", "vc_vcinterfacescount") @classmethod def get_key(cls, vc): if hasattr(vc, "id"): return vc.id else: return int(vc) @classmethod def find(cls, vc): if not hasattr(vc, "id"): vc = VC.objects.get(id=int(vc)) objects = vc.vc_domain.managedobject_set.values_list("id", flat=True) l1 = vc.l1 n = SubInterface.objects.filter( Q(managed_object__in=objects) & (Q(untagged_vlan=l1, enabled_afi=["BRIDGE"]) | Q(tagged_vlans=l1, enabled_afi=["BRIDGE"]) | Q(vlan_ids=l1))).count() return n
class ProbeApplication(ExtDocApplication): """ PMProbe application """ title = "Probe" menu = "Setup | Probes" model = Probe query_fields = ["name"] REFRESH_CHUNK = config.getint("pm", "expired_refresh_chunk") REFRESH_TIMEOUT = config.getint("pm", "expired_refresh_timeout") @view(url="^(?P<name>[^/]+)/(?P<instance>\d+)/config/$", method=["GET"], validate={"last": DateTimeParameter(required=False)}, access="config", api=True) def api_config(self, request, name, instance, last=None): """ Get full probe configuration """ probe = self.get_object_or_404(Probe, name=name) if not probe.user or request.user.id != probe.user.id: return self.response_forbidden() instance = int(instance) if instance >= probe.n_instances: return self.response_not_found("Invalid instance") probe_id = str(probe.id) now = datetime.datetime.now() # Refresh expired congfigs t0 = time.time() nr = 0 dt = 0 stopped = False for pc in ProbeConfig.objects.filter(probe_id=probe_id, instance_id=instance, expire__lt=now): pc.refresh() nr += 1 if nr % self.REFRESH_CHUNK: # Check execution time dt = time.time() - t0 if dt > self.REFRESH_TIMEOUT: self.logger.info( "%d configs has been refreshed in %s seconds. Giving up", nr, dt) stopped = True break if nr and not stopped: self.logger.info("%d configs has been refreshed in %s seconds.", nr, dt) # Get configs q = {"probe_id": probe_id, "instance_id": instance} if last: fmt = "%Y-%m-%dT%H:%M:%S.%f" if "." in last else "%Y-%m-%dT%H:%M:%S" last = datetime.datetime.strptime(last, fmt) q["changed"] = {"$gte": last} config = [{ "uuid": pc["uuid"], "handler": pc["handler"], "interval": pc["interval"], "metrics": [{ "metric": m["metric"], "metric_type": m["metric_type"], "thresholds": m["thresholds"], "convert": m["convert"], "scale": m["scale"], "collectors": { "policy": m["collectors"]["policy"], "write_concern": m["collectors"]["write_concern"], "collectors": [{ "proto": c["proto"], "address": c["address"], "port": c["port"] } for c in m["collectors"]["collectors"]] } } for m in pc["metrics"]], "config": pc["config"], "managed_object": pc.get("managed_object", None), "changed": pc["changed"].isoformat(), "expire": pc["expire"].isoformat() } for pc in ProbeConfig._get_collection().find(q)] if config: expire = min(c["expire"] for c in config) # Wipe out deleted configs deleted = [ c["uuid"] for c in config if c["changed"] == c["expire"] ] if deleted: ProbeConfig.objects.filter(uuid__in=deleted).delete() else: expire = None return { "now": now.isoformat(), "last": last.isoformat() if last else None, "expire": expire, "config": config } @view(url="^(?P<name>[^/]+)/(?P<instance>\d+)/feed/$", method=["POST"], validate={ "thresholds": DictListParameter( attrs={ "managed_object": IntParameter(), "metric": StringParameter(), "metric_type": StringParameter(), "ts": IntParameter(), "value": FloatParameter(), "old_state": StringParameter(), "new_state": StringParameter() }) }, access="config", api=True) def api_fmfeed(self, request, name, instance, thresholds): if thresholds: cnt = itertools.count() batch = NewEvent._get_collection().initialize_unordered_bulk_op() for t in thresholds: seq = struct.pack("!II", int(time.time()), cnt.next() & 0xFFFFFFFFL) batch.insert({ "timestamp": datetime.datetime.fromtimestamp(t["ts"]), "managed_object": t["managed_object"], "raw_vars": { "source": "system", "metric": t["metric"], "metric_type": t["metric_type"], "value": str(t["value"]), "old_state": t["old_state"], "new_state": t["new_state"] }, "seq": Binary(seq) }) batch.execute(0)
class ReportForm(forms.Form): days = forms.IntegerField( initial=config.getint("dns", "warn_before_expired_days"))
import time import datetime ## NOC modules from noc.settings import config, IS_TEST from noc.gis.models import TileCache, Area from noc.gis.mapxml import map_to_xml from noc.gis.geo import xy_to_ll, ll_to_xy, TS, MIN_ZOOM, MAX_ZOOM ## Third-party modules try: import mapnik2 except ImportError, why: if not IS_TEST: raise ImportError(*why) # Render additional N tiles around areas PAD_TILES = config.getint("gis", "tilecache_padding") class TileWorker(object): def __init__(self, map, instance, queue, xml): self.map = map self.label = "%s::%d" % (map.name, instance) self.queue = queue self.log("Running TileWorker") self.xml = xml def render_tile(self, zoom, x, y): t0 = time.time() tl = "(zoom=%s x=%s y=%s)" % (zoom, x, y) self.log("Rendering tile %s" % tl) # Convert tile index to LatLong (EPSG:4326)
class SyncApplication(ExtDocApplication): """ Sync application """ title = "Sync" menu = "Setup | Sync" glyph = "refresh" model = Sync query_fields = ["name"] REFRESH_CHUNK = config.getint("sync", "expired_refresh_chunk") REFRESH_TIMEOUT = config.getint("sync", "expired_refresh_timeout") @view(url="^(?P<name>[^/]+)/(?P<instance>\d+)/config/$", method=["GET"], validate={"last": DateTimeParameter(required=False)}, access="config", api=True) def api_config(self, request, name, instance, last=None): """ Get full probe configuration """ sync = self.get_object_or_404(Sync, name=name) if request.user.id != sync.user.id: return self.response_forbidden() instance = int(instance) if instance >= sync.n_instances: return self.response_not_found("Invalid instance") sync_id = str(sync.id) now = datetime.datetime.now() # Refresh expired configs t0 = time.time() nr = 0 dt = 0 for sc in SyncCache.objects.filter(sync_id=sync_id, instance_id=instance, expire__lt=now): sc.refresh() nr += 1 if nr % self.REFRESH_CHUNK: # Check execution time dt = time.time() - t0 if dt > self.REFRESH_TIMEOUT: break if nr: self.logger.info( "%d configs has been refreshed in %s seconds. Giving up", nr, dt) # Get configs qs = SyncCache.objects.filter(sync_id=sync_id, instance_id=instance, expire__gt=now) if last: fmt = "%Y-%m-%dT%H:%M:%S.%f" if "." in last else "%Y-%m-%dT%H:%M:%S" last = datetime.datetime.strptime(last, fmt) qs = qs.filter(changed__gte=last) config = [{ "uuid": sc.uuid, "type": sc.model_id, "data": sc.data, "changed": sc.changed.isoformat(), "expire": sc.expire.isoformat() } for sc in qs] if config: expire = min(c["expire"] for c in config) # Wipe out deleted configs deleted = [ c["uuid"] for c in config if c["changed"] == c["expire"] ] if deleted: SyncCache.objects.filter(uuid__in=deleted).delete() else: expire = None return { "now": now.isoformat(), "last": last.isoformat() if last else None, "expire": expire, "config": config }