Exemplo n.º 1
0
 def lookup(self, oid):
     """
     Convert oid to symbolic name and vise versa
     :param oid:
     :return:
     """
     if self.rx_oid.match(oid):
         # oid -> name
         name = MIB.get_name(oid)
         oid = oid
     else:
         name = oid
         oid = MIB.get_oid(name)
     if oid and name:
         return {"status": True, "oid": oid, "name": name}
     return {"status": False}
Exemplo n.º 2
0
 def get_data(self, **kwargs):
     c = EventClass.objects.filter(name="Unknown | SNMP Trap").first()
     pipeline = [{
         "$match": {
             "event_class": c.id
         }
     }, {
         "$project": {
             "vars": 1
         }
     }, {
         "$group": {
             "_id": "$vars.trap_oid",
             "count": {
                 "$sum": 1
             }
         }
     }]
     oids = ActiveEvent._get_collection().aggregate(pipeline)
     data = [(e["_id"], MIB.get_name(e["_id"]), e["count"]) for e in oids]
     data = sorted(data, key=lambda x: -x[2])
     return self.from_dataset(title=self.title,
                              columns=[
                                  "OID", "Name",
                                  TableColumn("Count",
                                              format="integer",
                                              align="right",
                                              total="sum")
                              ],
                              data=data)
Exemplo n.º 3
0
 def get_data(self, **kwargs):
     c = EventClass.objects.filter(name="Unknown | SNMP Trap").first()
     # Переделать на agregate Функция считает число OID'ов в переменных аварий
     # и проверяет их на опознанность
     pipeline = [{
         "$match": {
             "event_class": c.id
         }
     }, {
         "$project": {
             "vars": 1
         }
     }, {
         "$group": {
             "_id": "$vars.trap_oid",
             "count": {
                 "$sum": 1
             }
         }
     }]
     oids = ActiveEvent._get_collection().aggregate(pipeline)
     d = [(e["_id"], MIB.get_name(e["_id"]), e["count"]) for e in oids]
     print d
     data = [(o, n, c) for o, n, c in d if self.rx_unclassified.search(n)]
     return self.from_dataset(title=self.title,
                              columns=[
                                  "OID", "Name",
                                  TableColumn("Count",
                                              format="integer",
                                              align="right",
                                              total="sum")
                              ],
                              data=data)
Exemplo n.º 4
0
 def get_events(self, options):
     """
     Generator returning active events
     """
     c = ActiveEvent.objects.all()
     trap_oid = None
     syslog_re = None
     profile = options["profile"]
     if options["event"]:
         c = c.filter(id=ObjectId(options["event"]))
     if options["object"]:
         try:
             o = ManagedObject.objects.get(name=options["object"])
         except ManagedObject.DoesNotExist:
             self.die("Object not found: %s" % options["object"])
         c = c.filter(managed_object=o.id)
     if options["selector"]:
         try:
             s = ManagedObjectSelector.objects.get(name=options["selector"])
         except ManagedObjectSelector.DoesNotExist:
             self.die("Selector not found: %s" % options["selector"])
         c = c.filter(
             managed_object__in=[mo.id for mo in s.managed_objects])
     if options["class"]:
         o = EventClass.objects.filter(name=options["class"]).first()
         if not o:
             self.die("Event class not found: %s" % options["class"])
         c = c.filter(event_class=o.id)
     if options["trap"]:
         if is_oid(options["trap"]):
             trap_oid = options["trap"]
         else:
             trap_oid = MIB.get_oid(options["trap"])
             if trap_oid is None:
                 self.die("Cannot find OID for %s" % options["trap"])
         c = c.filter(raw_vars__source="SNMP Trap")
     if options["syslog"]:
         try:
             syslog_re = re.compile(options["syslog"], re.IGNORECASE)
         except Exception as e:
             self.die("Invalid RE: %s" % str(e))
         c = c.filter(raw_vars__source="syslog")
     for e in c:
         if profile:
             if not e.managed_object.profile == Profile[profile]:
                 continue
         if trap_oid:
             if ("source" in e.raw_vars
                     and e.raw_vars["source"] == "SNMP Trap"
                     and "1.3.6.1.6.3.1.1.4.1.0" in e.raw_vars
                     and e.raw_vars["1.3.6.1.6.3.1.1.4.1.0"] == trap_oid):
                 yield e
         elif syslog_re:
             if ("source" in e.raw_vars and e.raw_vars["source"] == "syslog"
                     and "message" in e.raw_vars
                     and syslog_re.search(e.raw_vars["message"])):
                 yield e
         else:
             yield e
Exemplo n.º 5
0
 def handle(self, paths, profile, format, progress=False, *args, **options):
     assert profile_loader.get_profile(
         profile), "Invalid profile: %s" % profile
     t0 = time.time()
     ruleset = RuleSet()
     ruleset.load()
     self.print("Ruleset load in %.2fms" % ((time.time() - t0) * 1000))
     reader = getattr(self, "read_%s" % format, None)
     assert reader, "Invalid format %s" % format
     self.managed_object = ManagedObject(id=1,
                                         name="test",
                                         address="127.0.0.1",
                                         profile_name=profile)
     t0 = time.time()
     stats = defaultdict(int)
     total = 0
     for p in paths:
         if not os.path.isfile(p):
             continue
         for f in iter_open(p):
             for event in reader(f):
                 e_vars = event.raw_vars.copy()
                 if event.source == "SNMP Trap":
                     e_vars.update(MIB.resolve_vars(event.raw_vars))
                 rule, r_vars = ruleset.find_rule(event, e_vars)
                 stats[rule.event_class.name] += 1
                 total += 1
                 if progress and total % 1000 == 0:
                     self.print("%d records processed" % total)
     dt = time.time() - t0
     self.print("%d events processed in %.2fms (%.fevents/sec)" %
                (total, dt * 1000, float(total) / dt))
     if stats:
         # Prepare statistics
         s_data = sorted([(k, stats[k]) for k in stats],
                         key=operator.itemgetter(1),
                         reverse=True)
         s_total = sum(stats[k] for k in stats if not self.is_ignored(k))
         data = [["Events", "%", "Event class"]]
         for ecls, qty in s_data:
             data += [[
                 str(qty),
                 "%3.2f%%" % (float(stats[ecls] * 100) / float(total)), ecls
             ]]
         # Calculate classification quality
         data += [[
             "",
             "%3.2f%%" % (float(s_total * 100) / total),
             "Classification Quality"
         ]]
         # Ruleset hit rate
         rs_rate = float(metrics["rules_checked"].value) / float(total)
         data += [["", "%.2f" % rs_rate, "Rule checks per event"]]
         # Dump table
         self.print("Event classes summary:")
         self.print(format_table([4, 6, 10], data))
Exemplo n.º 6
0
def test_event(ruleset, event):
    e, expected_class, expected_vars = event
    e_vars = e.raw_vars.copy()
    if e.source == "SNMP Trap":
        e_vars.update(MIB.resolve_vars(e.raw_vars))
    rule, r_vars = ruleset.find_rule(e, e_vars)
    assert rule is not None, "Cannot find matching rule"
    assert rule.event_class == expected_class, "Mismatched event class %s vs %s" % (
        rule.event_class.name, expected_class.name)
    ruleset.eval_vars(event, rule.event_class, e_vars)
    assert e_vars == expected_vars, "Mismatched vars"
Exemplo n.º 7
0
 def api_upload(self, request):
     left = {}  # name -> data
     for f in request.FILES:
         left[f] = request.FILES[f]
     errors = {}
     while len(left):
         n = len(left)
         for name in left.keys():
             with temporary_file(left[name].read()) as path:
                 try:
                     MIB.load(path)
                     del left[name]
                     if name in errors:
                         del errors[name]
                 except MIB.MIBRequiredException, x:
                     errors[name] = "%s requires MIBs %s" % (x.mib,
                                                             x.requires_mib)
         if len(left) == n:
             # Failed to upload anything, stopping
             break
Exemplo n.º 8
0
 def handle_make_collection(self, mib_name, bump=False, *args, **kwargs):
     if len(mib_name) != 1:
         self.print("Specify one MIB")
         self.die("")
     # Get MIB
     mib = MIB.get_by_name(mib_name[0])
     if not mib:
         self.print("MIB not found: %s" % mib_name[0])
         self.die("")
     # Prepare MIB data
     mib_data = list(
         sorted(
             [{
                 "oid": dd.oid,
                 "name": dd.name,
                 "description": dd.description,
                 "syntax": dd.syntax,
             } for dd in MIBData.objects.filter(mib=mib.id)] + [{
                 "oid":
                 dd.oid,
                 "name":
                 next((a for a in dd.aliases
                       if a.startswith(mib.name + "::"))),
                 "description":
                 dd.description,
                 "syntax":
                 dd.syntax,
             } for dd in MIBData.objects.filter(aliases__startswith="%s::" %
                                                mib.name)],
             key=lambda x: x["oid"],
         ))
     # Prepare MIB
     if mib.last_updated:
         last_updated = mib.last_updated.strftime("%Y-%m-%d")
     else:
         last_updated = "1970-01-01"
     version = mib.version
     if bump:  # Bump to next version
         version += 1
     data = {
         "name": mib.name,
         "description": mib.description,
         "last_updated": last_updated,
         "version": version,
         "depends_on": mib.depends_on,
         "typedefs": mib.typedefs,
         "data": mib_data,
     }
     # Serialize and write
     with self.open_output(kwargs.get("output")) as f:
         f(ujson.dumps(data))
Exemplo n.º 9
0
 def create_mib(self, data):
     # Deserialze
     d = ujson.loads(data)
     # Create MIB
     mib = MIB(name=d["name"], description=d["description"],
               last_updated=self.decode_date(d["last_updated"]),
               version=d.get("version", 0),
               depends_on=d["depends_on"])
     mib.save()
     # Upload
     if d["data"]:
         mib.load_data(d["data"])
Exemplo n.º 10
0
 def handle_make_cmib(self, mib_name, *args, **kwargs):
     if len(mib_name) != 1:
         self.print("Specify one MIB")
         self.die("")
     # Get MIB
     mib = MIB.get_by_name(mib_name[0])
     if not mib:
         self.print("MIB not found: %s" % mib_name[0])
         self.die("")
     # Build cmib
     year = datetime.date.today().year
     r = [
         "# -*- coding: utf-8 -*-",
         "# ----------------------------------------------------------------------",
         "# %s" % mib,
         "#     Compiled MIB",
         "#     Do not modify this file directly",
         "#     Run ./noc mib make-cmib instead",
         "# ----------------------------------------------------------------------",
         "# Copyright (C) 2007-%s The NOC Project" % year,
         "# See LICENSE for details",
         "# ----------------------------------------------------------------------",
         "",
         "# MIB Name",
         'NAME = "%s"' % mib,
         "",
         "# Metadata",
         'LAST_UPDATED = "%s"' % mib.last_updated.isoformat().split("T")[0],
         'COMPILED = "%s"' % datetime.date.today().isoformat(),
         "",
         "# MIB Data: name -> oid",
         "MIB = {",
     ]
     r += [
         ",\n".join('    "%s": "%s"' % (md.name, md.oid) for md in sorted(
             MIBData.objects.filter(mib=mib.id),
             key=lambda x: [int(y) for y in x.oid.split(".")],
         ))
     ]
     r[-1] += ","
     r += ["}", ""]
     data = "\n".join(r) + "\n"
     with self.open_output(kwargs.get("output")) as f:
         f(data)
Exemplo n.º 11
0
Arquivo: views.py Projeto: 0pt1on/noc
 def api_event(self, request, id):
     event = get_event(id)
     if not event:
         return self.response_not_found()
     d = self.instance_to_dict(event)
     dd = dict(
         (v, None)
         for v in (
             "body",
             "symptoms",
             "probable_causes",
             "recommended_actions",
             "log",
             "vars",
             "resolved_vars",
             "raw_vars",
         )
     )
     if event.status in ("A", "S"):
         dd["body"] = event.body
         dd["symptoms"] = event.event_class.symptoms
         dd["probable_causes"] = event.event_class.probable_causes
         dd["recommended_actions"] = event.event_class.recommended_actions
         # Fill vars
         left = set(event.vars)
         vars = []
         for ev in event.event_class.vars:
             if ev.name in event.vars:
                 vars += [(ev.name, event.vars[ev.name], ev.description)]
                 left.remove(ev.name)
         vars += [(v, event.vars[v], None) for v in sorted(left)]
         dd["vars"] = vars
         # Fill resolved vars
         vars = []
         is_trap = event.raw_vars.get("source") == "SNMP Trap"
         for v in sorted(event.resolved_vars):
             desc = None
             if is_trap and "::" in v:
                 desc = MIB.get_description(v)
             vars += [(v, event.resolved_vars[v], desc)]
         dd["resolved_vars"] = vars
     dd["raw_vars"] = sorted(event.raw_vars.items())
     # Managed object properties
     mo = event.managed_object
     d["managed_object_address"] = mo.address
     d["managed_object_profile"] = mo.profile.name
     d["managed_object_platform"] = mo.platform.name if mo.platform else ""
     d["managed_object_version"] = mo.version.version if mo.version else ""
     d["segment"] = mo.segment.name
     d["segment_id"] = str(mo.segment.id)
     d["tags"] = mo.tags
     # Log
     if event.log:
         dd["log"] = [
             {
                 "timestamp": self.to_json(l.timestamp),
                 "from_status": l.from_status,
                 "to_status": l.to_status,
                 "message": l.message,
             }
             for l in event.log
         ]
     #
     d.update(dd)
     # Get alarms
     if event.status in ("A", "S"):
         alarms = []
         for a_id in event.alarms:
             a = get_alarm(a_id)
             if not a:
                 continue
             if a.opening_event == event.id:
                 role = "O"
             elif a.closing_event == event.id:
                 role = "C"
             else:
                 role = ""
             alarms += [
                 {
                     "id": str(a.id),
                     "status": a.status,
                     "alarm_class": str(a.alarm_class.id),
                     "alarm_class__label": a.alarm_class.name,
                     "subject": a.subject,
                     "role": role,
                     "timestamp": self.to_json(a.timestamp),
                 }
             ]
         d["alarms"] = alarms
     # Apply plugins
     if event.status in ("A", "S") and event.event_class.plugins:
         plugins = []
         for p in event.event_class.plugins:
             if p.name in self.plugins:
                 plugin = self.plugins[p.name]
                 dd = plugin.get_data(event, p.config)
                 if "plugins" in dd:
                     plugins += dd["plugins"]
                     del dd["plugins"]
                 d.update(dd)
         if plugins:
             d["plugins"] = plugins
     elif event.status == "F":
         # Enable traceback plugin for failed events
         d["traceback"] = event.traceback
         d["plugins"] = [("NOC.fm.event.plugins.Traceback", {})]
     return d
Exemplo n.º 12
0
 def api_test(self, request):
     q = self.deserialize(request.raw_post_data)
     errors = []
     patterns = []
     result = False
     # Get data
     data = {}
     vars = {}
     required_vars = set()
     r_patterns = []
     event_class = None
     subject = None
     body = None
     if "data" in q:
         if is_objectid(q["data"]):
             event = get_event(q["data"])
             if event:
                 data = event.raw_vars.copy()
                 data["profile"] = event.managed_object.profile.name
                 data["source"] = event.source
             else:
                 errors += ["Event not found: %s" % q["data"]]
         else:
             # Decode json
             try:
                 e = self.deserialize(q["data"])
             except Exception:
                 errors += ["Cannot decode JSON"]
                 e = None
             if isinstance(e, list):
                 e = e[0]
             if not isinstance(e, dict) or "raw_vars" not in e:
                 errors += ["Invalid JSON data"]
             else:
                 data = e["raw_vars"]
                 if "profile" in e:
                     data["profile"] = e["profile"]
                 if "source" in e:
                     data["source"] = e["source"]
         if data.get("source") == "SNMP Trap":
             # Resolve MIBs
             data.update(MIB.resolve_vars(data))
     # Check event class
     if "event_class" in q:
         event_class = self.get_object_or_404(EventClass,
                                              id=q["event_class"])
         for v in event_class.vars:
             if v.required:
                 required_vars.add(v.name)
                 vars[v.name] = "MISSED!"
     # Check patterns
     if "patterns" in q:
         for p in q["patterns"]:
             if "key_re" in p and "value_re" in p:
                 k = None
                 v = None
                 try:
                     k = re.compile(p["key_re"])
                 except re.error as why:
                     errors += [
                         "Invalid key regular expression <<<%s>>>: %s" %
                         (p["key_re"], why)
                     ]
                 try:
                     v = re.compile(p["value_re"])
                 except re.error as why:
                     errors += [
                         "Invalid value regular expression <<<%s>>>: %s" %
                         (p["value_re"], why)
                     ]
                 if k and v:
                     patterns += [(k, v)]
     # Try to match rule
     if patterns and not errors:
         s_patterns = []
         i_patterns = []
         for pkey, pvalue in patterns:
             matched = False
             for k in data:
                 k_match = pkey.search(k)
                 if k_match:
                     v_match = pvalue.search(data[k])
                     if v_match:
                         # Line match
                         # Update vars
                         v = {}
                         v.update(k_match.groupdict())
                         v.update(v_match.groupdict())
                         vars.update(v)
                         # Save patterns
                         s_patterns += [{
                             "status":
                             True,
                             "key":
                             k,
                             "value":
                             data[k],
                             "key_re":
                             pkey.pattern,
                             "value_re":
                             pvalue.pattern,
                             "vars": [{
                                 "key": k,
                                 "value": v[k]
                             } for k in v]
                         }]
                     else:
                         i_patterns += [{
                             "status": False,
                             "key": k,
                             "value": data[k],
                             "key_re": pkey.pattern,
                             "value_re": pvalue.pattern,
                             "vars": {}
                         }]
                     matched = True
                     break
             if not matched:
                 i_patterns += [{
                     "status": False,
                     "key": None,
                     "value": None,
                     "key_re": pkey.pattern,
                     "value_re": pvalue.pattern,
                     "vars": {}
                 }]
         if s_patterns and not i_patterns:
             result = True
         r_patterns = s_patterns + i_patterns
     # Calculate rule variables
     if "vars" in q and q["vars"]:
         for v in q["vars"]:
             if v["value"].startswith("="):
                 # Evaluate
                 try:
                     vars[v["name"]] = eval(v["value"][1:], {}, vars)
                 except Exception as why:
                     errors += [
                         "Error when evaluating '%s': %s" % (v["name"], why)
                     ]
             else:
                 vars[v["name"]] = v["value"]
     # Check required variables
     for rvars in required_vars:
         if rvars not in vars:
             errors += ["Missed required variable: %s" % rvars]
     # Fill event class template
     if event_class:
         # lang = "en"
         ctx = Context(vars)
         subject = Template(event_class.subject_template).render(ctx)
         body = Template(event_class.body_template).render(ctx)
     # Check expression
     r = {"result": result}
     if errors:
         r["errors"] = errors
     if vars:
         r["vars"] = [{"key": k, "value": vars[k]} for k in vars]
     if r_patterns:
         r["patterns"] = r_patterns
     if subject:
         r["subject"] = subject
     if body:
         r["body"] = body
     return r
Exemplo n.º 13
0
    async def classify_event(self, event, data):
        """
        Perform event classification.
        Classification steps are:

        1. Format SNMP values accordind to MIB definitions (for SNMP events only)
        2. Find matching classification rule
        3. Calculate rule variables

        :param event: Event to classify
        :type event: NewEvent
        :returns: Classification status (CR_*)
        """
        metrics[E_SRC_METRICS.get(event.source, E_SRC_OTHER)] += 1
        is_unknown = False
        #
        pre_event = data.pop("$event", None)
        # Resolve MIB variables for SNMP Traps
        resolved_vars = {"profile": event.managed_object.profile.name}
        # Store event variables
        event.raw_vars = data
        if event.source == E_SRC_SNMP_TRAP:
            resolved_vars.update(MIB.resolve_vars(event.raw_vars))
        event.resolved_vars = resolved_vars
        # Get matched event class
        if pre_event:
            # Event is preprocessed, get class and variables
            event_class_name = pre_event.get("class")
            event_class = EventClass.get_by_name(event_class_name)
            if not event_class:
                self.logger.error(
                    "[%s|%s|%s] Failed to process event: Invalid event class '%s'",
                    event.id,
                    event.managed_object.name,
                    event.managed_object,
                    event_class_name,
                )
                metrics[CR_FAILED] += 1
                return  # Drop malformed message
            event.event_class = event_class
            event.vars = pre_event.get("vars", {})
        else:
            # Prevent unclassified events flood
            if self.check_unclassified_syslog_flood(event):
                return
            # Find matched event class
            c_vars = event.raw_vars.copy()
            c_vars.update({
                k: smart_text(fm_unescape(resolved_vars[k]))
                for k in resolved_vars
            })
            rule, vars = self.ruleset.find_rule(event, c_vars)
            if rule is None:
                # Something goes wrong.
                # No default rule found. Exit immediately
                self.logger.error("No default rule found. Exiting")
                os._exit(1)
            if rule.to_drop:
                # Silently drop event if declared by action
                self.logger.info(
                    "[%s|%s|%s] Dropped by action",
                    event.id,
                    event.managed_object.name,
                    event.managed_object.address,
                )
                metrics[CR_DELETED] += 1
                return
            if rule.is_unknown_syslog:
                # Append to codebook
                msg = event.raw_vars.get("message", "")
                cb = self.get_msg_codebook(msg)
                o_id = event.managed_object.id
                if o_id not in self.unclassified_codebook:
                    self.unclassified_codebook[o_id] = []
                cbs = [cb] + self.unclassified_codebook[o_id]
                cbs = cbs[:self.unclassified_codebook_depth]
                self.unclassified_codebook[o_id] = cbs
            self.logger.debug(
                "[%s|%s|%s] Matching rule: %s",
                event.id,
                event.managed_object.name,
                event.managed_object.address,
                rule.name,
            )
            event.event_class = rule.event_class
            # Calculate rule variables
            event.vars = self.ruleset.eval_vars(event, event.event_class, vars)
            message = "Classified as '%s' by rule '%s'" % (
                event.event_class.name, rule.name)
            event.log += [
                EventLog(
                    timestamp=datetime.datetime.now(),
                    from_status="N",
                    to_status="A",
                    message=message,
                )
            ]
            is_unknown = rule.is_unknown
        # Event class found, process according to rules
        self.logger.info(
            "[%s|%s|%s] Event class: %s (%s)",
            event.id,
            event.managed_object.name,
            event.managed_object.address,
            event.event_class.name,
            event.vars,
        )
        # Deduplication
        if self.deduplicate_event(event):
            return
        # Suppress repeats
        if self.suppress_repeats(event):
            return
        # Activate event
        event.expires = event.timestamp + datetime.timedelta(
            seconds=event.event_class.ttl)
        event.save()
        # Fill deduplication filter
        self.dedup_filter.register(event)
        # Fill suppress filter
        self.suppress_filter.register(event)
        # Call handlers
        if self.call_event_handlers(event):
            return
        # Additionally check link events
        if await self.check_link_event(event):
            return
        # Call triggers
        if self.call_event_triggers(event):
            return
        # Finally dispose event to further processing by correlator
        if event.to_dispose:
            await self.dispose_event(event)
        if is_unknown:
            metrics[CR_UNKNOWN] += 1
        elif pre_event:
            metrics[CR_PREPROCESSED] += 1
        else:
            metrics[CR_CLASSIFIED] += 1
Exemplo n.º 14
0
 def api_test(self, request):
     q = self.deserialize(request.raw_post_data)
     errors = []
     patterns = []
     result = False
     # Get data
     data = {}
     vars = {}
     required_vars = set()
     r_patterns = []
     event_class = None
     subject = None
     body = None
     if "data" in q:
         if is_objectid(q["data"]):
             event = get_event(q["data"])
             if event:
                 data = event.raw_vars.copy()
                 data["profile"] = event.managed_object.profile_name
             else:
                 errors += ["Event not found: %s" % q["data"]]
         else:
             # Decode json
             try:
                 e = self.deserialize(q["data"])
             except:
                 errors += ["Cannot decode JSON"]
                 e = None
             if isinstance(e, list):
                 e = e[0]
             if not isinstance(e, dict) or "raw_vars" not in e:
                 errors += ["Invalid JSON data"]
             else:
                 data = e["raw_vars"]
                 if "profile" in e:
                     data["profile"] = e["profile"]
         if data.get("source") == "SNMP Trap":
             # Resolve MIBs
             data.update(MIB.resolve_vars(data))
     # Check event class
     if "event_class" in q:
         event_class = self.get_object_or_404(EventClass,
                                              id=q["event_class"])
         for v in event_class.vars:
             if v.required:
                 required_vars.add(v.name)
                 vars[v.name] = "MISSED!"
     # Check patterns
     if "patterns" in q:
         for p in q["patterns"]:
             if "key_re" in p and "value_re" in p:
                 k = None
                 v = None
                 try:
                     k = re.compile(p["key_re"])
                 except re.error, why:
                     errors += [
                         "Invalid key regular expression <<<%s>>>: %s" %
                         (p["key_re"], why)
                     ]
                 try:
                     v = re.compile(p["value_re"])
                 except re.error, why:
                     errors += [
                         "Invalid value regular expression <<<%s>>>: %s" %
                         (p["value_re"], why)
                     ]
                 if k and v:
                     patterns += [(k, v)]
Exemplo n.º 15
0
 def compile(self, data):
     """
     Compile MIB, upload to database and store MIB file
     :param data: MIB text
     :return:
     """
     if not config.path.smilint or not os.path.exists(config.path.smilint):
         return {
             "status": False,
             "msg": "smilint is missed",
             "error": ERR_MIB_TOOL_MISSED
         }
     if not config.path.smilint or not os.path.exists(config.path.smidump):
         return {
             "status": False,
             "msg": "smidump is missed",
             "error": ERR_MIB_TOOL_MISSED
         }
     # Put data to temporary file
     with temporary_file(data) as tmp_path:
         # Pass MIB through smilint to detect missed modules
         self.logger.debug(
             "Pass MIB through smilint to detect missed modules")
         f = subprocess.Popen([config.path.smilint, "-m", tmp_path],
                              stderr=subprocess.PIPE,
                              env=self.SMI_ENV).stderr
         for l in f:
             match = self.rx_module_not_found.search(l.strip())
             if match:
                 return {
                     "status": False,
                     "msg": "Required MIB missed: %s" % match.group(1),
                     "code": ERR_MIB_MISSED,
                 }
         self.logger.debug("Convert MIB to python module and load")
         # Convert MIB to python module and load
         with temporary_file() as py_path:
             subprocess.check_call(
                 [
                     config.path.smidump, "-k", "-q", "-f", "python", "-o",
                     py_path, tmp_path
                 ],
                 env=self.SMI_ENV,
             )
             with open(py_path) as f:
                 p_data = unicode(f.read(), "ascii",
                                  "ignore").encode("ascii")
             with open(py_path, "w") as f:
                 f.write(p_data)
             m = imp.load_source("mib", py_path)
         # NOW we can deduce module name
         mib_name = m.MIB["moduleName"]
         # Check module dependencies
         depends_on = {}  # MIB Name -> Object ID
         self.logger.debug("Check module dependencies: %s",
                           m.MIB.get("imports", ""))
         if "imports" in m.MIB:
             for i in m.MIB["imports"]:
                 if "module" not in i:
                     continue
                 rm = i["module"]
                 if rm in depends_on:
                     continue
                 md = MIB.get_by_name(rm)
                 if md is None:
                     return {
                         "status": False,
                         "msg": "Required MIB missed: %s" % rm,
                         "code": ERR_MIB_MISSED,
                     }
                 depends_on[rm] = md
         # Get MIB latest revision date
         try:
             last_updated = datetime.datetime.strptime(
                 sorted([x["date"]
                         for x in m.MIB[mib_name]["revisions"]])[-1],
                 "%Y-%m-%d %H:%M")
         except ValueError:
             last_updated = datetime.datetime(year=1970, month=1, day=1)
         self.logger.debug("Extract MIB typedefs")
         # Extract MIB typedefs
         typedefs = {}
         if "typedefs" in m.MIB:
             for t in m.MIB["typedefs"]:
                 typedefs[t] = MIB.parse_syntax(m.MIB["typedefs"][t])
         # Check mib already uploaded
         mib_description = m.MIB[mib_name].get("description", None)
         mib = MIB.objects.filter(name=mib_name).first()
         if mib is not None:
             mib.description = mib_description
             mib.last_updated = last_updated
             mib.depends_on = sorted(depends_on)
             mib.typedefs = typedefs
             mib.save()
             # Delete all MIB Data
             mib.clean()
         else:
             # Create MIB
             mib = MIB(
                 name=mib_name,
                 description=mib_description,
                 last_updated=last_updated,
                 depends_on=sorted(depends_on),
                 typedefs=typedefs,
             )
             mib.save()
         # Upload MIB data
         cdata = []
         for i in ["nodes", "notifications"]:
             if i in m.MIB:
                 cdata += [{
                     "name":
                     "%s::%s" % (mib_name, node),
                     "oid":
                     v["oid"],
                     "description":
                     v.get("description"),
                     "syntax":
                     v["syntax"]["type"] if "syntax" in v else None,
                 } for node, v in six.iteritems(m.MIB[i])]
         mib.load_data(cdata)
         # Move file to permanent place
         safe_rewrite(self.get_path(mib_name), data)
     return {"status": True, "mib": mib_name}
Exemplo n.º 16
0
Arquivo: mib.py Projeto: nbashev/noc
    def handle_make_cmib(self, mib_name, *args, **kwargs):
        def has_worth_hint(syntax):
            if not syntax:
                return False
            hint = syntax.get("display_hint")
            if not hint:
                return False
            base_type = syntax["base_type"]
            if base_type == "Integer32" and hint == "d":
                return False
            if base_type == "OctetString" and hint == "255a":
                return False
            return True

        if len(mib_name) != 1:
            self.print("Specify one MIB")
            self.die("")
        # Get MIB
        mib = MIB.get_by_name(mib_name[0])
        if not mib:
            self.print("MIB not found: %s" % mib_name[0])
            self.die("")
        # Build cmib
        year = datetime.date.today().year
        r = [
            "# ----------------------------------------------------------------------",
            "# %s" % mib,
            "# Compiled MIB",
            "# Do not modify this file directly",
            "# Run ./noc mib make-cmib instead",
            "# ----------------------------------------------------------------------",
            "# Copyright (C) 2007-%s The NOC Project" % year,
            "# See LICENSE for details",
            "# ----------------------------------------------------------------------",
            "",
            "# MIB Name",
            'NAME = "%s"' % mib,
            "",
            "# Metadata",
            'LAST_UPDATED = "%s"' % mib.last_updated.isoformat().split("T")[0],
            'COMPILED = "%s"' % datetime.date.today().isoformat(),
            "",
            "# MIB Data: name -> oid",
            "MIB = {",
        ]
        mib_data = list(
            sorted(
                MIBData.objects.filter(mib=mib.id),
                key=lambda x: [int(y) for y in x.oid.split(".")],
            ))
        r += [
            "\n".join('    "%s": "%s",' % (md.name, md.oid) for md in mib_data)
        ]
        r += ["}", "", "DISPLAY_HINTS = {"]
        r += [
            "\n".join('    "%s": ("%s", "%s"),  # %s' %
                      (md.oid, md.syntax["base_type"],
                       md.syntax["display_hint"], md.name) for md in mib_data
                      if has_worth_hint(md.syntax))
        ]
        r += ["}", ""]
        data = "\n".join(r)
        with self.open_output(kwargs.get("output")) as f:
            f(data)