예제 #1
0
    def execute(self, args):
        seq_file = args["<seq-file>"]
        reduction = utils.cast(args["--reduction"], float, 0.5)
        threshold = utils.cast(args["--threshold"], float, None)
        filters = [re.compile(filter) for filter in args["--filter"]]
        output = args["--output"]
        if not output:
            output = os.path.join(os.getcwd(), "scm-%s" % str(utils.get_utc()))

        LOG.info("Loading alert rules from sequence file")
        with open(seq_file, "r") as f:
            data = json.loads(f.read())
        raw_rules = data["calculatedRules"]
        LOG.info("Loaded %i alert rules" % len(raw_rules))

        if filters:
            LOG.info("Filtering alert rules by regex")
            raw_rules = alerts.filter_rules_by_regex(raw_rules, filters)
            LOG.info("Obtained %i alert rules after filtering" %
                     len(raw_rules))

        LOG.info("Building SCM structure")
        causality_map = scm.SymptomCausalityMap.from_rules(raw_rules)

        LOG.info("Reducing SCM structure")
        causality_map.reduce(reduction, threshold)

        LOG.info("Plotting SCM")
        plotter.GraphPlot(causality_map, "Symptom Causality Map",
                          output).plot()
        LOG.info("Output saved to: %s" % output)
예제 #2
0
    def execute(self, args):
        queries = args["--query"]

        now = utils.get_utc()
        start = int(args["--start"] or now - 500)
        end = int(args["--end"] or now)
        step = int(args["--step"] or 10)

        exp_start = utils.cast(args["--exp-start"], int)
        exp_duration = int(args["--exp-duration"] or 300)
        exp_offset = int(args["--exp-offset"] or 120)

        if exp_start:
            start = exp_start - exp_offset
            end = exp_start + exp_duration + exp_offset

        prom_client = prometheus.PrometheusClient.get()
        metric_fetcher = metrics.MetricFetcher(prom_client)

        results = []
        for query in queries:
            results.extend(metric_fetcher.fetch(query, start, end, step))

        corr_matrix = correlation.CorrelationMatrix(results).compute()
        plotter.CorrelationMatrixPlot(corr_matrix).plot()
예제 #3
0
    def execute(self, args):
        seq_file = args["<seq-file>"]
        filters = [re.compile(filter) for filter in args["--filter"]]
        pairwise = args["--pairwise"]
        output_dir = args["--output-dir"]
        if not output_dir:
            output_dir = os.path.join(os.getcwd(), str(utils.get_utc()))

        LOG.info("Loading alerts from sequence file")
        with open(seq_file, "r") as f:
            data = json.loads(f.read())
        raw_alerts = data["events"]
        first_timestamp = float(data["firstTimestamp"])
        raw_rules = data.get("calculatedRules", [])
        LOG.info("Loaded %i alerts" % len(raw_alerts))
        LOG.info("Loaded %i alert_rules" % len(raw_rules))

        if filters:
            LOG.info("Filtering alerts and rules by regex")
            raw_alerts = alerts.filter_alerts_by_regex(raw_alerts, filters)
            raw_rules = alerts.filter_rules_by_regex(raw_rules, filters)
            LOG.info("Obtained %i alerts after filtering" % len(raw_alerts))
            LOG.info("Obtained %i alert rules after filtering" %
                     len(raw_rules))

        LOG.info("Normalizing alerts data")
        normalized_alerts = defaultdict(list)
        for raw_alert in raw_alerts:
            timestamp = int(float(raw_alert["timestamp"]) + first_timestamp)
            normalized_alerts[raw_alert["eventType"]].append(timestamp)

        LOG.info("Plotting all alerts")
        utils.mkdir(output_dir)

        output = os.path.join(output_dir, "alerts")
        plotter.TimelinePlot(normalized_alerts,
                             "Alerts co-occurrence",
                             output=output).plot()

        if pairwise:
            LOG.info("Plotting alert combinations")
            for raw_rule in raw_rules:
                id1 = raw_rule["trigger"]["event"]
                id2 = raw_rule["response"]["event"]
                data = raw_rule["data"]
                alerts_subset = {k: normalized_alerts[k] for k in [id1, id2]}
                desc = "Offset: %.2f\n" % (data["Offset"])
                desc += "Mutual information: %.2f" % data["Mutual Information"]
                output = os.path.join(output_dir, "%s-%s-alerts" % (id1, id2))
                plotter.TimelinePlot(alerts_subset,
                                     "Alerts co-occurrence",
                                     desc=desc,
                                     output=output).plot()
        LOG.info("Output saved to: %s" % output_dir)
예제 #4
0
    def execute(self, args):
        start = utils.cast(args["--start"], int, None)
        end = utils.cast(args["--end"], int, utils.get_utc())
        converter_name = args["--converter"]
        output = args["--output"]
        if not output:
            output = os.path.join(os.getcwd(),
                                  "alerts-%s.json" % str(utils.get_utc()))

        LOG.info("Fetching alerts")
        orca_client = orca.OpenRCAClient.get()
        raw_alerts = orca_client.get_alerts(deleted=True)
        if start and end:
            raw_alerts = alerts.filter_by_time(raw_alerts, start, end)
        LOG.info("Collected %i alert instances" % len(raw_alerts))

        LOG.info("Converting using converter: %s" % converter_name)
        if converter_name:
            converter = alerts.get_converter(converter_name)
            processed_alerts = converter.convert(raw_alerts)

        with open(output, "w") as f:
            f.write(json.dumps(processed_alerts))
        LOG.info("Output saved to: %s" % output)
예제 #5
0
    def execute(self, args):
        title = args["<title>"]
        query = args["<query>"]

        now = utils.get_utc()
        start = int(args["--start"] or now - 500)
        end = int(args["--end"] or now)
        step = int(args["--step"] or 10)

        exp_start = utils.cast(args["--exp-start"], int)
        exp_duration = int(args["--exp-duration"] or 300)
        exp_offset = int(args["--exp-offset"] or 120)

        if exp_start:
            start = exp_start - exp_offset
            end = exp_start + exp_duration + exp_offset

        ymin = utils.cast(args["--ymin"], float)
        ymax = utils.cast(args["--ymax"], float)

        xmarkers = [float(xval) for xval in args["--xmarker"]]
        ymarkers = [float(yval) for yval in args["--ymarker"]]

        output_dir = args.get("--output-dir") or os.getcwd()

        LOG.info("Dumping query '%s', start: %s, end: %s, step: %s", query,
                 start, end, step)
        prom_client = prometheus.PrometheusClient.get()
        metric_fetcher = metrics.MetricFetcher(prom_client)
        results = metric_fetcher.fetch(query, start, end, step)

        LOG.info("Plotting metrics...")
        subplots = []
        for result in results:
            subplots.append(
                plotter.TimeseriesPlot(*result,
                                       ymin=ymin,
                                       ymax=ymax,
                                       xmarkers=xmarkers,
                                       ymarkers=ymarkers))

        grid_plotter = plotter.GridPlot(title, subplots)
        grid_plotter.plot()
예제 #6
0
    def execute(self, args):
        seq_file = args["<seq-file>"]
        filters = [re.compile(filter) for filter in args["--filter"]]
        output_dir = args["--output-dir"]
        if not output_dir:
            output_dir = os.path.join(os.getcwd(), str(utils.get_utc()))

        LOG.info("Loading alert rules from sequence file")
        with open(seq_file, "r") as f:
            data = json.loads(f.read())
        raw_rules = data["calculatedRules"]
        LOG.info("Loaded %i alert rules" % len(raw_rules))

        if filters:
            LOG.info("Filtering alert rules by regex")
            raw_rules = alerts.filter_rules_by_regex(raw_rules, filters)
            LOG.info("Obtained %i alert rules after filtering" %
                     len(raw_rules))

        LOG.info("Generating PDFs for alert combinations")
        utils.mkdir(output_dir)

        for raw_rule in raw_rules:
            id1 = raw_rule["trigger"]["event"]
            id2 = raw_rule["response"]["event"]
            kde_params = raw_rule["data"]["Kde"]["param"][0]

            dist = distribution.KDEDistribution()
            dist.fit(kde_params)

            subtitle = "%s\n%s" % (id1, id2)
            output = os.path.join(output_dir, "%s-%s-pdf" % (id1, id2))
            plotter.PDFPlot(dist,
                            kde_params,
                            "Event Co-occurrence PDF",
                            subtitle=subtitle,
                            output=output).plot()
        LOG.info("Output saved to: %s" % output_dir)
예제 #7
0
    def execute(self, args):
        query = args["<query>"]

        now = utils.get_utc()
        start = int(args["--start"] or now - 600)
        end = int(args["--end"] or now)
        step = int(args["--step"] or 10)

        ymin = utils.cast(args["--ymin"], float)
        ymax = utils.cast(args["--ymax"], float)

        xmarkers = [float(xval) for xval in args["--xmarker"]]
        ymarkers = [float(yval) for yval in args["--ymarker"]]

        prom_client = prometheus.PrometheusClient.get()
        metric_fetcher = metrics.MetricFetcher(prom_client)

        LOG.info("Dumping query '%s', start: %s, end: %s, step: %s", query,
                 start, end, step)
        results = metric_fetcher.fetch(query, start, end, step)

        subplots = []
        LOG.info("Detecting changepoints...")
        for result in results:
            cps = cpd.ChangepointDetector().detect(result[2])
            del cps[-1]
            subplots.append(
                plotter.ChangepointPlot(*result,
                                        cps,
                                        ymin=ymin,
                                        ymax=ymax,
                                        xmarkers=xmarkers,
                                        ymarkers=ymarkers))

        LOG.info("Plotting changepoints...")
        grid_plotter = plotter.GridPlot("Changepoint detection", subplots)
        grid_plotter.plot()