Пример #1
0
def itic(mauka_message: protobuf.mauka_pb2.MaukaMessage,
         segment_threshold: float,
         logger=None,
         opq_mongo_client: mongo.OpqMongoClient = None):
    """
    Computes the ITIC region for a given waveform.
    :param mauka_message:
    :param segment_threshold: Threshold for segmentation
    :param logger: Optional logger to use to print information
    :param opq_mongo_client:  Optional DB client to re-use (otherwise new one will be created)
    :return: ITIC region.
    """
    mongo_client = mongo.get_default_client(opq_mongo_client)
    if len(mauka_message.payload.data) < 0.01:
        return

    segments = analysis.segment(mauka_message.payload.data, segment_threshold)

    if logger is not None:
        logger.debug("Calculating ITIC with {} segments.".format(
            len(segments)))

    for segment in segments:
        start_idx = segment[0]
        end_idx = segment[1] + 1
        subarray = mauka_message.payload.data[start_idx:end_idx]
        mean_rms = numpy.mean(subarray)

        itic_enum = itic_region(mean_rms, analysis.c_to_ms(len(subarray)))

        if itic_enum == IticRegion.NO_INTERRUPTION:
            continue
        else:
            incident_start_timestamp_ms = mauka_message.payload.start_timestamp_ms + analysis.c_to_ms(
                start_idx)
            incident_end_timestamp_ms = mauka_message.payload.start_timestamp_ms + analysis.c_to_ms(
                end_idx)
            if itic_enum is IticRegion.PROHIBITED:
                incident_classification = mongo.IncidentClassification.ITIC_PROHIBITED
            else:
                incident_classification = mongo.IncidentClassification.ITIC_NO_DAMAGE

            mongo.store_incident(mauka_message.event_id, mauka_message.box_id,
                                 incident_start_timestamp_ms,
                                 incident_end_timestamp_ms,
                                 mongo.IncidentMeasurementType.VOLTAGE,
                                 mean_rms - 120.0, [incident_classification],
                                 [], {}, mongo_client)
            if logger is not None:
                logger.debug(
                    "Found ITIC incident [{}] from event {} and box {}".format(
                        itic_enum, mauka_message.event_id,
                        mauka_message.box_id))
Пример #2
0
def box_triggering_thresholds(box_ids: typing.Set[str],
                              opq_mongo_client: typing.Optional[mongo.OpqMongoClient] = None) -> typing.List[
                                  typing.Dict[str, float]]:
    """
    Gets the box triggering thresholds for the provided boxes. Will use the override if found, otherwise detault.
    :param box_ids: The box ids to get triggering thresholds for.
    :param opq_mongo_client: An optional opq ongo client.
    :return: A list of thresholds for each box.
    """
    mongo_client = mongo.get_default_client(opq_mongo_client)
    triggering_thresholds: TriggeringType = mongo_client.makai_config_collection.find_one()["triggering"]
    triggering_overrides: typing.Dict[str, TriggeringOverrideType] = {}

    for override in triggering_thresholds["triggering_overrides"]:
        triggering_overrides[override["box_id"]] = override

    thresholds = []

    for box_id in box_ids:
        if box_id not in triggering_overrides:
            thresholds.append({
                "box_id": box_id,
                "ref_f": triggering_thresholds["default_ref_f"],
                "ref_v": triggering_thresholds["default_ref_v"],
                "threshold_percent_f_low": triggering_thresholds["default_threshold_percent_f_low"],
                "threshold_percent_f_high": triggering_thresholds["default_threshold_percent_f_high"],
                "threshold_percent_v_low": triggering_thresholds["default_threshold_percent_v_low"],
                "threshold_percent_v_high": triggering_thresholds["default_threshold_percent_v_high"],
                "threshold_percent_thd_high": triggering_thresholds["default_threshold_percent_thd_high"],
            })
        else:
            triggering_override = triggering_overrides[box_id]
            thresholds.append({
                "box_id": box_id,
                "ref_f": triggering_override["ref_f"],
                "ref_v": triggering_override["ref_v"],
                "threshold_percent_f_low": triggering_override["threshold_percent_f_low"],
                "threshold_percent_f_high": triggering_override["threshold_percent_f_high"],
                "threshold_percent_v_low": triggering_override["threshold_percent_v_low"],
                "threshold_percent_v_high": triggering_override["threshold_percent_v_high"],
                "threshold_percent_thd_high": triggering_override["threshold_percent_thd_high"],
            })
    return thresholds
Пример #3
0
def itic(mauka_message: protobuf.mauka_pb2.MaukaMessage,
         segment_threshold: float,
         itic_plugin: typing.Optional['IticPlugin'] = None,
         opq_mongo_client: typing.Optional[mongo.OpqMongoClient] = None) -> typing.List[int]:
    """
    Computes the ITIC region for a given waveform.
    :param itic_plugin: An instance of this plugin.
    :param mauka_message: A mauka message.
    :param segment_threshold: Threshold for segmentation
    :param opq_mongo_client:  Optional DB client to re-use (otherwise new one will be created)
    :return: ITIC region.
    """
    mongo_client = mongo.get_default_client(opq_mongo_client)
    if len(mauka_message.payload.data) < 0.01:
        maybe_debug(itic_plugin, "Bad payload data length: %d" % len(mauka_message.payload.data))

    maybe_debug(itic_plugin, "Preparing to get segments for %d Vrms values" % len(mauka_message.payload.data))
    # segments = analysis.segment(mauka_message.payload.data, segment_threshold)
    try:
        segments = analysis.segment_array(numpy.array(list(mauka_message.payload.data)))
    except Exception as exception:
        itic_plugin.logger.error("Error segmenting data for ITIC plugin: %s", str(exception))
        segments = []

    if len(segments) == 0:
        maybe_debug(itic_plugin, "No segments found. Ignoring")
        return []

    maybe_debug(itic_plugin, "Calculating ITIC with {} segments.".format(len(segments)))

    incident_ids = []
    for i, segment in enumerate(segments):
        try:
            segment_len = analysis.c_to_ms(len(segment))
            start_t = analysis.c_to_ms(sum([len(segments[x]) for x in range(0, i)]))
            end_t = start_t + segment_len
            mean_rms = segment.mean()
            maybe_debug(itic_plugin, "start=%f end=%f mean=%f" % (start_t, end_t, mean_rms))

            itic_enum = itic_region(mean_rms, segment_len)

            if itic_enum == IticRegion.NO_INTERRUPTION:
                maybe_debug(itic_plugin, "NO_INTERRUPTION")
                continue
            else:
                incident_start_timestamp_ms = mauka_message.payload.start_timestamp_ms + start_t
                incident_end_timestamp_ms = mauka_message.payload.start_timestamp_ms + end_t
                if itic_enum is IticRegion.PROHIBITED:
                    maybe_debug(itic_plugin, "PROHIBITED")
                    incident_classification = mongo.IncidentClassification.ITIC_PROHIBITED
                else:
                    maybe_debug(itic_plugin, "NO_DAMAGE")
                    incident_classification = mongo.IncidentClassification.ITIC_NO_DAMAGE

                incident_id = mongo.store_incident(
                    itic_plugin.request_next_available_incident_id(),
                    mauka_message.payload.event_id,
                    mauka_message.payload.box_id,
                    incident_start_timestamp_ms,
                    incident_end_timestamp_ms,
                    mongo.IncidentMeasurementType.VOLTAGE,
                    mean_rms - 120.0,
                    [incident_classification],
                    [],
                    {},
                    mongo_client)

                maybe_debug(itic_plugin, "Stored incident")

                maybe_debug(itic_plugin,
                            "Found ITIC incident [{}] from event {} and box {}".format(
                                itic_enum,
                                mauka_message.event_id,
                                mauka_message.box_id))

                incident_ids.append(incident_id)
        except Exception as exception:
            itic_plugin.logger.error("Error storing ITIC incident: %s", str(exception))

    return incident_ids
Пример #4
0
def frequency_incident_classifier(
        event_id: int,
        box_id: str,
        windowed_frequencies: numpy.ndarray,
        box_event_start_ts: int,
        freq_ref: float,
        freq_var_high: float,
        freq_var_low: float,
        freq_interruption: float,
        window_size: int,
        max_lull: int,
        opq_mongo_client: mongo.OpqMongoClient = None,
        logger=None):
    """
    Classifies a frequency incident as a Sag, Swell, or Interruption. Creates a Mongo Incident document
    :param event_id: Makai Event ID
    :param box_id: Box reporting event
    :param windowed_frequencies: High fidelity frequency measurements of windows
    :param box_event_start_ts: start timestamp of event in milliseconds
    :param freq_ref: the reference frequency
    :param freq_var_high: the threshold amount of variation to trigger a frequency swell incident
    :param freq_var_low: the threshold amount of variation to trigger a frequency sag incident
    :param freq_interruption: the frequency to trigger a frequency interruption incident
    :param window_size: The number of samples per window
    :param max_lull:
    :param opq_mongo_client:
    :param logger:
    """

    mongo_client = mongo.get_default_client(opq_mongo_client)
    window_duration_ms = (window_size / constants.SAMPLE_RATE_HZ) * 1000
    running_incident = False
    incident_start_ts = box_event_start_ts
    incident_variations = []
    incidents = []
    lull_variations = []
    lull_incidents = []
    lull_count = 0

    if logger is not None:
        logger.debug("Calculating frequency with {} segments.".format(
            len(windowed_frequencies)))
        logger.debug("window_duration_ms: {}".format(window_duration_ms))

    for idx, freq in enumerate(windowed_frequencies):
        # check whether there is a frequency variation and if so what type
        curr_incident, curr_variation = frequency_variation(
            freq, freq_ref, freq_var_high, freq_var_low, freq_interruption)
        if running_incident != curr_incident:
            if lull_count == max_lull or running_incident is False:  # start of new incident and or end of incident
                if running_incident:  # make and store incident doc if end of incident
                    incident_end_ts = (
                        idx -
                        max_lull) * window_duration_ms + box_event_start_ts
                    if logger is not None:
                        logger.debug(
                            "Found Frequency incident [{}] from event {} and box {}"
                            .format(running_incident, event_id, box_id))
                    incidents.append({
                        "event_id":
                        event_id,
                        "box_id":
                        box_id,
                        "incident_start_ts":
                        incident_start_ts,
                        "incident_end_ts":
                        incident_end_ts,
                        "incident_type":
                        mongo.IncidentMeasurementType.FREQUENCY,
                        "avg_deviation":
                        numpy.average(incident_variations),
                        "incident_classifications": [running_incident],
                        "annotations": [],
                        "metadata": {},
                        "mongo_client":
                        mongo_client
                    })

                incident_variations = [curr_variation]
                incident_start_ts = idx * window_duration_ms + box_event_start_ts
                running_incident = curr_incident
                lull_count = 0
                lull_incidents = []
                lull_variations = []
            else:
                lull_incidents.append(curr_incident)
                lull_variations.append(curr_variation)
                lull_count += 1
        else:
            incident_variations = incident_variations + lull_variations
            incident_variations.append(curr_variation)
            lull_count = 0
            lull_incidents = []
            lull_variations = []

    # ensure if there is any frequency variation at the end of the event then it is still saved
    if running_incident:  # make and store incident doc
        incident_end_ts = len(windowed_frequencies - lull_count
                              ) * window_duration_ms + box_event_start_ts
        if logger is not None:
            logger.debug(
                "Found Frequency incident [{}] from event {} and box {}".
                format(running_incident, event_id, box_id))
        incidents.append({
            "event_id": event_id,
            "box_id": box_id,
            "incident_start_ts": incident_start_ts,
            "incident_end_ts": incident_end_ts,
            "incident_type": mongo.IncidentMeasurementType.FREQUENCY,
            "avg_deviation": numpy.average(incident_variations),
            "incident_classifications": [running_incident],
            "annotations": [],
            "metadata": {},
            "mongo_client": mongo_client
        })
    return incidents
Пример #5
0
def transient_incident_classifier(event_id: int, box_id: str, raw_waveform: numpy.ndarray, box_event_start_ts: int,
                                  configs: dict, opq_mongo_client: mongo.OpqMongoClient = None, logger=None):
    """
    Classifies transient waveform. Creates a Mongo Incident document
    :param event_id:
    :param box_id:
    :param raw_waveform:
    :param box_event_start_ts:
    :param configs:
    :param opq_mongo_client:
    :param logger:
    :return: list of the classified incidents
    """

    mongo_client = mongo.get_default_client(opq_mongo_client)

    incidents = []
    meta = {}
    incident_classifications = []
    incident_flag = False

    waveforms = waveform_filter(raw_waveform, configs['filter_order'], configs['filter_cutoff_frequency'])
    candidate_transient_windows = transient_sliding_window(waveforms["filtered_waveform"], configs["noise_floor"],
                                                           configs["max_lull_ms"])

    if logger is not None:
        logger.debug("Calculating transients with {} segments.".format(len(candidate_transient_windows)))

    for window in candidate_transient_windows:
        windowed_waveforms = {"fundamental_waveform": waveforms["fundamental_waveform"][window[0]: window[1] + 1],
                              "filtered_waveform": waveforms["filtered_waveform"][window[0]: window[1] + 1],
                              "raw_waveform": waveforms["raw_waveform"][window[0]: window[1] + 1]}
        incident_start_ts = int(window[0] / constants.SAMPLES_PER_MILLISECOND + box_event_start_ts)
        incident_end_ts = int(incident_start_ts + (window[1] - window[0]) / constants.SAMPLES_PER_MILLISECOND)

        impulsive = impulsive_classifier(windowed_waveforms["filtered_waveform"], configs)
        if impulsive[0]:
            meta.update(impulsive[1])
            incident_classifications.append("IMPULSIVE_TRANSIENT")
            incident_flag = True

        else:
            arcing = arcing_classifier(windowed_waveforms["filtered_waveform"], configs)
            if arcing[0]:
                meta.update(arcing[1])
                incident_classifications.append("ARCING_TRANSIENT")
                incident_flag = True

            else:
                oscillatory = oscillatory_classifier(windowed_waveforms["filtered_waveform"], configs)
                if oscillatory[0]:
                    meta.update(oscillatory[1])
                    incident_classifications.append("OSCILLATORY_TRANSIENT")
                    incident_flag = True

                else:
                    periodic_notching = periodic_notching_classifier(windowed_waveforms["filtered_waveform"],
                                                                     windowed_waveforms["fundamental_waveform"],
                                                                     configs)
                    if periodic_notching[0]:
                        meta.update(periodic_notching[1])
                        incident_classifications.append("PERIODIC_NOTCHING_TRANSIENT")
                        incident_flag = True

        multiple_zero_xing = multiple_zero_xing_classifier(windowed_waveforms,
                                                           configs)
        if multiple_zero_xing[0]:
            meta.update(multiple_zero_xing[1])
            incident_classifications.append("MULTIPLE_ZERO_CROSSING_TRANSIENT")
            incident_flag = True

        if incident_flag:
            incidents.append({"event_id": event_id,
                              "box_id": box_id,
                              "incident_start_ts": incident_start_ts,
                              "incident_end_ts": incident_end_ts,
                              "incident_type": mongo.IncidentMeasurementType.TRANSIENT,
                              "max_deviation": numpy.max(numpy.abs(windowed_waveforms["filtered_waveform"])),
                              "incident_classifications": incident_classifications,
                              "annotations": [],
                              "metadata": meta,
                              "mongo_client": mongo_client})

        incident_flag = False
        incident_classifications = []
        meta = {}

    return incidents