def battery_marker(raw_stream_id: uuid, stream_name: str, owner_id, dd_stream_name, CC: CerebralCortex, config: dict,
                   start_time=None, end_time=None):
    """
    This algorithm uses battery percentages to decide whether device was powered-off or battery was low.
    All the labeled data (st, et, label) with its metadata are then stored in a datastore.
    :param raw_stream_id:
    :param CC:
    :param config:
    """

    try:
        # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
        battery_marker_stream_id = uuid.uuid3(uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id))

        stream_days = get_stream_days(raw_stream_id, battery_marker_stream_id, CC)

        for day in stream_days:
            stream = CC.get_datastream(raw_stream_id, data_type=DataSet.COMPLETE, day=day)

            if len(stream.data) > 0:
                windowed_data = window(stream.data, config['general']['window_size'], True)
                results = process_windows(windowed_data, stream_name, config)

                merged_windows = merge_consective_windows(results)
                if len(merged_windows) > 0:
                    input_streams = [{"owner_id": owner_id, "id": str(raw_stream_id), "name": stream_name}]
                    output_stream = {"id": battery_marker_stream_id, "name": dd_stream_name,
                                     "algo_type": config["algo_type"]["battery_marker"]}
                    labelled_windows = mark_windows(battery_marker_stream_id, merged_windows, CC, config)
                    metadata = get_metadata(dd_stream_name, input_streams, config)
                    store(labelled_windows, input_streams, output_stream, metadata, CC, config)
    except Exception as e:
        print(e)
Ejemplo n.º 2
0
def mobile_app_availability_marker(raw_stream_id: uuid, stream_name: str, owner_id, dd_stream_name, CC: CerebralCortex,
                                   config: dict, start_time=None, end_time=None):
    """
    This algorithm uses phone battery percentages to decide whether mobile app was available or unavailable.
    Theoretically, phone battery data shall be collected 24/7.
    :param raw_stream_id:
    :param CC:
    :param config:
    """

    try:
        # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
        app_availability_marker_stream_id = uuid.uuid3(uuid.NAMESPACE_DNS, str(
            raw_stream_id + dd_stream_name + owner_id + "mobile app availability marker"))

        stream_days = get_stream_days(raw_stream_id, app_availability_marker_stream_id, CC)

        for day in stream_days:
            stream = CC.get_datastream(raw_stream_id, data_type=DataSet.COMPLETE, day=day)
            if len(stream.data) > 0:
                windowed_data = window(stream.data, config['general']['window_size'], True)
                results = process_windows(windowed_data, config)

                merged_windows = merge_consective_windows(results)
                if len(merged_windows) > 0:
                    input_streams = [{"owner_id": owner_id, "id": str(raw_stream_id), "name": stream_name}]
                    output_stream = {"id": app_availability_marker_stream_id, "name": dd_stream_name,
                                     "algo_type": config["algo_type"]["app_availability_marker"]}
                    metadata = get_metadata(dd_stream_name, input_streams, config)
                    store(merged_windows, input_streams, output_stream, metadata, CC, config)

    except Exception as e:
        print(e)
Ejemplo n.º 3
0
def phone_screen_touch_marker(raw_stream_id: uuid,
                              raw_stream_name: str,
                              owner_id,
                              dd_stream_name,
                              CC: CerebralCortex,
                              config: dict,
                              start_time=None,
                              end_time=None):
    """
    This is not part of core data diagnostic suite.
    It only calculates how many screen touches are there.
    :param raw_stream_id:
    :param CC:
    :param config:
    """

    try:
        # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
        screen_touch_stream_id = uuid.uuid3(
            uuid.NAMESPACE_DNS,
            str(raw_stream_id + dd_stream_name + owner_id +
                "mobile phone screen touch marker"))

        stream_days = get_stream_days(raw_stream_id, screen_touch_stream_id,
                                      CC)

        for day in stream_days:
            stream = CC.get_datastream(raw_stream_id,
                                       data_type=DataSet.COMPLETE,
                                       day=day,
                                       start_time=start_time,
                                       end_time=end_time)
            if len(stream.data) > 0:
                windowed_data = window(stream.data,
                                       config['general']['window_size'], True)
                results = process_windows(windowed_data)

                merged_windows = merge_consective_windows(results)
                if len(merged_windows) > 0:
                    input_streams = [{
                        "owner_id": owner_id,
                        "id": str(raw_stream_id),
                        "name": raw_stream_name
                    }]
                    output_stream = {
                        "id": screen_touch_stream_id,
                        "name": dd_stream_name,
                        "algo_type":
                        config["algo_type"]["app_availability_marker"]
                    }
                    metadata = get_metadata(dd_stream_name, input_streams,
                                            config)
                    store(merged_windows, input_streams, output_stream,
                          metadata, CC, config)

    except Exception as e:
        print(e)
def sensor_failure_marker(attachment_marker_stream_id: uuid, mshrv_accel_id: uuid, mshrv_gyro_id: uuid, wrist: str,
                          owner_id: uuid, dd_stream_name, CC: CerebralCortex, config: dict):
    """
    Label a window as packet-loss if received packets are less than the expected packets.
    All the labeled data (st, et, label) with its metadata are then stored in a datastore.
    :param stream_id:
    :param CC_obj:
    :param config:
    """

    # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
    sensor_failure_stream_id = uuid.uuid3(uuid.NAMESPACE_DNS, str(
        attachment_marker_stream_id + dd_stream_name + owner_id + "SENSOR FAILURE MARKER"))

    stream_days = get_stream_days(attachment_marker_stream_id, sensor_failure_stream_id, CC)

    try:
        for day in stream_days:
            # load stream data to be diagnosed
            attachment_marker_stream = CC.get_datastream(attachment_marker_stream_id, day, data_type=DataSet.COMPLETE)
            results = OrderedDict()
            if attachment_marker_stream.data:
                for marker_window in attachment_marker_stream.data:
                    if "MOTIONSENSE-ON-BODY" in marker_window.sample:
                        mshrv_accel_stream = CC.get_datastream(mshrv_accel_id, day, data_type=DataSet.ONLY_DATA,
                                                               start_time=marker_window.start_time,
                                                               end_time=marker_window.end_time)
                        mshrv_gyro_stream = CC.get_datastream(mshrv_gyro_id, day, data_type=DataSet.ONLY_DATA,
                                                              start_time=marker_window.start_time,
                                                              end_time=marker_window.end_time)

                    results_accel = process_windows(mshrv_accel_stream, config)
                    results_gyro = process_windows(mshrv_gyro_stream, config)

                    key = marker_window.start_time, marker_window.end_time

                    # if sensor failure period is more than 12 hours then mark it as a sensor failure
                    if results_accel > 0 and results_gyro < 1:
                        sample = "MOTIONSENE-HRV-" + str(wrist) + "ACCELEROMETER-FAILURE"
                        results[key].append(DataPoint(marker_window.start_time, marker_window.end_time, sample))
                    elif results_accel < 1 and results_gyro > 0:
                        sample = "MOTIONSENE-HRV-" + str(wrist) + "GYRO-FAILURE"
                        results[key].append(DataPoint(marker_window.start_time, marker_window.end_time, sample))

                    merged_windows = merge_consective_windows(results)

                if len(results) > 0:
                    input_streams = [{"owner_id": owner_id, "id": str(attachment_marker_stream_id),
                                      "name": attachment_marker_stream.name}]
                    output_stream = {"id": sensor_failure_stream_id, "name": dd_stream_name,
                                     "algo_type": config["algo_type"]["sensor_failure"]}
                    metadata = get_metadata(dd_stream_name, input_streams, config)
                    store(merged_windows, input_streams, output_stream, metadata, CC, config)
    except Exception as e:
        print(e)
def attachment_marker(stream_id: uuid,
                      CC_obj: CerebralCortex,
                      config: dict,
                      start_time=None,
                      end_time=None):
    """
    Label sensor data as sensor-on-body, sensor-off-body, or improper-attachment.
    All the labeled data (st, et, label) with its metadata are then stored in a datastore
    :param stream_id: UUID
    :param CC_obj: CerebralCortex object
    :param config: Data diagnostics configurations
    """

    stream = CC_obj.get_datastream(stream_id,
                                   data_type=DataSet.COMPLETE,
                                   start_time=start_time,
                                   end_time=end_time)

    results = OrderedDict()
    threshold_val = None
    stream_name = stream._name

    if stream_name == config["stream_names"]["autosense_ecg"]:
        threshold_val = config['attachment_marker']['ecg_on_body']
        label_on = config['labels']['ecg_on_body']
        label_off = config['labels']['ecg_off_body']
    elif stream_name == config["stream_names"]["autosense_rip"]:
        threshold_val = config['attachment_marker']['rip_on_body']
        label_on = config['labels']['rip_on_body']
        label_off = config['labels']['rip_off_body']
    else:
        raise ValueError("Incorrect sensor type.")

    windowed_data = window(stream.data, config['general']['window_size'],
                           False)

    for key, data in windowed_data.items():
        # remove outliers from a window data
        normal_values = outlier_detection(data)

        if stat.variance(normal_values) < threshold_val:
            results[key] = label_off
        else:
            results[key] = label_on

    merged_windows = merge_consective_windows(results)
    input_streams = [{"id": str(stream_id), "name": stream_name}]
    store(input_streams, merged_windows, CC_obj, config,
          config["algo_names"]["attachment_marker"])
def sensor_availability(raw_stream_id: uuid, stream_name: str, owner_id: uuid,
                        dd_stream_name, phone_physical_activity,
                        CC: CerebralCortex, config: dict):
    """
    Mark missing data as wireless disconnection if a participate walks away from phone or sensor
    :param raw_stream_id:
    :param stream_name:
    :param owner_id:
    :param dd_stream_name:
    :param phone_physical_activity:
    :param CC:
    :param config:
    """

    # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
    wireless_marker_stream_id = uuid.uuid3(
        uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id))

    stream_days = get_stream_days(raw_stream_id, wireless_marker_stream_id, CC)

    for day in stream_days:
        # load stream data to be diagnosed
        raw_stream = CC.get_datastream(raw_stream_id,
                                       day,
                                       data_type=DataSet.COMPLETE)
        if len(raw_stream.data) > 0:

            windowed_data = window(raw_stream.data,
                                   config['general']['window_size'], True)
            results = process_windows(windowed_data, day, CC,
                                      phone_physical_activity, config)
            merged_windows = merge_consective_windows(results)

            if len(merged_windows) > 0:
                input_streams = [{
                    "owner_id": owner_id,
                    "id": str(raw_stream_id),
                    "name": stream_name
                }]
                output_stream = {
                    "id": wireless_marker_stream_id,
                    "name": dd_stream_name,
                    "algo_type":
                    config["algo_type"]["sensor_unavailable_marker"]
                }
                metadata = get_metadata(dd_stream_name, input_streams, config)
                store(merged_windows, input_streams, output_stream, metadata,
                      CC, config)
Ejemplo n.º 7
0
def packet_loss_marker(stream_id: uuid, CC_obj: CerebralCortex, config: dict, start_time=None, end_time=None):
    """
    Label a window as packet-loss if received packets are less than the expected packets.
    All the labeled data (st, et, label) with its metadata are then stored in a datastore.
    :param stream_id:
    :param CC_obj:
    :param config:
    """
    stream = CC_obj.get_datastream(stream_id, data_type=DataSet.COMPLETE, start_time=start_time, end_time=end_time)
    name = stream._name
    results = OrderedDict()

    if name == config["sensor_types"]["autosense_ecg"]:
        sampling_rate = config["sampling_rate"]["ecg"]
        threshold_val = config["packet_loss_marker"]["ecg_acceptable_packet_loss"]
        label = config["labels"]["ecg_packet_loss"]
        windowed_data = window(stream.data, config['general']['window_size'], False)
    elif name == config["sensor_types"]["autosense_rip"]:
        sampling_rate = config["sampling_rate"]["rip"]
        threshold_val = config["packet_loss_marker"]["rip_acceptable_packet_loss"]
        label = config["labels"]["rip_packet_loss"]
        windowed_data = window(stream.data, config['general']['window_size'], False)
    elif name == config["sensor_types"]["motionsense_accel"]:
        sampling_rate = config["sampling_rate"]["motionsense"]
        threshold_val = config["packet_loss_marker"]["motionsense_acceptable_packet_loss"]
        label = config["labels"]["motionsense_packet_loss"]
        motionsense_accel_magni = magnitude(stream)
        windowed_data = window(motionsense_accel_magni.data, config['general']['window_size'], False)
    else:
        raise ValueError("Incorrect sensor type.")

    for key, data in windowed_data.items():

        available_packets = len(data)
        expected_packets = sampling_rate * config['general']['window_size']

        if (available_packets / expected_packets) < threshold_val:
            results[key] = label

    merged_windows = merge_consective_windows(results)
    input_streams = [{"id": str(stream_id), "name": name}]
    store(input_streams, merged_windows, CC_obj, config, config["algo_names"]["packet_loss_marker"])
def battery_marker(stream_id: uuid,
                   CC_obj: CerebralCortex,
                   config: dict,
                   start_time=None,
                   end_time=None):
    """
    This algorithm uses battery percentages to decide whether phone was powered-off or battery was low.
    All the labeled data (st, et, label) with its metadata are then stored in a datastore.
    :param stream_id:
    :param CC_obj:
    :param config:
    """
    results = OrderedDict()

    # stream = CC_obj.get_datastream(stream_id, data_type="all")

    stream = CC_obj.get_datastream(stream_id,
                                   data_type=DataSet.COMPLETE,
                                   start_time=start_time,
                                   end_time=end_time)
    windowed_data = window(stream.data, config['general']['window_size'], True)

    name = stream._name

    for key, data in windowed_data.items():
        dp = []
        for k in data:
            dp.append(float(k.sample))

        if name == config["sensor_types"]["phone_battery"]:
            results[key] = phone_battery(dp, config)
        elif name == config["sensor_types"]["motionsense_battery"]:
            results[key] = motionsense_battery(dp, config)
        elif name == config["sensor_types"]["autosense_battery"]:
            results[key] = autosense_battery(dp, config)
        else:
            raise ValueError("Incorrect sensor type.")

    merged_windows = merge_consective_windows(results)
    input_streams = [{"id": str(stream_id), "name": name}]
    store(input_streams, merged_windows, CC_obj, config,
          config["algo_names"]["battery_marker"])
Ejemplo n.º 9
0
def attachment_marker(raw_stream_id: uuid, stream_name: str, owner_id: uuid,
                      dd_stream_name, CC: CerebralCortex, config: dict):
    """
    Label sensor data as sensor-on-body, sensor-off-body, or improper-attachment.
    All the labeled data (st, et, label) with its metadata are then stored in a datastore

    """
    # TODO: quality streams could be multiple so find the one computed with CC
    # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
    attachment_marker_stream_id = uuid.uuid3(
        uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id))

    stream_days = get_stream_days(raw_stream_id, attachment_marker_stream_id,
                                  CC)

    for day in stream_days:
        # load stream data to be diagnosed
        raw_stream = CC.get_datastream(raw_stream_id,
                                       day,
                                       data_type=DataSet.COMPLETE)

        if len(raw_stream.data) > 0:
            windowed_data = window(raw_stream.data,
                                   config['general']['window_size'], True)
            results = process_windows(windowed_data, config)
            merged_windows = merge_consective_windows(results)

            input_streams = [{
                "owner_id": owner_id,
                "id": str(raw_stream_id),
                "name": stream_name
            }]
            output_stream = {
                "id": attachment_marker_stream_id,
                "name": dd_stream_name,
                "algo_type": config["algo_type"]["attachment_marker"]
            }
            metadata = get_metadata(dd_stream_name, input_streams, config)
            store(merged_windows, input_streams, output_stream, metadata, CC,
                  config)
def wireless_disconnection(stream_id: uuid,
                           CC_obj: CerebralCortex,
                           config: dict,
                           start_time=None,
                           end_time=None):
    """
    Analyze whether a sensor was unavailable due to a wireless disconnection
    or due to sensor powered off. This method automatically loads related
    accelerometer streams of an owner. All the labeled data (st, et, label)
    with its metadata are then stored in a datastore.
    Note: If an owner owns more than one accelerometer (for example, more
    than one motionsense accelerometer) then this might not work.
    :param stream_id: stream_id should be of "battery-powered-off"
    :param CC_obj:
    :param config:
    """

    results = OrderedDict()
    threshold = 0

    stream_info = CC_obj.get_datastream(stream_id,
                                        data_type=DataSet.ONLY_METADATA,
                                        start_time=start_time,
                                        end_time=end_time)

    owner_id = stream_info._owner
    name = stream_info._name

    stream_name = stream_info._name

    if name == config["sensor_types"]["autosense_ecg"]:
        threshold = config['sensor_unavailable_marker']['ecg']
        label = config['labels']['autosense_unavailable']
    if name == config["sensor_types"]["autosense_rip"]:
        threshold = config['sensor_unavailable_marker']['rip']
        label = config['labels']['autosense_unavailable']
    elif name == config["sensor_types"]["motionsense_accel"]:
        threshold = config['sensor_unavailable_marker']['motionsense']
        label = config['labels']['motionsense_unavailable']

    battery_off_data = CC_obj.get_datastream(stream_id,
                                             data_type=DataSet.ONLY_DATA,
                                             start_time=start_time,
                                             end_time=end_time)

    if battery_off_data:
        if name == config["sensor_types"]["motionsense_accel"]:
            motionsense_accel_stream_id = CC_obj.get_stream_id_by_owner_id(
                owner_id, config["sensor_types"]["motionsense_accel"], "id")
            input_streams = [{
                "id": str(stream_id),
                "name": str(stream_name)
            }, {
                "id":
                str(motionsense_accel_stream_id),
                "name":
                config["sensor_types"]["motionsense_accel"]
            }]
        else:
            x = CC_obj.get_stream_id_by_owner_id(
                owner_id, config["sensor_types"]["autosense_accel_x"])
            y = CC_obj.get_stream_id_by_owner_id(
                owner_id, config["sensor_types"]["autosense_accel_y"])
            z = CC_obj.get_stream_id_by_owner_id(
                owner_id, config["sensor_types"]["autosense_accel_z"])
            input_streams = [{
                "id": str(stream_id),
                "name": stream_name
            }, {
                "id":
                str(x),
                "name":
                config["sensor_types"]["autosense_accel_x"]
            }, {
                "id":
                str(y),
                "name":
                config["sensor_types"]["autosense_accel_y"]
            }, {
                "id":
                str(z),
                "name":
                config["sensor_types"]["autosense_accel_z"]
            }]

        for dp in battery_off_data:
            if dp.start_time != "" and dp.end_time != "":
                # get a window prior to a battery powered off
                start_time = dp.start_time - timedelta(
                    seconds=config['general']['window_size'])
                end_time = dp.start_time
                if name == config["sensor_types"]["motionsense_accel"]:
                    motionsense_accel_xyz = CC_obj.get_datastream(
                        motionsense_accel_stream_id,
                        start_time=start_time,
                        end_time=end_time,
                        data_type=DataSet.COMPLETE)
                    magnitudeValStream = magnitude(motionsense_accel_xyz)
                    magnitudeVals = []
                    for mv in magnitudeValStream.data:
                        magnitudeVals.append(mv.sample)

                else:
                    autosense_acc_x = CC_obj.get_datastream(
                        x,
                        start_time=start_time,
                        end_time=end_time,
                        data_type=DataSet.ONLY_DATA)
                    autosense_acc_y = CC_obj.get_datastream(
                        y,
                        start_time=start_time,
                        end_time=end_time,
                        data_type=DataSet.ONLY_DATA)
                    autosense_acc_z = CC_obj.get_datastream(
                        z,
                        start_time=start_time,
                        end_time=end_time,
                        data_type=DataSet.ONLY_DATA)

                    magnitudeVals = autosense_calculate_magnitude(
                        autosense_acc_x, autosense_acc_y, autosense_acc_z)

                if np.var(magnitudeVals) > threshold:
                    key = (dp.start_time, dp.end_time)
                    results[key] = label

        merged_windows = merge_consective_windows(results)
        store(input_streams, merged_windows, CC_obj, config,
              config["algo_names"]["sensor_unavailable_marker"])
def wireless_disconnection(stream_id: uuid, stream_name: str, owner_id: uuid,
                           CC_obj: CerebralCortex, config: dict):
    """
    Analyze whether a sensor was unavailable due to a wireless disconnection
    or due to sensor powered off. This method automatically loads related
    accelerometer streams of an owner. All the labeled data (st, et, label)
    with its metadata are then stored in a datastore.
    Note: If an owner owns more than one accelerometer (for example, more
    than one motionsense accelerometer) then this might not work.
    :param stream_id: stream_id should be of "battery-powered-off"
    :param CC_obj:
    :param config:
    """

    results = OrderedDict()

    stream_end_time = CC_obj.get_stream_start_end_time(stream_id)["end_time"]
    day = stream_end_time

    # load stream data to be diagnosed
    stream = CC_obj.get_datastream(stream_id, day, data_type=DataSet.COMPLETE)
    windowed_data = window(stream.data, config['general']['window_size'], True)

    owner_id = stream._owner
    stream_name = stream._name

    windowed_data = filter_battery_off_windows(stream_id, stream_name,
                                               windowed_data, owner_id, config,
                                               CC_obj)

    threshold = config['sensor_unavailable_marker']['autosense']
    label = config['labels']['autosense_unavailable']

    if windowed_data:
        # prepare input streams metadata
        x = all_stream_ids_names[config["stream_names"]["autosense_accel_x"]]
        y = all_stream_ids_names[config["stream_names"]["autosense_accel_y"]]
        z = all_stream_ids_names[config["stream_names"]["autosense_accel_z"]]

        input_streams = [{
            "id": str(stream_id),
            "name": stream_name
        }, {
            "id": str(x),
            "name": config["stream_names"]["autosense_accel_x"]
        }, {
            "id": str(y),
            "name": config["stream_names"]["autosense_accel_y"]
        }, {
            "id": str(z),
            "name": config["stream_names"]["autosense_accel_z"]
        }]

        for dp in windowed_data:
            if not dp.data and dp.start_time != "" and dp.end_time != "":
                start_time = dp.start_time - timedelta(
                    seconds=config['general']['window_size'])
                end_time = dp.start_time

                autosense_accel_x = CC_obj.get_datastream(
                    x,
                    start_time=start_time,
                    end_time=end_time,
                    data_type=DataSet.ONLY_DATA)
                autosense_accel_y = CC_obj.get_datastream(
                    y,
                    start_time=start_time,
                    end_time=end_time,
                    data_type=DataSet.ONLY_DATA)
                autosense_accel_z = CC_obj.get_datastream(
                    z,
                    start_time=start_time,
                    end_time=end_time,
                    data_type=DataSet.ONLY_DATA)

                magnitudeVals = magnitude_autosense_v1(autosense_accel_x,
                                                       autosense_accel_y,
                                                       autosense_accel_z)

                if np.var(magnitudeVals) > threshold:
                    key = (dp.start_time, dp.end_time)
                    results[key] = label

        merged_windows = merge_consective_windows(results)
        store(input_streams, merged_windows, CC_obj, config,
              config["algo_names"]["sensor_unavailable_marker"])
def packet_loss_marker(raw_stream_id: uuid, stream_name: str, owner_id: uuid,
                       dd_stream_name, CC: CerebralCortex, config: dict):
    """
    Label a window as packet-loss if received packets are less than the expected packets.
    All the labeled data (st, et, label) with its metadata are then stored in a datastore.
    :param raw_stream_id:
    :param CC_obj:
    :param config:
    """

    # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
    packetloss_marker_stream_id = uuid.uuid3(
        uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id))

    stream_days = get_stream_days(raw_stream_id, packetloss_marker_stream_id,
                                  CC)

    if stream_name == config["stream_names"]["autosense_ecg"]:
        sampling_rate = config["sampling_rate"]["ecg"]
        threshold_val = config["packet_loss_marker"][
            "ecg_acceptable_packet_loss"]
        label = config["labels"]["ecg_packet_loss"]
    elif stream_name == config["stream_names"]["autosense_rip"]:
        sampling_rate = config["sampling_rate"]["rip"]
        threshold_val = config["packet_loss_marker"][
            "rip_acceptable_packet_loss"]
        label = config["labels"]["rip_packet_loss"]
    elif stream_name == config["stream_names"][
            "motionsense_hrv_accel_right"] or stream_name == config[
                "stream_names"]["motionsense_hrv_accel_left"]:
        sampling_rate = config["sampling_rate"]["motionsense_accel"]
        threshold_val = config["packet_loss_marker"][
            "motionsense_accel_acceptable_packet_loss"]
        label = config["labels"]["motionsense_gyro_packet_loss"]
    elif stream_name == config["stream_names"][
            "motionsense_hrv_gyro_right"] or stream_name == config[
                "stream_names"]["motionsense_hrv_gyro_left"]:
        sampling_rate = config["sampling_rate"]["motionsense_gyro"]
        threshold_val = config["packet_loss_marker"][
            "motionsense_gyro_acceptable_packet_loss"]
        label = config["labels"]["motionsense_gyro_packet_loss"]

    for day in stream_days:
        # load stream data to be diagnosed
        stream = CC.get_datastream(raw_stream_id,
                                   day,
                                   data_type=DataSet.COMPLETE)

        if len(stream.data) > 0:

            windowed_data = window(stream.data,
                                   config['general']['window_size'], True)

            results = process_windows(windowed_data, sampling_rate,
                                      threshold_val, label, config)
            merged_windows = merge_consective_windows(results)
            if len(merged_windows) > 0:
                input_streams = [{
                    "owner_id": owner_id,
                    "id": str(raw_stream_id),
                    "name": stream_name
                }]
                output_stream = {
                    "id": packetloss_marker_stream_id,
                    "name": dd_stream_name,
                    "algo_type": config["algo_type"]["packet_loss_marker"]
                }
                metadata = get_metadata(dd_stream_name, input_streams, config)
                store(merged_windows, input_streams, output_stream, metadata,
                      CC, config)