def sensor_failure_marker(attachment_marker_stream_id: uuid, mshrv_accel_id: uuid, mshrv_gyro_id: uuid, wrist: str,
                          owner_id: uuid, dd_stream_name, CC: CerebralCortex, config: dict):
    """
    Label a window as packet-loss if received packets are less than the expected packets.
    All the labeled data (st, et, label) with its metadata are then stored in a datastore.
    :param stream_id:
    :param CC_obj:
    :param config:
    """

    # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
    sensor_failure_stream_id = uuid.uuid3(uuid.NAMESPACE_DNS, str(
        attachment_marker_stream_id + dd_stream_name + owner_id + "SENSOR FAILURE MARKER"))

    stream_days = get_stream_days(attachment_marker_stream_id, sensor_failure_stream_id, CC)

    try:
        for day in stream_days:
            # load stream data to be diagnosed
            attachment_marker_stream = CC.get_datastream(attachment_marker_stream_id, day, data_type=DataSet.COMPLETE)
            results = OrderedDict()
            if attachment_marker_stream.data:
                for marker_window in attachment_marker_stream.data:
                    if "MOTIONSENSE-ON-BODY" in marker_window.sample:
                        mshrv_accel_stream = CC.get_datastream(mshrv_accel_id, day, data_type=DataSet.ONLY_DATA,
                                                               start_time=marker_window.start_time,
                                                               end_time=marker_window.end_time)
                        mshrv_gyro_stream = CC.get_datastream(mshrv_gyro_id, day, data_type=DataSet.ONLY_DATA,
                                                              start_time=marker_window.start_time,
                                                              end_time=marker_window.end_time)

                    results_accel = process_windows(mshrv_accel_stream, config)
                    results_gyro = process_windows(mshrv_gyro_stream, config)

                    key = marker_window.start_time, marker_window.end_time

                    # if sensor failure period is more than 12 hours then mark it as a sensor failure
                    if results_accel > 0 and results_gyro < 1:
                        sample = "MOTIONSENE-HRV-" + str(wrist) + "ACCELEROMETER-FAILURE"
                        results[key].append(DataPoint(marker_window.start_time, marker_window.end_time, sample))
                    elif results_accel < 1 and results_gyro > 0:
                        sample = "MOTIONSENE-HRV-" + str(wrist) + "GYRO-FAILURE"
                        results[key].append(DataPoint(marker_window.start_time, marker_window.end_time, sample))

                    merged_windows = merge_consective_windows(results)

                if len(results) > 0:
                    input_streams = [{"owner_id": owner_id, "id": str(attachment_marker_stream_id),
                                      "name": attachment_marker_stream.name}]
                    output_stream = {"id": sensor_failure_stream_id, "name": dd_stream_name,
                                     "algo_type": config["algo_type"]["sensor_failure"]}
                    metadata = get_metadata(dd_stream_name, input_streams, config)
                    store(merged_windows, input_streams, output_stream, metadata, CC, config)
    except Exception as e:
        print(e)
示例#2
0
 def test_DataPoint(self):
     ts = datetime.datetime.now()
     dp = DataPoint(start_time=ts, end_time=ts, sample={'Foo': 123}, metadata={'label': 'good'})
     self.assertDictEqual(dp.sample, {'Foo': 123})
     self.assertEqual(dp.start_time, ts)
     self.assertEqual(dp.end_time, ts)
     self.assertEqual(dp.metadata, {'label': 'good'})
    def setUpClass(cls):
        configuration_file = os.path.join(os.path.dirname(__file__),
                                          '../../../cerebralcortex.yml')
        cls.CC = CerebralCortex(configuration_file,
                                master="local[*]",
                                name="Data Diagnostic App",
                                time_zone="US/Central")
        cls.config = Configuration(
            filepath="../data_diagnostic/data_diagnostic_config.yml").config

        cls.sample_battery_data = []
        for row in range(1, 481):
            if row < 61:
                battery = 87.0
            elif row > 60 and row < 120:
                battery = 0.0
            elif row > 120 and row < 240:
                battery = 87.0
            elif row > 240 and row < 300:
                battery = 7.0
            elif row > 300 and row < 360:
                battery = 0.0
            elif row > 360:
                battery = 60.0

            tz = pytz.timezone("US/Central")
            start_time = tz.localize(
                datetime.fromtimestamp(
                    int(round((time.time() + row) * 1000)) / 1e3))

            dp = DataPoint(start_time=start_time, sample=battery)
            cls.sample_battery_data.append(dp)
        cls.window_size = 60
示例#4
0
 def data(self, value):
     result = []
     for dp in value:
         result.append(
             DataPoint(self._identifier, dp.start_time, dp.end_time,
                       dp.sample))
     self._data = result
def data_quality_led(windowed_data):
    """
    
    :param windowed_data: a datastream with a collection of windows 
    :return: a list of window labels
    """
    window_list = windowed_data
    dps = []
    for key, window in window_list.items():
        quality_results = compute_quality(window)
        dps.append(DataPoint(key[0], key[1], quality_results))

    return dps
    def test_06_store_stream(self):
        identifier = "6db98dfb-d6e8-4b27-8d55-95b20fa0f754"
        owner = "06634264-56bc-4c92-abd7-377dbbad79dd"
        name = "data-store-test"
        data_descriptor = {}
        execution_context = json.loads(
            '{"execution_context": {"algorithm": {"method": "cerebralcortex.data_processor.data_diagnostic.BatteryDataMarker"}}}'
        )
        annotations = {}
        datapoints = []
        stream_type = "datastream"
        start_time = datetime.datetime(2017, 4, 24, 0, 0, 1)
        end_time = datetime.datetime(2017, 4, 24, 0, 0, 2)
        localtz = timezone('US/Central')
        start_time = localtz.localize(start_time)
        end_time = localtz.localize(end_time)
        sample = {'Foo3': 123}

        dp1 = DataPoint(start_time=start_time,
                        end_time=end_time,
                        sample=sample)

        datapoints.append(dp1)

        ds = DataStream(identifier, owner, name, data_descriptor,
                        execution_context, annotations, stream_type,
                        start_time, end_time, datapoints)

        self.CC.save_datastream(ds)
        stream = self.CC.get_datastream(identifier, data_type=DataSet.COMPLETE)
        self.assertEqual(stream._identifier, identifier)
        self.assertEqual(stream._owner, owner)
        self.assertEqual(stream._name, name)
        self.assertEqual(stream._data_descriptor, data_descriptor)
        self.assertEqual(stream._execution_context, execution_context)
        self.assertEqual(stream._annotations, annotations)
        self.assertEqual(stream._datastream_type, stream_type)

        self.assertEqual(stream.data[0].start_time, start_time)
        self.assertEqual(stream.data[0].end_time, end_time)
        self.assertEqual(stream.data[0].sample, sample)
    def test_07_stream_filter(self):
        identifier_anno = "6db98dfb-d6e8-4b27-8d55-95b20fa0f750"
        identifier_data = "6db98dfb-d6e8-4b27-8d55-95b20fa0f751"
        owner_id = "06634264-56bc-4c92-abd7-377dbbad79dd"
        name_anno = "data-store-test-annotation"
        name_data = "data-store-test-data"
        data_descriptor = {}
        execution_context_anno = json.loads(
            '{"execution_context": {"algorithm": {"method": "test.data_store.annotation.filter"}}}'
        )
        execution_context_data = json.loads(
            '{"execution_context": {"algorithm": {"method": "test.data_store.data.filter"}}}'
        )
        annotations_data = json.loads(
            '[{"name": "test-case","identifier": "6db98dfb-d6e8-4b27-8d55-95b20fa0f750"}]'
        )
        annotations_anno = {}
        datapoints_anno = []
        datapoints_data = []

        result_data = Metadata(self.CC).is_id_created(owner_id, name_data,
                                                      execution_context_data)
        if result_data["status"] != "new":
            identifier_data = result_data["id"]

        Metadata(self.CC).store_stream_info(
            identifier_anno, owner_id, name_anno, data_descriptor,
            execution_context_anno, annotations_anno, "annotations",
            datetime.datetime(2017, 4, 24, 0, 0, 1),
            datetime.datetime(2017, 4, 24, 0, 0, 5), result_data["status"])

        result_anno = Metadata(self.CC).is_id_created(owner_id, name_data,
                                                      execution_context_data)
        if result_anno["status"] != "new":
            identifier_anno = result_anno["id"]

        Metadata(self.CC).store_stream_info(
            identifier_data, owner_id, name_data, data_descriptor,
            execution_context_data, annotations_data, "datastream",
            datetime.datetime(2017, 4, 24, 0, 0, 1),
            datetime.datetime(2017, 4, 24, 0, 0, 5), result_anno["status"])

        for i in range(0, 5):
            if (i % 2 == 0):
                sample_anno = 'good'
            else:
                sample_anno = 'bad'
            sample_data = i, i + 2, i + 3
            start_time_anno = datetime.datetime(2017, 4, 24, 0, 0, i)
            end_time_anno = datetime.datetime(2017, 4, 24, 0, 0, (5 + i))

            start_time_data = datetime.datetime(2017, 4, 24, 0, 0, i)
            end_time_data = datetime.datetime(2017, 4, 24, 0, 0, (3 + i))

            localtz = timezone('US/Central')
            start_time_anno = localtz.localize(start_time_anno)
            end_time_anno = localtz.localize(end_time_anno)
            start_time_data = localtz.localize(start_time_data)
            end_time_data = localtz.localize(end_time_data)

            datapoints_anno.append(
                DataPoint(start_time=start_time_anno,
                          end_time=end_time_anno,
                          sample=sample_anno))
            datapoints_data.append(
                DataPoint(start_time=start_time_data,
                          end_time=end_time_data,
                          sample=sample_data))

        ds_anno = DataStream(uuid.UUID(identifier_anno), owner_id, name_anno,
                             data_descriptor, execution_context_anno,
                             annotations_data, "annotations", start_time_anno,
                             end_time_anno, datapoints_anno)

        ds_data = DataStream(uuid.UUID(identifier_data), owner_id, name_data,
                             data_descriptor, execution_context_data,
                             annotations_anno, "datastream", start_time_anno,
                             end_time_anno, datapoints_data)

        self.CC.save_datastream(ds_anno)
        self.CC.save_datastream(ds_data)

        filted_stream = self.CC.filter_stream(identifier_data, "test-case",
                                              "good")

        self.assertEqual(len(filted_stream), 5)

        for i in range(0, 5):
            sample_data = [i, i + 2, i + 3]
            start_time_data = datetime.datetime(2017, 4, 24, 0, 0, i)
            end_time_data = datetime.datetime(2017, 4, 24, 0, 0, (3 + i))
            start_time_data = localtz.localize(start_time_data)
            end_time_data = localtz.localize(end_time_data)

            self.assertEqual(filted_stream[i].start_time, start_time_data)
            self.assertEqual(filted_stream[i].end_time, end_time_data)
            self.assertEqual(filted_stream[i].sample, sample_data)
示例#8
0
 def test_DataPoint_None(self):
     dp = DataPoint()
     self.assertIsNone(dp.start_time)
     self.assertIsNone(dp.end_time)
     self.assertIsNone(dp.sample)
     self.assertIsNone(dp.metadata)