def mobile_app_availability_marker(raw_stream_id: uuid, stream_name: str, owner_id, dd_stream_name, CC: CerebralCortex, config: dict): """ This algorithm uses phone battery percentages to decide whether mobile app was available or unavailable. Theoretically, phone battery data shall be collected 24/7. :param raw_stream_id: :param CC: :param config: """ try: # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker app_availability_marker_stream_id = uuid.uuid3(uuid.NAMESPACE_DNS, str( raw_stream_id + dd_stream_name + owner_id + "MOBILE APP AVAILABILITY MARKER")) stream_days = get_stream_days(raw_stream_id, app_availability_marker_stream_id, CC) for day in stream_days: stream = CC.get_stream(raw_stream_id, day=day, data_type=DataSet.COMPLETE) if len(stream.data) > 0: windowed_data = window(stream.data, config['general']['window_size'], True) results = process_windows(windowed_data, config) merged_windows = merge_consective_windows(results) if len(merged_windows) > 0: input_streams = [{"owner_id": owner_id, "id": str(raw_stream_id), "name": stream_name}] output_stream = {"id": app_availability_marker_stream_id, "name": dd_stream_name, "algo_type": config["algo_type"]["app_availability_marker"]} metadata = get_metadata(dd_stream_name, input_streams, config) store(merged_windows, input_streams, output_stream, metadata, CC, config) except Exception as e: print(e)
def phone_screen_touch_marker(raw_stream_id: uuid, raw_stream_name: str, owner_id, dd_stream_name, CC: CerebralCortex, config: dict, start_time=None, end_time=None): """ This is not part of core data diagnostic suite. It only calculates how many screen touches are there. :param raw_stream_id: :param CC: :param config: """ try: # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker screen_touch_stream_id = uuid.uuid3( uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id + "mobile phone screen touch marker")) stream_days = get_stream_days(raw_stream_id, screen_touch_stream_id, CC) for day in stream_days: stream = CC.get_datastream(raw_stream_id, data_type=DataSet.COMPLETE, day=day, start_time=start_time, end_time=end_time) if len(stream.data) > 0: windowed_data = window(stream.data, config['general']['window_size'], True) results = process_windows(windowed_data) merged_windows = merge_consective_windows(results) if len(merged_windows) > 0: input_streams = [{ "owner_id": owner_id, "id": str(raw_stream_id), "name": raw_stream_name }] output_stream = { "id": screen_touch_stream_id, "name": dd_stream_name, "algo_type": config["algo_type"]["app_availability_marker"] } metadata = get_metadata(dd_stream_name, input_streams, config) store(merged_windows, input_streams, output_stream, metadata, CC, config) except Exception as e: print(e)
def battery_marker(raw_stream_id: uuid, stream_name: str, user_id, dd_stream_name, CC: CerebralCortex, config: dict): """ This algorithm uses battery percentages to decide whether device was powered-off or battery was low. All the labeled data (st, et, label) with its metadata are then stored in a datastore. :param raw_stream_id: :param CC: :param config: """ try: # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker battery_marker_stream_id = uuid.uuid3( uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + user_id + "BATTERY MARKER")) stream_days = get_stream_days(raw_stream_id, battery_marker_stream_id, CC) for day in stream_days: stream = CC.get_stream(raw_stream_id, day=day, data_type=DataSet.COMPLETE) if len(stream.data) > 0: windowed_data = window(stream.data, config['general']['window_size'], True) results = process_windows(windowed_data, stream_name, config) merged_windows = merge_consective_windows(results) if len(merged_windows) > 0: input_streams = [{ "owner_id": user_id, "id": str(raw_stream_id), "name": stream_name }] output_stream = { "id": battery_marker_stream_id, "name": dd_stream_name, "algo_type": config["algo_type"]["battery_marker"] } labelled_windows = mark_windows(battery_marker_stream_id, merged_windows, CC, config) metadata = get_metadata(dd_stream_name, input_streams, config) store(labelled_windows, input_streams, output_stream, metadata, CC, config) except Exception as e: print(e)
def sensor_availability(raw_stream_id: uuid, stream_name: str, owner_id: uuid, dd_stream_name, phone_physical_activity, CC: CerebralCortex, config: dict): """ Mark missing data as wireless disconnection if a participate walks away from phone or sensor :param raw_stream_id: :param stream_name: :param owner_id: :param dd_stream_name: :param phone_physical_activity: :param CC: :param config: """ # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker wireless_marker_stream_id = uuid.uuid3( uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id)) stream_days = get_stream_days(raw_stream_id, wireless_marker_stream_id, CC) for day in stream_days: # load stream data to be diagnosed raw_stream = CC.get_stream(raw_stream_id, day=day, data_type=DataSet.COMPLETE) if len(raw_stream.data) > 0: windowed_data = window(raw_stream.data, config['general']['window_size'], True) results = process_windows(windowed_data, day, CC, phone_physical_activity, config) merged_windows = merge_consective_windows(results) if len(merged_windows) > 0: input_streams = [{ "owner_id": owner_id, "id": str(raw_stream_id), "name": stream_name }] output_stream = { "id": wireless_marker_stream_id, "name": dd_stream_name, "algo_type": config["algo_type"]["sensor_unavailable_marker"] } metadata = get_metadata(dd_stream_name, input_streams, config) store(merged_windows, input_streams, output_stream, metadata, CC, config)
def attachment_marker(raw_stream_id: uuid, stream_name: str, owner_id: uuid, dd_stream_name, CC: CerebralCortex, config: dict): """ Label sensor data as sensor-on-body, sensor-off-body, or improper-attachment. All the labeled data (st, et, label) with its metadata are then stored in a datastore """ # TODO: quality streams could be multiple so find the one computed with CC # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker attachment_marker_stream_id = uuid.uuid3( uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id + "ATTACHMENT MARKER")) stream_days = get_stream_days(raw_stream_id, attachment_marker_stream_id, CC) for day in stream_days: # load stream data to be diagnosed raw_stream = CC.get_stream(raw_stream_id, day=day, data_type=DataSet.COMPLETE) if len(raw_stream.data) > 0: windowed_data = window(raw_stream.data, config['general']['window_size'], True) results = process_windows(windowed_data, config) merged_windows = merge_consective_windows(results) input_streams = [{ "owner_id": owner_id, "id": str(raw_stream_id), "name": stream_name }] output_stream = { "id": attachment_marker_stream_id, "name": dd_stream_name, "algo_type": config["algo_type"]["attachment_marker"] } metadata = get_metadata(dd_stream_name, input_streams, config) store(merged_windows, input_streams, output_stream, metadata, CC, config)
def sensor_failure_marker(attachment_marker_stream_id: uuid, mshrv_accel_id: uuid, mshrv_gyro_id: uuid, wrist: str, owner_id: uuid, dd_stream_name, CC: CerebralCortex, config: dict): """ Label a window as packet-loss if received packets are less than the expected packets. All the labeled data (st, et, label) with its metadata are then stored in a datastore. :param stream_id: :param CC_obj: :param config: """ # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker sensor_failure_stream_id = uuid.uuid3( uuid.NAMESPACE_DNS, str(attachment_marker_stream_id + dd_stream_name + owner_id + "SENSOR FAILURE MARKER")) stream_days = get_stream_days(attachment_marker_stream_id, sensor_failure_stream_id, CC) try: for day in stream_days: # load stream data to be diagnosed attachment_marker_stream = CC.get_stream( attachment_marker_stream_id, day, data_type=DataSet.COMPLETE) results = OrderedDict() if attachment_marker_stream.data: for marker_window in attachment_marker_stream.data: if "MOTIONSENSE-ON-BODY" in marker_window.sample: mshrv_accel_stream = CC.get_stream( mshrv_accel_id, day, start_time=marker_window.start_time, end_time=marker_window.end_time, data_type=DataSet.ONLY_DATA) mshrv_gyro_stream = CC.get_stream( mshrv_gyro_id, day, start_time=marker_window.start_time, end_time=marker_window.end_time, data_type=DataSet.ONLY_DATA) results_accel = process_windows(mshrv_accel_stream, config) results_gyro = process_windows(mshrv_gyro_stream, config) key = marker_window.start_time, marker_window.end_time # if sensor failure period is more than 12 hours then mark it as a sensor failure if results_accel > 0 and results_gyro < 1: sample = "MOTIONSENE-HRV-" + str( wrist) + "ACCELEROMETER-FAILURE" results[key].append( DataPoint(marker_window.start_time, marker_window.end_time, sample)) elif results_accel < 1 and results_gyro > 0: sample = "MOTIONSENE-HRV-" + str( wrist) + "GYRO-FAILURE" results[key].append( DataPoint(marker_window.start_time, marker_window.end_time, sample)) merged_windows = merge_consective_windows(results) if len(results) > 0: input_streams = [{ "owner_id": owner_id, "id": str(attachment_marker_stream_id), "name": attachment_marker_stream.name }] output_stream = { "id": sensor_failure_stream_id, "name": dd_stream_name, "algo_type": config["algo_type"]["sensor_failure"] } metadata = get_metadata(dd_stream_name, input_streams, config) store(merged_windows, input_streams, output_stream, metadata, CC, config) except Exception as e: print(e)
def packet_loss_marker(raw_stream_id: uuid, stream_name: str, owner_id: uuid, dd_stream_name, CC: CerebralCortex, config: dict): """ Label a window as packet-loss if received packets are less than the expected packets. All the labeled data (st, et, label) with its metadata are then stored in a datastore. :param raw_stream_id: :param CC_obj: :param config: """ # using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker packetloss_marker_stream_id = uuid.uuid3( uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id + "PACKET LOSS MARKER")) stream_days = get_stream_days(raw_stream_id, packetloss_marker_stream_id, CC) if stream_name == config["stream_names"]["autosense_ecg"]: sampling_rate = config["sampling_rate"]["ecg"] threshold_val = config["packet_loss_marker"][ "ecg_acceptable_packet_loss"] label = config["labels"]["ecg_packet_loss"] elif stream_name == config["stream_names"]["autosense_rip"]: sampling_rate = config["sampling_rate"]["rip"] threshold_val = config["packet_loss_marker"][ "rip_acceptable_packet_loss"] label = config["labels"]["rip_packet_loss"] elif stream_name == config["stream_names"][ "motionsense_hrv_accel_right"] or stream_name == config[ "stream_names"]["motionsense_hrv_accel_left"]: sampling_rate = config["sampling_rate"]["motionsense_accel"] threshold_val = config["packet_loss_marker"][ "motionsense_accel_acceptable_packet_loss"] label = config["labels"]["motionsense_gyro_packet_loss"] elif stream_name == config["stream_names"][ "motionsense_hrv_gyro_right"] or stream_name == config[ "stream_names"]["motionsense_hrv_gyro_left"]: sampling_rate = config["sampling_rate"]["motionsense_gyro"] threshold_val = config["packet_loss_marker"][ "motionsense_gyro_acceptable_packet_loss"] label = config["labels"]["motionsense_gyro_packet_loss"] for day in stream_days: # load stream data to be diagnosed stream = CC.get_stream(raw_stream_id, day=day, data_type=DataSet.COMPLETE) if len(stream.data) > 0: windowed_data = window(stream.data, config['general']['window_size'], True) results = process_windows(windowed_data, sampling_rate, threshold_val, label, config) merged_windows = merge_consective_windows(results) if len(merged_windows) > 0: input_streams = [{ "owner_id": owner_id, "id": str(raw_stream_id), "name": stream_name }] output_stream = { "id": packetloss_marker_stream_id, "name": dd_stream_name, "algo_type": config["algo_type"]["packet_loss_marker"] } metadata = get_metadata(dd_stream_name, input_streams, config) store(merged_windows, input_streams, output_stream, metadata, CC, config)