def one_loop(hparam): exp_id, data_path, diary_path, start_hour, end_hour, hr_quantile, hr_merge_blocks, hr_min_window_length, hr_volarity, output_path = hparam print("Q:", hr_quantile, "W:", hr_min_window_length, "T", hr_merge_blocks) exp = load_experiment(data_path, diary_path, start_hour) exp.fill_no_activity(-0.0001) va = Validator(exp) va.remove_wearables_without_diary() va.flag_epoch_physical_activity_less_than(min_activity_threshold=0) va.flag_epoch_null_cols(col_list=["hyp_act_x"]) va.flag_day_max_nonwearing(max_non_wear_minutes_per_day=3 * 60) va.flag_day_if_invalid_epochs_larger_than(max_invalid_minutes_per_day=5 * 60) va.flag_day_without_diary() n_removed_days = va.remove_flagged_days() print("Removed %d days (non wearing)." % n_removed_days) n_users = va.remove_wearables_without_valid_days() print("Removed %d wearables." % n_users) sbd = SleepBoudaryDetector(exp) sbd.detect_sleep_boundaries( strategy="hr", output_col="hyp_sleep_period_hr", hr_quantile=hr_quantile, hr_volarity_threshold=hr_volarity, hr_rolling_win_in_minutes=5, hr_sleep_search_window=(start_hour, end_hour), hr_min_window_length_in_minutes=hr_min_window_length, hr_volatility_window_in_minutes=10, hr_merge_blocks_gap_time_in_min=hr_merge_blocks, hr_sleep_only_in_sleep_search_window=True, hr_only_largest_sleep_period=True) sbd.detect_sleep_boundaries(strategy="adapted_van_hees", output_col="hyp_sleep_period_vanhees", angle_cols=[], angle_use_triaxial_activity=True, angle_start_hour=start_hour, angle_quantile=0.1, angle_minimum_len_in_minutes=30, angle_merge_tolerance_in_minutes=60) sbd.detect_sleep_boundaries(strategy="adapted_van_hees", output_col="hyp_sleep_period_vanheespr", angle_cols=["pitch", "roll"], angle_use_triaxial_activity=False, angle_start_hour=start_hour, angle_quantile=0.1, angle_minimum_len_in_minutes=30, angle_merge_tolerance_in_minutes=60) df_acc = [] mses = {} cohens = {} print("Calculating evaluation measures...") for w in exp.get_all_wearables(): if w.data.empty: print("Data for PID %s is empty!" % w.get_pid()) continue sleep = {} sleep["diary"] = w.data[w.diary_sleep].astype(int) sleep["hr"] = w.data["hyp_sleep_period_hr"].astype(int) sleep["vanhees"] = w.data["hyp_sleep_period_vanhees"].astype(int) sleep["vanheespr"] = w.data["hyp_sleep_period_vanheespr"].astype(int) if sleep["diary"].shape[0] == 0: continue for comb in [ "diary_hr", "diary_vanhees", "hr_vanhees", "diary_vanheespr", "hr_vanheespr", "vanhees_vanheespr" ]: a, b = comb.split("_") mses[comb] = mean_squared_error(sleep[a], sleep[b]) cohens[comb] = cohen_kappa_score(sleep[a], sleep[b]) tst_diary = w.get_total_sleep_time_per_day(based_on_diary=True) tst_hr = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_hr") tst_vanhees = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_vanhees") tst_vanheespr = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheespr") onset_diary = w.get_onset_sleep_time_per_day(based_on_diary=True) onset_diary.name = "onset_diary" onset_hr = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_hr") onset_hr.name = "onset_hr" onset_vanhees = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanhees") onset_vanhees.name = "onset_vanhees" onset_vanheespr = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheespr") onset_vanheespr.name = "onset_vanheespr" offset_diary = w.get_offset_sleep_time_per_day(based_on_diary=True) offset_diary.name = "offset_diary" offset_hr = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_hr") offset_hr.name = "offset_hr" offset_vanhees = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanhees") offset_vanhees.name = "offset_vanhees" offset_vanheespr = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheespr") offset_vanheespr.name = "offset_vanheespr" df_res = pd.concat( (onset_hr, onset_diary, onset_vanhees, onset_vanheespr, offset_hr, offset_diary, offset_vanhees, offset_vanheespr, tst_diary, tst_hr, tst_vanhees, tst_vanheespr), axis=1) df_res["pid"] = w.get_pid() for comb in [ "diary_hr", "diary_vanhees", "hr_vanhees", "diary_vanheespr", "hr_vanheespr", "vanhees_vanheespr" ]: df_res["mse_" + comb] = mses[comb] df_res["cohens_" + comb] = cohens[comb] # View signals # w.change_start_hour_for_experiment_day(0) # w.view_signals(["activity", "hr", "sleep", "diary"], # sleep_cols=["hyp_sleep_period_vanhees", "hyp_sleep_period_hr", "hyp_sleep_period_vanheespr"]) # View signals # v = Viewer(w) # v.view_signals(["sleep"], # sleep_cols=["hyp_sleep_period_hr", "hyp_sleep_period_vanheespr", "hyp_sleep_period_vanhees"], # #alphas={'sleep': 0.3} # ) df_acc.append(df_res) exp_id += 1 df_acc = pd.concat(df_acc) df_acc["exp_id"] = exp_id df_acc["quantile"] = hr_quantile df_acc["window_lengths"] = hr_min_window_length df_acc["time_merge_blocks"] = hr_merge_blocks df_acc.to_csv(os.path.join(output_path, "exp_%d.csv" % (exp_id)), index=False)
if __name__ == "__main__": file_path = "../data/small_collection_mesa/*.csv" #file_path = "../data/collection_mesa_actigraphy/*.csv" diary_path = "../data/diaries/mesa_diary.csv" start_hour = 15 end_hour = 15 exp = setup_experiment(file_path, diary_path, start_hour) exp.fill_no_activity(-0.0001) # # nwd = NonWearingDetector(exp) # nwd.detect_non_wear(strategy="choi", wearing_col="hyp_wearing_choi") # # # TODO: fix bug when annotation_merge_tolerance_in_minutes < 0 sbd = SleepBoudaryDetector(exp) sbd.detect_sleep_boundaries(strategy="annotation", output_col="sleep_period_annotation", annotation_col="interval_sleep", annotation_merge_tolerance_in_minutes=30, annotation_only_largest_sleep_period=True) va = Validator(exp) va.flag_epoch_physical_activity_less_than(min_activity_threshold=0) va.flag_epoch_null_cols(col_list=["hyp_act_x"]) va.flag_epoch_nonwearing("hyp_wearing_choi") va.flag_day_sleep_length_less_than(sleep_period_col="sleep_period_annotation", min_sleep_in_minutes=3*60) # n_removed_days = va.remove_flagged_days() # print("Removed %d days (short sleep)." % n_removed_days) va.flag_day_sleep_length_more_than(sleep_period_col="sleep_period_annotation", max_sleep_in_minutes=12 * 60) # n_removed_days = va.remove_flagged_days()
def one_loop(hparam): exp_id, data_path, diary_path, start_hour, end_hour, hr_quantile, hr_merge_blocks, hr_min_window_length, \ hr_volarity, start_night, end_night, output_path = hparam print("Q:", hr_quantile, "L:", hr_min_window_length, "G", hr_merge_blocks) exp = load_experiment(data_path, diary_path, start_hour) exp.fill_no_activity(-10000) # Maybe this needs to be changed or removed va = Validator(exp) va.remove_wearables_without_diary() va.flag_epoch_null_cols([ "pitch_mean_ndw", "roll_mean_ndw", "pitch_mean_dw", "roll_mean_dw", "pitch_mean_thigh", "roll_mean_thigh" ]) va.flag_epoch_physical_activity_less_than( min_activity_threshold=-1000 ) # Maybe this needs to be changed or removed va.flag_epoch_null_cols(col_list=["hyp_act_x"]) va.flag_day_max_nonwearing(max_non_wear_minutes_per_day=3 * 60) va.flag_day_if_invalid_epochs_larger_than(max_invalid_minutes_per_day=5 * 60) va.flag_day_without_diary() n_removed_days = va.remove_flagged_days() print("Removed %d days (non wearing)." % n_removed_days) n_users = va.remove_wearables_without_valid_days() print("Removed %d wearables." % n_users) sbd = SleepBoudaryDetector(exp) sbd.detect_sleep_boundaries( strategy="hr", output_col="hyp_sleep_period_hrfullday", hr_quantile=hr_quantile, hr_volarity_threshold=hr_volarity, hr_volatility_window_in_minutes=10, hr_rolling_win_in_minutes=5, hr_sleep_search_window=(start_hour, end_hour), hr_min_window_length_in_minutes=hr_min_window_length, hr_merge_blocks_gap_time_in_min=hr_merge_blocks, hr_sleep_only_in_sleep_search_window=True, hr_only_largest_sleep_period=True, ) sbd.detect_sleep_boundaries( strategy="hr", output_col="hyp_sleep_period_hrnight", hr_quantile=hr_quantile, hr_volarity_threshold=hr_volarity, hr_volatility_window_in_minutes=10, hr_rolling_win_in_minutes=5, hr_sleep_search_window=(start_night, end_night), hr_min_window_length_in_minutes=hr_min_window_length, hr_merge_blocks_gap_time_in_min=hr_merge_blocks, hr_sleep_only_in_sleep_search_window=True, hr_only_largest_sleep_period=True, ) sbd.detect_sleep_boundaries( strategy="adapted_van_hees", output_col="hyp_sleep_period_vanheesndw", angle_cols=["pitch_mean_ndw", "roll_mean_ndw"], angle_start_hour=start_hour, angle_quantile=0.1, angle_minimum_len_in_minutes=30, angle_merge_tolerance_in_minutes=60, angle_only_largest_sleep_period=True, # This was missing ) sbd.detect_sleep_boundaries( strategy="adapted_van_hees", output_col="hyp_sleep_period_vanheesdw", angle_cols=["pitch_mean_dw", "roll_mean_dw"], angle_start_hour=start_hour, angle_quantile=0.1, angle_minimum_len_in_minutes=30, angle_merge_tolerance_in_minutes=60, angle_only_largest_sleep_period=True, # This was missing ) sbd.detect_sleep_boundaries( strategy="adapted_van_hees", output_col="hyp_sleep_period_vanheesthigh", angle_cols=["pitch_mean_thigh", "roll_mean_thigh"], angle_start_hour=start_hour, angle_quantile=0.1, angle_minimum_len_in_minutes=30, angle_merge_tolerance_in_minutes=60, angle_only_largest_sleep_period=True, ) df_acc = [] mses = {} cohens = {} print("Calculating evaluation measures...") for w in exp.get_all_wearables(): sleep = {} sleep["diary"] = w.data[w.diary_sleep].astype(int) sleep["hrfullday"] = w.data["hyp_sleep_period_hrfullday"].astype(int) sleep["hrnight"] = w.data["hyp_sleep_period_hrnight"].astype(int) sleep["vanheesdw"] = w.data["hyp_sleep_period_vanheesdw"].astype(int) sleep["vanheesndw"] = w.data["hyp_sleep_period_vanheesndw"].astype(int) sleep["vanheesthigh"] = w.data["hyp_sleep_period_vanheesthigh"].astype( int) if sleep["diary"].shape[0] == 0: continue for comb in [ "diary_hrfullday", "diary_hrnight", "diary_vanheesndw", "diary_vanheesdw", "diary_vanheesthigh" ]: a, b = comb.split("_") mses[comb] = mean_squared_error(sleep[a], sleep[b]) cohens[comb] = cohen_kappa_score(sleep[a], sleep[b]) tst_diary = w.get_total_sleep_time_per_day(based_on_diary=True) tst_hrfullday = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_hrfullday") tst_hrnight = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_hrnight") tst_vanheesndw = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesndw") tst_vanheesdw = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesdw") tst_vanheesthigh = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesthigh") onset_diary = w.get_onset_sleep_time_per_day(based_on_diary=True) onset_diary.name = "onset_diary" onset_hrfullday = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_hrfullday") onset_hrfullday.name = "onset_hrfullday" onset_hrnight = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_hrnight") onset_hrnight.name = "onset_hrnight" onset_vanheesndw = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesndw") onset_vanheesndw.name = "onset_vanheesndw" onset_vanheesdw = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesdw") onset_vanheesdw.name = "onset_vanheesdw" onset_vanheesthigh = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesthigh") onset_vanheesthigh.name = "onset_vanheesthigh" offset_diary = w.get_offset_sleep_time_per_day(based_on_diary=True) offset_diary.name = "offset_diary" offset_hrfullday = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_hrfullday") offset_hrfullday.name = "offset_hrfullday" offset_hrnight = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_hrnight") offset_hrnight.name = "offset_hrnight" offset_vanheesndw = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesndw") offset_vanheesndw.name = "offset_vanheesndw" offset_vanheesdw = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesdw") offset_vanheesdw.name = "offset_vanheesdw" offset_vanheesthigh = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanheesthigh") offset_vanheesthigh.name = "offset_vanheesthigh" df_res = pd.concat( (onset_diary, onset_hrfullday, onset_hrnight, onset_vanheesndw, onset_vanheesdw, onset_vanheesthigh, offset_diary, offset_hrfullday, offset_hrnight, offset_vanheesndw, offset_vanheesdw, offset_vanheesthigh, tst_diary, tst_hrfullday, tst_hrnight, tst_vanheesndw, tst_vanheesdw, tst_vanheesthigh), axis=1) df_res["pid"] = w.get_pid() for comb in [ "diary_hrfullday", "diary_hrnight", "diary_vanheesndw", "diary_vanheesdw", "diary_vanheesthigh" ]: df_res["mse_" + comb] = mses[comb] df_res["cohens_" + comb] = cohens[comb] # View signals # w.view_signals(["sleep", "diary"], sleep_cols=["hyp_sleep_period_vanheesthigh"], others=["pitch_mean_thigh", "roll_mean_thigh"]) # w.view_signals(["sleep", "diary"], sleep_cols=["hyp_sleep_period_vanheesndw", "hyp_sleep_period_vanheesthigh"], # others=["pitch_mean_thigh", "roll_mean_thigh", "hyp_invalid"]) # v = Viewer(w) # v.view_signals(["sleep", "diary"], # sleep_cols=["hyp_sleep_period_hrfullday", "hyp_sleep_period_hrnight", # "hyp_sleep_period_vanheesndw", "hyp_sleep_period_vanheesdw", # "hyp_sleep_period_vanheesthigh"], # colors=["green", "black", "blue", "orange", "yellow", "pink", "purple"], # #alphas={'sleep': 0.3} # ) df_acc.append(df_res) exp_id += 1 df_acc = pd.concat(df_acc) df_acc["exp_id"] = exp_id df_acc["quantile"] = hr_quantile df_acc["window_lengths"] = hr_min_window_length df_acc["time_merge_blocks"] = hr_merge_blocks df_acc.to_csv(os.path.join(output_path, "exp_%d.csv" % (exp_id)), index=False)
def one_loop(hparam): exp_id, file_path, diary_path, start_hour, end_hour, hr_quantile, hr_merge_blocks, hr_min_window_length, output_path = hparam exp = setup_experiment(file_path, diary_path, start_hour) exp.fill_no_activity(-0.0001) print("Q:", hr_quantile, "W:", hr_min_window_length, "T", hr_merge_blocks) va = Validator(exp) va.remove_wearables_without_diary() va.flag_epoch_physical_activity_less_than(min_activity_threshold=0) va.flag_epoch_null_cols(col_list=["hyp_act_x"]) va.flag_day_max_nonwearing(max_non_wear_minutes_per_day=3*60) va.flag_day_if_invalid_epochs_larger_than(max_invalid_minutes_per_day=5 * 60) va.flag_day_without_diary() n_removed_days = va.remove_flagged_days() print("Removed %d days (non wearing)." % n_removed_days) n_users = va.remove_wearables_without_valid_days() print("Removed %d wearables." % n_users) sbd = SleepBoudaryDetector(exp) sbd.detect_sleep_boundaries(strategy="annotation", output_col="hyp_sleep_period_psg", annotation_col="hyp_annotation", annotation_merge_tolerance_in_minutes=300) sbd.detect_sleep_boundaries(strategy="hr", output_col="hyp_sleep_period_hr", hr_quantile=hr_quantile, hr_volarity_threshold=5, hr_rolling_win_in_minutes=5, hr_sleep_search_window=(start_hour, end_hour), hr_min_window_length_in_minutes=hr_min_window_length, hr_volatility_window_in_minutes=10, hr_merge_blocks_gap_time_in_min=hr_merge_blocks, hr_sleep_only_in_sleep_search_window=True, hr_only_largest_sleep_period=True) df_acc = [] mses = {} cohens = {} print("Calculating evaluation measures...") for w in tqdm(exp.get_all_wearables()): if w.data.empty: print("Data for PID %s is empty!" % w.get_pid()) continue sleep = {} sleep["diary"] = w.data[w.diary_sleep].astype(int) sleep["hr"] = w.data["hyp_sleep_period_hr"].astype(int) sleep["psg"] = w.data["hyp_sleep_period_psg"].astype(int) for comb in ["diary_hr", "diary_psg", "psg_hr"]: a, b = comb.split("_") mses[comb] = mean_squared_error(sleep[a], sleep[b]) cohens[comb] = cohen_kappa_score(sleep[a], sleep[b]) tst_diary = w.get_total_sleep_time_per_day(based_on_diary=True) tst_psg = w.get_total_sleep_time_per_day(sleep_col="hyp_sleep_period_psg") tst_hr = w.get_total_sleep_time_per_day(sleep_col="hyp_sleep_period_hr") onset_diary = w.get_onset_sleep_time_per_day(based_on_diary=True) onset_diary.name = "onset_diary" onset_psg = w.get_onset_sleep_time_per_day(sleep_col="hyp_sleep_period_psg") onset_psg.name = "onset_psg" onset_hr = w.get_onset_sleep_time_per_day(sleep_col="hyp_sleep_period_hr") onset_hr.name = "onset_hr" offset_diary = w.get_offset_sleep_time_per_day(based_on_diary=True) offset_diary.name = "offset_diary" offset_psg = w.get_offset_sleep_time_per_day(sleep_col="hyp_sleep_period_psg") offset_psg.name = "offset_psg" offset_hr = w.get_offset_sleep_time_per_day(sleep_col="hyp_sleep_period_hr") offset_hr.name = "offset_hr" df_res = pd.concat((onset_hr, onset_psg, onset_diary, offset_hr, offset_psg, offset_diary, tst_psg, tst_hr, tst_diary), axis=1) df_res["pid"] = w.get_pid() for comb in ["diary_hr", "diary_psg", "psg_hr"]: df_res["mse_" + comb] = mses[comb] df_res["cohens_" + comb] = cohens[comb] # View signals # w.view_signals(["sleep", "diary"], # others=["hyp_annotation", "mean_hr"], # sleep_cols=["hyp_sleep_period_psg", "hyp_sleep_period_hr"], # frequency="30S" # ) df_acc.append(df_res) df_acc = pd.concat(df_acc) df_acc["exp_id"] = exp_id df_acc["quantile"] = hr_quantile df_acc["window_lengths"] = hr_min_window_length df_acc["time_merge_blocks"] = hr_merge_blocks df_acc.to_csv(os.path.join(output_path, "exp_%d.csv" % (exp_id)), index=False)
#print(w.data.head(10)) #Define parameters fo HR-based sleep algorithm hr_quantile = 0.4 hr_min_window_length = 60 hr_merge_blocks = 180 hr_volarity = 5 #Time to consider as start and end of each experiment day - if equal the sleep labelling occurs #over the entire 24 hours start_hour = 18 end_hour = 18 # Label sleep using HypnosPy HR algorithms sbd = SleepBoudaryDetector(w) sbd.detect_sleep_boundaries( strategy="hr", output_col="hyp_sleep_period_hr", hr_quantile=hr_quantile, hr_volarity_threshold=hr_volarity, hr_rolling_win_in_minutes=5, hr_sleep_search_window=(start_hour, end_hour), hr_min_window_length_in_minutes=hr_min_window_length, hr_volatility_window_in_minutes=10, hr_merge_blocks_gap_time_in_min=hr_merge_blocks, hr_sleep_only_in_sleep_search_window=True, hr_only_largest_sleep_period=True) #Plot sleep labels together with HR and acitivty signals
va.flag_day_without_diary() # Accounting for removed days and subjects (referred to as wearables) n_removed_days = va.remove_flagged_days() print("Removed %d days (non wearing)." % n_removed_days) n_users = va.remove_wearables_without_valid_days() print("Removed %d wearables." % n_users) ### 5. Sleep Labelling Using 3 algorithms: * HypnosPy - HR-based * Adapted van Hees - angle-based * Adapted van Hess - angle-based, using pitch and roll sbd = SleepBoudaryDetector(exp) sbd.detect_sleep_boundaries(strategy="hr", output_col="hyp_sleep_period_hr", hr_quantile=hr_quantile, hr_volarity_threshold=hr_volarity, hr_rolling_win_in_minutes=5, hr_sleep_search_window=(start_hour, end_hour), hr_min_window_length_in_minutes=hr_min_window_length, hr_volatility_window_in_minutes=10, hr_merge_blocks_gap_time_in_min=hr_merge_blocks, hr_sleep_only_in_sleep_search_window=True, hr_only_largest_sleep_period=True) sbd.detect_sleep_boundaries(strategy="adapted_van_hees", output_col="hyp_sleep_period_vanhees", angle_cols=[], angle_use_triaxial_activity=True, angle_start_hour=start_hour, angle_quantile=0.1, angle_minimum_len_in_minutes=30, angle_merge_tolerance_in_minutes=60) sbd.detect_sleep_boundaries(strategy="adapted_van_hees", output_col="hyp_sleep_period_vanheespr", angle_cols=["pitch", "roll"], angle_use_triaxial_activity=False, angle_start_hour=start_hour, angle_quantile=0.1, angle_minimum_len_in_minutes=30,
def one_loop(hparam): exp_id, file_path, start_hour, end_hour, hr_quantile, hr_merge_blocks, hr_min_window_length, output_path = hparam exp = load_experiment(file_path, start_hour) exp.fill_no_activity(-0.0001) print("Q:", hr_quantile, "L:", hr_min_window_length, "G", hr_merge_blocks) sbd = SleepBoudaryDetector(exp) sbd.detect_sleep_boundaries(strategy="annotation", annotation_col="hyp_annotation", output_col="hyp_sleep_period_psg", annotation_merge_tolerance_in_minutes=300) sbd.detect_sleep_boundaries( strategy="hr", output_col="hyp_sleep_period_hr", hr_quantile=hr_quantile, hr_volarity_threshold=5, hr_rolling_win_in_minutes=5, hr_sleep_search_window=(start_hour, end_hour), hr_min_window_length_in_minutes=hr_min_window_length, hr_volatility_window_in_minutes=10, hr_merge_blocks_gap_time_in_min=hr_merge_blocks, hr_sleep_only_in_sleep_search_window=True, hr_only_largest_sleep_period=True) sbd.detect_sleep_boundaries(strategy="adapted_van_hees", output_col="hyp_sleep_period_vanhees", angle_cols=[], angle_use_triaxial_activity=True, angle_start_hour=start_hour, angle_quantile=0.1, angle_minimum_len_in_minutes=30, angle_merge_tolerance_in_minutes=60) df_acc = [] mses = {} cohens = {} print("Calculating evaluation measures...") for w in tqdm(exp.get_all_wearables()): sleep = {} sleep["psg"] = w.data["hyp_sleep_period_psg"].astype(int) sleep["hr"] = w.data["hyp_sleep_period_hr"].astype(int) sleep["vanhees"] = w.data["hyp_sleep_period_vanhees"].astype(int) for comb in ["psg_hr", "psg_vanhees", "hr_vanhees"]: a, b = comb.split("_") mses[comb] = mean_squared_error(sleep[a], sleep[b]) cohens[comb] = cohen_kappa_score(sleep[a], sleep[b]) tst_psg = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_psg") tst_hr = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_hr") tst_vanhees = w.get_total_sleep_time_per_day( sleep_col="hyp_sleep_period_vanhees") onset_psg = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_psg") onset_psg.name = "onset_psg" onset_hr = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_hr") onset_hr.name = "onset_hr" onset_vanhees = w.get_onset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanhees") onset_vanhees.name = "onset_vanhees" offset_psg = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_psg") offset_psg.name = "offset_psg" offset_hr = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_hr") offset_hr.name = "offset_hr" offset_vanhees = w.get_offset_sleep_time_per_day( sleep_col="hyp_sleep_period_vanhees") offset_vanhees.name = "offset_vanhees" df_res = pd.concat( (onset_hr, onset_psg, onset_vanhees, offset_hr, offset_psg, offset_vanhees, tst_psg, tst_hr, tst_vanhees), axis=1) df_res["pid"] = w.get_pid() for comb in ["psg_hr", "psg_vanhees", "hr_vanhees"]: df_res["mse_" + comb] = mses[comb] df_res["cohens_" + comb] = cohens[comb] df_acc.append(df_res) df_acc = pd.concat(df_acc) df_acc["exp_id"] = exp_id df_acc["quantile"] = hr_quantile df_acc["window_lengths"] = hr_min_window_length df_acc["time_merge_blocks"] = hr_merge_blocks df_acc.to_csv(os.path.join(output_path, "exp_%d.csv" % exp_id), index=False)
def investigate_one_wearable(wid): start_hour = 15 min_consecutive_days = 5 w0 = setup_experiment( MESAPreProcessing, '../data/collection_mesa_actigraphy/mesa-sleep-' + wid + '.csv', start_hour) w0.fill_no_activity(-0.0001) w0.overall_stats() print("Sleep Boundary Detector...") sbd = SleepBoudaryDetector(w0) sbd.detect_sleep_boundaries(strategy="annotation", output_col="sleep_period_annotation", annotation_col="interval_sleep", annotation_merge_tolerance_in_minutes=120, annotation_only_largest_sleep_period=True) print("Changing experiment_day representation to ml_sequence") exp_day_column = 'ml_sequence' exp.create_day_sleep_experiment_day(sleep_col="sleep_period_annotation", new_col=exp_day_column) va = Validator(w0) va.flag_epoch_physical_activity_less_than(min_activity_threshold=0) va.flag_epoch_null_cols(col_list=["hyp_act_x"]) va.flag_day_sleep_length_less_than( sleep_period_col="sleep_period_annotation", min_sleep_in_minutes=3 * 30) va.flag_day_sleep_length_more_than( sleep_period_col="sleep_period_annotation", max_sleep_in_minutes=12 * 60) va.flag_day_max_nonwearing(max_non_wear_minutes_per_day=3 * 10) va.flag_day_if_valid_epochs_smaller_than(valid_minutes_per_day=16 * 60) va.validation_report() v = Viewer(w0) v.view_signals(["activity", "sleep"], sleep_cols=["sleep_period_annotation"], text=["validation"], alphas={'sleep': 0.3}) n_removed_wearables = va.remove_wearables_without_valid_days() va.flag_day_if_not_enough_consecutive_days(min_consecutive_days) n_removed_days = va.remove_flagged_days() n_removed_wearables = va.remove_wearables_without_valid_days() v = Viewer(w0) v.view_signals(["activity", "sleep"], sleep_cols=["sleep_period_annotation"], text=["validation"], alphas={'sleep': 0.3}) print("Removed %d wearables." % n_removed_wearables) print("Removed %d days that are not consecutive." % n_removed_days) print("Removed %d wearables." % n_removed_wearables) # # Setting day to ml representation -> days may not be of fixed lengths. # print("Changing experiment_day representation to ml_sequence") # exp_day_column = 'ml_sequence' # w0.create_day_sleep_experiment_day(sleep_col="sleep_period_annotation", new_col=exp_day_column) # v = Viewer(w0) # v.view_signals_ml_format(["activity", "sleep"], # sleep_cols=["sleep_period_annotation"], # alphas={'sleep': 0.3}) w0.overall_stats()