Beispiel #1
0
    def test_valid_invalid_days(self):
        # Should not return anything yet, as we never marked any row as invalid
        invalid_days = self.w_5day_invalid5hours.get_invalid_days()
        self.assertSetEqual(invalid_days, set())

        valid_days = self.w_5day_invalid5hours.get_valid_days()
        self.assertSetEqual(valid_days, set([5]))

        #  We now force some an invalid day
        tsp = TimeSeriesProcessing(self.w_5day_invalid5hours)
        tsp.detect_non_wear(strategy="choi2011")
        tsp.check_valid_days(min_activity_threshold=0, max_non_wear_minutes_per_day=60)

        invalid_days = self.w_5day_invalid5hours.get_invalid_days()
        self.assertSetEqual(invalid_days, set({5}))
    # parser.add_argument('--hr_quantile', type=float, default=0.40)
    # parser.add_argument('--hr_merge_blocks', type=int, default=300)
    # parser.add_argument('--hr_min_window_length', type=int, default=40)
    # parser.add_argument('--hr_volarity', type=int, default=5)
    hr_volarity = 5
    exp_id = 0
    quantiles = [0.42]  # [0.35, 0.375, 0.4, 0.425, 0.45, 0.475, 0.5, 0.525]
    window_lengths = [
        45
    ]  # [25, 27.5, 30, 32.5, 35, 37.5, 40, 42.5, 45, 47.5, 50]
    time_merge_blocks = [60]  # [60, 120, 180, 240, 300, 360]
    start_hour = 15

    # Run van hees once
    exp = load_experiment(data_path, diary_path)
    tsp = TimeSeriesProcessing(exp)
    tsp.fill_no_activity(-0.0001)
    tsp.detect_non_wear(strategy="none")
    tsp.check_valid_days(min_activity_threshold=0,
                         max_non_wear_minutes_per_day=180,
                         check_sleep_period=False)
    tsp.drop_invalid_days()

    tsp.detect_sleep_boundaries(strategy="adapted_van_hees",
                                output_col="hyp_sleep_period_vanhees",
                                angle_start_hour=start_hour)
    cached_van_hees = {}
    cached_van_hees_tst = {}
    cached_van_hees_sleeponset = {}
    cached_van_hees_sleepoffset = {}
    cached_diary_tst = {}
def one_loop(hparam):

    exp_id, data_path, diary_path, start_hour, end_hour, vh_quantile, vh_merge_blocks, vh_min_window_length = hparam

    print("Q:", vh_quantile, "W:", vh_min_window_length, "T", vh_merge_blocks)

    exp = load_experiment(data_path, diary_path, start_hour)

    tsp = TimeSeriesProcessing(exp)
    tsp.fill_no_activity(-0.0001)
    tsp.detect_non_wear(strategy="none")
    tsp.check_valid_days(min_activity_threshold=0,
                         max_non_wear_minutes_per_day=180,
                         check_sleep_period=False,
                         check_diary=True)
    tsp.drop_invalid_days()

    tsp.detect_sleep_boundaries(strategy="hr",
                                output_col="hyp_sleep_period_hr",
                                hr_quantile=0.35,
                                hr_volarity_threshold=hr_volarity,
                                hr_rolling_win_in_minutes=5,
                                hr_sleep_search_window=(start_hour, end_hour),
                                hr_min_window_length_in_minutes=32.50,
                                hr_volatility_window_in_minutes=10,
                                hr_merge_blocks_gap_time_in_min=240,
                                hr_sleep_only_in_sleep_search_window=True,
                                hr_only_largest_sleep_period=True)

    tsp.detect_sleep_boundaries(
        strategy="adapted_van_hees",
        output_col="hyp_sleep_period_vanhees",
        angle_cols=[],
        angle_use_triaxial_activity=True,
        angle_start_hour=start_hour,
        angle_quantile=vh_quantile,
        angle_minimum_len_in_minutes=vh_min_window_length,
        angle_merge_tolerance_in_minutes=vh_merge_blocks)

    tsp.detect_sleep_boundaries(
        strategy="adapted_van_hees",
        output_col="hyp_sleep_period_vanheespr",
        angle_cols=["pitch", "roll"],
        angle_use_triaxial_activity=False,
        angle_start_hour=start_hour,
        angle_quantile=vh_quantile,
        angle_minimum_len_in_minutes=vh_min_window_length,
        angle_merge_tolerance_in_minutes=vh_merge_blocks)

    df_acc = []
    mses = {}
    cohens = {}

    print("Calculating evaluation measures...")
    for w in exp.get_all_wearables():

        if w.data.empty:
            print("Data for PID %s is empty!" % w.get_pid())
            continue

        sleep = {}
        sleep["diary"] = w.data[w.diary_sleep].astype(int)
        sleep["hr"] = w.data["hyp_sleep_period_hr"].astype(int)
        sleep["vanhees"] = w.data["hyp_sleep_period_vanhees"].astype(int)
        sleep["vanheespr"] = w.data["hyp_sleep_period_vanheespr"].astype(int)

        if sleep["diary"].shape[0] == 0:
            continue

        for comb in [
                "diary_hr", "diary_vanhees", "hr_vanhees", "diary_vanheespr",
                "hr_vanheespr", "vanhees_vanheespr"
        ]:
            a, b = comb.split("_")
            mses[comb] = mean_squared_error(sleep[a], sleep[b])
            cohens[comb] = cohen_kappa_score(sleep[a], sleep[b])

        tst_diary = w.get_total_sleep_time_per_day(based_on_diary=True)
        tst_hr = w.get_total_sleep_time_per_day(
            sleep_col="hyp_sleep_period_hr")
        tst_vanhees = w.get_total_sleep_time_per_day(
            sleep_col="hyp_sleep_period_vanhees")
        tst_vanheespr = w.get_total_sleep_time_per_day(
            sleep_col="hyp_sleep_period_vanheespr")

        onset_diary = w.get_onset_sleep_time_per_day(based_on_diary=True)
        onset_diary.name = "onset_diary"
        onset_hr = w.get_onset_sleep_time_per_day(
            sleep_col="hyp_sleep_period_hr")
        onset_hr.name = "onset_hr"
        onset_vanhees = w.get_onset_sleep_time_per_day(
            sleep_col="hyp_sleep_period_vanhees")
        onset_vanhees.name = "onset_vanhees"
        onset_vanheespr = w.get_onset_sleep_time_per_day(
            sleep_col="hyp_sleep_period_vanheespr")
        onset_vanheespr.name = "onset_vanheespr"

        offset_diary = w.get_offset_sleep_time_per_day(based_on_diary=True)
        offset_diary.name = "offset_diary"
        offset_hr = w.get_offset_sleep_time_per_day(
            sleep_col="hyp_sleep_period_hr")
        offset_hr.name = "offset_hr"
        offset_vanhees = w.get_offset_sleep_time_per_day(
            sleep_col="hyp_sleep_period_vanhees")
        offset_vanhees.name = "offset_vanhees"
        offset_vanheespr = w.get_offset_sleep_time_per_day(
            sleep_col="hyp_sleep_period_vanheespr")
        offset_vanheespr.name = "offset_vanheespr"

        df_res = pd.concat(
            (onset_hr, onset_diary, onset_vanhees, onset_vanheespr, offset_hr,
             offset_diary, offset_vanhees, offset_vanheespr, tst_diary, tst_hr,
             tst_vanhees, tst_vanheespr),
            axis=1)

        df_res["pid"] = w.get_pid()
        for comb in [
                "diary_hr", "diary_vanhees", "hr_vanhees", "diary_vanheespr",
                "hr_vanheespr", "vanhees_vanheespr"
        ]:
            df_res["mse_" + comb] = mses[comb]
            df_res["cohens_" + comb] = cohens[comb]

        # View signals
        # w.change_start_hour_for_experiment_day(0)
        # w.view_signals(["activity", "hr", "sleep", "diary"],
        #                sleep_cols=["hyp_sleep_period_hr", "hyp_sleep_period_vanhees", "hyp_sleep_period_vanheespr"])

        df_acc.append(df_res)

    exp_id += 1
    df_acc = pd.concat(df_acc)
    df_acc["exp_id"] = exp_id
    df_acc["quantile"] = vh_quantile
    df_acc["window_lengths"] = vh_min_window_length
    df_acc["time_merge_blocks"] = vh_merge_blocks

    df_acc.to_csv(os.path.join(output_path, "exp_%d.csv" % (exp_id)),
                  index=False)
def one_loop(hparam):

    exp_id, file_path, start_hour, end_hour, quantile, merge_blocks, min_window_length = hparam

    exp = load_experiment(file_path, start_hour)

    print("Q:", quantile, "W:", min_window_length, "T", merge_blocks)

    tsp = TimeSeriesProcessing(exp)

    tsp.fill_no_activity(-0.0001)
    tsp.detect_sleep_boundaries(strategy="annotation", output_col="hyp_sleep_period_psg",
                                annotation_merge_tolerance_in_minutes=300)

    tsp.detect_sleep_boundaries(strategy="adapted_van_hees", output_col="hyp_sleep_period_vanhees", angle_cols=[],
                                angle_use_triaxial_activity=True, angle_start_hour=start_hour, angle_quantile=quantile,
                                angle_minimum_len_in_minutes=min_window_length,
                                angle_merge_tolerance_in_minutes=merge_blocks)

    df_acc = []
    mses = {}
    cohens = {}

    print("Calculating evaluation measures...")
    for w in tqdm(exp.get_all_wearables()):

        sleep = {}
        sleep["psg"] = w.data["hyp_sleep_period_psg"].astype(int)
        sleep["vanhees"] = w.data["hyp_sleep_period_vanhees"].astype(int)


        for comb in ["psg_vanhees"]:
            a, b = comb.split("_")
            mses[comb] = mean_squared_error(sleep[a], sleep[b])
            cohens[comb] = cohen_kappa_score(sleep[a], sleep[b])

        tst_psg = w.get_total_sleep_time_per_day(sleep_col="hyp_sleep_period_psg")
        tst_vanhees = w.get_total_sleep_time_per_day(sleep_col="hyp_sleep_period_vanhees")

        onset_psg = w.get_onset_sleep_time_per_day(sleep_col="hyp_sleep_period_psg")
        onset_psg.name = "onset_psg"
        onset_vanhees = w.get_onset_sleep_time_per_day(sleep_col="hyp_sleep_period_vanhees")
        onset_vanhees.name = "onset_vanhees"

        offset_psg = w.get_offset_sleep_time_per_day(sleep_col="hyp_sleep_period_psg")
        offset_psg.name = "offset_psg"
        offset_vanhees = w.get_offset_sleep_time_per_day(sleep_col="hyp_sleep_period_vanhees")
        offset_vanhees.name = "offset_vanhees"

        df_res = pd.concat((onset_psg, onset_vanhees,
                            offset_psg, offset_vanhees,
                            tst_psg, tst_vanhees), axis=1)

        df_res["pid"] = w.get_pid()
        for comb in ["psg_vanhees"]:
            df_res["mse_" + comb] = mses[comb]
            df_res["cohens_" + comb] = cohens[comb]

        # View signals
        # w.view_signals(["sleep"],
        #                others=["hyp_annotation", "hr", "hyp_act_x", "hyp_act_y", "hyp_act_z"],
        #                sleep_cols=["hyp_sleep_period_psg", "hyp_sleep_period_hr", "hyp_sleep_period_vanhees"],
        #                frequency="30S"
        #                )

        df_acc.append(df_res)

    df_acc = pd.concat(df_acc)
    df_acc["exp_id"] = exp_id
    df_acc["quantile"] = quantile
    df_acc["window_lengths"] = min_window_length
    df_acc["time_merge_blocks"] = merge_blocks

    df_acc.to_csv(os.path.join(output_path, "exp_%d.csv" % (exp_id)), index=False)
Beispiel #5
0
    # Configure an Experiment
    exp = Experiment()

    file_path = "./data/small_collection_hchs/*"

    # Iterates over a set of files in a directory.
    # Unfortunately, we have to do it manually with RawProcessing because we are modifying the annotations
    for file in glob(file_path):
        pp = ActiwatchSleepData(file,
                                col_for_datetime="time",
                                col_for_pid="pid")
        w = Wearable(pp)  # Creates a wearable from a pp object
        exp.add_wearable(w)

    tsp = TimeSeriesProcessing(exp)

    tsp.fill_no_activity(-0.0001)
    tsp.detect_non_wear(strategy="choi")

    tsp.check_consecutive_days(5)
    print("Valid days:", tsp.get_valid_days())
    print("Invalid days:", tsp.get_invalid_days())

    tsp.detect_sleep_boundaries(strategy="annotation",
                                annotation_hour_to_start_search=18)
    tsp.invalidate_day_if_no_sleep()
    print("Valid days:", tsp.get_valid_days())

    tsp.check_valid_days(min_activity_threshold=0,
                         max_non_wear_minutes_per_day=180)
Beispiel #6
0
    hr_volarity = 5
    exp_id = 0
    quantiles = [0.05] #, 0.10, 0.15, 0.20, 0.25]
    window_lengths = [25] #, 27.5, 30, 32.5, 35, 37.5, 40, 42.5, 45, 47.5, 50]
    time_merge_blocks = [30] #, 60, 90, 120, 180]
    start_hour = 15

    with tempfile.TemporaryDirectory() as output_path:
        for quantile in quantiles:
            for merge_blocks in time_merge_blocks:
                for min_window_length in window_lengths:

                    exp = load_experiment(data_path, diary_path)

                    tsp = TimeSeriesProcessing(exp)
                    tsp.fill_no_activity(-0.0001)
                    tsp.detect_non_wear(strategy="none")
                    tsp.check_valid_days(min_activity_threshold=0, max_non_wear_minutes_per_day=180,
                                         check_sleep_period=False)
                    tsp.drop_invalid_days()
                    tsp.detect_sleep_boundaries(strategy="adapted_van_hees", output_col="hyp_sleep_period_vanhees",
                                                angle_cols=["pitch_mean_dw", "roll_mean_dw"],
                                                angle_start_hour=start_hour, angle_quantile=quantile,
                                                angle_minimum_len_in_minutes=min_window_length,
                                                angle_merge_tolerance_in_minutes=merge_blocks)

                    # Dont change the intevals below: we're using 1.5, 3 and 6.
                    # Removed the -1 when creating the wearable
                    pa = PhysicalActivity(exp, 1.5, 3, 6)
                    pa.generate_pa_columns()
def one_loop(hparam):

    exp_id, data_path, diary_path, start_hour, end_hour, hr_quantile, hr_merge_blocks, hr_min_window_length = hparam

    print("Q:", hr_quantile, "L:", hr_min_window_length, "G", hr_merge_blocks)

    exp = load_experiment(data_path, diary_path, start_hour)

    tsp = TimeSeriesProcessing(exp)
    tsp.fill_no_activity(0.0001)
    tsp.detect_non_wear(strategy="none")
    tsp.check_valid_days(min_activity_threshold=-100000, max_non_wear_minutes_per_day=180, check_sleep_period=False,
                         check_diary=True)
    tsp.drop_invalid_days()

    tsp.detect_sleep_boundaries(strategy="hr", output_col="hyp_sleep_period_hr", hr_quantile=hr_quantile,
                                hr_volarity_threshold=hr_volarity, hr_rolling_win_in_minutes=5,
                                hr_sleep_search_window=(start_hour, end_hour),
                                hr_min_window_length_in_minutes=hr_min_window_length,
                                hr_volatility_window_in_minutes=10, hr_merge_blocks_gap_time_in_min=hr_merge_blocks,
                                hr_sleep_only_in_sleep_search_window=True, hr_only_largest_sleep_period=True)

    # tsp.detect_sleep_boundaries(strategy="adapted_van_hees", output_col="hyp_sleep_period_vanheesndw",
    #                             vanhees_cols=["pitch_mean_ndw", "roll_mean_ndw"], vanhees_start_hour=vanhees_start_hour,
    #                             vanhees_quantile=vanhees_quantile, vanhees_minimum_len_in_minutes=vanhees_window_length,
    #                             vanhees_merge_tolerance_in_minutes=vanhees_time_merge_block)

    sm = SleepMetrics(exp)
    sm_results = sm.get_sleep_quality(strategy="sleepEfficiency", sleep_period_col="hyp_sleep_period_hr",
                                      wake_col="hyp_sleep_period_hr")
    sm_results = sm.get_sleep_quality(strategy="sri", sleep_period_col=None,
                                      wake_col="hyp_sleep_period_hr")

    print(sm_results)

    df_acc = []
    mses = {}
    cohens = {}

    #print("Calculating evaluation measures...")
    for w in exp.get_all_wearables():

        diary_sleep = w.data[w.diary_sleep].astype(int)
        hr_sleep = w.data["hyp_sleep_period_hr"].astype(int)

        if diary_sleep.shape[0] == 0 or hr_sleep.shape[0] == 0:
            continue

        mses["diary_hr"] = mean_squared_error(diary_sleep, hr_sleep)
        cohens["diary_hr"] = cohen_kappa_score(diary_sleep, hr_sleep)

        tst_diary = w.get_total_sleep_time_per_day(based_on_diary=True)
        tst_hr = w.get_total_sleep_time_per_day(sleep_col="hyp_sleep_period_hr")

        onset_diary = w.get_onset_sleep_time_per_day(based_on_diary=True)
        onset_diary.name = "onset_diary"
        onset_hr = w.get_onset_sleep_time_per_day(sleep_col="hyp_sleep_period_hr")
        onset_hr.name = "onset_hr"

        offset_diary = w.get_offset_sleep_time_per_day(based_on_diary=True)
        offset_diary.name = "offset_diary"
        offset_hr = w.get_offset_sleep_time_per_day(sleep_col="hyp_sleep_period_hr")
        offset_hr.name = "offset_hr"

        df_res = pd.concat((onset_hr, onset_diary,
                            offset_hr, offset_diary,
                            tst_diary, tst_hr), axis=1)

        df_res["pid"] = w.get_pid()
        for comb in ["diary_hr"]:
            df_res["mse_" + comb] = mses[comb]
            df_res["cohens_" + comb] = cohens[comb]

        # View signals
        # w.change_start_hour_for_experiment_day(0)
        w.view_signals(["activity", "hr", "sleep", "diary"], sleep_cols=["hyp_sleep_period_hr"])

        df_acc.append(df_res)

    exp_id += 1
    df_acc = pd.concat(df_acc)
    df_acc["exp_id"] = exp_id
    df_acc["quantile"] = hr_quantile
    df_acc["window_lengths"] = hr_min_window_length
    df_acc["time_merge_blocks"] = hr_merge_blocks

    df_acc.to_csv(os.path.join(output_path, "exp_%d.csv" % (exp_id)), index=False)