Exemple #1
0
def main_source_training(source_dataset, target_dataset, target_subject, model, params, weights_exp, eval_mode):
    hist_f = params["hist"] // freq
    save_file = compute_weights_file(model, source_dataset, target_dataset, target_subject, weights_exp)

    train, valid, test, scalers = preprocessing_source_multi(source_dataset, target_dataset, target_subject, ph_f,
                                                             hist_f, day_len_f)
    make_predictions_tl(target_subject, model, params, ph_f, train, valid, test,
                        eval_mode=eval_mode, fit=True, save_model_file=save_file)
    def _compute_features(self, target_subject, split):
        """
        Compute the features for a dataset excluding the given target subject, for a given split
        :param target_subject: name or number of target subject to exclude during computation
        :param split: number of the split
        :return: features, domains (class of subject)
        """
        train, valid, test, _ = preprocessing_source_multi(self.source_dataset, self.target_dataset, target_subject,
                                                           self.ph, self.hist, self.day_len)
        file_path = self._compute_file_path(target_subject)
        model = self.Model(target_subject, self.ph, self.params, train[split], valid[split], test[split])
        features, _ = model.extract_features("valid", file_path)
        y = valid[split].loc[:, ["y", "domain"]].values[:, 1]

        return features, y
Exemple #3
0
    def __init__(self, source_dataset, target_dataset, target_subject,
                 exp_name, model_name, params):
        self.source_dataset = source_dataset
        self.target_dataset = target_dataset
        self.target_subject = target_subject
        self.exp_name = exp_name

        self.model_name = model_name
        self.Model = locate_model(model_name)
        self.params = locate_params(params)

        self.ph = misc.constants.ph_f
        self.hist = self.params["hist"] // misc.constants.freq
        self.day_len = misc.constants.day_len_f

        self.train, self.valid, self.test, _ = preprocessing_source_multi(
            self.source_dataset, self.target_dataset, self.target_subject,
            self.ph, self.hist, self.day_len)
Exemple #4
0
def end_to_end(source_dataset, target_dataset, target_subject, model, params, weights_exp, eval_mode, exp,
               plot):
    hist_f = params["hist"] // freq
    save_file = compute_weights_file(model, source_dataset, target_dataset, target_subject, weights_exp)

    train_m, valid_m, test_m, scalers_m = preprocessing_source_multi(source_dataset, target_dataset, target_subject,
                                                                     ph_f, hist_f, day_len_f)
    make_predictions_tl(target_subject, model, params, ph_f, train_m, valid_m, test_m,
                        eval_mode=eval_mode, fit=True, save_model_file=save_file)

    train, valid, test, scalers = preprocessing(target_dataset, target_subject, ph_f, hist_f, day_len_f)

    raw_results = make_predictions_tl(target_subject, model, params, ph_f, train, valid, test,
                                      weights_file=save_file, eval_mode=eval_mode, fit=False, save_model_file=None)

    evaluation(raw_results, scalers, source_dataset, target_dataset, target_subject, model, params, exp, plot,
               "target_global")

    raw_results_2 = make_predictions_tl(target_subject, model, params, ph_f, train, valid, test,
                                        weights_file=save_file, eval_mode=eval_mode, fit=True,
                                        save_model_file=None)

    return evaluation(raw_results_2, scalers, source_dataset, target_dataset, target_subject, model, params, exp, plot,
                      "target_finetuning")