コード例 #1
0
def train_X_to_Cy(args, dataset_kwargs, model_kwargs):

    dataloaders, datasets, dataset_sizes = load_data_from_different_splits(
        **dataset_kwargs)

    # ---- Model fitting ----
    model = ModelXtoCY(model_kwargs)
    results = model.fit(dataloaders=dataloaders, dataset_sizes=dataset_sizes)

    # ---- Save results ----
    save_model_results(model, results, args, dataset_kwargs, model_kwargs)
コード例 #2
0
def train_X_to_C(args, dataset_kwargs, model_kwargs):

    dataloaders, datasets, dataset_sizes = load_data_from_different_splits(
        **dataset_kwargs)

    # ---- Model fitting ----
    if args.use_senn_model:
        model = ModelXtoC_SENN(model_kwargs)
    else:
        model = ModelXtoC(model_kwargs)
    results = model.fit(dataloaders=dataloaders, dataset_sizes=dataset_sizes)

    # ---- Save results ----
    save_model_results(model, results, args, dataset_kwargs, model_kwargs)
コード例 #3
0
def hyperparameter_optimization(args, dataset_kwargs, model_kwargs):

    dataloaders, datasets, dataset_sizes = load_data_from_different_splits(
        **dataset_kwargs)

    args.hyperopt_params = json.loads(args.hyperopt_params)
    args.hyperopt_additional = json.loads(
        args.hyperopt_additional) if args.hyperopt_additional else {}
    if args.hyperopt_model == 'X_to_y':
        model_class = ModelXtoY
    elif args.hyperopt_model == 'X_to_Cy':
        model_class = ModelXtoCY
    elif args.hyperopt_model == 'X_to_C_to_y':
        model_class = ModelXtoCtoY
    elif args.hyperopt_model == 'X_to_Chat__Chat_to_y':
        model_class = ModelXtoC

    # ---- Generate candidate parameters ----
    candidate_parameters = []
    if args.hyperopt_search == 'random':
        raise NotImplementedError()
    elif args.hyperopt_search == 'grid':
        keys = [key for key in args.hyperopt_params.keys()]
        values = [value for value in args.hyperopt_params.values()]
        candidate_parameters = [tup for tup in product(*values)]

    def recursive_set_attr(kwargs, key, value):
        if len(key) == 1:
            kwargs[key[0]] = value
            return
        new_kwargs = kwargs[key[0]]
        new_key = key[1:]
        return recursive_set_attr(new_kwargs, new_key, value)

    def convert_params_to_kwargs(names, parameters, model_kwargs):
        model_kwargs_new = copy.deepcopy(model_kwargs)
        for name, parameter in zip(names, parameters):
            recursive_set_attr(model_kwargs_new, name.split('.'), parameter)
        return model_kwargs_new

    def get_exp_name(model_name, cand_id, trial_id, keys, params):
        string = 'opt/%s_Cand%d_Trial%d' % (model_name, cand_id, trial_id)
        for key, param in zip(keys, params):
            string += '_%s@%s' % (key, param)
        return string

    # ---- Run evaluations for each candidate parameters ----
    N_exps = len(candidate_parameters) * args.hyperopt_n_repeats
    print('Running a total of %d params X %d repeats = %d experiments' %
          (len(candidate_parameters), args.hyperopt_n_repeats, N_exps))
    print('Estimated time: %d H' % (N_exps * EST_TIME_PER_EXP))
    candidate_scores = []
    for i, parameters in enumerate(candidate_parameters):
        print(' ------ Evaluating candidate %d/%d ------' %
              (i + 1, len(candidate_parameters)))
        scores = []
        for j in range(args.hyperopt_n_repeats):
            print(' ---------- Trial %d ----------' % (j + 1))
            exp_name = os.path.join(
                args.name,
                get_exp_name(args.hyperopt_model, i + 1, j + 1, keys,
                             parameters))
            model_kwargs_param = convert_params_to_kwargs(
                keys, parameters, model_kwargs)
            model = model_class(model_kwargs_param)
            results = model.fit(dataloaders=dataloaders,
                                dataset_sizes=dataset_sizes)
            save_model_results(model,
                               results,
                               args,
                               dataset_kwargs,
                               model_kwargs_param,
                               exp_name=exp_name)

            if args.hyperopt_model == 'X_to_Chat__Chat_to_y':
                print(' ----- Training Chat_to_y -----')
                extra_params = {
                    'pretrained_path':
                    os.path.join(OUTPUTS_DIR, exp_name, 'model_weights.pth'),
                    'front_fc_layers_to_freeze':
                    args.hyperopt_additional['front_fc_layers_to_freeze'],
                    'fc_layers':
                    args.hyperopt_additional['fc_layers'],
                    'y_fc_name':
                    args.hyperopt_additional['y_fc_name']
                }
                model_kwargs_param_new = copy.deepcopy(model_kwargs_param)
                model_kwargs_param_new.update(extra_params)
                model = ModelXtoChat_ChatToY(model_kwargs_param_new)
                results = model.fit(dataloaders=dataloaders,
                                    dataset_sizes=dataset_sizes)
                save_model_results(model, results, args, dataset_kwargs,
                                   model_kwargs_param_new)

            # Use last epoch validation result as the score
            score = results[model_kwargs['num_epochs'] -
                            1][args.hyperopt_score_metric]
            if args.hyperopt_negate_score: score *= -1.
            scores.append(score)
        candidate_scores.append(np.mean(scores))

    # ---- Report the best hyperparameter ----
    print(' ------ Results ------')
    print(' Parameter names: %s' % (str(keys)))
    best_idx = np.argmax(candidate_scores)
    for i, (score, parameters) in enumerate(
            zip(candidate_scores, candidate_parameters)):
        best = '[Best] ' if i == best_idx else ''
        print('   Score: %.3f %s| Parameters: %s ' %
              (score, best, str(parameters)))