def custom2(param, comb_degree=3): print(f"{dt()} :: Experiments Initialize") for nb_combine in range(1, comb_degree+1): print(f"{dt()} :: {nb_combine} sample experiments") param.nb_combine = nb_combine datasets = loader.data_loader(param, target=nb_combine) datasets = preprocessing.del_subject(param, datasets, target="PA") train, test, nb_class, nb_people = preprocessing.chosen_method(param=param, comb=nb_combine, datasets=datasets) deep_learning_experiment_configuration(param, train, test, [nb_class, nb_people]) ds.save_result(param)
def cropping(param): print(f"{dt()} :: Cropping Network Initialize") datasets = loader.create_loader(param) data_list = preprocessing.normalize_all_of_length(param, datasets) for i, data in enumerate(data_list): data[:, -2] = data[:, -2] - 1 data_list[i] = data[data[:, -2].argsort()] param.nb_modal = 3 train, test, nb_class, nb_people = preprocessing.chosen_method(param=param, comb=1, datasets=data_list) nb_repeat = len(train) for repeat in range(nb_repeat): model = model_compactor.model_setting(param, train[repeat], test[repeat], [nb_class, nb_people]) print('Done?')
def experiment(param, comb_degree=5): print(f"{dt()} :: Experiments Initialize") for nb_combine in [2, 4]: # if nb_combine != 1: # continue print(f"{dt()} :: {nb_combine} sample experiments") param.nb_combine = nb_combine if param.model_name in model_compactor.model_info['dl']: datasets = loader.data_loader(param, target=nb_combine) train, test, nb_class, nb_people = preprocessing.chosen_method( param=param, comb=nb_combine, datasets=datasets) deep_learning_experiment_configuration(param, train, test, [nb_class, nb_people]) ds.save_result(param) elif param.model_name in model_compactor.model_info['c_dl']: datasets = loader.data_loader(param, target=nb_combine) train, test, nb_class, nb_people = preprocessing.chosen_method( param=param, comb=nb_combine, datasets=datasets) deep_learning_experiment_custom(param, train, test, [nb_class, nb_people]) ds.save_result(param) elif param.model_name in model_compactor.model_info['v_dl']: datasets = loader.vector_loader(param) train, test, nb_class, nb_people = preprocessing.chosen_method( param=param, comb=1, datasets=datasets) deep_learning_experiment_vector(param, train, test, [nb_class, nb_people]) ds.save_result(param) elif param.object == 'ensemble': datasets = loader.data_loader(param, nb_combine) train, test, nb_class, nb_people = preprocessing.chosen_method( param=param, comb=nb_combine, datasets=datasets) deep_learning_experiment_ensemble(param, train, test, [nb_class, nb_people]) ds.save_result(param)
def experiment(param, comb_degree=5): print(f"{dt()} :: Experiments Initialize") for nb_combine in range(1, comb_degree+1): print(f"{dt()} :: {nb_combine} sample experiments") param.nb_combine = nb_combine if nb_combine != 1: continue datasets = loader.data_loader(param, target=nb_combine) train, test, nb_class, nb_people = preprocessing.chosen_method(param=param, comb=nb_combine, datasets=datasets) if param.model_name in model_compactor.model_info['dl']: deep_learning_experiment_configuration(param, train, test, [nb_class, nb_people]) ds.save_result(param) elif param.model_name in model_compactor.model_info['ml']: machine_learning_experiment_configuration(param, train, test, [nb_class, nb_people])
def convert(param): print(f"{dt()} :: Convert Initialize") for nb_combine in range(1, 5): print(f"{dt()} :: {nb_combine} sample experiments") param.nb_combine = nb_combine if nb_combine != 1: continue datasets = loader.data_loader(param, target=nb_combine) train, test, nb_class, nb_people = preprocessing.chosen_method( param=param, comb=nb_combine, datasets=datasets) if param.method == method_select['people']: nb_repeat = nb_people elif param.method in method_select['repeat']: nb_repeat = 20 elif param.method in method_select["CrossValidation"]: nb_repeat = param.collect["CrossValidation"] * 5 elif param.method in method_select['specific']: nb_repeat = 5 for repeat in range(nb_repeat): print( f"{dt()} :: {repeat + 1}/{nb_repeat} convert target progress") tartr = train[repeat] tarte = test[repeat] if param.datatype == "type": tartr["tag"] -= 1 tarte["tag"] -= 1 tr_label = np.zeros([len(tartr["people"]), 2]) te_label = np.zeros([len(tarte["people"]), 2]) for idx, (tr, te) in enumerate( zip([tartr["people"], tartr["tag"]], [tarte["people"], tarte["tag"]])): tr_label[:, idx] = tr te_label[:, idx] = te for idx in range(param.nb_modal): tartr[f"data_{idx}"] = np.hstack( [tartr[f"data_{idx}"], tr_label]) tarte[f"data_{idx}"] = np.hstack( [tarte[f"data_{idx}"], te_label]) tr_data = list() te_data = list() for idx in range(param.nb_modal): tr_data.append(tartr[f"data_{idx}"]) te_data.append(tarte[f"data_{idx}"]) train_dict = dict() test_dict = dict() if param.nb_modal == 3: datatype = ['pressure', 'acc', 'gyro'] for train_target, test_target, target in zip( tr_data, te_data, datatype): train_dict[target] = train_target test_dict[target] = test_target elif param.nb_modal == 7: datatype = ['pressure', 'acc', 'gyro'] for train_target, test_target, target in zip( tr_data, te_data, datatype): train_dict[target] = train_target test_dict[target] = test_target save_dir = '../Result/Convert' train_folder = 'train' test_folder = 'test' folder_name = 'matfile' file_name = f'{repeat}.mat' train_dir = None test_dir = None for idx, target in enumerate([save_dir, folder_name, train_folder]): if idx == 0: train_dir = target else: train_dir = os.path.join(train_dir, target) if os.path.exists(train_dir) is not True: os.mkdir(train_dir) for idx, target in enumerate([save_dir, folder_name, test_folder]): if idx == 0: test_dir = target else: test_dir = os.path.join(test_dir, target) if os.path.exists(test_dir) is not True: os.mkdir(test_dir) savemat(os.path.join(train_dir, file_name), train_dict) savemat(os.path.join(test_dir, file_name), test_dict)