datasets = ens.read_dataset(paths["common"], paths["binary"]) helper = get_renam_fun(json_path) if (get_fun): return helper new_datasets = datasets #[helper(data_i) for data_i in datasets] votes = ens.Votes(learn.train_ens(new_datasets, clf="LR")) result_i = votes.voting(False) result_i.report() def get_renam_fun(json_path): rename = read_rename(json_path) def helper(data_i): feat_i = feats.Feats() for name_i, rename_i in rename.items(): print((rename_i, name_i)) feat_i[rename_i] = data_i[name_i] return feat_i return helper if __name__ == "__main__": dataset = "3DHOI" dir_path = ".." paths = exp.basic_paths(dataset, dir_path, "dtw", None) paths["common"] = ["../3DHOI/1D_CNN/feats"] rename_frames(paths, "rename") # rename=random_cat("../3DHOI/1D_CNN/feats") # save_rename("rename",rename)
exp.result_exp(loss_name,result_dict,out_i) def weight_desc(result_dict,eps=0.02): weight_dict={} for name_i,pair_i in result_dict.items(): result_i,weights_i=pair_i s_clf=(weights_i>eps) n_clf=weights_i[s_clf].shape[0] s_clf=str(np.where(s_clf==True)[0]) new_name_i="%s,%d,%s" % (name_i,n_clf,s_clf) weight_dict[new_name_i]=result_i return weight_dict def single_exp(paths,loss_type,out_path,p=0.5,k=10): loss_dict={"MSE":MSE,"gasen":gasen.Corl,"Comb":Comb} valid=auc.CrossVal(p) if(p<1.0): valid=auc.MedianaVal(valid,k=k) optim=OptimWeights(loss_dict[loss_type],valid) result=optim(paths)[0] result.report() result.get_cf(out_path) if __name__ == "__main__": dataset="MHAD" dir_path="../../ICSS"#%s" % dataset paths=exp.basic_paths(dataset,dir_path,"dtw","ens/feats") paths["common"].append("%s/%s/1D_CNN/feats" % (dir_path,dataset)) print(paths) # optim=auc_exp(paths,"MHAD") single_exp(paths,"Comb","cf/%s" % dataset,p=1.0)