def get_vids(cf, dataset) -> Dict[str, List[Vid_daly]]: v_fraction = cf['dataset.val_split.fraction'] v_nsamplings = cf['dataset.val_split.nsamplings'] v_seed = cf['dataset.val_split.seed'] val, train = split_off_validation_set(dataset, v_fraction, v_nsamplings, v_seed) vgroup = { 'train': train, 'val': val, 'trainval': get_daly_split_vids(dataset, 'train'), 'test': get_daly_split_vids(dataset, 'test'), } return vgroup
def eval_daly_object(workfolder, cfg_dict, add_args): """ Evaluation code with hacks """ out, = snippets.get_subfolders(workfolder, ['out']) cfg = snippets.YConfig(cfg_dict) _set_defcfg_detectron(cfg) _set_defcfg_detectron_test(cfg) _set_defcfg_object_hacks(cfg) cfg.set_deftype(""" what_to_eval: [~, str] eval_hacks: model_to_eval: ['what', ['what', 'what+foldname']] """) cf = cfg.parse() cf_add_d2 = cfg.without_prefix('d2.') # DALY Dataset dataset = Dataset_daly_ocv() dataset.populate_from_folder(cf['dataset.cache_folder']) split_label = cf['dataset.subset'] split_vids = get_daly_split_vids(dataset, split_label) datalist: Datalist = simplest_daly_to_datalist_v2(dataset, split_vids) cls_names, datalist_converter = \ _datalist_hacky_converter(cf, dataset) datalist = datalist_converter(datalist) TEST_DATASET_NAME = 'daly_objaction_test' model_to_eval = _eval_foldname_hack(cf) _eval_routine(cf, cf_add_d2, out, cls_names, TEST_DATASET_NAME, datalist, model_to_eval)
def train_daly_object(workfolder, cfg_dict, add_args): out, = snippets.get_subfolders(workfolder, ['out']) cfg = snippets.YConfig(cfg_dict) _set_defcfg_detectron(cfg) _set_defcfg_detectron_train(cfg) _set_defcfg_object_hacks(cfg) cf = cfg.parse() cf_add_d2 = cfg.without_prefix('d2.') cf_add_d2 = d2dict_gpu_scaling(cf, cf_add_d2, cf['num_gpus']) dataset = Dataset_daly_ocv() dataset.populate_from_folder(cf['dataset.cache_folder']) o100_objects, category_map = get_category_map_o100(dataset) assert len(o100_objects) == 16 split_label = cf['dataset.subset'] split_vids = get_daly_split_vids(dataset, split_label) datalist: Datalist = simplest_daly_to_datalist_v2(dataset, split_vids) cls_names, datalist_converter = \ _datalist_hacky_converter(cf, dataset) datalist = datalist_converter(datalist) TRAIN_DATASET_NAME = 'daly_objaction_train' _train_routine(cf, cf_add_d2, out, cls_names, TRAIN_DATASET_NAME, datalist, add_args)
def resolve_dataset_tubes(cf): dataset = Dataset_daly_ocv(cf['dataset.mirror']) dataset.populate_from_folder(cf['dataset.cache_folder']) split_label = cf['dataset.subset'] split_vids: List[Vid_daly] = \ get_daly_split_vids(dataset, split_label) dgt_tubes: Dict[I_dgt, T_dgt] = \ get_daly_gt_tubes(dataset) dgt_tubes = dtindex_filter_split(dgt_tubes, split_vids) av_gt_tubes: AV_dict[T_dgt] = push_into_avdict(dgt_tubes) return dataset, split_vids, av_gt_tubes
def train_daly_action(workfolder, cfg_dict, add_args): out, = snippets.get_subfolders(workfolder, ['out']) cfg = snippets.YConfig(cfg_dict) _set_defcfg_detectron(cfg) _set_defcfg_detectron_train(cfg) cf = cfg.parse() cf_add_d2 = cfg.without_prefix('d2.') cf_add_d2 = d2dict_gpu_scaling(cf, cf_add_d2, cf['num_gpus']) dataset = Dataset_daly_ocv() dataset.populate_from_folder(cf['dataset.cache_folder']) split_label = cf['dataset.subset'] split_vids = get_daly_split_vids(dataset, split_label) datalist: Datalist = daly_to_datalist_pfadet(dataset, split_vids) cls_names = dataset.action_names TRAIN_DATASET_NAME = 'daly_pfadet_train' _train_routine(cf, cf_add_d2, out, cls_names, TRAIN_DATASET_NAME, datalist, add_args)