def main(params): """ PIPELINE for new detections :param {str: str} paths: """ logging.info('running...') params = run_train.prepare_experiment_folder(params, FOLDER_EXPERIMENT) # run_train.check_pathes_patterns(paths) tl_expt.set_experiment_logger(params['path_expt']) logging.info('COMPUTER: \n%s', repr(os.uname())) logging.info(tl_expt.string_dict(params, desc='PARAMETERS')) tl_expt.create_subfolders(params['path_expt'], LIST_SUBFOLDER) path_csv = os.path.join(params['path_expt'], NAME_CSV_TRIPLES) df_paths = get_csv_triplets(params['path_list'], path_csv, params['path_images'], params['path_segms'], force_reload=FORCE_RERUN) dict_classif = seg_clf.load_classifier(params['path_classif']) params_clf = dict_classif['params'] params_clf.update(params) logging.info(tl_expt.string_dict(params, desc='UPDATED PARAMETERS')) # perform on new images df_stat = pd.DataFrame() tqdm_bar = tqdm.tqdm(total=len(df_paths)) if params['nb_jobs'] > 1: wrapper_detection = partial(load_compute_detect_centers, params=params_clf, path_classif=params['path_classif'], path_output=params['path_expt']) mproc_pool = mproc.Pool(params['nb_jobs']) for dict_center in mproc_pool.imap_unordered(wrapper_detection, df_paths.iterrows()): df_stat = df_stat.append(dict_center, ignore_index=True) df_stat.to_csv(os.path.join(params['path_expt'], NAME_CSV_TRIPLES_TEMP)) tqdm_bar.update() mproc_pool.close() mproc_pool.join() else: classif = dict_classif['clf_pipeline'] for idx_row in df_paths.iterrows(): dict_center = load_compute_detect_centers(idx_row, params_clf, classif, path_output=params['path_expt']) df_stat = df_stat.append(dict_center, ignore_index=True) df_stat.to_csv(os.path.join(params['path_expt'], NAME_CSV_TRIPLES_TEMP)) tqdm_bar.update() df_stat.set_index(['image'], inplace=True) df_stat.to_csv(os.path.join(params['path_expt'], NAME_CSV_TRIPLES)) logging.info('STATISTIC: \n %s', repr(df_stat.describe())) logging.info('DONE')
def main(params): """ PIPELINE for new detections :param {str: str} paths: """ logging.info('running...') params = run_train.prepare_experiment_folder(params, FOLDER_EXPERIMENT) # run_train.check_pathes_patterns(paths) tl_expt.set_experiment_logger(params['path_expt']) logging.info('COMPUTER: \n%s', repr(os.uname())) logging.info(tl_expt.string_dict(params, desc='PARAMETERS')) tl_expt.create_subfolders(params['path_expt'], LIST_SUBFOLDER) path_csv = os.path.join(params['path_expt'], NAME_CSV_TRIPLES) df_paths = get_csv_triplets(params['path_list'], path_csv, params['path_images'], params['path_segms'], force_reload=FORCE_RERUN) dict_classif = seg_clf.load_classifier(params['path_classif']) params_clf = dict_classif['params'] params_clf.update(params) logging.info(tl_expt.string_dict(params, desc='UPDATED PARAMETERS')) # perform on new images df_stat = pd.DataFrame() wrapper_detection = partial(load_compute_detect_centers, params=params_clf, path_classif=params['path_classif'], path_output=params['path_expt']) iterate = tl_expt.WrapExecuteSequence(wrapper_detection, df_paths.iterrows(), nb_jobs=params['nb_jobs']) for dict_center in iterate: df_stat = df_stat.append(dict_center, ignore_index=True) df_stat.to_csv(os.path.join(params['path_expt'], NAME_CSV_TRIPLES_TEMP)) df_stat.set_index(['image'], inplace=True) df_stat.to_csv(os.path.join(params['path_expt'], NAME_CSV_TRIPLES)) logging.info('STATISTIC: \n %s', repr(df_stat.describe())) logging.info('DONE')