params['path_centers'], FORCE_RELOAD) df_eval = df_paths.copy(deep=True) for stage in params['stages']: df_eval = evaluate_detection_stage(df_eval, stage, params['path_infofile'], params['path_expt'], params['nb_workers']) if not df_eval.empty and 'image' in df_eval.columns: df_eval.set_index('image', inplace=True) df_eval.to_csv(os.path.join(params['path_expt'], NAME_CSV_TRIPLES_STAT)) gc.collect() time.sleep(1) if not df_eval.empty: df_stat = df_eval.describe().transpose() logging.info('STATISTIC: \n %r', df_stat) df_stat.to_csv(os.path.join(params['path_expt'], NAME_CSV_STATISTIC)) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) logging.info('running...') params = run_train.arg_parse_params(DEFAULT_PARAMS) main(params) logging.info('DONE')
tqdm_bar = tqdm.tqdm(total=len(df_paths)) if params['nb_jobs'] > 1: wrapper_clustering = partial(cluster_points_draw_export, params=params, path_out=params['path_output']) pool = mproc.Pool(params['nb_jobs']) for dict_center in pool.imap_unordered( wrapper_clustering, (dict(row) for idx, row in df_paths.iterrows())): df_paths_new = df_paths_new.append(dict_center, ignore_index=True) tqdm_bar.update() pool.close() pool.join() else: for dict_row in (dict(row) for idx, row in df_paths.iterrows()): dict_center = cluster_points_draw_export(dict_row, params, params['path_output']) df_paths_new = df_paths_new.append(dict_center, ignore_index=True) tqdm_bar.update() df_paths_new.set_index('image', inplace=True) df_paths_new.to_csv(path_cover) logging.info('DONE') if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) params = run_train.arg_parse_params(PARAMS) main(params)