def test_launch_calls_error(): """ Run several calls to an erroring function through a multiprocessing.Pool. """ with pytest.raises(subprocess.CalledProcessError): parallel.launch_calls(raise_exception, [1, 1, 1, 1], 2, subprocess.CalledProcessError(1, "failcmd"), tilewise=False)
def main(user_cfg): """ Launch the s2p pipeline with the parameters given in a json file. Args: user_cfg: user config dictionary """ common.print_elapsed_time.t0 = datetime.datetime.now() initialization.build_cfg(user_cfg) initialization.make_dirs() # multiprocessing setup nb_workers = multiprocessing.cpu_count() # nb of available cores if cfg['max_processes'] is not None: nb_workers = cfg['max_processes'] tw, th = initialization.adjust_tile_size() tiles_txt = os.path.join(cfg['out_dir'], 'tiles.txt') tiles = initialization.tiles_full_info(tw, th, tiles_txt, create_masks=True) if not tiles: print('ERROR: the ROI is not seen in two images or is totally masked.') sys.exit(1) # initialisation: write the list of tilewise json files to outdir/tiles.txt with open(tiles_txt, 'w') as f: for t in tiles: print(t['json'], file=f) n = len(cfg['images']) tiles_pairs = [(t, i) for i in range(1, n) for t in tiles] timeout = cfg['timeout'] # local-pointing step: print('correcting pointing locally...') parallel.launch_calls(pointing_correction, tiles_pairs, nb_workers, timeout=timeout) # global-pointing step: print('correcting pointing globally...') global_pointing_correction(tiles) common.print_elapsed_time() # rectification step: print('rectifying tiles...') parallel.launch_calls(rectification_pair, tiles_pairs, nb_workers, timeout=timeout) # matching step: print('running stereo matching...') if cfg['max_processes_stereo_matching'] is not None: nb_workers_stereo = cfg['max_processes_stereo_matching'] else: nb_workers_stereo = nb_workers parallel.launch_calls(stereo_matching, tiles_pairs, nb_workers_stereo, timeout=timeout) if n > 2: # disparity-to-height step: print('computing height maps...') parallel.launch_calls(disparity_to_height, tiles_pairs, nb_workers, timeout=timeout) print('computing local pairwise height offsets...') parallel.launch_calls(mean_heights, tiles, nb_workers, timeout=timeout) # global-mean-heights step: print('computing global pairwise height offsets...') global_mean_heights(tiles) # heights-to-ply step: print('merging height maps and computing point clouds...') parallel.launch_calls(heights_to_ply, tiles, nb_workers, timeout=timeout) else: # triangulation step: print('triangulating tiles...') parallel.launch_calls(disparity_to_ply, tiles, nb_workers, timeout=timeout) # local-dsm-rasterization step: print('computing DSM by tile...') parallel.launch_calls(plys_to_dsm, tiles, nb_workers, timeout=timeout) # global-dsm-rasterization step: print('computing global DSM...') global_dsm(tiles) common.print_elapsed_time() # cleanup common.garbage_cleanup() common.print_elapsed_time(since_first_call=True)
def tiles_full_info(tw, th, tiles_txt, create_masks=False): """ List the tiles to process and prepare their output directories structures. Most of the time is spent discarding tiles that are masked by water (according to exogenous dem). Returns: a list of dictionaries. Each dictionary contains the image coordinates and the output directory path of a tile. """ rpc = cfg['images'][0]['rpcm'] roi_msk = cfg['images'][0]['roi'] cld_msk = cfg['images'][0]['cld'] wat_msk = cfg['images'][0]['wat'] rx = cfg['roi']['x'] ry = cfg['roi']['y'] rw = cfg['roi']['w'] rh = cfg['roi']['h'] # list of dictionaries (one for each non-masked tile) tiles = [] # list tiles coordinates tiles_coords, neighborhood_coords_dict = compute_tiles_coordinates( rx, ry, rw, rh, tw, th) if create_masks or not os.path.exists(tiles_txt): print('\ndiscarding masked tiles...') images_sizes = [] for img in cfg['images']: with rasterio.open(img['img'], 'r') as f: images_sizes.append(f.shape) # compute all masks in parallel as numpy arrays tiles_usefulnesses = parallel.launch_calls(is_this_tile_useful, tiles_coords, cfg['max_processes'], images_sizes, tilewise=False, timeout=cfg['timeout']) # discard useless tiles from neighborhood_coords_dict discarded_tiles = set( x for x, (b, _) in zip(tiles_coords, tiles_usefulnesses) if not b) for k, v in neighborhood_coords_dict.items(): neighborhood_coords_dict[k] = list(set(v) - discarded_tiles) for coords, usefulness in zip(tiles_coords, tiles_usefulnesses): useful, mask = usefulness if not useful: continue tile = create_tile(coords, neighborhood_coords_dict) tiles.append(tile) # make tiles directories and store json configuration dumps common.mkdir_p(tile['dir']) for i in range(1, len(cfg['images'])): common.mkdir_p(os.path.join(tile['dir'], 'pair_{}'.format(i))) # save a json dump of the tile configuration tile_cfg = copy.deepcopy(cfg) x, y, w, h = tile['coordinates'] for img in tile_cfg['images']: img.pop('rpcm', None) tile_cfg['roi'] = {'x': x, 'y': y, 'w': w, 'h': h} tile_cfg['full_img'] = False tile_cfg['max_processes'] = 1 tile_cfg['neighborhood_dirs'] = tile['neighborhood_dirs'] tile_cfg['out_dir'] = '../../..' with open(os.path.join(cfg['out_dir'], tile['json']), 'w') as f: json.dump(tile_cfg, f, indent=2, default=workaround_json_int64) # save the mask common.rasterio_write(os.path.join(tile['dir'], 'mask.png'), mask.astype(np.uint8)) else: if len(tiles_coords) == 1: tiles.append(create_tile(tiles_coords[0], neighborhood_coords_dict)) else: with open(tiles_txt, 'r') as f_tiles: for config_json in f_tiles: tile = {} with open( os.path.join(cfg['out_dir'], config_json.rstrip(os.linesep)), 'r') as f_config: tile_cfg = json.load(f_config) roi = tile_cfg['roi'] coords = roi['x'], roi['y'], roi['w'], roi['h'] tiles.append( create_tile(coords, neighborhood_coords_dict)) return tiles
def main(user_cfg): """ Launch the s2p pipeline with the parameters given in a json file. Args: user_cfg: user config dictionary """ common.print_elapsed_time.t0 = datetime.datetime.now() initialization.build_cfg(user_cfg) initialization.make_dirs() # multiprocessing setup nb_workers = multiprocessing.cpu_count() # nb of available cores if cfg['max_processes'] is not None: nb_workers = cfg['max_processes'] tw, th = initialization.adjust_tile_size() tiles_txt = os.path.join(cfg['out_dir'], 'tiles.txt') tiles = initialization.tiles_full_info(tw, th, tiles_txt, create_masks=True) # initialisation step: # Write the list of json files to outdir/tiles.txt with open(tiles_txt, 'w') as f: for t in tiles: f.write(t['json'] + os.linesep) n = len(cfg['images']) tiles_pairs = [(t, i) for i in range(1, n) for t in tiles] # local-pointing step: print('correcting pointing locally...') parallel.launch_calls(pointing_correction, tiles_pairs, nb_workers) # global-pointing step: print('correcting pointing globally...') global_pointing_correction(tiles) common.print_elapsed_time() # rectification step: print('rectifying tiles...') parallel.launch_calls(rectification_pair, tiles_pairs, nb_workers) # matching step: print('running stereo matching...') parallel.launch_calls(stereo_matching, tiles_pairs, nb_workers) if n > 2 and cfg['triangulation_mode'] == 'pairwise': # disparity-to-height step: print('computing height maps...') parallel.launch_calls(disparity_to_height, tiles_pairs, nb_workers) print('computing local pairwise height offsets...') parallel.launch_calls(mean_heights, tiles, nb_workers) # global-mean-heights step: print('computing global pairwise height offsets...') global_mean_heights(tiles) # heights-to-ply step: print('merging height maps and computing point clouds...') parallel.launch_calls(heights_to_ply, tiles, nb_workers) else: # triangulation step: print('triangulating tiles...') if cfg['triangulation_mode'] == 'geometric': parallel.launch_calls(multidisparities_to_ply, tiles, nb_workers) elif cfg['triangulation_mode'] == 'pairwise': parallel.launch_calls(disparity_to_ply, tiles, nb_workers) else: raise ValueError( "possible values for 'triangulation_mode' : 'pairwise' or 'geometric'" ) # local-dsm-rasterization step: print('computing DSM by tile...') parallel.launch_calls(plys_to_dsm, tiles, nb_workers) # global-dsm-rasterization step: print('computing global DSM...') global_dsm(tiles) common.print_elapsed_time() # cleanup common.garbage_cleanup() common.print_elapsed_time(since_first_call=True)