Exemplo n.º 1
0
def tilewise_wrapper(fun, *args, **kwargs):
    """
    """
    if not cfg['debug']:  # redirect stdout and stderr to log file
        f = open(kwargs['stdout'], 'a')
        sys.stdout = f
        sys.stderr = f

    try:
        out = fun(*args)
    except Exception:
        print("Exception in %s" % fun.__name__)
        traceback.print_exc()
        raise

    common.garbage_cleanup()
    if not cfg['debug']:  # close logs
        sys.stdout = sys.__stdout__
        sys.stderr = sys.__stderr__
        f.close()

    return out
Exemplo n.º 2
0
def main(user_cfg):
    """
    Recompute the s2p tile geometry for the config file
    and produce an svg representing the tiles

    Args:
        user_cfg: user config dictionary
    """
    common.print_elapsed_time.t0 = datetime.datetime.now()
    initialization.build_cfg(user_cfg)

    tw, th = initialization.adjust_tile_size()
    tiles_txt = os.path.join(cfg['out_dir'],'tiles.txt')
    tiles = initialization.tiles_full_info(tw, th, tiles_txt)

    # generate svg tile map
    write_svg_tilemap(os.path.join(cfg['out_dir'],'tiles.svg'), cfg, tiles)

    print("\n\n    svg tilemap saved in: %s\n"%os.path.join(cfg['out_dir'],'tiles.svg'))


    # cleanup
    common.garbage_cleanup()
Exemplo n.º 3
0
def main(user_cfg):
    """
    Launch the s2p pipeline with the parameters given in a json file.

    Args:
        user_cfg: user config dictionary
    """
    common.print_elapsed_time.t0 = datetime.datetime.now()
    initialization.build_cfg(user_cfg)
    initialization.make_dirs()

    # multiprocessing setup
    nb_workers = multiprocessing.cpu_count()  # nb of available cores
    if cfg['max_processes'] is not None:
        nb_workers = cfg['max_processes']

    tw, th = initialization.adjust_tile_size()
    tiles_txt = os.path.join(cfg['out_dir'], 'tiles.txt')
    tiles = initialization.tiles_full_info(tw,
                                           th,
                                           tiles_txt,
                                           create_masks=True)
    if not tiles:
        print('ERROR: the ROI is not seen in two images or is totally masked.')
        sys.exit(1)

    # initialisation: write the list of tilewise json files to outdir/tiles.txt
    with open(tiles_txt, 'w') as f:
        for t in tiles:
            print(t['json'], file=f)

    n = len(cfg['images'])
    tiles_pairs = [(t, i) for i in range(1, n) for t in tiles]
    timeout = cfg['timeout']

    # local-pointing step:
    print('correcting pointing locally...')
    parallel.launch_calls(pointing_correction,
                          tiles_pairs,
                          nb_workers,
                          timeout=timeout)

    # global-pointing step:
    print('correcting pointing globally...')
    global_pointing_correction(tiles)
    common.print_elapsed_time()

    # rectification step:
    print('rectifying tiles...')
    parallel.launch_calls(rectification_pair,
                          tiles_pairs,
                          nb_workers,
                          timeout=timeout)

    # matching step:
    print('running stereo matching...')
    if cfg['max_processes_stereo_matching'] is not None:
        nb_workers_stereo = cfg['max_processes_stereo_matching']
    else:
        nb_workers_stereo = nb_workers
    parallel.launch_calls(stereo_matching,
                          tiles_pairs,
                          nb_workers_stereo,
                          timeout=timeout)

    if n > 2:
        # disparity-to-height step:
        print('computing height maps...')
        parallel.launch_calls(disparity_to_height,
                              tiles_pairs,
                              nb_workers,
                              timeout=timeout)

        print('computing local pairwise height offsets...')
        parallel.launch_calls(mean_heights, tiles, nb_workers, timeout=timeout)

        # global-mean-heights step:
        print('computing global pairwise height offsets...')
        global_mean_heights(tiles)

        # heights-to-ply step:
        print('merging height maps and computing point clouds...')
        parallel.launch_calls(heights_to_ply,
                              tiles,
                              nb_workers,
                              timeout=timeout)
    else:
        # triangulation step:
        print('triangulating tiles...')
        parallel.launch_calls(disparity_to_ply,
                              tiles,
                              nb_workers,
                              timeout=timeout)

    # local-dsm-rasterization step:
    print('computing DSM by tile...')
    parallel.launch_calls(plys_to_dsm, tiles, nb_workers, timeout=timeout)

    # global-dsm-rasterization step:
    print('computing global DSM...')
    global_dsm(tiles)
    common.print_elapsed_time()

    # cleanup
    common.garbage_cleanup()
    common.print_elapsed_time(since_first_call=True)
Exemplo n.º 4
0
def main(user_cfg):
    """
    Launch the s2p pipeline with the parameters given in a json file.

    Args:
        user_cfg: user config dictionary
    """
    common.print_elapsed_time.t0 = datetime.datetime.now()
    initialization.build_cfg(user_cfg)
    initialization.make_dirs()

    # multiprocessing setup
    nb_workers = multiprocessing.cpu_count()  # nb of available cores
    if cfg['max_processes'] is not None:
        nb_workers = cfg['max_processes']

    tw, th = initialization.adjust_tile_size()
    tiles_txt = os.path.join(cfg['out_dir'], 'tiles.txt')
    tiles = initialization.tiles_full_info(tw,
                                           th,
                                           tiles_txt,
                                           create_masks=True)

    # initialisation step:
    # Write the list of json files to outdir/tiles.txt
    with open(tiles_txt, 'w') as f:
        for t in tiles:
            f.write(t['json'] + os.linesep)

    n = len(cfg['images'])
    tiles_pairs = [(t, i) for i in range(1, n) for t in tiles]

    # local-pointing step:
    print('correcting pointing locally...')
    parallel.launch_calls(pointing_correction, tiles_pairs, nb_workers)

    # global-pointing step:
    print('correcting pointing globally...')
    global_pointing_correction(tiles)
    common.print_elapsed_time()

    # rectification step:
    print('rectifying tiles...')
    parallel.launch_calls(rectification_pair, tiles_pairs, nb_workers)

    # matching step:
    print('running stereo matching...')
    parallel.launch_calls(stereo_matching, tiles_pairs, nb_workers)

    if n > 2 and cfg['triangulation_mode'] == 'pairwise':
        # disparity-to-height step:
        print('computing height maps...')
        parallel.launch_calls(disparity_to_height, tiles_pairs, nb_workers)

        print('computing local pairwise height offsets...')
        parallel.launch_calls(mean_heights, tiles, nb_workers)

        # global-mean-heights step:
        print('computing global pairwise height offsets...')
        global_mean_heights(tiles)

        # heights-to-ply step:
        print('merging height maps and computing point clouds...')
        parallel.launch_calls(heights_to_ply, tiles, nb_workers)
    else:
        # triangulation step:
        print('triangulating tiles...')
        if cfg['triangulation_mode'] == 'geometric':
            parallel.launch_calls(multidisparities_to_ply, tiles, nb_workers)
        elif cfg['triangulation_mode'] == 'pairwise':
            parallel.launch_calls(disparity_to_ply, tiles, nb_workers)
        else:
            raise ValueError(
                "possible values for 'triangulation_mode' : 'pairwise' or 'geometric'"
            )

    # local-dsm-rasterization step:
    print('computing DSM by tile...')
    parallel.launch_calls(plys_to_dsm, tiles, nb_workers)

    # global-dsm-rasterization step:
    print('computing global DSM...')
    global_dsm(tiles)
    common.print_elapsed_time()

    # cleanup
    common.garbage_cleanup()
    common.print_elapsed_time(since_first_call=True)