Ejemplo n.º 1
0
def test_simple_rasterization_dataset_2():

    cloud = xr.open_dataset(
        absolute_data_path("input/intermediate_results/cloud1_ref.nc"))
    color = xr.open_dataset(
        absolute_data_path("input/intermediate_results/data1_ref_clr.nc"))

    utm = projection.points_cloud_conversion_dataset(cloud, 32630)

    xstart = None
    ystart = None
    xsize = None
    ysize = None
    resolution = 0.5

    raster = rasterization.simple_rasterization_dataset([utm], resolution,
                                                        32630, [color], xstart,
                                                        ystart, xsize, ysize,
                                                        0.3, 3)

    # Uncomment to update references
    # raster.to_netcdf(
    #     absolute_data_path('ref_output/rasterization_res_ref_2.nc'),
    # )

    raster_ref = xr.open_dataset(
        absolute_data_path("ref_output/rasterization_res_ref_2.nc"))
    assert_same_datasets(raster, raster_ref, atol=1.e-10, rtol=1.e-10)
Ejemplo n.º 2
0
def rasterization_wrapper(clouds_and_colors, resolution, epsg, **kwargs):
    """
    Wrapper for rasterization step.

    This function allow to convert a list of cloud to correct EPSG and rasterize it with associated colors.

    :param clouds_and_colors: list of tuple (cloud, colors)
    :type clouds_and_colors: list of pair of xarray
    :param resolution: resolution of DSM to produce (in meter, degree... [depends on epsg code])
    :type resolution: float
    :param  epsg_code: epsg code for the CRS of the output DSM
    :type epsg_code: int
    :return: digital surface model + projected colors
    :rtype: xarray 2d tuple
    """
    # Unpack list of clouds from tuple, and project them to correct EPSG if
    # needed
    clouds = [v[0]['ref'] for v in clouds_and_colors]

    # Unpack list of colors alike
    colors = [v[1]['ref'] for v in clouds_and_colors]

    # Add clouds and colors computed from the secondary disparity map
    if 'sec' in clouds_and_colors[0][0]:
        cloud_sec = [v[0]['sec'] for v in clouds_and_colors]
        clouds.extend(cloud_sec)

        color_sec = [v[1]['sec'] for v in clouds_and_colors]
        colors.extend(color_sec)

    # Call simple_rasterization
    return rasterization.simple_rasterization_dataset(clouds, resolution, epsg,
                                                      colors, **kwargs)
Ejemplo n.º 3
0
def test_simple_rasterization_dataset_2():
    """
    Test simple rasterization dataset from test cloud cloud1_ref_epsg_32630.nc
    Configuration 2 : no xstart, ystart, xsize, ysize values
    """

    cloud = xr.open_dataset(
        absolute_data_path(
            "input/rasterization_input/cloud1_ref_epsg_32630.nc"))
    color = xr.open_dataset(
        absolute_data_path("input/intermediate_results/data1_ref_clr.nc"))

    xstart = None
    ystart = None
    xsize = None
    ysize = None
    resolution = 0.5

    raster = rasterization.simple_rasterization_dataset([cloud], resolution,
                                                        32630, [color], xstart,
                                                        ystart, xsize, ysize,
                                                        0.3, 3)

    # Uncomment to update references
    # raster.to_netcdf(
    #     absolute_data_path('ref_output/rasterization_res_ref_2.nc'),
    # )

    raster_ref = xr.open_dataset(
        absolute_data_path("ref_output/rasterization_res_ref_2.nc"))
    assert_same_datasets(raster, raster_ref, atol=1.e-10, rtol=1.e-10)
Ejemplo n.º 4
0
def rasterization_wrapper(clouds_and_colors, resolution, epsg, **kwargs):
    """
    Wrapper for rasterization step :
    - Convert a list of clouds to correct epsg
    - Rasterize it with associated colors

    :param clouds_and_colors: list of tuple (cloud, colors)
    :type clouds_and_colors: list of pair of xarray
    :param resolution: Produced DSM resolution (meter, degree [EPSG dependent])
    :type resolution: float
    :param  epsg_code: epsg code for the CRS of the output DSM
    :type epsg_code: int
    :return: digital surface model + projected colors
    :rtype: xarray 2d tuple
    """
    # Unpack list of clouds from tuple, and project them to correct EPSG if
    # needed
    clouds = [v[0][cst.STEREO_REF] for v in clouds_and_colors]

    # Unpack list of colors alike
    colors = [v[1][cst.STEREO_REF] for v in clouds_and_colors]

    # Add clouds and colors computed from the secondary disparity map
    if cst.STEREO_SEC in clouds_and_colors[0][0]:
        cloud_sec = [v[0][cst.STEREO_SEC] for v in clouds_and_colors]
        clouds.extend(cloud_sec)

        color_sec = [v[1][cst.STEREO_SEC] for v in clouds_and_colors]
        colors.extend(color_sec)

    # Call simple_rasterization
    return rasterization.simple_rasterization_dataset(clouds, resolution, epsg,
                                                      colors, **kwargs)
Ejemplo n.º 5
0
def test_simple_rasterization_multiple_datasets():
    """
    Test simple_rasterization_dataset with a list of datasets
    """
    cloud = xr.open_dataset(
        absolute_data_path("input/intermediate_results/cloud1_ref.nc"))
    color = xr.open_dataset(
        absolute_data_path("input/intermediate_results/data1_ref_clr.nc"))

    utm = projection.points_cloud_conversion_dataset(cloud, 32630)

    utm1 = utm.isel(row=range(0, 60))
    utm2 = utm.isel(row=range(60, 120))

    color1 = color.isel(row=range(0, 60))
    color2 = color.isel(row=range(60, 120))

    xstart = 1154790
    ystart = 4927552
    xsize = 114
    ysize = 112
    resolution = 0.5

    raster = rasterization.simple_rasterization_dataset([utm1, utm2],
                                                        resolution, 32630,
                                                        [color1, color2],
                                                        xstart, ystart, xsize,
                                                        ysize, 0.3, 3)

    # Uncomment to update reference
    # raster.to_netcdf(
    #     absolute_data_path('ref_output/rasterization_multiple_res_ref.nc'),
    # )

    raster_ref = xr.open_dataset(
        absolute_data_path("ref_output/rasterization_multiple_res_ref.nc"))
    assert_same_datasets(raster, raster_ref, atol=1.e-10, rtol=1.e-10)
Ejemplo n.º 6
0
def run(in_json: params.input_configuration_type,
        out_dir: str,
        epi_step: int = 30,
        region_size: int = 500,
        disparity_margin: float = 0.02,
        epipolar_error_upper_bound: float = 10.,
        epipolar_error_maximum_bias: float = 0.,
        elevation_delta_lower_bound: float = -1000.,
        elevation_delta_upper_bound: float = 1000.,
        mode: str = "local_dask",
        nb_workers: int = 4,
        walltime: str = "00:59:00",
        check_inputs: bool = False):
    """
    Main function of the prepare subcommand

    This function will perform the following steps:

    1. Compute stereo-rectification grids for the input pair
    2. Compute all possible sift matches in epipolar geometry
    3. Derive an optimal disparity range to explore from the matches
    4. Derive a bilinear correction model of the stereo-rectification grid for right image in order to minimize epipolar error
    5. Apply correction to right grid
    6. Export left and corrected right grid

    :param in_json:  dictionary describing input data (see README.md for format)
    :param out_dir: Directory where all outputs will be written, including a content.json file describing its content
    :param epi_step: Step of the epipolar grid to compute (in pixels in epipolar geometry)
    :param region_size: Size of regions used for sift matching
    :param disparity_margin: Percent of the disparity range width to add at each end as security margin
    :param epipolar_error_upper_bound: Upper bound of expected epipolar error (in pixels)
    :param epipolar_error_maximum_bias: Maximum bias for epipolar error (in pixels)
    :param elevation_delta_lower_bound: Lower bound for elevation delta with respect to initial MNT (in meters)
    :param elevation_delta_upper_bound: Upper bound for elevation delta with respect to initial MNT (in meters)
    :param mode: Parallelization mode
    :param nb_workers: Number of dask workers to use for the sift matching step
    :param walltime: Walltime of the dask workers
    :param check_inputs: activation of the inputs consistency checking
    """
    out_dir = os.path.abspath(out_dir)
    # Ensure that outdir exists
    try:
        os.makedirs(out_dir)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(out_dir):
            pass
        else:
            raise

    utils.add_log_file(out_dir, 'prepare')

    if not check_inputs:
        logging.warning(
            'The inputs consistency will not be checked. To enable the inputs checking, add \'--check_'
            'inputs\' to your command line')

    # Check configuration dict
    config = utils.check_json(in_json, params.input_configuration_schema)

    # Retrieve static parameters (sift and low res dsm)
    static_params = static_cfg.get_cfg()

    # Initialize output json dict
    out_json = {
        params.input_section_tag: config,
        params.preprocessing_section_tag: {
            params.preprocessing_version_tag: utils.get_version(),
            params.preprocessing_parameters_section_tag: {
                params.epi_step_tag: epi_step,
                params.disparity_margin_tag: disparity_margin,
                params.epipolar_error_upper_bound_tag:
                epipolar_error_upper_bound,
                params.epipolar_error_maximum_bias_tag:
                epipolar_error_maximum_bias,
                params.elevation_delta_lower_bound_tag:
                elevation_delta_lower_bound,
                params.elevation_delta_upper_bound_tag:
                elevation_delta_upper_bound
            },
            params.static_params_tag: static_params[static_cfg.prepare_tag],
            params.preprocessing_output_section_tag: {}
        }
    }

    # Read input parameters
    img1 = config[params.img1_tag]
    img2 = config[params.img2_tag]
    srtm_dir = config[params.srtm_dir_tag]

    nodata1 = config.get(params.nodata1_tag, None)
    nodata2 = config.get(params.nodata2_tag, None)
    mask1 = config.get(params.mask1_tag, None)
    mask2 = config.get(params.mask2_tag, None)
    color1 = config.get(params.color1_tag, None)

    if check_inputs:
        logging.info('Checking inputs consistency')
        if utils.rasterio_get_nb_bands(
                img1) != 1 or utils.rasterio_get_nb_bands(img2) != 1:
            raise Exception('{} and {} are not mono-band images'.format(
                img1, img2))
        if mask1 is not None:
            if utils.rasterio_get_size(img1) != utils.rasterio_get_size(mask1):
                raise Exception(
                    'The image {} and the mask {} do not have the same size'.
                    format(img1, mask1))
        if mask2 is not None:
            if utils.rasterio_get_size(img2) != utils.rasterio_get_size(mask2):
                raise Exception(
                    'The image {} and the mask {} do not have the same size'.
                    format(img2, mask2))

        if not utils.otb_can_open(img1):
            raise Exception(
                'Problem while opening image {} with the otb'.format(img1))
        if not utils.otb_can_open(img2):
            raise Exception(
                'Problem while opening image {} with the otb'.format(img1))

        with rio.open(img1) as im:
            trans = im.transform
            if trans.e < 0:
                logging.warning(
                    '{} seems to have an incoherent pixel size. '
                    'Input images has to be in sensor geometry.'.format(img1))

        with rio.open(img2) as im:
            trans = im.transform
            if trans.e < 0:
                logging.warning(
                    '{} seems to have an incoherent pixel size. '
                    'Input images has to be in sensor geometry.'.format(img2))

    # Check that the envelopes intersect one another
    logging.info("Computing images envelopes and their intersection")
    shp1 = os.path.join(out_dir, "left_envelope.shp")
    shp2 = os.path.join(out_dir, "right_envelope.shp")
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.left_envelope_tag] = shp1
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.right_envelope_tag] = shp2
    preprocessing.image_envelope(img1, shp1, srtm_dir)
    preprocessing.image_envelope(img2, shp2, srtm_dir)

    poly1, epsg1 = utils.read_vector(shp1)
    poly2, epsg2 = utils.read_vector(shp2)

    inter_poly, (inter_xmin, inter_ymin, inter_xmax, inter_ymax) = \
        tiling.ground_polygon_from_envelopes(poly1, poly2, epsg1, epsg2, epsg1)

    out_envelopes_intersection = os.path.join(out_dir,
                                              'envelopes_intersection.gpkg')
    utils.write_vector([inter_poly], out_envelopes_intersection, epsg1)

    conf_out_dict = out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag]
    conf_out_dict[
        params.envelopes_intersection_tag] = out_envelopes_intersection
    conf_out_dict[params.envelopes_intersection_bb_tag] = [
        inter_xmin, inter_ymin, inter_xmax, inter_ymax
    ]

    if check_inputs:
        logging.info('Checking DEM coverage')
        dem_useful_polygon, dem_coverage = projection.compute_dem_intersection_with_poly(
            srtm_dir, inter_poly, epsg1)

        if dem_coverage < 100.0:
            logging.warning(
                'The input DEM covers {}% of the useful zone'.format(
                    int(dem_coverage)))

    # Generate rectification grids
    logging.info("Generating epipolar rectification grid ...")
    grid1, grid2, epipolar_size_x, epipolar_size_y, alt_to_disp_ratio, stereogrid_pipeline = pipelines.build_stereorectification_grid_pipeline(
        img1, img2, srtm_dir, epi_step)
    # we want disp_to_alt_ratio = resolution/(B/H), in m.pixel^-1
    disp_to_alt_ratio = 1 / alt_to_disp_ratio

    # Export grids to numpy
    left_grid_as_array = np.copy(
        stereogrid_pipeline["stereo_app"].GetVectorImageAsNumpyArray(
            "io.outleft"))
    right_grid_as_array = np.copy(
        stereogrid_pipeline["stereo_app"].GetVectorImageAsNumpyArray(
            "io.outright"))
    grid_origin = stereogrid_pipeline["stereo_app"].GetImageOrigin(
        "io.outleft")
    grid_spacing = stereogrid_pipeline["stereo_app"].GetImageSpacing(
        "io.outleft")
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.epipolar_size_x_tag] = epipolar_size_x
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.epipolar_size_y_tag] = epipolar_size_y
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.epipolar_origin_x_tag] = grid_origin[0]
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.epipolar_origin_y_tag] = grid_origin[1]
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.epipolar_spacing_x_tag] = grid_spacing[0]
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.epipolar_spacing_y_tag] = grid_spacing[1]
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.disp_to_alt_ratio_tag] = disp_to_alt_ratio

    logging.info("Size of epipolar images: {}x{} pixels".format(
        epipolar_size_x, epipolar_size_y))
    logging.info(
        "Disparity to altitude factor: {} m/pixel".format(disp_to_alt_ratio))

    logging.info("Sparse matching ...")
    nb_threads = int(os.environ.get('OMP_NUM_THREADS', '1'))

    # Compute the full range needed for sparse matching
    disp_lower_bound = elevation_delta_lower_bound / disp_to_alt_ratio
    disp_upper_bound = elevation_delta_upper_bound / disp_to_alt_ratio

    disparity_range_width = disp_upper_bound - disp_lower_bound
    logging.info(
        "Full disparity range width for sparse matching: {} pixels".format(
            disparity_range_width))
    disparity_range_center = (elevation_delta_upper_bound +
                              elevation_delta_lower_bound) / (
                                  2 * disp_to_alt_ratio)

    # Compute the number of offsets to consider in order to explore the full range
    nb_splits = 1 + int(math.floor(float(disparity_range_width) / region_size))
    actual_region_size = int(
        math.ceil((region_size + disparity_range_width) / nb_splits))
    actual_range = nb_splits * actual_region_size
    actual_range_start = disparity_range_center - actual_range / 2 + region_size / 2
    logging.info(
        "Disparity range will be explored in {} regions of size {}, starting at {} pixels"
        .format(nb_splits, actual_region_size, actual_range_start))

    regions = tiling.split(0, 0, epipolar_size_x, epipolar_size_y, region_size,
                           region_size)

    logging.info("Number of splits to process for sparse matching: {}".format(
        len(regions)))

    cluster = None
    client = None

    # TODO: prepare mp mode
    # Use dask
    use_dask = {"local_dask": True, "pbs_dask": True}
    if mode not in use_dask.keys():
        raise NotImplementedError('{} mode is not implemented'.format(mode))

    if mode == "local_dask":
        cluster, client = start_local_cluster(nb_workers)
    else:
        cluster, client = start_cluster(nb_workers, walltime, out_dir)

    # Write temporary grid
    tmp1 = os.path.join(out_dir, "tmp1.tif")
    preprocessing.write_grid(left_grid_as_array, tmp1, grid_origin,
                             grid_spacing)
    tmp2 = os.path.join(out_dir, "tmp2.tif")
    preprocessing.write_grid(right_grid_as_array, tmp2, grid_origin,
                             grid_spacing)

    # Compute margins for right region
    margins = [
        int(
            math.floor(epipolar_error_upper_bound +
                       epipolar_error_maximum_bias)),
        int(
            math.floor(epipolar_error_upper_bound +
                       epipolar_error_maximum_bias)),
        int(
            math.floor(epipolar_error_upper_bound +
                       epipolar_error_maximum_bias)),
        int(math.ceil(epipolar_error_upper_bound +
                      epipolar_error_maximum_bias))
    ]

    logging.info(
        "Margins added to right region for matching: {}".format(margins))

    # Matching tasks as delayed objects
    delayed_matches = []
    for left_region in regions:
        for offset in range(nb_splits):
            offset_ = actual_range_start + offset * actual_region_size
            # Pad region to include margins for right image
            right_region = [
                left_region[0] + offset_, left_region[1],
                left_region[0] + offset_ + actual_region_size, left_region[3]
            ]

            # Pad with margin and crop to largest region
            right_region = tiling.crop(
                tiling.pad(right_region,
                           margins), [0, 0, epipolar_size_x, epipolar_size_y])

            # Avoid empty regions
            if not tiling.empty(right_region):

                delayed_matches.append(
                    dask.delayed(matching_wrapper)(left_region, right_region,
                                                   img1, img2, tmp1, tmp2,
                                                   mask1, mask2, nodata1,
                                                   nodata2, epipolar_size_x,
                                                   epipolar_size_y))

    # Transform delayed tasks to future
    logging.info("Submitting {} tasks to dask".format(len(delayed_matches)))
    future_matches = client.compute(delayed_matches)

    # Initialize output matches array
    matches = np.empty((0, 4))

    # Wait for all matching tasks to be completed
    for future, result in tqdm(as_completed(future_matches, with_results=True),
                               total=len(future_matches),
                               desc="Performing matching ..."):
        matches = np.concatenate((matches, result))

    raw_nb_matches = matches.shape[0]

    logging.info(
        "Raw number of matches found: {} matches".format(raw_nb_matches))

    # Export matches
    logging.info("Writing raw matches file")
    raw_matches_array_path = os.path.join(out_dir, "raw_matches.npy")
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.raw_matches_tag] = raw_matches_array_path
    np.save(raw_matches_array_path, matches)

    # Filter matches that are out of margin
    if epipolar_error_maximum_bias == 0:
        epipolar_median_shift = 0
    else:
        epipolar_median_shift = np.median(matches[:, 3] - matches[:, 1])

    matches = matches[((matches[:, 3] - matches[:, 1]) -
                       epipolar_median_shift) >= -epipolar_error_upper_bound]
    matches = matches[((matches[:, 3] - matches[:, 1]) -
                       epipolar_median_shift) <= epipolar_error_upper_bound]

    matches_discarded_message = "{} matches discarded because their epipolar error is greater \
than --epipolar_error_upper_bound = {} pix".format(
        raw_nb_matches - matches.shape[0], epipolar_error_upper_bound)

    if epipolar_error_maximum_bias != 0:
        matches_discarded_message += " considering a shift of {} pix".format(
            epipolar_median_shift)

    logging.info(matches_discarded_message)

    filtered_nb_matches = matches.shape[0]

    matches = matches[matches[:, 2] - matches[:, 0] >= disp_lower_bound]
    matches = matches[matches[:, 2] - matches[:, 0] <= disp_upper_bound]

    logging.info(
        "{} matches discarded because they fall outside of disparity range defined by --elevation_delta_lower_bound = {} m and --elevation_delta_upper_bound = {} m : [{} pix., {} pix.]"
        .format(filtered_nb_matches - matches.shape[0],
                elevation_delta_lower_bound, elevation_delta_upper_bound,
                disp_lower_bound, disp_upper_bound))

    # Retrieve number of matches
    nb_matches = matches.shape[0]

    # Check if we have enough matches
    # TODO: we could also make it a warning and continue with uncorrected grid
    # and default disparity range
    if nb_matches < 100:
        logging.critical(
            "Insufficient amount of matches found (< 100), can not safely estimate epipolar error correction and disparity range"
        )
        # stop cluster
        stop_cluster(cluster, client)
        # Exit immediately
        return

    logging.info(
        "Number of matches kept for epipolar error correction: {} matches".
        format(nb_matches))

    # Remove temporary files
    os.remove(tmp1)
    os.remove(tmp2)

    # Compute epipolar error
    epipolar_error = matches[:, 1] - matches[:, 3]
    logging.info(
        "Epipolar error before correction: mean = {:.3f} pix., standard deviation = {:.3f} pix., max = {:.3f} pix."
        .format(np.mean(epipolar_error), np.std(epipolar_error),
                np.max(np.fabs(epipolar_error))))

    # Commpute correction for right grid
    logging.info("Generating correction for right epipolar grid ...")
    corrected_right_grid, corrected_matches, in_stats, out_stats = preprocessing.correct_right_grid(
        matches, right_grid_as_array, grid_origin, grid_spacing)

    corrected_epipolar_error = corrected_matches[:, 1] - corrected_matches[:,
                                                                           3]

    logging.info(
        "Epipolar error after correction: mean = {:.3f} pix., standard deviation = {:.3f} pix., max = {:.3f} pix."
        .format(np.mean(corrected_epipolar_error),
                np.std(corrected_epipolar_error),
                np.max(np.fabs(corrected_epipolar_error))))

    # TODO: add stats in content.json

    out_left_grid = os.path.join(out_dir, "left_epipolar_grid.tif")
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.left_epipolar_grid_tag] = out_left_grid
    preprocessing.write_grid(left_grid_as_array, out_left_grid, grid_origin,
                             grid_spacing)

    # Export corrected right grid
    out_right_grid = os.path.join(out_dir, "right_epipolar_grid.tif")
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.right_epipolar_grid_tag] = out_right_grid
    preprocessing.write_grid(corrected_right_grid, out_right_grid, grid_origin,
                             grid_spacing)

    # Export uncorrected right grid
    logging.info("Writing uncorrected right grid")
    out_right_grid_uncorrected = os.path.join(
        out_dir, "right_epipolar_grid_uncorrected.tif")
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.
            right_epipolar_uncorrected_grid_tag] = out_right_grid_uncorrected
    preprocessing.write_grid(right_grid_as_array, out_right_grid_uncorrected,
                             grid_origin, grid_spacing)

    # Compute the disparity range (we filter matches that are too off epipolar
    # lins after correction)
    corrected_std = np.std(corrected_epipolar_error)

    corrected_matches = corrected_matches[
        np.fabs(corrected_epipolar_error) < 3 * corrected_std]
    logging.info(
        "{} matches discarded because their epipolar error is greater than 3*stdev of epipolar error after correction (3*stddev = {:.3f} pix.)"
        .format(nb_matches - corrected_matches.shape[0], 3 * corrected_std))

    logging.info(
        "Number of matches kept for disparity range estimation: {} matches".
        format(corrected_matches.shape[0]))

    dmin, dmax = preprocessing.compute_disparity_range(
        corrected_matches,
        static_cfg.get_disparity_outliers_rejection_percent())
    margin = abs(dmax - dmin) * disparity_margin
    dmin -= margin
    dmax += margin
    logging.info(
        "Disparity range with margin: [{:.3f} pix., {:.3f} pix.] (margin = {:.3f} pix.)"
        .format(dmin, dmax, margin))
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.minimum_disparity_tag] = dmin
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.maximum_disparity_tag] = dmax

    logging.info(
        "Equivalent range in meters: [{:.3f} m, {:.3f} m] (margin = {:.3f} m)".
        format(dmin * disp_to_alt_ratio, dmax * disp_to_alt_ratio,
               margin * disp_to_alt_ratio))

    # Export matches
    logging.info("Writing matches file")
    matches_array_path = os.path.join(out_dir, "matches.npy")
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.matches_tag] = matches_array_path
    np.save(matches_array_path, corrected_matches)

    # Now compute low resolution DSM and its initial DEM counterpart

    # First, triangulate matches
    logging.info("Generating low resolution DSM from matches")
    points_cloud_from_matches = stereo.triangulate_matches(
        out_json, corrected_matches)

    # Then define the size of the lower res DSM to rasterize
    low_res_dsm_params = static_cfg.get_low_res_dsm_params()
    lowres_dsm_resolution = getattr(
        low_res_dsm_params,
        static_cfg.low_res_dsm_resolution_in_degree_tag)  # Value in degree
    lowres_dsm_sizex = int(
        math.ceil((inter_xmax - inter_xmin) / lowres_dsm_resolution))
    lowres_dsm_sizey = int(
        math.ceil((inter_ymax - inter_ymin) / lowres_dsm_resolution))
    lowres_dsm = rasterization.simple_rasterization_dataset(
        [points_cloud_from_matches],
        lowres_dsm_resolution,
        4326,
        color_list=None,
        xstart=inter_xmin,
        ystart=inter_ymax,
        xsize=lowres_dsm_sizex,
        ysize=lowres_dsm_sizey)

    lowres_dsm_file = os.path.join(
        out_dir, "lowres_dsm_from_matches.nc")  # TODO add propoer crs info
    lowres_dsm.to_netcdf(lowres_dsm_file)
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.lowres_dsm_tag] = lowres_dsm_file

    # Now read the exact same grid on initial DEM
    lowres_initial_dem = preprocessing.read_lowres_dem(
        srtm_dir,
        startx=inter_xmin,
        starty=inter_ymax,
        sizex=lowres_dsm_sizex,
        sizey=lowres_dsm_sizey,
        resolution=lowres_dsm_resolution)
    lowres_initial_dem_file = os.path.join(out_dir, "lowres_initial_dem.nc")
    lowres_initial_dem.to_netcdf(lowres_initial_dem_file)
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.lowres_initial_dem_tag] = lowres_initial_dem_file

    # also write the difference
    lowres_elevation_difference_file = os.path.join(
        out_dir, "lowres_elevation_diff.nc")
    lowres_dsm_diff = lowres_initial_dem - lowres_dsm
    (lowres_dsm_diff).to_netcdf(lowres_elevation_difference_file)
    out_json[params.preprocessing_section_tag][
        params.preprocessing_output_section_tag][
            params.
            lowres_elevation_difference_tag] = lowres_elevation_difference_file

    # Now, estimate a correction to align DSM on the lowres initial DEM
    splines = None
    if lowres_dsm_sizex > getattr(low_res_dsm_params, static_cfg.low_res_dsm_min_sizex_for_align_tag) and \
                    lowres_dsm_sizey > getattr(low_res_dsm_params, static_cfg.low_res_dsm_min_sizey_for_align_tag):

        logging.info(
            "Estimating correction between low resolution DSM and initial DEM")

        # First, we estimate direction of acquisition time for both images
        vec1 = preprocessing.get_time_ground_direction(img1, dem=srtm_dir)
        vec2 = preprocessing.get_time_ground_direction(img2, dem=srtm_dir)
        time_direction_vector = (vec1 + vec2) / 2

        display_angle = lambda x: 180 * math.atan2(x[1], x[0]) / math.pi

        logging.info(
            "Time direction average azimuth: {}° (img1: {}°, img2: {}°)".
            format(display_angle(time_direction_vector), display_angle(vec1),
                   display_angle(vec2)))

        origin = [
            float(lowres_dsm_diff.x[0].values),
            float(lowres_dsm_diff.y[0].values)
        ]
        out_json[params.preprocessing_section_tag][
            params.preprocessing_output_section_tag][
                params.time_direction_line_origin_x_tag] = origin[0]
        out_json[params.preprocessing_section_tag][
            params.preprocessing_output_section_tag][
                params.time_direction_line_origin_y_tag] = origin[1]
        out_json[params.preprocessing_section_tag][
            params.preprocessing_output_section_tag][
                params.
                time_direction_line_vector_x_tag] = time_direction_vector[0]
        out_json[params.preprocessing_section_tag][
            params.preprocessing_output_section_tag][
                params.
                time_direction_line_vector_y_tag] = time_direction_vector[1]

        # Then we estimate the correction splines
        splines = preprocessing.lowres_initial_dem_splines_fit(
            lowres_dsm,
            lowres_initial_dem,
            origin,
            time_direction_vector,
            ext=getattr(low_res_dsm_params, static_cfg.low_res_dsm_ext_tag),
            order=getattr(low_res_dsm_params, static_cfg.low_res_dsm_ext_tag))

    else:
        logging.warning(
            "Low resolution DSM is not large enough (minimum size is 100x100) to estimate correction to fit initial DEM, skipping ..."
        )

    if splines is not None:
        # Save model to file
        lowres_dem_splines_fit_file = os.path.join(
            out_dir, "lowres_dem_splines_fit.pck")
        with open(lowres_dem_splines_fit_file, 'wb') as f:
            pickle.dump(splines, f)
            out_json[params.preprocessing_section_tag][
                params.preprocessing_output_section_tag][
                    params.
                    lowres_dem_splines_fit_tag] = lowres_dem_splines_fit_file

            logging.info(
                "Generating corrected low resolution DSM from matches")

            # Estimate correction on point cloud from matches
            points_cloud_from_matches_z_correction = splines(
                preprocessing.project_coordinates_on_line(
                    points_cloud_from_matches.x, points_cloud_from_matches.y,
                    origin, time_direction_vector))

            # Estimate disparity correction
            points_cloud_disp_correction = points_cloud_from_matches_z_correction / disp_to_alt_ratio

            # Correct matches disparity
            z_corrected_matches = corrected_matches
            z_corrected_matches[:,
                                2] = z_corrected_matches[:,
                                                         2] - points_cloud_disp_correction[:,
                                                                                           0]

            # Triangulate and rasterize again
            corrected_points_cloud_from_matches = stereo.triangulate_matches(
                out_json, z_corrected_matches)

            corrected_lowres_dsm = rasterization.simple_rasterization_dataset(
                [corrected_points_cloud_from_matches],
                lowres_dsm_resolution,
                corrected_points_cloud_from_matches.attrs['epsg'],
                xstart=inter_xmin,
                ystart=inter_ymax,
                xsize=lowres_dsm_sizex,
                ysize=lowres_dsm_sizey)

            # Write corrected lowres dsm
            corrected_lowres_dsm_file = os.path.join(
                out_dir, "corrected_lowres_dsm_from_matches.nc"
            )  # TODO add propoer crs info
            corrected_lowres_dsm.to_netcdf(corrected_lowres_dsm_file)
            out_json[params.preprocessing_section_tag][
                params.preprocessing_output_section_tag][
                    params.
                    corrected_lowres_dsm_tag] = corrected_lowres_dsm_file

            # also write the difference
            corrected_lowres_elevation_difference_file = os.path.join(
                out_dir, "corrected_lowres_elevation_diff.nc")
            corrected_lowres_dsm_diff = lowres_initial_dem - corrected_lowres_dsm
            (corrected_lowres_dsm_diff
             ).to_netcdf(corrected_lowres_elevation_difference_file)
            out_json[params.preprocessing_section_tag][
                params.preprocessing_output_section_tag][
                    params.
                    corrected_lowres_elevation_difference_tag] = corrected_lowres_elevation_difference_file

    # Write the output json
    try:
        utils.check_json(out_json, params.preprocessing_content_schema)
    except CheckerError as e:
        logging.warning(
            "content.json does not comply with schema: {}".format(e))

    out_json_path = os.path.join(out_dir, "content.json")
    params.write_preprocessing_content_file(out_json, out_json_path)

    # stop cluster
    stop_cluster(cluster, client)