예제 #1
0
def lookup(path_ds,
           path_out_ds,
           from_field='Value',
           to_field='',
           overwrite=True):
    """

    :param path_ds:
    :param path_out_ds:
    :param from_field:
    :param to_field:
    :param overwrite:
    :return:
    """
    if overwrite and os.path.isfile(path_out_ds):
        os.remove(path_out_ds)

    val_from = get_values_rat_column(path_ds, name_col=from_field)
    val_to = get_values_rat_column(path_ds, name_col=to_field)

    dict_reclass = dict(zip(val_from, val_to))
    gp.reclassify_dataset_uri(path_ds,
                              dict_reclass,
                              path_out_ds,
                              out_datatype=get_dataset_datatype(path_ds),
                              out_nodata=gp.get_nodata_from_uri(path_ds))
예제 #2
0
def compute_viewshed_uri(in_dem_uri, out_viewshed_uri, in_structure_uri,
                         curvature_correction, refr_coeff, args):
    """ Compute the viewshed as it is defined in ArcGIS where the inputs are:

        -in_dem_uri: URI to input surface raster
        -out_viewshed_uri: URI to the output raster
        -in_structure_uri: URI to a point shapefile that contains the location
        of the observers and the viewshed radius in (negative) meters
        -curvature_correction: flag for the curvature of the earth. Either
        FLAT_EARTH or CURVED_EARTH. Not used yet.
        -refraction: refraction index between 0 (max effect) and 1 (no effect).
        Default is 0.13."""

    # Extract cell size from input DEM
    cell_size = geoprocessing.get_cell_size_from_uri(in_dem_uri)

    # Extract nodata
    nodata = geoprocessing.get_nodata_from_uri(in_dem_uri)

    ## Build I and J arrays, and save them to disk
    rows, cols = geoprocessing.get_row_col_from_uri(in_dem_uri)
    I, J = np.meshgrid(range(rows), range(cols), indexing='ij')
    # Base path uri
    base_uri = os.path.split(out_viewshed_uri)[0]
    I_uri = os.path.join(base_uri, 'I.tif')
    J_uri = os.path.join(base_uri, 'J.tif')
    #I_uri = geoprocessing.temporary_filename()
    #J_uri = geoprocessing.temporary_filename()
    geoprocessing.new_raster_from_base_uri(in_dem_uri, I_uri, 'GTiff', \
        -32768., gdal.GDT_Float32, fill_value = -32768.)
    I_raster = gdal.Open(I_uri, gdal.GA_Update)
    I_band = I_raster.GetRasterBand(1)
    I_band.WriteArray(I)
    I_band = None
    I_raster = None
    geoprocessing.new_raster_from_base_uri(in_dem_uri, J_uri, 'GTiff', \
        -32768., gdal.GDT_Float32, fill_value = -32768.)
    J_raster = gdal.Open(J_uri, gdal.GA_Update)
    J_band = J_raster.GetRasterBand(1)
    J_band.WriteArray(J)
    J_band = None
    J_raster = None
    # Extract the input raster geotransform
    GT = geoprocessing.get_geotransform_uri(in_dem_uri)

    # Open the input URI and extract the numpy array
    input_raster = gdal.Open(in_dem_uri)
    input_array = input_raster.GetRasterBand(1).ReadAsArray()
    input_raster = None

    # Create a raster from base before passing it to viewshed
    visibility_uri = out_viewshed_uri  #geoprocessing.temporary_filename()
    geoprocessing.new_raster_from_base_uri(in_dem_uri, visibility_uri, 'GTiff', \
        255, gdal.GDT_Byte, fill_value = 255)

    # Call the non-uri version of viewshed.
    #compute_viewshed(in_dem_uri, visibility_uri, in_structure_uri,
    compute_viewshed(input_array, visibility_uri, in_structure_uri, cell_size,
                     rows, cols, nodata, GT, I_uri, J_uri,
                     curvature_correction, refr_coeff, args)
예제 #3
0
def reclassify_quantile_dataset_uri( \
    dataset_uri, quantile_list, dataset_out_uri, datatype_out, nodata_out):

    nodata_ds = geoprocessing.get_nodata_from_uri(dataset_uri)

    memory_file_uri = geoprocessing.temporary_filename()
    memory_array = geoprocessing.load_memory_mapped_array(
        dataset_uri, memory_file_uri)
    memory_array_flat = memory_array.reshape((-1, ))

    quantile_breaks = [0]
    for quantile in quantile_list:
        quantile_breaks.append(
            scipy.stats.scoreatpercentile(memory_array_flat, quantile,
                                          (0.0, np.amax(memory_array_flat))))
        LOGGER.debug('quantile %f: %f', quantile, quantile_breaks[-1])

    def reclass(value):
        if value == nodata_ds:
            return nodata_out
        else:
            for new_value, quantile_break in enumerate(quantile_breaks):
                if value <= quantile_break:
                    return new_value
        raise ValueError, "Value was not within quantiles."

    cell_size = geoprocessing.get_cell_size_from_uri(dataset_uri)

    geoprocessing.vectorize_datasets([dataset_uri],
                                     reclass,
                                     dataset_out_uri,
                                     datatype_out,
                                     nodata_out,
                                     cell_size,
                                     "union",
                                     dataset_to_align_index=0)

    geoprocessing.calculate_raster_stats_uri(dataset_out_uri)
예제 #4
0
# Modify the axes within the colorbar
cbar.ax.set_yticklabels(['< -1', '0', '> 1'])  # vertically oriented colorbar

# Show it.
#plt.show()

import pygeoprocessing.geoprocessing as gp

inputs = [geotiff_filename]


def multiply_by_random_int(input_array):
    random_int = random.randint(1, 300)
    return input_array * random_int


vectorize_output_uri = 'vectorize_output_' + str(random.randint(
    1, 1000000)) + '.tif'
nodata_out = gp.get_nodata_from_uri(inputs[0])
pixel_size_out = gp.get_cell_size_from_uri(inputs[0])

gp.vectorize_datasets(inputs,
                      multiply_by_random_int,
                      vectorize_output_uri,
                      gdal.GDT_Float32,
                      nodata_out,
                      pixel_size_out,
                      'intersection',
                      vectorize_op=False,
                      assert_datasets_projected=False)
예제 #5
0
def execute(args):
    """DOCSTRING"""
    LOGGER.info("Start Scenic Quality Model")

    #create copy of args
    aq_args = args.copy()

    #validate input
    LOGGER.debug("Validating parameters.")
    dem_cell_size = geoprocessing.get_cell_size_from_uri(args['dem_uri'])
    LOGGER.debug("DEM cell size: %f" % dem_cell_size)
    if "cell_size" in aq_args:
        if aq_args['cell_size'] < dem_cell_size:
            raise ValueError, "The cell size cannot be downsampled below %f" % dem_cell_size
    else:
        aq_args['cell_size'] = dem_cell_size

    intermediate_dir = os.path.join(aq_args['workspace_dir'], 'intermediate')
    if not os.path.isdir(intermediate_dir):
        os.makedirs(intermediate_dir)

    output_dir = os.path.join(aq_args['workspace_dir'], 'output')
    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)

    #local variables
    LOGGER.debug("Setting local variables.")
    z_factor = 1
    curvature_correction = aq_args['refraction']

    #intermediate files
    aoi_dem_uri = os.path.join(intermediate_dir, "aoi_dem.shp")
    aoi_pop_uri = os.path.join(intermediate_dir, "aoi_pop.shp")

    viewshed_dem_uri = os.path.join(intermediate_dir, "dem_vs.tif")
    viewshed_dem_reclass_uri = os.path.join(intermediate_dir, "dem_vs_re.tif")

    pop_clip_uri = os.path.join(intermediate_dir, "pop_clip.tif")
    pop_prj_uri = os.path.join(intermediate_dir, "pop_prj.tif")
    pop_vs_uri = os.path.join(intermediate_dir, "pop_vs.tif")

    viewshed_reclass_uri = os.path.join(intermediate_dir, "vshed_bool.tif")
    viewshed_polygon_uri = os.path.join(intermediate_dir, "vshed.shp")

    #outputs
    viewshed_uri = os.path.join(output_dir, "vshed.tif")
    viewshed_quality_uri = os.path.join(output_dir, "vshed_qual.tif")
    pop_stats_uri = os.path.join(output_dir, "populationStats.html")
    overlap_uri = os.path.join(output_dir, "vp_overlap.shp")

    #determining best data type for viewshed
    features = get_count_feature_set_uri(aq_args['structure_uri'])
    if features < 2**16:
        viewshed_type = gdal.GDT_UInt16
        viewshed_nodata = (2**16) - 1
    elif features < 2**32:
        viewshed_type = gdal.GDT_UInt32
        viewshed_nodata = (2**32) - 1
    else:
        raise ValueError, "Too many structures."

    #clip DEM by AOI and reclass
    LOGGER.info("Clipping DEM by AOI.")

    LOGGER.debug("Projecting AOI for DEM.")
    dem_wkt = geoprocessing.get_dataset_projection_wkt_uri(aq_args['dem_uri'])
    geoprocessing.reproject_datasource_uri(aq_args['aoi_uri'], dem_wkt,
                                           aoi_dem_uri)

    LOGGER.debug("Clipping DEM by projected AOI.")
    LOGGER.debug("DEM: %s, AIO: %s", aq_args['dem_uri'], aoi_dem_uri)
    geoprocessing.clip_dataset_uri(aq_args['dem_uri'], aoi_dem_uri,
                                   viewshed_dem_uri, False)

    LOGGER.info(
        "Reclassifying DEM to account for water at sea-level and resampling to specified cell size."
    )
    LOGGER.debug(
        "Reclassifying DEM so negative values zero and resampling to save on computation."
    )

    nodata_dem = geoprocessing.get_nodata_from_uri(aq_args['dem_uri'])

    def no_zeros(value):
        if value == nodata_dem:
            return nodata_dem
        elif value < 0:
            return 0
        else:
            return value

    geoprocessing.vectorize_datasets([viewshed_dem_uri], no_zeros,
                                     viewshed_dem_reclass_uri,
                                     get_data_type_uri(viewshed_dem_uri),
                                     nodata_dem, aq_args["cell_size"], "union")

    #calculate viewshed
    LOGGER.info("Calculating viewshed.")
    compute_viewshed_uri(viewshed_dem_reclass_uri, viewshed_uri,
                         aq_args['structure_uri'], curvature_correction,
                         aq_args['refraction'], aq_args)

    LOGGER.info("Ranking viewshed.")
    #rank viewshed
    quantile_list = [25, 50, 75, 100]
    LOGGER.debug('reclassify input %s', viewshed_uri)
    LOGGER.debug('reclassify output %s', viewshed_quality_uri)
    reclassify_quantile_dataset_uri(viewshed_uri, quantile_list,
                                    viewshed_quality_uri, viewshed_type,
                                    viewshed_nodata)

    if "pop_uri" in args:
        #tabulate population impact
        LOGGER.info("Tabulating population impact.")
        LOGGER.debug("Tabulating unaffected population.")
        nodata_pop = geoprocessing.get_nodata_from_uri(aq_args["pop_uri"])
        LOGGER.debug("The no data value for the population raster is %s.",
                     str(nodata_pop))
        nodata_viewshed = geoprocessing.get_nodata_from_uri(viewshed_uri)
        LOGGER.debug("The no data value for the viewshed raster is %s.",
                     str(nodata_viewshed))

        #clip population
        LOGGER.debug("Projecting AOI for population raster clip.")
        pop_wkt = geoprocessing.get_dataset_projection_wkt_uri(
            aq_args['pop_uri'])
        geoprocessing.reproject_datasource_uri(aq_args['aoi_uri'], pop_wkt,
                                               aoi_pop_uri)

        LOGGER.debug("Clipping population raster by projected AOI.")
        geoprocessing.clip_dataset_uri(aq_args['pop_uri'], aoi_pop_uri,
                                       pop_clip_uri, False)

        #reproject clipped population
        LOGGER.debug("Reprojecting clipped population raster.")
        vs_wkt = geoprocessing.get_dataset_projection_wkt_uri(viewshed_uri)
        reproject_dataset_uri(pop_clip_uri, vs_wkt, pop_prj_uri,
                              get_data_type_uri(pop_clip_uri))

        #align and resample population
        def copy(value1, value2):
            if value2 == nodata_viewshed:
                return nodata_pop
            else:
                return value1

        LOGGER.debug("Resampling and aligning population raster.")
        geoprocessing.vectorize_datasets([pop_prj_uri, viewshed_uri], copy,
                                         pop_vs_uri,
                                         get_data_type_uri(pop_prj_uri),
                                         nodata_pop, aq_args["cell_size"],
                                         "intersection",
                                         ["bilinear", "bilinear"], 1)

        pop = gdal.Open(pop_vs_uri)
        pop_band = pop.GetRasterBand(1)
        vs = gdal.Open(viewshed_uri)
        vs_band = vs.GetRasterBand(1)

        affected_pop = 0
        unaffected_pop = 0
        for row_index in range(vs_band.YSize):
            pop_row = pop_band.ReadAsArray(0, row_index, pop_band.XSize, 1)
            vs_row = vs_band.ReadAsArray(0, row_index, vs_band.XSize,
                                         1).astype(np.float64)

            pop_row[pop_row == nodata_pop] = 0.0
            vs_row[vs_row == nodata_viewshed] = -1

            affected_pop += np.sum(pop_row[vs_row > 0])
            unaffected_pop += np.sum(pop_row[vs_row == 0])

        pop_band = None
        pop = None
        vs_band = None
        vs = None

        table = """
        <html>
        <title>Marine InVEST</title>
        <center><H1>Scenic Quality Model</H1><H2>(Visual Impact from Objects)</H2></center>
        <br><br><HR><br>
        <H2>Population Statistics</H2>

        <table border="1", cellpadding="0">
        <tr><td align="center"><b>Number of Features Visible</b></td><td align="center"><b>Population (estimate)</b></td></tr>
        <tr><td align="center">None visible<br> (unaffected)</td><td align="center">%i</td>
        <tr><td align="center">1 or more<br>visible</td><td align="center">%i</td>
        </table>
        </html>
        """

        outfile = open(pop_stats_uri, 'w')
        outfile.write(table % (unaffected_pop, affected_pop))
        outfile.close()

    #perform overlap analysis
    LOGGER.info("Performing overlap analysis.")

    LOGGER.debug("Reclassifying viewshed")

    nodata_vs_bool = 0

    def non_zeros(value):
        if value == nodata_vs_bool:
            return nodata_vs_bool
        elif value > 0:
            return 1
        else:
            return nodata_vs_bool

    geoprocessing.vectorize_datasets([viewshed_uri], non_zeros,
                                     viewshed_reclass_uri, gdal.GDT_Byte,
                                     nodata_vs_bool, aq_args["cell_size"],
                                     "union")

    if "overlap_uri" in aq_args:
        LOGGER.debug("Copying overlap analysis features.")
        geoprocessing.copy_datasource_uri(aq_args["overlap_uri"], overlap_uri)

        LOGGER.debug("Adding id field to overlap features.")
        id_name = "investID"
        add_id_feature_set_uri(overlap_uri, id_name)

        LOGGER.debug("Add area field to overlap features.")
        area_name = "overlap"
        add_field_feature_set_uri(overlap_uri, area_name, ogr.OFTReal)

        LOGGER.debug("Count overlapping pixels per area.")
        values = geoprocessing.aggregate_raster_values_uri(
            viewshed_reclass_uri, overlap_uri, id_name,
            ignore_nodata=True).total

        def calculate_percent(feature):
            if feature.GetFieldAsInteger(id_name) in values:
                return (values[feature.GetFieldAsInteger(id_name)] * \
                aq_args["cell_size"]) / feature.GetGeometryRef().GetArea()
            else:
                return 0

        LOGGER.debug("Set area field values.")
        set_field_by_op_feature_set_uri(overlap_uri, area_name,
                                        calculate_percent)