Пример #1
0
def longLat2Elevation(long,lat):
    if not os.path.isfile('elevation_map/SLC-DEM.tif'):
        elevation.clip(bounds=(-112.5, 40.5, -111.5, 41), output='elevation_map/SLC-DEM.tif')
        elevation.clear()
    
    gdal.UseExceptions()
    elevData = gdal.Open('elevation_map/SLC-DEM.tif')
    band = elevData.GetRasterBand(1)
    elevs = band.ReadAsArray()
    dataInfo = elevData.GetGeoTransform()
    initLong = dataInfo[0]
    initLat = dataInfo[3]
    dLong = dataInfo[1]
    dLat = dataInfo[5]
    nLat = elevs.shape[0]
    nLong = elevs.shape[1]
    gridLongs = [initLong+dLong*i for i in range(nLong)]
    gridLats = [initLat+dLat*i for i in range(nLat)]
    f = interpolate.interp2d(gridLongs, gridLats, elevs, kind='linear')
    endLong = initLong + (nLong-1)* dLong
    endLat = initLat + (nLat-1)* dLat
#    sio.savemat('elevationMap.mat', {'elevs':elevs,'gridLongs':gridLongs,'gridLats':gridLats,'initLong':initLong,'initLat':initLat,'endLong':endLong,'endLat':endLat})
    el = []
    for i in range(long.shape[0]):
        lo = long[i, 0]
        la = lat[i, 0]
        assert(lo>=initLong and lo<=endLong), "The longitude is out of bound for elevation look-up!"
        assert(la<=initLat and la>=endLat), "The latitude is out of bound for elevation look-up!"
        el += [f(lo, la)[0]]
        
    return (np.matrix(el).T)/1000.
Пример #2
0
def clip(bounds, reference, **kwargs):
    if not bounds and not reference:
        raise click.BadOptionUsage(
            "One of --bounds or --reference must be supplied.")
    if not bounds:
        bounds = spatial.import_bounds(reference)
    elevation.clip(bounds, **kwargs)
Пример #3
0
def elevation_dem(lon, lat, crs=DEF_CRS, product='SRTM1',
                  resampling=Resampling.nearest, nodata=DEM_NODATA, min_resol=1.0e-8):
    """ Set elevation in meters for every point.

    Parameter:
        product (str, optional): Digital Elevation Model to use with elevation
            package. Options: 'SRTM1' (30m), 'SRTM3' (90m). Default: 'SRTM1'
        resampling (rasterio.warp.Resampling, optional): resampling
            function used for reprojection from DEM to centroids' CRS. Default:
            nearest.
        nodata (int, optional): value to use in DEM no data points.
        min_resol (float, optional): if centroids are points, minimum
            resolution in lat and lon to use to interpolate DEM data. Default: 1.0e-8
    """
    import elevation

    bounds = lon.min(), lat.min(), lon.max(), lat.max()
    LOGGER.debug('Setting elevation of points with bounds %s.', str(bounds))
    rows, cols, ras_trans = pts_to_raster_meta(bounds, min(get_resolution(lat, lon, min_resol)))

    bounds += np.array([-.05, -.05, .05, .05])
    elevation.clip(bounds, output=TMP_ELEVATION_FILE, product=product,
                   max_download_tiles=MAX_DEM_TILES_DOWN)
    dem_mat = np.zeros((rows, cols))
    with rasterio.open(TMP_ELEVATION_FILE, 'r') as src:
        reproject(source=src.read(1), destination=dem_mat,
                  src_transform=src.transform, src_crs=src.crs,
                  dst_transform=ras_trans, dst_crs=crs,
                  resampling=resampling,
                  src_nodata=src.nodata, dst_nodata=nodata)

    # search nearest neighbor of each point
    x_i = ((lon - ras_trans[2]) / ras_trans[0]).astype(int)
    y_i = ((lat - ras_trans[5]) / ras_trans[4]).astype(int)
    return dem_mat[y_i, x_i]
Пример #4
0
    def from_online_srtm_database(bounds, path_to_dem_file=DEFAULT_OUTPUT, product="SRTM1", margin=0,
                                  no_data_value=-32768):
        """ Import DEM tile from SRTM3 or SRTM1 online dataset

        Based on "elevation" module. Be careful that at the moment, SRTM3 product
        does not seem to work properly.
        :param bounds:
        :param path_to_dem_file:
        :param product: "SRTM1" or "SRTM3"
        :param margin: margin (in %) around DEM
        :param no_data_value: no data filling value (default = -32768)
        :return:
        """
        from subprocess import CalledProcessError
        import elevation

        try:
            check_string(product, {'SRTM1', 'SRTM3'})
        except ValueError as e:
            raise DigitalElevationModelError("Invalid product name '%s': %s" % (product, e))

        try:
            elevation.clip(bounds, output=path_to_dem_file, margin="%s" % (margin/100), product=product)
        except CalledProcessError as e:
            raise DigitalElevationModelError("Internal subprocess error: %s" % e)
        except (ValueError, TypeError, KeyError) as e:
            raise DigitalElevationModelError("Invalid input argument: %s" % e)

        # Return instance of DigitalElevationModel
        return DigitalElevationModel(path_to_dem_file, no_data_value=no_data_value)
Пример #5
0
 def download(self, cleanup=True):
     """Download the SRTM data in GeoTIFF format"""
     dpath = os.path.dirname(self.tiffdata)
     if not os.path.isdir(dpath):
         print('Creating path', dpath)
         os.makedirs(dpath)
     elevation.clip(self.bounds, product=self.product, output=self.tiffdata)
     if cleanup:
         elevation.clean()
Пример #6
0
    def download_area(self, srtm_model=1):
        if self.interactive:
            input_information = self._get_input_data()
        else:
            input_information = [self.data_dict[srtm_model], self.destination_points, self.destination_folder]

        elevation.clip(product=input_information[0], bounds=tuple(input_information[1]), output=input_information[2])
        # clean up stale temporary files and fix the cache in the event of a server error
        elevation.clean()
Пример #7
0
 def elevation_grid(self):
     """
     Property storing the grid containing the elevation data.
     """
     if not hasattr(self, '_elevation_grid'):
         dem_path = os.path.join(os.getcwd(), 'DEM.tif')
         elevation.clip(bounds=self.bounds, output=dem_path)
         self._elevation_grid = np.array(rd.LoadGDAL(dem_path))
         os.remove('DEM.tif')
     return self._elevation_grid
Пример #8
0
    def set_elevation(self,
                      product='SRTM1',
                      resampling=None,
                      nodata=DEM_NODATA,
                      min_resol=1.0e-8):
        """ Set elevation in meters for every pixel or point.

        Parameter:
            product (str, optional): Digital Elevation Model to use with elevation
                package. Options: 'SRTM1' (30m), 'SRTM3' (90m). Default: 'SRTM1'
            resampling (rasterio.warp.Resampling, optional): resampling
                function used for reprojection from DEM to centroids' CRS. Default:
                average if raster and nearest if points.
            nodata (int, optional): value to use in DEM no data points.
            min_resol (float, optional): if centroids are points, minimum
                resolution in lat and lon to use to interpolate DEM data. Default: 1.0e-8
        """
        import elevation
        bounds = np.array(self.total_bounds)
        if self.meta:
            LOGGER.debug('Setting elevation of raster with bounds %s.',
                         str(self.total_bounds))
            rows, cols = self.shape
            ras_trans = self.meta['transform']

            if resampling is None:
                resampling = Resampling.average

            bounds += np.array([-.05, -.05, .05, .05])
            elevation.clip(bounds,
                           output=TMP_ELEVATION_FILE,
                           product=product,
                           max_download_tiles=MAX_DEM_TILES_DOWN)
            dem_mat = np.zeros((rows, cols))
            with rasterio.open(TMP_ELEVATION_FILE, 'r') as src:
                reproject(source=src.read(1),
                          destination=dem_mat,
                          src_transform=src.transform,
                          src_crs=src.crs,
                          dst_transform=ras_trans,
                          dst_crs=self.crs,
                          resampling=resampling,
                          src_nodata=src.nodata,
                          dst_nodata=nodata)
            self.elevation = dem_mat.flatten()
        else:
            self.elevation = elevation_dem(self.lon, self.lat, self.crs,
                                           product, resampling, nodata,
                                           min_resol)
Пример #9
0
def read_dem(bounds, res):
    """
    Function to read in a DEM from SRTM amd interplolate it onto a dolphyn mesh. This function uses the python package
    'elevation' (http://elevation.bopen.eu/en/stable/) and the gdal libraries.

    I will assume you want the 30m resolution SRTM model.

    :param bounds: west, south, east, north coordinates
    :return u_n, lx, ly: the elevation interpolated onto the dolphyn mesh and the lengths of the domain
    """
    west, south, east, north = bounds

    # Create a temporary file to store the DEM and go get it using elevation
    dem_path = 'tmp.tif'
    output = os.getcwd() + '/' + dem_path
    elv.clip(bounds=bounds, output=output, product='SRTM1')

    # read in the DEM into a numpy array
    gdal_data = gdal.Open(output)
    data_array = gdal_data.ReadAsArray().astype(np.float)

    # The DEM is 30m per pixel, so lets make a array for x and y at 30 m
    ny, nx = np.shape(data_array)
    lx = nx * 30
    ly = ny * 30

    x, y = np.meshgrid(np.linspace(0, lx / ly, nx), np.linspace(0, 1, ny))

    # Create mesh and define function space
    domain = Rectangle(Point(0, 0), Point(lx / ly, 1))
    mesh = generate_mesh(domain, res)
    V = FunctionSpace(mesh, 'P', 1)
    u_n = Function(V)

    # Get the global coordinates
    gdim = mesh.geometry().dim()
    gc = V.tabulate_dof_coordinates().reshape((-1, gdim))

    # Interpolate elevation into the initial condition
    elevation = interpolate.griddata((x.flatten(), y.flatten()),
                                     data_array.flatten(),
                                     (gc[:, 0], gc[:, 1]),
                                     method='nearest')
    u_n.vector()[:] = elevation

    # remove tmp DEM
    os.remove(output)

    return u_n, lx, ly, mesh, V
def getTif(minX, maxX, minY, maxY, fname):

    if minX > maxX or minY > maxY:
        print('')
        print('**Error: Check Your latitude and longitude bounds!**')
        print('')
    elif round(maxX - minX, 3) != round(maxY - minY, 3):
        print('')
        print('**Error: Your bounds must be a square region!**')
        print('')
    else:
        dem_path = fname
        output = os.getcwd() + dem_path
        elevation.clip(bounds=(minX, minY, maxX, maxY), output=output)
        elevation.clean()
        return (output, minX, maxX, minY, maxY)
Пример #11
0
    def export(
        self,
        region_name: str = "kenya",
        product: str = "SRTM3",
        max_download_tiles: int = 15,
    ) -> None:
        """
        Export SRTm topography data

        Arguments
        ----------
        region_name: str = 'kenya'
            The region to download. Must be one of the regions in the
            region_lookup dictionary
        product: {'SRTM1', 'SRTM3'} = 'SRTM3'
            The product to download the data from
        max_download_tiles: int = 15
            By default, the elevation package doesn't allow more than 9
            tiles to be downloaded. Kenya is 12 tiles - this increases the
            limit to allow Kenya to be downloaded
        """

        region = region_lookup[region_name]

        output_tif = self.output_folder / f"{region_name}.tif"

        if not output_tif.exists():
            print(f"Downloading tiles. Saving as tif to {output_tif}")
            try:
                elevation.clip(  # type: ignore
                    bounds=self._region_to_tuple(region),
                    output=output_tif.resolve().as_posix(),
                    product=product,
                    max_download_tiles=max_download_tiles,
                    margin="1",
                )
            except Exception as e:
                print(e)

            elevation.clean()  # type: ignore

        output_nc = self.output_folder / f"{region_name}.nc"

        if not output_nc.exists():
            print(f"Converting {output_tif} to NetCDF format ({output_nc})")
            self._tif_to_nc(output_tif, output_nc)
Пример #12
0
def download(bounds, output_path):
    """Download DEM for a given set of bounds.

    Parameters
    ----------
    bounds : tuple of float
        Bounds in lat/lon coordinates.
    output_path : str
        Path to output file.

    Returns
    -------
    output_path : str
        Path to output file.
    """
    if not os.path.isfile(output_path):
        elevation.clip(bounds, output_path, margin='5%')
    return output_path
Пример #13
0
    def export(
        self,
        region_name: str = "kenya",
        product: str = "SRTM3",
        max_download_tiles: int = 15,
    ) -> None:
        r"""Export SRTm topography data

        :param region_name: Defines a geographical subset of the downloaded data to be used.
            Should be one of the regions defined in src.utils.region_lookup.
            Default = ``"kenya"``.
        :param product: One of ``{"SRTM1", "SRTM3"}``, the product to download the data from.
            Default = ``"SRTM3"``.
        :param max_download_tiles: By default, the elevation package doesn't allow more than 9
            tiles to be downloaded. Kenya is 12 tiles - this increases the limit to allow
            Kenya to be downloaded. Default = ``15``.
        """

        region = region_lookup[region_name]

        output_tif = self.output_folder / f"{region_name}.tif"

        if not output_tif.exists():
            print(f"Downloading tiles. Saving as tif to {output_tif}")
            try:
                elevation.clip(  # type: ignore
                    bounds=self._region_to_tuple(region),
                    output=output_tif.resolve().as_posix(),
                    product=product,
                    max_download_tiles=max_download_tiles,
                    margin="1",
                )
            except Exception as e:
                print(e)

            elevation.clean()  # type: ignore

        output_nc = self.output_folder / f"{region_name}.nc"

        if not output_nc.exists():
            print(f"Converting {output_tif} to NetCDF format ({output_nc})")
            self._tif_to_nc(output_tif, output_nc)
Пример #14
0
def get_ned_elevation_raster(extent, res=250, **kwargs):
    """
    Wrapper for elevation.clip that will generate a GeoTIFF file for a 
    given project extent.
    :param output: Path to output file. Existing files will be overwritten.
    :param cache_dir: Root of the DEM cache folder.
    :param product: DEM product choice.
    """

    PRODUCT = {"30": elevation.PRODUCTS[0], "250": elevation.PRODUCTS[1]}

    logger.debug("Fetching NED raster data for project region extent")

    if kwargs.get("output") is None:
        raise AttributeError("output= argument is required by " +
                             "elevation.clip() and cannot be None")
    if os.path.isfile(kwargs.get("output")):
        logger.warning("Warning : It looks like output=" +
                       kwargs.get("output") + " already exists... skipping.")

    else:
        elevation.clip(bounds=extent, product=PRODUCT[str(res)], **kwargs)
        elevation.clean()
Пример #15
0
    def process_dem(self, global_dem=''):
        ''' Download DEM from AWS, calculate slope
        '''
        # Download DEM

        if not os.path.exists(self.dem_file) and global_dem == '':
            tPrint("Downloading DEM")
            elevation.clip(bounds=self.inD.total_bounds, max_download_tiles=90000, output=self.dem_file, product='SRTM3')

        if not os.path.exists(self.dem_file) and not global_dem == '':
            tPrint("Downloading DEM")
            rMisc.clipRaster(rasterio.open(global_dem), self.inD, self.dem_file)
            
        # Calculate slope
        if not os.path.exists(self.slope_file) and os.path.exists(self.dem_file):
            tPrint("Calculating slope")
            in_dem = rasterio.open(self.dem_file)
            in_dem_data = in_dem.read()
            beau  = richdem.rdarray(in_dem_data[0,:,:], no_data=in_dem.meta['nodata'])
            slope = richdem.TerrainAttribute(beau, attrib='slope_riserun')
            meta = in_dem.meta.copy()
            meta.update(dtype = slope.dtype)
            with rasterio.open(self.slope_file, 'w', **meta) as outR:
                outR.write_band(1, slope)
Пример #16
0
def main():
    #######  load user configurable paramters here    #######
    # Check user defined configuration file

    if len(sys.argv) == 1:
        print(
            'ERROR: wind_mapper.py requires one argument [configuration file] (i.e. wind_mapper.py '
            'param_existing_DEM.py)')
        exit(-1)

    # Get name of configuration file/module
    configfile = sys.argv[-1]

    # Load in configuration file as module
    X = importlib.machinery.SourceFileLoader('config', configfile)
    X = X.load_module()

    # Resolution of WindNinja simulations (in m)
    res_wind = X.res_wind

    # path to Wind Ninja executable

    # default path assumes we are running out of pip or we have a symlink @ ./bin/WindNinja_cli
    wn_exe = os.path.join(
        os.path.dirname(
            os.path.abspath(__file__)), 'bin', 'WindNinja_cli')

    if hasattr(X, 'wn_exe'):
        wn_exe = X.wn_exe

    if not os.path.exists(wn_exe):
        print('ERROR: Invalid path for WindNinja_cli. Consider specifying a `wn_exe` config option or confirm it is correct.')
        print(f'Path = {wn_exe}')
        exit(-1)

    environ["WINDNINJA_DATA"] = os.path.join(os.path.dirname(wn_exe), '..', 'share', 'windninja')

    # Parameter for atmospheric stability in Wind Ninja mass conserving (default value)
    alpha = 1

    # Number of wind speed categories (every 360/ncat degrees)
    ncat = 4
    if hasattr(X, 'ncat'):
        ncat = X.ncat

    if ncat < 1:
        print('ERROR ncat must be > 0 ')
        exit(-1)

    use_existing_dem = True

    lat_min = -9999
    lon_min = -9999
    lat_max = -9999
    lon_max = -9999
    if hasattr(X, 'use_existing_dem'):
        use_existing_dem = X.use_existing_dem
    if use_existing_dem:
        dem_filename = X.dem_filename
    else:
        lat_min = X.lat_min
        lat_max = X.lat_max
        lon_min = X.lon_min
        lon_max = X.lon_max

    if not use_existing_dem:
        if lat_min == -9999 or lon_min == -9999 or lat_max == -9999 or lon_max == -9999:
            print('Coordinates of the bounding box must be specified to download SRTM DEM.')
            exit(-1)

    # Method to compute average wind speed used to derive transfert function
    wind_average = 'mean_tile'
    targ_res = 1000
    if hasattr(X, 'wind_average'):
        wind_average = X.wind_average
        if wind_average == 'grid':
            targ_res = X.targ_res

    list_options_average = ['mean_tile', 'grid']
    if wind_average not in list_options_average:
        print('wind average must be "mean_tile" or "grid"')

        exit(-1)

    if targ_res < 0:
        print('Target resolution must be>0')
        exit(-1)

    # output to the specific directory, instead of the root dir of the calling python script
    user_output_dir = os.getcwd() + '/' + configfile[:-3] + '/'  # use the config filename as output path

    if hasattr(X, 'user_output_dir'):
        user_output_dir = X.user_output_dir
        if user_output_dir[-1] is not os.path.sep:
            user_output_dir += os.path.sep

    # Delete previous dir (if exists)
    if os.path.isdir(user_output_dir):
        shutil.rmtree(user_output_dir, ignore_errors=True)

    # make new output dir
    os.makedirs(user_output_dir)

    # Setup file containing WN configuration
    nworkers = os.cpu_count() or 1

    # on linux we can ensure that we respect cpu affinity
    if 'sched_getaffinity' in dir(os):
        nworkers = len(os.sched_getaffinity(0))

    # ensure correct formatting on the output
    fic_config = F"""num_threads = {nworkers}  
initialization_method = domainAverageInitialization 
units_mesh_resolution = m 
input_speed = 10.0 
input_speed_units = mps 
input_wind_height = 40.0 
units_input_wind_height = m 
output_wind_height = 40.0 
units_output_wind_height = m 
output_speed_units = mps 
vegetation = grass 
diurnal_winds = false 
write_goog_output = false 
write_shapefile_output = false 
write_ascii_output = true 
write_farsite_atm = false """

    if hasattr(X, 'fic_config_WN'):
        fic_config_WN = X.fic_config_WN

        if not os.path.exists(fic_config_WN):
            print('ERROR: Invalid path for cli_massSolver.cfg given in `fic_config_WN` config options.')
            exit(-1)
    else:
        fic_config_WN = os.path.join(user_output_dir, 'default_cli_massSolver.cfg')
        with open(fic_config_WN, 'w') as fic_file:
            fic_file.write(fic_config)

    # we need to make sure we pickup the right paths to all the gdal scripts
    gdal_prefix = ''
    try:
        gdal_prefix = subprocess.run(["gdal-config", "--prefix"], stdout=subprocess.PIPE).stdout.decode()
        gdal_prefix = gdal_prefix.replace('\n', '')
        gdal_prefix += '/bin/'
    except:
        raise BaseException(""" ERROR: Could not find gdal-config, please ensure it is installed and on $PATH """)

    # Wind direction increment
    delta_wind = 360. / ncat

    # List of variable to transform from asc into tif
    var_transform = ['ang', 'vel']
    if wind_average == 'grid':
        list_tif_2_vrt = ['U', 'V', 'spd_up_' + str(targ_res)]
    elif wind_average == 'mean_tile':
        list_tif_2_vrt = ['U', 'V', 'spd_up_tile']

    # Optimal size for wind ninja
    nres = 600

    # Additional grid point to ensure correct tile overlap
    nadd = 25

    # Define DEM file to use for WN
    fic_download = user_output_dir + 'ref-DEM.tif'

    name_utm = 'ref-DEM-utm'
    fic_utm = user_output_dir + '/' + name_utm + '.tif'

    if use_existing_dem:

        # if we are using a user-provided dem, ensure there are no NoData values that border the
        # DEM which will cause issues

        # mask data values
        exec_str = """%sgdal_calc.py -A %s --outfile %s --NoDataValue 0 --calc="1*(A>0)" """ % (gdal_prefix,
            dem_filename, user_output_dir + 'out.tif')
        subprocess.check_call([exec_str], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

        # convert to shp file
        exec_str = """%sgdal_polygonize.py -8 -b 1 -f "ESRI Shapefile" %s %s/pols """ % (gdal_prefix,
            user_output_dir + 'out.tif', user_output_dir)
        subprocess.check_call([exec_str], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

        # clip original with the shpfile
        exec_str = """%sgdalwarp -of GTiff -cutline %s/pols/out.shp -crop_to_cutline -dstalpha %s %s """ % (gdal_prefix,
            user_output_dir, dem_filename, fic_utm)
        subprocess.check_call([exec_str], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

        shutil.rmtree("%s/pols" % user_output_dir)
        os.remove("%s/out.tif" % user_output_dir)

    else:

        # Properties of the bounding box
        delta_lat = lat_max - lat_min
        delta_lon = lon_max - lon_min

        fac = 0.1  # Expansion factor to make sure that the downloaded SRTM tile is large enough

        lon_mid = (lon_min + lon_max) / 2.
        lat_mid = (lat_min + lat_max) / 2.

        # Download reference SRTM data
        elevation.clip(bounds=(
            lon_min - delta_lon * fac, lat_min - delta_lat * fac, lon_max + delta_lon * fac, lat_max + delta_lat * fac),
            output=fic_download)

        # Get corresponding UTM zone (center of the zone to extract)
        nepsg_utm = int(32700 - round((45 + lat_mid) / 90, 0) * 100 + round((183 + lon_mid) / 6, 0))
        srs_out = osr.SpatialReference()
        srs_out.ImportFromEPSG(nepsg_utm)

        # Get bounding box to extract in utm using pyproj
        WGS84 = Proj(init='EPSG:4326')
        inp = Proj(init='EPSG:' + str(nepsg_utm))
        xmin, ymin = transform(WGS84, inp, lon_min, lat_min)
        xmax, ymax = transform(WGS84, inp, lon_max, lat_max)

        # Extract a rectangular region of interest in utm at 30 m
        exec_str = '%sgdalwarp %s %s -overwrite -dstnodata -9999 -t_srs "%s" -te %.30f %.30f %.30f %.30f  -tr %.30f ' \
                   '%.30f -r bilinear '
        com_string = exec_str % (gdal_prefix, fic_download, fic_utm, srs_out.ExportToProj4(), xmin, ymin, xmax, ymax, 30, 30)
        subprocess.check_call([com_string], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

    # Get informations on projected file
    ds = gdal.Open(fic_utm)
    band = ds.GetRasterBand(1)
    gt = ds.GetGeoTransform()
    xmin = gt[0]
    ymax = gt[3]

    pixel_width = gt[1]
    pixel_height = -gt[5]

    xmax = xmin + pixel_width * ds.RasterXSize
    ymin = ymax - pixel_height * ds.RasterYSize

    lenx = band.XSize * pixel_width
    leny = band.YSize * pixel_height
    len_wn = res_wind * nres

    # Number of Wind Ninja tiles
    nopt_x = int(lenx // len_wn + 1)
    nopt_y = int(leny // len_wn + 1)

    nx = band.XSize / nopt_x
    ny = band.YSize / nopt_y

    if nopt_x == 1 and nopt_y == 1:
        # DEM is small enough for WN
        name_tmp = 'tmp_0_0'
        fic_tmp = user_output_dir + name_tmp + ".tif"
        shutil.copy(fic_utm, fic_tmp)
    else:
        # Split the DEM into smaller DEM for Wind Ninja
        for i in range(0, nopt_x):
            for j in range(0, nopt_y):

                xbeg = xmin + i * nx * pixel_width - nadd * pixel_width
                ybeg = ymin + j * ny * pixel_height - nadd * pixel_height

                delx = nx * pixel_width + 2 * nadd * pixel_width
                dely = ny * pixel_height + 2 * nadd * pixel_height

                if i == 0.:
                    xbeg = xmin
                if i == 0. or i == (nopt_x - 1):
                    delx = nx * pixel_width + nadd * pixel_width

                if j == 0.:
                    ybeg = ymin
                if j == 0. or j == (nopt_y - 1):
                    dely = ny * pixel_height + nadd * pixel_height

                name_tmp = 'tmp_' + str(i) + "_" + str(j)
                fic_tmp = user_output_dir + name_tmp + ".tif"
                clip_tif(fic_utm, fic_tmp, xbeg, xbeg + delx, ybeg, ybeg + dely, gdal_prefix)

    # Build WindNinja winds maps
    x_y_wdir = itertools.product(range(0, nopt_x),
                                 range(0, nopt_y),
                                 np.arange(0, 360., delta_wind))
    x_y_wdir = [p for p in x_y_wdir]

    for d in x_y_wdir:
        i,j,k = d
        dir_tmp = user_output_dir + 'tmp_dir' + "_" + str(i) + "_" + str(j)
        if not os.path.isdir(dir_tmp):
            os.makedirs(dir_tmp)

    print(f'Running WindNinja on {len(x_y_wdir)} combinations of direction and sub-area. Please be patient...')
    with futures.ProcessPoolExecutor(max_workers=nworkers) as executor:
        res = list(tqdm(executor.map(partial(call_WN_1dir, gdal_prefix, user_output_dir, fic_config_WN,
                                             list_tif_2_vrt, nopt_x, nopt_y, nx, ny,
                                             pixel_height, pixel_width, res_wind, targ_res, var_transform, wind_average,
                                             wn_exe,
                                             xmin, ymin), x_y_wdir), total=len(x_y_wdir)))

    print('Building VRTs...')
    # Loop on wind direction to build reference vrt file to be used by mesher
    nwind = np.arange(0, 360., delta_wind)
    with tqdm(total=len(nwind)) as pbar:
        for wdir in nwind:
            for var in list_tif_2_vrt:
                name_vrt = user_output_dir + name_utm + '_' + str(int(wdir)) + '_' + var + '.vrt'
                cmd = "find " + user_output_dir[0:-1] + " -type f -name '*_" + str(int(wdir)) + "_10_" + str(
                    res_wind) + "m_" + var + "*.tif' -exec " + gdal_prefix+ "gdalbuildvrt " + name_vrt + " {} +"
                subprocess.check_call([cmd], stdout=subprocess.PIPE,
                                      shell=True)
            pbar.update(1)
Пример #17
0
def clip(bounds, reference, **kwargs):
    if not bounds and not reference:
        raise click.BadOptionUsage("One of --bounds or --reference must be supplied.")
    if not bounds:
        bounds = spatial.import_bounds(reference)
    elevation.clip(bounds, **kwargs)
Пример #18
0
elif(resolution == 'Twenty'):
	angstep = 20

log = ''
current_path = os.getcwd()

# IMPORT MAP
if(topo_source == 'SRTM'):

	aux_lon = np.array([lon1, lon2])
	aux_lat = np.array([lat1, lat2])
	lon1 = min(aux_lon)
	lon2 = max(aux_lon)
	lat1 = min(aux_lat)
	lat2 = max(aux_lat)
	elevation.clip(bounds=(lon1, lat1, lon2, lat2), output = current_path + '/' + run_name + '.tif')

# READ MAP
if(topo_source == 'SRTM'):
	fp = run_name + '.tif'
	image = tifffile.imread(fp)
	elevation.clean()
	Topography = np.array(image)

	Topography_Sea = Topography + 0.0
	Topography_Sea[ Topography_Sea[:,:] <= 0] = -1.0 * np.sqrt(-1.0 * Topography_Sea[ Topography_Sea[:,:] <= 0])
	Topography_Sea[ Topography_Sea[:,:] > 0] =  np.nan
	Topography_Sea = Topography_Sea * -1.0

	Topography  = (Topography  + abs(Topography)) / 2.0
	cells_lon = Topography.shape[1]
Пример #19
0
    # Call the parser to parse the arguments.
    args = parser.parse_args()

    output_img = os.path.abspath(args.output)
    tmp_dir = os.path.abspath(args.tmp)
    if not os.path.exists(tmp_dir):
        os.mkdir(tmp_dir)

    sensorFact = ARCSISensorFactory()
    sensor_cls_obj = sensorFact.getSensorClassFromName(args.sensor, False,
                                                       None)
    sensor_cls_obj.extractHeaderParameters(args.inputheader, None)
    image_bbox_latlon = sensor_cls_obj.getBBOXLatLon()

    bounds_ext = (image_bbox_latlon[0] - args.buffer,
                  image_bbox_latlon[2] - args.buffer,
                  image_bbox_latlon[1] + args.buffer,
                  image_bbox_latlon[3] + args.buffer)
    print(bounds_ext)
    try:
        elevation.clip(bounds=bounds_ext,
                       output=output_img,
                       max_download_tiles=args.limit,
                       cache_dir=tmp_dir)
    except Exception as e:
        print(
            "An error has occurred when downloading and processing the DEM data. Try re-running as data is cached."
        )
        raise e
    elevation.clean()
Пример #20
0
def clip(**kwargs):
    elevation.clip(**kwargs)
Пример #21
0
lat_start, lat_end = lat[0], lat[1]
lon_start, lon_end = lon[0], lon[1]

names = ['Østerild']

makeMap(lon_start, lon_end, lat_start, lat_end, names, [x1d, x2d], [y1d, y2d])

import sys
sys.path.append("/Users/lalc/dev/ext-libs")

import elevation

from elevation import cli
cli.selfcheck()

import elevation
import os
import rasterio
from rasterio.transform import from_bounds, from_origin
from rasterio.warp import reproject, Resampling

bounds = np.array([lon[0], lat[0], lon[1], lat[1]])

bounds = [-90.000672, 45.998852, -87.998534, 46.999431]

west, south, east, north = bounds
west, south, east, north = bounds = west - .05, south - .05, east + .05, north + .05
dem_path = '\\Iron_River_DEM.tif'
output = os.getcwd() + dem_path
elevation.clip(bounds=bounds, output=output, product='SRTM3')
Пример #22
0
def _surge_decay(inten_surge, centroids, dem_product, set_fraction, min_resol, \
                 add_sea_level_rise):
    """ Substract DEM height and decay factor from initial surge height and
    computes corresponding fraction matrix.

    Parameter:
        inten_surge (sparse.csr_matrix): initial surge height in m
        centroids (Centroids): centroids, either raster or points
        dem_product (str): DEM to use: 'SRTM1' (30m) or 'SRTM3' (90m)
        set_fraction (bool, optional): set fraction matrix different to ones.
            Default: True
        min_resol (float, optional): minimum points centroids resolution to
            use when interpolating DEM data. Used in get_resolution method.
            If get_resolution(centroids.lat, centroids.lon) gives a too low
            number, set this parameter to the right centroids resolution.
            Default: 1.0e-8.
        add_sea_level_rise (float): sea level rise in meters to be added to surge

    Returns:
        inten_surge (sparse.csr_matrix), fract_surge (sparse.csr_matrix)
    """
    import elevation

    inland_decay = _calc_inland_decay(centroids)

    # substract event by event to avoid to densificate all the matrix
    inten_surge = _substract_sparse_surge(inten_surge, centroids.elevation, \
                                          inland_decay, add_sea_level_rise)

    # if set_fraction: fraction is fraction of centroids of DEM on land in
    # given centroids cell. Else fraction is ones where intensity.
    if set_fraction:
        bounds = np.array(centroids.total_bounds)
        if centroids.meta:
            shape = centroids.shape
            ras_trans = centroids.meta['transform']
        else:
            shape = [0, 0]
            shape[0], shape[1], ras_trans = pts_to_raster_meta(bounds, \
                min(get_resolution(centroids.lat, centroids.lon, min_resol)))
        bounds += np.array([-.05, -.05, .05, .05])
        elevation.clip(bounds,
                       output=TMP_ELEVATION_FILE,
                       product=dem_product,
                       max_download_tiles=MAX_DEM_TILES_DOWN)
        fract_surge = np.zeros(shape)
        with rasterio.open(TMP_ELEVATION_FILE, 'r') as src:
            on_land = src.read(1)
            on_land[on_land > 0] = 1  # 1 land
            on_land[on_land <= 0] = 0  # 0 water
            on_land[on_land == src.nodata] = 0  # 0 nodata
            reproject(source=on_land,
                      destination=fract_surge,
                      src_transform=src.transform,
                      src_crs=src.crs,
                      dst_transform=ras_trans,
                      dst_crs=centroids.crs,
                      resampling=Resampling.average,
                      src_nodata=src.nodata,
                      dst_nodata=src.nodata)
        if centroids.meta:
            fract_surge = csr_matrix(fract_surge.flatten())
            fract_surge = csr_matrix(np.ones([inten_surge.shape[0], 1
                                              ])) * fract_surge
        else:
            x_i = ((centroids.lon - ras_trans[2]) / ras_trans[0]).astype(int)
            y_i = ((centroids.lat - ras_trans[5]) / ras_trans[4]).astype(int)
            fract_surge = csr_matrix(fract_surge[y_i, x_i])
    else:
        fract_surge = inten_surge.copy()
        fract_surge.data.fill(1)

    return inten_surge, fract_surge
Пример #23
0
def landsat_dl(name, output, xmin, ymin, xmax, ymax):
    # clip the SRTM1 30m DEM of Rome and save it to Rome-DEM.tif
    elevation.clip(bounds=(xmin, ymin, xmax, ymax), output=output)
    # clean up stale temporary files and fix the cache in the event of a server error
    elevation.clean()
    return 'download/%s' % name
def elevation_generator(site_lon, site_lat, res):
    bbox = np.array(
        [site_lon - res, site_lat - res, site_lon + res, site_lat + res])
    ########################################################################
    ### Build latitude and longitude grid for GOES-16
    ########################################################################

    t = time.time()

    goes_grid_file = os.getcwd() + '/aux/GOESR_ABI_CONUS_East.nc'
    grid_dataset = Dataset(goes_grid_file)
    lats, lons = grid_dataset.variables[
        'Latitude'][:].data, grid_dataset.variables['Longitude'][:].data

    # city bounds indices based on lat/lon bbox
    nc_indx_corners = np.array([
        np.unravel_index(
            np.argmin(
                np.abs(np.subtract(lons, bbox[0])) +
                np.abs(np.subtract(lats, bbox[1]))), np.shape(lats)),
        np.unravel_index(
            np.argmin(
                np.abs(np.subtract(lons, bbox[0])) +
                np.abs(np.subtract(lats, bbox[3]))), np.shape(lats)),
        np.unravel_index(
            np.argmin(
                np.abs(np.subtract(lons, bbox[2])) +
                np.abs(np.subtract(lats, bbox[1]))), np.shape(lats)),
        np.unravel_index(
            np.argmin(
                np.abs(np.subtract(lons, bbox[2])) +
                np.abs(np.subtract(lats, bbox[3]))), np.shape(lats))
    ])
    # clip indices for lat/lon of city and GOES-16 data
    nc_indx_bounds = [
        np.min([
            nc_indx_corners[0][0], nc_indx_corners[1][0],
            nc_indx_corners[2][0], nc_indx_corners[3][0]
        ]),
        np.max([
            nc_indx_corners[0][0], nc_indx_corners[1][0],
            nc_indx_corners[2][0], nc_indx_corners[3][0]
        ]),
        np.min([
            nc_indx_corners[0][1], nc_indx_corners[1][1],
            nc_indx_corners[2][1], nc_indx_corners[3][1]
        ]),
        np.max([
            nc_indx_corners[0][1], nc_indx_corners[1][1],
            nc_indx_corners[2][1], nc_indx_corners[3][1]
        ])
    ]

    # Actual GOES-16 coordinate grid
    lon_plot = lons[nc_indx_bounds[0]:nc_indx_bounds[1],
                    nc_indx_bounds[2]:nc_indx_bounds[3]]
    lat_plot = lats[nc_indx_bounds[0]:nc_indx_bounds[1],
                    nc_indx_bounds[2]:nc_indx_bounds[3]]

    print('--------------------------------------------------')
    print("Lat/lon data pull and clip elapsed time: %.2f s" %
          (time.time() - t))
    print('--------------------------------------------------')

    ########################################################################
    # Get elevation data
    # From STM DEM
    # From elevation api STRM3 = 90m resolution elevation DEM
    ########################################################################

    t = time.time()

    elevation.clip(bounds=bbox,
                   output=(os.getcwd().replace(' ', '\ ')) +
                   '/elevation_tifs/' + site_name.lower().replace(' ', '_') +
                   '.tif',
                   product='SRTM1')
    elevation.clean()
    dem_raster = rasterio.open('./elevation_tifs/' +
                               site_name.lower().replace(' ', '_') + '.tif')

    src_crs = dem_raster.crs  # get projection info
    utm = pyproj.Proj(src_crs)  # Pass CRS of image from rasterio
    lonlat = pyproj.Proj(init='epsg:4326')
    src_shape = src_height, src_width = dem_raster.shape
    T0 = rasterio.transform.from_bounds(bbox[0], bbox[1], bbox[2], bbox[3],
                                        src_width, src_height)
    x_ll, y_ll = T0 * ((pyproj.transform(lonlat, utm, bbox[0], bbox[1])))
    x_ul, y_ul = T0 * ((pyproj.transform(lonlat, utm, bbox[0], bbox[3])))
    x_lr, y_lr = T0 * ((pyproj.transform(lonlat, utm, bbox[2], bbox[1])))
    x_ur, y_ur = T0 * ((pyproj.transform(lonlat, utm, bbox[2], bbox[3])))
    x_span = [x_ll, x_ul, x_lr, x_ur]
    y_span = [y_ll, y_ul, y_lr, y_ur]
    cols, rows = np.meshgrid(np.arange(src_width), np.arange(src_height))
    eastings, northings = T0 * (cols, rows)
    cols, rows = [], []
    # transformation from raw projection to WGS84 lat/lon values
    xvals, yvals = (eastings, northings)

    elev = dem_raster.read(1)  # actual elevation data from DEM
    lats_elev, lons_elev = (pyproj.transform(utm, lonlat, xvals, yvals)
                            )  # actual lats/lons for elevation data

    elev = elev.ravel()  # ravel the elevation for processing
    lats_elev = lats_elev.ravel()  # ravel for processing
    lons_elev = lons_elev.ravel()  # ravel for processing

    # this is somewhat of a computationally expensive process, but it finds the elevation nearest to each GOES-16 pixel
    goes_elev = [elev[np.argmin(np.abs(np.subtract(ii,lons_elev))+np.abs(np.subtract(jj,\
                                      lats_elev)))] for ii,jj in zip(lon_plot.ravel(),lat_plot.ravel())]
    goes_elev = np.reshape(goes_elev,
                           np.shape(lat_plot))  # reshape to match GOES-16 grid

    os.remove('./elevation_tifs/' + site_name.lower().replace(' ', '_') +
              '.tif')  # remove the .tif file

    csv_fname = site_name + '_' + str(site_lat) + '_' + str(
        site_lon) + '_elevation.csv'
    np.savetxt(os.getcwd() + '/elevation_data/' + csv_fname,
               goes_elev,
               delimiter=",")

    print('--------------------------------------------------')
    print("Elevation data access time: %.2f s" % (time.time() - t))
    print('--------------------------------------------------')
Пример #25
0
def clip(ctx, **kwargs):
    if ctx.parent and ctx.parent.params:
        kwargs.update(ctx.parent.params)
    elevation.clip(**kwargs)
Пример #26
0
def elevation_generator(lats, lons, domain):

    t = time.time()

    # Define custom file name segment with bounding coordinates (NW and SE corners)
    domain_name = str(domain[1]) + 'N_' + str(domain[2]) + 'W_' + str(
        domain[0]) + 'N_' + str(domain[3]) + 'W'

    # Define names to dependent directories
    # (one for rasterio .tifs, the other for corresponding CSV data)
    tif_dir, csv_dir = 'elevation_tifs/', 'elevation_csvs/'
    tif_path = os.path.join(os.path.dirname(__file__), tif_dir)
    csv_path = os.path.join(os.path.dirname(__file__), csv_dir)

    # Create the file name for the output .csv file
    csv_fname = domain_name + '_elevation.csv'

    # Create new elevation .tif and .csv if the file doesn't already exist for this domain
    if not os.path.isfile(os.path.join(csv_path, csv_fname)):

        # If directories don't exist, create them
        if not os.path.isdir(tif_path):
            os.mkdir(tif_path)
        if not os.path.isdir(csv_path):
            os.mkdir(csv_path)

        # Customized domain list to match elevation package convention
        elev_domain = [domain[2], domain[0], domain[3], domain[1]]
        # Retrieve elevation data from SRTM and return a .tif of the spatial domain.
        # If SRTM3 (90m resolution) doesn't work, try SRTM1 (30m resolution)
        try:
            print('\t Using SRTM3...')
            elevation.clip(bounds=elev_domain,
                           output=(tif_path + domain_name + '.tif'),
                           product='SRTM3')
        except:
            print('\t Using SRTM1, will take longer than SRTM3...')
            elevation.clip(bounds=elev_domain,
                           output=(tif_path + domain_name + '.tif'),
                           product='SRTM1')
        elevation.clean()

        # Raster image processing
        dem_raster = rasterio.open(tif_path + domain_name + '.tif')
        src_crs = dem_raster.crs  # Get projection info
        utm = pyproj.Proj(src_crs)  # Pass CRS of image from rasterio
        lonlat = pyproj.Proj(init='epsg:4326')
        src_height, src_width = dem_raster.shape
        T0 = rasterio.transform.from_bounds(domain[2], domain[0], domain[3],
                                            domain[1], src_width, src_height)
        x_ll, y_ll = T0 * (
            (pyproj.transform(lonlat, utm, domain[0], domain[1])))
        x_ul, y_ul = T0 * (
            (pyproj.transform(lonlat, utm, domain[0], domain[3])))
        x_lr, y_lr = T0 * (
            (pyproj.transform(lonlat, utm, domain[2], domain[1])))
        x_ur, y_ur = T0 * (
            (pyproj.transform(lonlat, utm, domain[2], domain[3])))
        cols, rows = np.meshgrid(np.arange(src_width), np.arange(src_height))
        eastings, northings = T0 * (cols, rows)
        cols, rows = [], []
        # Transformation from raw projection to WGS84 lat/lon values
        xvals, yvals = (eastings, northings)

        # Retrieve actual elevation data and corresponding coordinates from DEM
        elev = dem_raster.read(1)
        lats_elev, lons_elev = (pyproj.transform(utm, lonlat, xvals, yvals))

        # Ravel each dataset for processing
        elev = elev.ravel()
        lats_elev = lats_elev.ravel()
        lons_elev = lons_elev.ravel()

        # Find the elevation value closes to each GOES-16 pixel
        goes_elev = [
            elev[np.argmin(
                np.abs(np.subtract(ii, lons_elev)) +
                np.abs(np.subtract(jj, lats_elev)))]
            for ii, jj in zip(lons.ravel(), lats.ravel())
        ]
        # Reshape data to match GOES-16 grid
        goes_elev = np.reshape(goes_elev, np.shape(lats))
        # Remove the .tif file
        os.remove(tif_path + domain_name + '.tif')
        # Save the .csv to the corresponding directory
        np.savetxt(os.path.join(csv_path, csv_fname), goes_elev, delimiter=",")

        return goes_elev

    runtime = time.time() - t
Пример #27
0
def main():
    #######  load user configurable paramters here    #######
    # Check user defined configuration file

    if len(sys.argv) == 1:
        print(
            'ERROR: wind_mapper.py requires one argument [configuration file] (i.e. wind_mapper.py '
            'param_existing_DEM.py)')
        exit(-1)

    # Get name of configuration file/module
    configfile = sys.argv[-1]

    # Load in configuration file as module
    X = importlib.machinery.SourceFileLoader('config', configfile)
    X = X.load_module()

    # Resolution of WindNinja simulations (in m)
    res_wind = X.res_wind

    # path to Wind Ninja executable

    # default path assumes we are running out of pip or we have a symlink @ ./bin/WindNinja_cli
    wn_exe = os.path.join(
        os.path.dirname(
            os.path.abspath(__file__)), 'bin', 'WindNinja_cli')

    if hasattr(X, 'wn_exe'):
        wn_exe = X.wn_exe

    if not os.path.exists(wn_exe):
        print('ERROR: Invalid path for WindNinja_cli. Consider specifying a `wn_exe` config option or confirm it is correct.')
        print(f'Path = {wn_exe}')
        exit(-1)

    environ["WINDNINJA_DATA"] = os.path.join(os.path.dirname(wn_exe), '..', 'share', 'windninja')

    # Parameter for atmospheric stability in Wind Ninja mass conserving (default value)
    alpha = 1

    # Number of wind speed categories (every 360/ncat degrees)
    ncat = 4
    if hasattr(X, 'ncat'):
        ncat = X.ncat

    if ncat < 1:
        print('ERROR ncat must be > 0 ')
        exit(-1)

    use_existing_dem = True

    lat_min = -9999
    lon_min = -9999
    lat_max = -9999
    lon_max = -9999
    if hasattr(X, 'use_existing_dem'):
        use_existing_dem = X.use_existing_dem
    if use_existing_dem:
        dem_filename = X.dem_filename

        lat_min = X.lat_min
        lat_max = X.lat_max
        lon_min = X.lon_min
        lon_max = X.lon_max
    else:
        lat_min = X.lat_min
        lat_max = X.lat_max
        lon_min = X.lon_min
        lon_max = X.lon_max

    if not use_existing_dem:
        if lat_min == -9999 or lon_min == -9999 or lat_max == -9999 or lon_max == -9999:
            print('Coordinates of the bounding box must be specified to download SRTM DEM.')
            exit(-1)

    # Method to compute average wind speed used to derive transfert function
    wind_average = 'grid'
    targ_res = 1000
    if hasattr(X, 'wind_average'):
        wind_average = X.wind_average
        if wind_average == 'grid':
            targ_res = X.targ_res

    list_options_average = ['mean_tile', 'grid']
    if wind_average not in list_options_average:
        print('wind average must be "mean_tile" or "grid"')

        exit(-1)

    if targ_res < 0:
        print('Target resolution must be>0')
        exit(-1)

    # output to the specific directory, instead of the root dir of the calling python script
    user_output_dir = os.getcwd() + '/' + configfile[:-3] + '/'  # use the config filename as output path

    if hasattr(X, 'user_output_dir'):
        user_output_dir = X.user_output_dir
        if user_output_dir[-1] is not os.path.sep:
            user_output_dir += os.path.sep

    # Delete previous dir (if exists)
    if os.path.isdir(user_output_dir):
        shutil.rmtree(user_output_dir, ignore_errors=True)

    # make new output dir
    os.makedirs(user_output_dir)

    # Setup file containing WN configuration
    nworkers = os.cpu_count() or 1

    # on linux we can ensure that we respect cpu affinity
    if 'sched_getaffinity' in dir(os):
        nworkers = len(os.sched_getaffinity(0))

    # ensure correct formatting on the output
    fic_config = F"""num_threads = {nworkers}  
initialization_method = domainAverageInitialization 
units_mesh_resolution = m 
input_speed = 10.0 
input_speed_units = mps 
input_wind_height = 40.0 
units_input_wind_height = m 
output_wind_height = 40.0 
units_output_wind_height = m 
output_speed_units = mps 
vegetation = grass 
diurnal_winds = false 
write_goog_output = false 
write_shapefile_output = false 
write_ascii_output = true 
write_farsite_atm = false """

    if hasattr(X, 'fic_config_WN'):
        fic_config_WN = X.fic_config_WN

        if not os.path.exists(fic_config_WN):
            print('ERROR: Invalid path for cli_massSolver.cfg given in `fic_config_WN` config options.')
            exit(-1)
    else:
        fic_config_WN = os.path.join(user_output_dir, 'default_cli_massSolver.cfg')
        with open(fic_config_WN, 'w') as fic_file:
            fic_file.write(fic_config)

    os.mkdir(os.path.join(user_output_dir, 'shp'))

    # we need to make sure we pickup the right paths to all the gdal scripts
    gdal_prefix = ''
    try:
        gdal_prefix = subprocess.run(["gdal-config", "--prefix"], stdout=subprocess.PIPE).stdout.decode()
        gdal_prefix = gdal_prefix.replace('\n', '')
        gdal_prefix += '/bin/'
    except:
        raise Exception(""" ERROR: Could not find gdal-config, please ensure it is installed and on $PATH """)

    # Wind direction increment
    delta_wind = 360. / ncat

    # List of variable to transform from asc into tif
    var_transform = ['ang', 'vel']
    if wind_average == 'grid':
        list_tif_2_vrt = ['U', 'V', 'spd_up_' + str(targ_res)]
    elif wind_average == 'mean_tile':
        list_tif_2_vrt = ['U', 'V', 'spd_up_tile']

    # Optimal size for wind ninja
    nres = 600

    # Additional grid point to ensure correct tile overlap
    nadd = 25

    # Define DEM file to use for WN
    fic_download = user_output_dir + 'ref-DEM.tif'

    name_utm = 'ref-DEM-proj'
    fic_lcc = user_output_dir + '/' + name_utm + '.tif'

    LCC_proj = '+proj=lcc +lon_0=-90 +lat_1=33 +lat_2=45'
    LCC_proj = None
    if use_existing_dem:

        # if we are using a user-provided dem, ensure there are no NoData values that border the
        # DEM which will cause issues
        print('Preparing input DEM')
        # mask data values
        print('...',end='')
        exec_str = """%sgdal_calc.py -A %s --outfile %s --NoDataValue 0 --calc="1*(A>0)" """ % (gdal_prefix,
            dem_filename, user_output_dir + 'out.tif')
        subprocess.check_call([exec_str],   shell=True) #stdout=subprocess.PIPE,  stderr=subprocess.PIPE,
        print('25...', end='')

        # convert to shp file
        exec_str = """%sgdal_polygonize.py -8 -b 1 -f "ESRI Shapefile" %s %s/pols """ % (gdal_prefix,
            user_output_dir + 'out.tif', user_output_dir)
        subprocess.check_call([exec_str],  shell=True)
        print('50...', end='')

        # clip original with the shpfile
        exec_str = """%sgdalwarp -of GTiff -cutline %s/pols/out.shp -crop_to_cutline -dstalpha %s %s """ % (gdal_prefix,
            user_output_dir, dem_filename, fic_lcc)
        subprocess.check_call([exec_str], shell=True)
        print('75...', end='')
        shutil.rmtree("%s/pols" % user_output_dir)
        os.remove("%s/out.tif" % user_output_dir)
        print('100', end='')
        print(' - done')



    else:

        # need to ensure that we request a square domain in LCC projection

        # Properties of the bounding box
        delta_lat = lat_max - lat_min
        delta_lon = lon_max - lon_min

        fac = 0.1  # Expansion factor to make sure that the downloaded SRTM tile is large enough

        # This is a larger extent than what we will use so we ensure perfect coverage
        lon_min_expanded = lon_min - delta_lon * fac
        lat_min_expanded = lat_min - delta_lat * fac
        lon_max_expanded = lon_max + delta_lon * fac
        lat_max_expanded = lat_max + delta_lat * fac

        LCC_proj = '+proj=merc +lat_ts=%.30f' % ((lat_min_expanded + lat_max_expanded)/2.0)

        t_4326_to_lcc = Transformer.from_crs("epsg:4326", LCC_proj)
        lon_lcc, lat_lcc = t_4326_to_lcc.transform(
                                                   [lat_min_expanded, lat_min_expanded, lat_max_expanded, lat_max_expanded],
                                                   [lon_min_expanded, lon_max_expanded, lon_min_expanded, lon_max_expanded],
                                                   )
        lon_lcc_square = [ min(lon_lcc), min(lon_lcc), max(lon_lcc), max(lon_lcc)]
        lat_lcc_square = [ min(lat_lcc), max(lat_lcc), min(lat_lcc), max(lat_lcc)]

        t_merc_to_4326 = Transformer.from_crs(LCC_proj, "epsg:4326",)
        new_4326_square_lat, new_4326_square_lon = t_merc_to_4326.transform(lon_lcc_square, lat_lcc_square)

        lat_max_expanded = max(new_4326_square_lat)
        lat_min_expanded = min(new_4326_square_lat)
        lon_max_expanded = max(new_4326_square_lon)
        lon_min_expanded = min(new_4326_square_lon)


        # Download reference SRTM data
        elevation.clip(bounds=(lon_min_expanded, lat_min_expanded, lon_max_expanded, lat_max_expanded), output=fic_download)

        # Extract a rectangular region of interest in utm at 30 m
        # exec_str = '%sgdalwarp %s %s -overwrite -dstnodata -9999 -t_srs "%s" -te_srs "epsg:4326" -te %.30f %.30f %.30f %.30f  -tr %.30f ' \
        #            '%.30f -r bilinear '
        # com_string = exec_str % (gdal_prefix, fic_download, fic_lcc+'.tmp.tif', LCC_proj,
        #                          X.lon_min, X.lat_min, X.lon_max, X.lat_max, 30, 30)
        # subprocess.check_call([com_string], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

        # # Extract a rectangular region of interest in utm at 30 m
        # exec_str = '%sgdalwarp %s %s -overwrite -dstnodata -9999 -t_srs "%s" -te %.30f %.30f %.30f %.30f  -tr %.30f ' \
        #            '%.30f -r bilinear '
        # com_string = exec_str % (gdal_prefix, fic_download, fic_lcc+'.tmp.tif', LCC_proj,
        #                          min(lon_lcc), min(lat_lcc), max(lon_lcc), max(lat_lcc), 30, 30)
        # subprocess.check_call([com_string], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

        exec_str = '%sgdalwarp %s %s -overwrite -dstnodata -9999 -t_srs "%s"  -tr %.30f ' \
                   '%.30f -r bilinear '
        com_string = exec_str % (gdal_prefix, fic_download, fic_lcc+'.tmp.tif', LCC_proj,
                                 30, 30)
        subprocess.check_call([com_string], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

        exec_str = '%sgdal_translate -ot Float32  %s %s' % (gdal_prefix, fic_lcc+'.tmp.tif', fic_lcc)
        subprocess.check_call([exec_str], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

        os.remove(fic_lcc+'.tmp.tif')


    srs_out = osr.SpatialReference()
    srs_out.ImportFromEPSG(4326)

    pts_to_shp([[lat_max, lon_min],
                [lat_max, lon_max],
                [lat_min, lon_max],
                [lat_min, lon_min]],
               os.path.join(user_output_dir,'shp','user_bbox.shp'),
               srs_out.ExportToProj4(),
               )

    ds = gdal.Open(fic_lcc)
    wkt = ds.GetProjection()
    srs = osr.SpatialReference()
    srs.ImportFromWkt(wkt)

    if LCC_proj is None:
        LCC_proj = srs.ExportToProj4()

    band = ds.GetRasterBand(1)
    gt = ds.GetGeoTransform()

    t_4326_to_merc = Transformer.from_crs("epsg:4326", LCC_proj, always_xy=True)
    x_merc, y_merc = t_4326_to_merc.transform(
        [lon_min, lon_max, lon_min, lon_max],
        [lat_min, lat_min, lat_max, lat_max],
    )

    xmin = min(x_merc)
    xmax = max(x_merc)
    ymin = min(y_merc)
    ymax = max(y_merc)


    is_geographic = srs.IsGeographic()
    if is_geographic:
        raise Exception('Input DEM must be projected ')

    pixel_width = gt[1]
    pixel_height = -gt[5]

    lenx = xmax-xmin
    leny = ymax-ymin

    len_wn = res_wind * nres

    # Number of Wind Ninja tiles
    nopt_x = int(lenx // len_wn + 1)
    nopt_y = int(leny // len_wn + 1)

    nx = lenx/pixel_width / nopt_x
    ny = leny/pixel_height / nopt_y

    if nopt_x == 1 and nopt_y == 1:
        # DEM is small enough for WN
        name_tmp = 'tmp_0_0'
        fic_tmp = user_output_dir + name_tmp + ".tif"
        shutil.copy(fic_lcc, fic_tmp)

    else:

        # Split the DEM into smaller DEM for Wind Ninja
        for i in range(0, nopt_x):
            for j in range(0, nopt_y):

                xbeg = xmin + i * nx * pixel_width - nadd * pixel_width
                ybeg = ymin + j * ny * pixel_height - nadd * pixel_height

                delx = nx * pixel_width + 2 * nadd * pixel_width
                dely = ny * pixel_height + 2 * nadd * pixel_height

                if i == 0.:
                    xbeg = xmin
                if i == 0. or i == (nopt_x - 1):
                    delx = nx * pixel_width + nadd * pixel_width

                if j == 0.:
                    ybeg = ymin
                if j == 0. or j == (nopt_y - 1):
                    dely = ny * pixel_height + nadd * pixel_height

                # get UTM zone
                chunk_x_mid = (xbeg + delx/2.)
                chunk_y_mid = (ybeg + dely/2.)

                # this is actually the custom mercator proj
                t_merc_to_4326 = Transformer.from_crs(LCC_proj, "epsg:4326", always_xy=True)
                lon_mid, lat_mid = t_merc_to_4326.transform(chunk_x_mid, chunk_y_mid)

                nepsg_utm = int(32700 - round((45 + lat_mid) / 90, 0) * 100 + round((183 + lon_mid) / 6, 0))

                t_merc_to_utm = Transformer.from_crs(LCC_proj, f"epsg:{nepsg_utm}", always_xy=True)

                utm_x, utm_y = t_merc_to_utm.transform(
                    [xbeg, xbeg + delx, xbeg, xbeg + delx],
                    [ybeg, ybeg       , ybeg + dely, ybeg + dely]
                )

                srs_out = osr.SpatialReference()
                srs_out.ImportFromEPSG(nepsg_utm)


                pts_to_shp([[max(utm_y), min(utm_x)],
                            [max(utm_y), max(utm_x)],
                            [min(utm_y), max(utm_x)],
                            [min(utm_y), min(utm_x)]],
                           os.path.join(user_output_dir,'shp',f'utm_{i}_{j}.shp'),
                           srs_out.ExportToProj4(),
                           )

                name_tmp = 'tmp_' + str(i) + "_" + str(j)
                fic_tmp = user_output_dir + name_tmp + ".tif"

                srs_out = osr.SpatialReference()
                srs_out.ImportFromEPSG(nepsg_utm)

                exec_str = '%sgdalwarp -overwrite -te %f %f %f %f -tr 30 30 -r "cubicspline" -et 0 -cutline %s -crop_to_cutline -dstnodata -9999 -t_srs "%s" %s %s'

                com_string = exec_str % (gdal_prefix,
                                         min(utm_x), min(utm_y),  max(utm_x), max(utm_y),
                                         os.path.join(user_output_dir,'shp',f'utm_{i}_{j}.shp'),
                                         srs_out.ExportToProj4(),
                                         fic_download,
                                         fic_tmp+'.tmp.tif')
                subprocess.check_call([com_string], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

                # absolutely make sure there are no missing data. From the cut above can sometimes be missing 1px along the edge
                exec_str = '%sgdal_fillnodata.py %s %s'
                com_string = exec_str % (gdal_prefix, fic_tmp+'.tmp.tif', fic_tmp)
                subprocess.check_call([com_string], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)

                os.remove(fic_tmp+'.tmp.tif')
                print(fic_tmp)


    # Build WindNinja winds maps
    x_y_wdir = itertools.product(range(0, nopt_x),
                                 range(0, nopt_y),
                                 np.arange(0, 360., delta_wind))
    x_y_wdir = [p for p in x_y_wdir]

    for d in x_y_wdir:
        i, j, k = d
        dir_tmp = user_output_dir + 'tmp_dir' + "_" + str(i) + "_" + str(j)
        if not os.path.isdir(dir_tmp):
            os.makedirs(dir_tmp)

    print(f'Running WindNinja on {len(x_y_wdir)} combinations of direction and sub-area. Please be patient...')
    with futures.ProcessPoolExecutor(max_workers=nworkers) as executor:
        res = list(tqdm(executor.map(partial(call_WN_1dir, gdal_prefix, user_output_dir, fic_config_WN,
                                             list_tif_2_vrt, nopt_x, nopt_y, nx, ny,
                                             pixel_height, pixel_width, res_wind, targ_res, var_transform, wind_average,
                                             wn_exe,
                                             xmin, ymin), x_y_wdir), total=len(x_y_wdir)))

    for d in itertools.product(range(0, nopt_x),
                               range(0, nopt_y)):
        i, j = d
        name_tmp = 'tmp_' + str(i) + "_" + str(j)
        fic_tmp = user_output_dir + name_tmp + ".tif"
        os.remove(fic_tmp)


    print('Building VRTs...')
    # Loop on wind direction to build reference vrt file to be used by mesher
    nwind = np.arange(0, 360., delta_wind)
    with tqdm(total=len(nwind)) as pbar:
        for wdir in nwind:
            for var in list_tif_2_vrt:
                # name_vrt = user_output_dir + name_utm + '_' + str(int(wdir)) + '_' + var + '.vrt'
                # cmd = "find " + user_output_dir[0:-1] + " -type f -name '*_" + str(int(wdir)) + "_10_" + str(
                #     res_wind) + "m_" + var + "*.tif' -exec " + gdal_prefix + "gdalbuildvrt " + name_vrt + " {} +"

                name_tif = user_output_dir + name_utm + '_' + str(int(wdir)) + '_' + var
                cmd = "find " + user_output_dir[0:-1] + " -type f -name '*_" + str(int(wdir)) + "_10_" + str(
                    res_wind) + "m_" + var + "*.tif' -exec rio_merge.py " +  name_tif + '.tmp.tif' + " {} +"
                subprocess.check_call([cmd], stdout=subprocess.PIPE,
                                      shell=True)

                srs_out = osr.SpatialReference()
                srs_out.ImportFromEPSG(4326)
                exec_str = '%sgdalwarp -overwrite -te %f %f %f %f -r "cubicspline" -et 0 -cutline %s -crop_to_cutline -dstnodata -9999 -t_srs "%s" %s %s'
                com_string = exec_str % (gdal_prefix,
                                         X.lon_min, X.lat_min, X.lon_max, X.lat_max,
                                         os.path.join(user_output_dir,'shp', 'user_bbox.shp'),
                                         srs_out.ExportToProj4(),
                                         name_tif+'.tmp.tif',
                                         name_tif+'.tif')
                subprocess.check_call([com_string], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
                os.remove(name_tif+'.tmp.tif')

            pbar.update(1)
Пример #28
0
def download_DEM(region, myaquifer, units):
    # Download and Set up the DEM for the aquifer
    app_workspace = app.get_app_workspace()
    name = myaquifer['Name']
    directory = os.path.join(app_workspace.path, region + '/DEM')
    if not os.path.exists(directory):
        os.makedirs(directory)
    minorfile = os.path.join(app_workspace.path,
                             region + '/MinorAquifers.json')
    majorfile = os.path.join(app_workspace.path,
                             region + '/MajorAquifers.json')
    regionfile = os.path.join(app_workspace.path, region,
                              region + '_State_Boundary.json')
    aquiferShape = {'type': 'FeatureCollection', 'features': []}
    fieldname = 'Aquifer_Name'
    print("Setting up DEM")
    match = False
    if os.path.exists(minorfile):
        with open(minorfile, 'r') as f:
            minor = json.load(f)
        for i in minor['features']:
            if fieldname in i['properties']:
                if i['properties'][fieldname] == myaquifer['CapsName']:
                    aquiferShape['features'].append(i)
                    match = True
    if os.path.exists(majorfile) and match == False:
        with open(majorfile, 'r') as f:
            major = json.load(f)
        for i in major['features']:
            if fieldname in i['properties']:
                if i['properties'][fieldname] == myaquifer['CapsName']:
                    aquiferShape['features'].append(i)
                    match = True
    if os.path.exists(regionfile) and match == False:
        with open(regionfile, 'r') as f:
            aquiferShape = json.load(f)
    try:
        lonmin, latmin, lonmax, latmax = bbox(aquiferShape['features'][0])
    except:
        long = 0
        answer = 0
        for f in range(len(aquiferShape['features'])):
            if aquiferShape['features'][f]['geometry'] != None:
                x, y = list(
                    zip(*list(
                        explode(aquiferShape['features'][f]['geometry']
                                ['coordinates']))))
                if len(x) > int:
                    long = len(x)
                    answer = f
        lonmin, latmin, lonmax, latmax = bbox(aquiferShape['features'][answer])

    bounds = (lonmin - .1, latmin - .1, lonmax + .1, latmax + .1)
    dem_path = name.replace(' ', '_') + '_DEM.tif'
    demfile = os.path.join(app_workspace.path, region + '/DEM.tif')
    if (os.path.exists(demfile)):
        output = demfile
    else:
        output = os.path.join(directory, dem_path)
        elevation.clip(bounds=bounds, output=output, product='SRTM3')
    print(("90 m DEM successfully downloaded for ", name))

    if units:
        # Reproject DEM to 0.01 degree resolution using rasterio
        resolution = .01
        with rasterio.open(output) as dem_raster:
            src_crs = dem_raster.crs
            src_shape = src_height, src_width = dem_raster.shape
            src_transform = dem_raster.transform  # from_bounds(lonmin, latmin, lonmax, latmax, src_width, src_height)
            source = dem_raster.read(1)
            dem_json = {
                'source': source,
                'src_crs': src_crs,
                'src_transform': src_transform
            }
        return dem_json
Пример #29
0
import os
import elevation

# create out folder
out_path = os.path.join(os.getcwd(), "tmp")
os.makedirs(out_path, exist_ok=True)

# Because we can't request large areas all at once,
# loop through 1 degree tiles along 49th parallel
for i, xmin in enumerate(range(-123, -113)):
    output = os.path.join(out_path, 'mapzen_{}.tif'.format(i))
    elevation.clip(bounds=(xmin, 48, xmin + 1, 49.1), output=output)

# clean up stale temporary files and fix the cache in the event of a ver error
elevation.clean()
Пример #30
0
def get_elevation(
        bounds, filename_tmp=None, dirname_cache=None, remove_tmp_file=True,
        product=None, margin='1%', Nx=4096, Ny=4096,
        ):
    """Load SRTM elevation.

        Parameters
        ----------
        bounds : iterable of float
            (deg) lonmin, latmin, lonmax, latmax
        filename_tmp : str, optional
            temporary file
        dirname_cache : str, optional
            directory where SRTM tiles are to be stored
        remove_tmp_file : bool, optional
            (default: True)
        margin : str, optional

        Returns
        -------
        data : 2d-array
            elevation
        meta : dict
            coordinates and such
    """
    # set defaults
    # =====================================================
    if filename_tmp is None:
        filename_tmp = get_filename_tmp()
    if dirname_cache is None:
        dirname_cache = _dirname_cache
    # =====================================================

    # expand user
    # =====================================================
    filename_tmp = os.path.expanduser(filename_tmp)
    dirname_cache = os.path.expanduser(dirname_cache)
    # =====================================================

    # create directories
    # =====================================================
    dirname_tmp = os.path.dirname(filename_tmp)
    for dirname in (dirname_cache, dirname_tmp):
        if not os.path.isdir(dirname):
            os.makedirs(dirname)
    # =====================================================

    # load
    # =====================================================
    # set cache dir
    elevation.CACHE_DIR = dirname_cache
    elevation.DEFAULT_PRODUCT = product

    # download if necessary and crop section to file
    elevation.clip(bounds, output=filename_tmp, margin=margin)

    # load file
    data, meta = tif.read(filename_tmp, bounds=bounds, Nx=Nx, Ny=Ny)

    # crop singleton first dimension
    while np.shape(data)[0] == 1 and len(np.shape(data)) > 2:
        data = data[0]

    # remove temporary file
    # =====================================================
    if remove_tmp_file and os.path.isfile(filename_tmp):
        os.remove(filename_tmp)
    # =====================================================

    # convert to float and replace nans
    # =====================================================
    # int -> float
    data = 1. * data

    # NaN
    idx_nan = data == meta['nodata']
    data[idx_nan] = np.nan
    # =====================================================

    return data, meta