Esempio n. 1
0
class GrassWrapper(object):
    def __init__(self, opts):
        self.opts = opts

        if self.opts.verbose:
            gscript._debug_level = 5

        self._open_session()
        self.run_command('g.proj', proj4=projection.proj4, flags='c')

    def run_command(self, command, **kwargs):
        kwargs.update(overwrite=self.opts.force, verbose=self.opts.verbose)

        return gscript.run_command(command, **kwargs)

    def list_pairs(self, *args, **kwargs):
        return gscript.list_pairs(*args, **kwargs)

    def _open_session(self):
        self.session = Session()
        self.session.open(gisdb=gisdb,
                          location=location,
                          create_opts='EPSG:4326')

    def __del__(self):
        self.session.close()
Esempio n. 2
0
def __Session_create(tmp_vars):

    from grass.pygrass.modules.shortcuts import general as g

    with Session(
            gisdb=tmp_vars["tmpdir"],
            location=tmp_vars["location_name"],
            create_opts=tmp_vars["create_opts"],
    ):
        # execute some command inside PERMANENT
        g.mapsets(flags="l")
        g.list(type="raster", flags="m")

    # check if PERMANENT has been created
    __check_PERMANENT_in_folder(tmp_vars["location_path"])

    # check files
    permanent = os.path.join(tmp_vars["location_path"], "PERMANENT")
    __check_mandatory_files_in_PERMANENT(permanent)

    # check PROJ_EPSG content
    __check_epsg(permanent, 3035)

    # check creation of a mapset
    with Session(
            gisdb=tmp_vars["tmpdir"],
            location=tmp_vars["location_name"],
            mapset=tmp_vars["mapset_name"],
            create_opts="",
    ):
        # execute some command inside user
        g.mapsets(flags="l")
        g.list(type="raster", flags="m")

    __check_mandatory_files_in_mapset(tmp_vars["mapset_path"])
Esempio n. 3
0
    def set_grass_session(self):
        """Set the GRASS session.
        """
        gisdb = self.conf.grass_params['grassdata']
        location = self.conf.grass_params['location']
        mapset = self.conf.grass_params['mapset']
        if location is None:
            msgr.fatal(("[grass] section is missing."))

        # Check if the given parameters exist and can be accessed
        error_msg = u"'{}' does not exist or does not have adequate permissions"
        if not os.access(gisdb, os.R_OK):
            msgr.fatal(error_msg.format(gisdb))
        elif not os.access(os.path.join(gisdb, location), os.R_OK):
            msgr.fatal(error_msg.format(location))
        elif not os.access(os.path.join(gisdb, location, mapset), os.W_OK):
            msgr.fatal(error_msg.format(mapset))

        # Start Session
        if self.conf.grass_params['grass_bin']:
            grassbin = self.conf.grass_params['grass_bin']
        else:
            grassbin = None
        self.grass_session = GrassSession(grassbin=grassbin)
        self.grass_session.open(gisdb=gisdb,
                                location=location,
                                mapset=mapset,
                                loadlibs=True)
        return self
def setup_grass(crs):
    """Sets up a GRASS project with the name 'GRASS_db_{crs}' in the
    subdirectory '/grass' of the data directory.

    :param crs: EPSG code (e.g. '32629') to set the projection. The main
        workflow (see grass_main()) uses the most common CRS of all files in a
        dataset. [str]

    :returns: New GRASS project that can be started using
        start_grass_session(crs).
    """

    path = Grass.path
    name = f'GRASS_db_{crs}'

    ## General GRASS setup
    ## (GRASSBIN is defined in config.py!)
    os.environ['GISBASE'] = get_grass_gisbase()
    sys.path.append(os.path.join(os.environ['GISBASE'], 'bin'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'lib'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'scripts'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'etc', 'python'))
    os.environ['PROJ_LIB'] = os.path.join(os.environ['GISBASE'], 'share\\proj')

    ## Open a GRASS session and create mapset if it doesn't exist already
    with Session(gisdb=path, location=name,
                 create_opts=f"EPSG:{crs}") as session:
        pass

    return print(f"GRASS project '{name}' was successfully created.")
Esempio n. 5
0
def grass_xy_session(tmpdir_factory):
    """Create a GRASS session in a new XY location and PERMANENT mapset
    """
    tmpdir = str(tmpdir_factory.mktemp("grassdata"))
    print(tmpdir)
    grass_session = GrassSession()
    grass_session.open(
        gisdb=tmpdir,
        location='xy',
        mapset=None,  # PERMANENT
        create_opts='XY',
        loadlibs=True)
    os.environ['GRASS_VERBOSE'] = '1'
    # os.environ['ITZI_VERBOSE'] = '4'
    yield grass_session
    grass_session.close()
Esempio n. 6
0
def generate_viewshed(x_transmitter, y_transmitter, output_dir, filename,
                      tile_path):

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    gis_path = os.path.join(output_dir, 'gisdb')

    output_filename = '{}-viewshed.tif'.format(filename)

    transmitter_coords = str(x_transmitter) + ',' + str(y_transmitter)

    with Session(gisdb=gis_path, location="location",
                 create_opts="EPSG:27700"):

        print(gcore.parse_command("g.gisenv", flags="s"))
        gcore.run_command('r.external',
                          input=tile_path,
                          output=filename,
                          overwrite=True)
        gcore.run_command('r.external.out',
                          directory=output_dir,
                          format="GTiff")
        gcore.run_command('g.region', raster=filename)
        gcore.run_command('r.viewshed',
                          flags='b',
                          input=filename,
                          output=output_filename,
                          coordinates=transmitter_coords,
                          overwrite=True)
        gcore.run_command('r.external.out', flags="r")
Esempio n. 7
0
def grass_setup():
    """
    this function initializes the GRASS session and creates the mapset with the user-specified variables
    """
    user_data()
    location_name = GrassData.location_name
    crs = GrassData.crs

    grassbin = GRASSBIN_import()
    if grassbin == "grass7bin_win":
        print("You're using Windows, this module most likely will not work properly, please use a linux-based OS!!!")
    os.environ['GRASSBIN'] = grassbin
    gisbase = get_grass_gisbase()
    os.environ['GISBASE'] = gisbase
    sys.path.append(os.path.join(os.environ['GISBASE'], 'bin'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'lib'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'scripts'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'etc', 'python'))

    # set folder to proj_lib:
    os.environ['PROJ_LIB'] = '/usr/share/proj'

    gisdb = Paths.grass_path
    mapset = "PERMANENT"
    ##################################################################################
    # open a GRASS session and create the mapset if it does not yet exist
    with Session(gisdb=gisdb,
                 location=GrassData.location_name,
                 create_opts='EPSG:' + crs):
        pass
    ##################################################################################
    # launch session
    gsetup.init(gisbase, gisdb, location_name, mapset)
    print(f"Current GRASS GIS 7 environment: {gscript.gisenv()}")
Esempio n. 8
0
def viewshed(point, path_input, path_output, tile_name, max_distance, crs):
    """
    Perform a viewshed using GRASS.

    Parameters
    ---------
    point : tuple
        The point being queried.
    tile_lookup : dict
        A lookup table containing raster tile boundary coordinates
        as the keys, and the file paths as the values.
    path_output : string
        The directory path for the output folder.
    tile_name : string
        The name allocated to the viewshed tile.
    max_distance : int
        The maximum distance a path can be.
    crs : string
        The coordinate reference system in use.

    Returns
    -------
    grid : dataframe
        A geopandas dataframe containing the created grid.

    """
    with Session(gisdb=path_output, location="location", create_opts=crs):

        # print('parse command')
        # print(gcore.parse_command("g.gisenv", flags="s"))#, set="DEBUG=3"

        # print('r.external')
        # now link a GDAL supported raster file to a binary raster map layer,
        # from any GDAL supported raster map format, with an optional title.
        # The file is not imported but just registered as GRASS raster map.
        gcore.run_command('r.external', input=path_input, output=tile_name, overwrite=True)

        # print('r.external.out')
        #write out as geotiff
        gcore.run_command('r.external.out', directory='viewsheds', format="GTiff")

        # print('r.region')
        #manage the settings of the current geographic region
        gcore.run_command('g.region', raster=tile_name)

        # print('r.viewshed')
        #for each point in the output that is NULL: No LOS
        gcore.run_command('r.viewshed', #flags='e',
                input=tile_name,
                output='{}.tif'.format(tile_name),
                coordinate= [point[0], point[1]],
                observer_elevation=30,
                target_elevation=30,
                memory=5000,
                overwrite=True,
                quiet=True,
                max_distance=max_distance,
                # verbose=True
        )
Esempio n. 9
0
def run_module(
    clcraster: str,
    popdensity: str,
    wwtp_plants: str,
    gisdb: str,
    out_folder: str,
    location: str = "location",
    mapset: str = None,
    overwrite: bool = False,
    dist_min: int = 150,
    dist_max: int = 1000,
):
    """
    Create location with input data in PERMANENT and genereted raster in
    test. The region is setting according to the conductivity raster file
    Args:
        data: dictionary with args to pass to GRASS module
        path_gisdb: path where temporary file are created
        out_folder: folder where output file are saved
    """
    mapset = f"mset_{secrets.token_urlsafe(8)}" if mapset is None else mapset
    # dowload default data set
    # TODO:
    rasters = dict(clc=clcraster, popdens=popdensity)

    # create location and import default data set
    actions = [(clc2urban, (CLC, URB, [111, 112, 121], overwrite))]
    create_location(gisdb,
                    location,
                    rasters=rasters,
                    overwrite=overwrite,
                    actions=actions)

    # create a new mapset for computation and importing the wwtp points
    with Session(gisdb=os.fspath(gisdb),
                 location=location,
                 mapset=mapset,
                 create_opts="") as sess:
        print(
            f"» Created a new temporary mapset for the computations: {mapset}")
        # import wwtp points
        run_command("v.import",
                    input=str(wwtp_plants),
                    output=WWTP,
                    overwrite=overwrite)
        try:
            tech_potential(
                wwtp_plants=WWTP,
                urban_areas=URB,
                dist_min=dist_min,
                dist_max=dist_max,
                overwrite=overwrite,
            )
        except Exception as exc:
            print(f"Issue in mapset: {sess._kwopen['mapset']}")
            raise exc
Esempio n. 10
0
def viewshed(vrt,list_of_dicts,distance,point,
             observer_height,grassdb,burn_viewshed_rst,total_cells_output):
    ## deles op - foerste funktion
    with rasterio.open(vrt) as src_rst:

        dsm = src_rst.read()
        out_meta = src_rst.meta.copy()
        print(out_meta)
        out_meta.update(dtype=rasterio.int16,driver='GTiff') 
        mask = features.geometry_mask(
                                      [feature["feature"]["geometry"] for feature in
                                       list_of_dicts],
                                       src_rst.shape,
                                       transform=src_rst.transform,
                                       all_touched=True, 
                                       invert=True)
        new_dsm = np.copy(np.squeeze(dsm)) #Forstaer ikke hvorfor, men dsm'en har en ekstra dimension, 
                                           #som jeg fjerner med squeeze, saa den passer med result dsm'en
    ## deles op - anden funktion, maaske
        with rasterio.Env():
            result = features.rasterize(
                                        ((feature['feature']['geometry'],np.int(feature['feature']['properties']['hoejde'])
                                         * 1000)
                                          for feature in list_of_dicts), 
                                          out_shape=src_rst.shape,
                                          transform=src_rst.transform,
                                          all_touched=True)             
            new_dsm[mask] = result[mask] 

    ## deles op - tredje funktion
            with Session(gisdb=grassdb, location="test",create_opts=vrt):
                import grass.script.array as garray
                r_viewshed = Module('r.viewshed')
                r_out_gdal = Module('r.out.gdal')
                r_stats = Module('r.stats')
                r_univar = Module('r.univar')
                from_np_raster = garray.array()
                from_np_raster[...] = new_dsm
                from_np_raster.write('ny_rast',overwrite=True)
                print(from_np_raster)
                gcore.run_command('r.viewshed', overwrite=True, memory=2000, 
		input='ny_rast', output='viewshed', max_distance=distance, 
		coordinates=point, observer_elevation=observer_height)
                r_stats(flags='nc',overwrite=True,input='viewshed',output=total_cells_output)
                ## finde ud af hvordan r_stats kan outputte til noget som
                ## python kan laese direkte
                with open(total_cells_output) as tcls:
                    counts = []
                    for line in tcls:
                        nbr = int(line.split()[-1])
                        counts.append(nbr)
                # summary = r_univar(map='viewshed')
                #r_viewshed(input=from_np_raster, output='viewshed', max_distance=1000, memory=1424, coordinates=(701495,6201503), observer_elevation=500.0)
                r_out_gdal(overwrite=True, input='viewshed', output=burn_viewshed_rst)
    return sum(counts) #visible_cells
Esempio n. 11
0
def main(index, AOI, grass_params):
    PERMANENT = Session()
    PERMANENT.open(gisdb=grass_params[0],
                   location=grass_params[1],
                   mapset=grass_params[2])

    aoi = uniq_name('aoi')
    tiles = uniq_name('tiles')
    intersection = uniq_name('common')

    try:
        v.in_ogr(input=index, output=tiles)
        v.in_ogr(input=AOI, output=aoi)
        v.select(binput=aoi,
                 ainput=tiles,
                 output=intersection,
                 operator='overlap')
        v.db_select(map=intersection, columns='location', flags='c')
    finally:
        g.remove(type='vector', name=tiles, flags='f')
        g.remove(type='vector', name=aoi, flags='f')
        g.remove(type='vector', name=intersection, flags='f')
Esempio n. 12
0
def compute_solar_irradiation(inputFile, outputFile, day_of_year, crs='32630'):
    # Define grass working set
    GRASS_GISDB = 'grassdata'
    GRASS_LOCATION = 'GEOPROCESSING'
    GRASS_MAPSET = 'PERMANENT'
    GRASS_ELEVATIONS_FILENAME = 'ELEVATIONS'

    os.environ.update(dict(GRASS_COMPRESS_NULLS='1'))

    # Clean previously processed data
    if os.path.isdir(GRASS_GISDB):
        shutil.rmtree(GRASS_GISDB)
    with Session(gisdb=GRASS_GISDB,
                 location=GRASS_LOCATION,
                 mapset=GRASS_MAPSET,
                 create_opts='EPSG:32630') as ses:

        # Set project projection to match elevation raster projection
        g.proj(epsg=crs, flags='c')

        # Load raster file into working directory
        r.import_(input=inputFile, output=GRASS_ELEVATIONS_FILENAME, flags='o')

        # Set project region to match raster region
        g.region(raster=GRASS_ELEVATIONS_FILENAME, flags='s')
        # Calculate solar irradiation
        gscript.run_command('r.slope.aspect',
                            elevation=GRASS_ELEVATIONS_FILENAME,
                            slope='slope',
                            aspect='aspect')
        gscript.run_command('r.sun',
                            elevation=GRASS_ELEVATIONS_FILENAME,
                            slope='slope',
                            aspect='aspect',
                            beam_rad='beam',
                            step=1,
                            day=day_of_year)

        # Get extraterrestrial irradiation from history metadata
        regex = re.compile(r'\d+\.\d+')
        output = gscript.read_command("r.info", flags="h", map=["beam"])
        splits = str(output).split('\n')
        line = next(filter(lambda line: 'Extraterrestrial' in line, splits))
        extraterrestrial_irradiance = float(regex.search(line)[0])

        # Export generated results into a GeoTiff file
        if os.path.isfile(outputFile):
            os.remove(outputFile)

        r.out_gdal(input='beam', output=outputFile)
        return extraterrestrial_irradiance
Esempio n. 13
0
def load_raster_files(list_of_tiles, output_dir, x_transmitter, y_transmitter):

    gis_path = os.path.join(output_dir, 'gisdb')

    transmitter_coords = str(x_transmitter) + ',' + str(y_transmitter)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    with Session(gisdb=gis_path, location="location",
                 create_opts="EPSG:27700"):

        print(gcore.parse_command("g.gisenv", flags="s"))

        for tile in list_of_tiles:
            base, ext = os.path.splitext(os.path.split(tile)[1])
            tile_name = "tile_%s" % base
            gcore.run_command('r.import',
                              input=tile,
                              output=tile_name,
                              overwrite=True)

        rast_list = gcore.read_command('g.list',
                                       type='rast',
                                       pattern="tile_*",
                                       separator="comma").strip()

        gcore.run_command('r.external.out', flags="r")

        gcore.run_command('r.patch',
                          input=rast_list,
                          output="all_tiles",
                          overwrite=True)
        gcore.run_command('r.viewshed',
                          flags='b',
                          input="all_tiles",
                          output="viewshed",
                          coordinates=transmitter_coords,
                          overwrite=True)
        gcore.run_command('r.external.out', flags="r")

    return print('files loaded')
Esempio n. 14
0
def firsttimeGRASS(infiles, adminfile, maskfile):
    """
    Run a maxlikelihood unsupervised classification on the data
    nclasses: number of expected classes
    infiles: list of raster files to import and process
    firstime: if firsttime, it will import all files in GRASS
    """
    from grass_session import Session
    from grass.script import core as gcore
    from grass.pygrass.modules.shortcuts import raster as r
    from grass.pygrass.modules.shortcuts import vector as v
    from grass.pygrass.modules.shortcuts import general as g
    from grass.pygrass.modules.shortcuts import imagery as i
    # create a new location from EPSG code (can also be a GeoTIFF or SHP or ... file)
    with Session(gisdb="/tmp", location="loc", create_opts="EPSG:4326"):
        # First run, needs to import the files and create a mask
        # Import admin boundary
        #v.import_(input=adminfile,output="admin",quiet=True,superquiet=True)
        gcore.parse_command("v.import",
                            input=adminfile,
                            output="admin",
                            quiet=True)
        # Set computational region to admin boundary
        g.region(flags="s", vector="admin", quiet=True)
        # Keep only file name for output
        outmf = maskfile.split("/")[-1]
        # Import Mask file
        r.in_gdal(input=maskfile, output=outmf, quiet=True)
        # Apply Mask
        r.mask(raster=outmf, maskcats="0", quiet=True)
        # Set computational resolution to mask pixel size
        g.region(flags="s", raster=outmf, quiet=True)
        # Import files
        for f in infiles:
            # Keep only file name for output
            outf = f.split("/")[-1]
            # Landsat files not in Geo lat long needs reproj on import
            #r.import_(input=f,output=outf,quiet=True)
            gcore.parse_command("r.import", input=f, output=outf, quiet=True)
            # Create group
            i.group(group="l8", subgroup="l8", input=outf, quiet=True)
Esempio n. 15
0
def create_GRASS_GIS_location():
    grassbin = GRASSBIN_import()
    os.environ['GRASSBIN'] = grassbin

    from grass_session import Session, get_grass_gisbase

    gisbase = get_grass_gisbase()

    os.environ['GISBASE'] = gisbase
    sys.path.append(os.path.join(os.environ['GISBASE'], 'bin'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'lib'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'scripts'))
    sys.path.append(os.path.join(os.environ['GISBASE'], 'etc', 'python'))

    # set folder to proj_lib:
    os.environ['PROJ_LIB'] = '/usr/share/proj'

    # to add grass to path permanently, open "bashrc" using command: vim bash
    import grass.script as gscript
    import grass.script.setup as gsetup
    ##################################################################################
    # user-defined settings
    ### Linux path <- add path here for each Linux user and do not overwrite!
    gisdb = '/home/user/grassdata'

    ### Windows path <- add path here for each Windows user and do not overwrite!
    # gisdb = 'F:/GEO450_GRASS/test_python'
    location = 'test3'
    mapset = 'PERMANENT'
    ##################################################################################
    # open a GRASS session and create the mapset if it does not yet exist
    with Session(
            gisdb=gisdb,
            location=location,
            # mapset=mapset,
            create_opts='EPSG:32632') as session:
        pass
    ##################################################################################
    # launch session
    gsetup.init(gisbase, gisdb, location, mapset)
Esempio n. 16
0
def create_location(
    gisdb: str,
    location: str,
    epsg: int = 3035,
    overwrite: bool = False,
    rasters: Dict[str, str] = None,
    vectors: Dict[str, str] = None,
    actions: List[Any] = None,
):
    os.makedirs(gisdb, exist_ok=True)

    # initialize / handle empty values
    rasters = rasters if rasters else {}
    vectors = vectors if vectors else {}
    actions = [] if actions is None else actions

    # define location path
    loc = os.path.join(gisdb, location)

    if overwrite:
        print(f"» Remove old location ({loc})")
        shutil.rmtree(loc)

    if not os.path.exists(loc):
        print(f"» Created a new location ({loc})")
        with Session(
                gisdb=os.fspath(gisdb),
                location=location,
                mapset="PERMANENT",
                create_opts=f"EPSG:{epsg}",
        ):
            print("» Import rasters")
            load_rasters(rasters, overwrite=overwrite)
            print("» Import vectors")
            load_vectors(vectors, overwrite=overwrite)
            print("» Apply actions")
            for faction, fargs in actions:
                print(f"» Apply {faction.__name__}(*{fargs})")
                faction(*fargs)
Esempio n. 17
0
def get_flooded_assets(asset_map, flood_maps_dict, output_map):
    """Use GRASS to get max flood depth value at each asset
    """
    with Session(gisdb="/tmp", location="loss_model", create_opts=CRS):
        # import maps
        vect_map = 'assets'
        gscript.run_command('v.import', input=asset_map, output=vect_map, overwrite=True)
        for return_period, flood_map in flood_maps_dict.items():
            map_name = "flood_map_{}".format(return_period)
            gscript.run_command('r.external', input=flood_map, output=map_name,
                                overwrite=True)
            # set computational extent
            gscript.run_command('g.region', raster=map_name, vector=vect_map)
            # Set negative depth values to zero
            map_null = map_name + "_fix"
            exp = "{o} = if({i} <= 0, 0, {i})".format(o=map_null, i=map_name)
            gscript.run_command('r.mapcalc', expression=exp, overwrite=True)
            # write raster stats as column in the vector
            gscript.run_command('v.rast.stats', map=vect_map, raster=map_null,
                                column_prefix=return_period, method='maximum',
                                overwrite=True)
        # export new map
        gscript.run_command('v.out.ogr', input=vect_map, format='GPKG',
                            output=output_map, overwrite=True)
Esempio n. 18
0
def Return_Raster_As_Array(grassdb, grass_location, raster_mn):
    """Transfer an rater in grass database into np array
    Parameters
    ----------
    grassdb         : string
    Full path to a grass database
    grass_location  : string
    location name in that grass database
    raster_mn       : string
    raster name

    Returns:
    -------
    Array            : array
    np array of the raster.

    """
    PERMANENT = Session()
    PERMANENT.open(gisdb=grassdb, location=grass_location, create_opts="")
    Array = copy.deepcopy(garray.array(mapname=raster_mn))
    PERMANENT.close()
    return Array
Esempio n. 19
0
import os

# import grass_session
from grass_session import Session

# import grass python libraries
from grass.pygrass.modules.shortcuts import general as g


# set some common environmental variables, like:
os.environ.update(dict(GRASS_COMPRESS_NULLS='1',
                       GRASS_COMPRESSOR='ZSTD'))

# create a PERMANENT mapset
# create a Session instance
PERMANENT = Session()
PERMANENT.open(gisdb='/tmp', location='grassdb_test',
               create_opts='EPSG:25832')


# execute some command inside PERMANENT
g.mapsets(flags="l")
g.list(type="raster", flags="m")

# exit from PERMANENT
PERMANENT.close()

# create a new mapset in the same location
user = Session()
user.open(gisdb='/tmp', location='mytest', mapset='user',
               create_opts='')
Esempio n. 20
0
def add_obs_into_existing_watershed_delineation(
    grassdb,
    grass_location,
    qgis_prefix_path,
    input_geo_names,
    path_obsfile_in,
    obs_attributes=[],
    search_radius=100,
    path_sub_reg_outlets_v="#",
    max_memroy=1024 * 4,
    pourpoints_add_obs="pourpoints_add_obs",
    snapped_obs_points="snapped_obs_points",
):

    fdr_arcgis = input_geo_names["fdr_arcgis"]
    fdr_grass = input_geo_names["fdr_grass"]
    str_r = input_geo_names["str_r"]
    str_v = input_geo_names["str_v"]
    acc = input_geo_names["acc"]
    cat_no_lake = input_geo_names["cat_no_lake"]
    mask = input_geo_names["mask"]
    dem = input_geo_names["dem"]
    pourpoints_with_lakes = input_geo_names["pourpoints_with_lakes"]
    lake_outflow_pourpoints = input_geo_names["lake_outflow_pourpoints"]
    cat_add_lake = input_geo_names["cat_add_lake"]

    # define internal file names
    obsname = Internal_Constant_Names["obs"]

    # prepropessing lakes inputs
    preprocessing_obs_point(
        mask=mask,
        path_obsin_in=path_obsfile_in,
        obs_attributes=obs_attributes,
        grassdb=grassdb,
        grass_location=grass_location,
        qgis_prefix_path=qgis_prefix_path,
        gis_platform="qgis",
        obsname=obsname + "t1",
    )

    import grass.script as grass
    import grass.script.setup as gsetup
    from grass.pygrass.modules import Module
    from grass.pygrass.modules.shortcuts import general as g
    from grass.pygrass.modules.shortcuts import raster as r
    from grass.script import array as garray
    from grass.script import core as gcore
    from grass_session import Session

    os.environ.update(
        dict(GRASS_COMPRESS_NULLS="1",
             GRASS_COMPRESSOR="ZSTD",
             GRASS_VERBOSE="1"))
    PERMANENT = Session()
    PERMANENT.open(gisdb=grassdb, location=grass_location, create_opts="")

    con = sqlite3.connect(
        os.path.join(grassdb, grass_location, "PERMANENT", "sqlite",
                     "sqlite.db"))

    # obtain maximum current cat id

    catids, temp = generate_stats_list_from_grass_raster(
        grass, mode=1, input_a=pourpoints_with_lakes)

    maxcatid = max(catids)
    # snap obs points
    grass_raster_r_stream_snap(
        grass,
        input=obsname + "t1",
        output=obsname + "_snap",
        stream_rast=str_r,
        accumulation=acc,
        radius=search_radius,
        memory=max_memroy,
    )

    # obtain use Obs_ID as observation point raster value
    grass_raster_v_to_raster(
        grass,
        input=obsname + "_snap",
        output=snapped_obs_points,
        column="#",
        use="cat",
    )
    grass_raster_r_to_vect(
        grass,
        input=snapped_obs_points,
        output=snapped_obs_points,
        type="point",
        flags="v",
    )
    grass_raster_v_db_join(
        grass,
        map=snapped_obs_points,
        column="cat",
        other_table=obsname + "t1",
        other_column="cat",
    )
    exp = obs_attributes[0] + "n int"
    grass.run_command("v.db.addcolumn",
                      map=snapped_obs_points,
                      columns=obs_attributes[0] + "n int")
    grass.run_command(
        "v.db.update",
        map=snapped_obs_points,
        column=obs_attributes[0] + "n",
        qcol=obs_attributes[0] + " + " + str(int(maxcatid) + 1),
    )

    grass.run_command(
        "v.out.ogr",
        input=snapped_obs_points,
        output=os.path.join(grassdb, snapped_obs_points + ".shp"),
        format="ESRI_Shapefile",
        overwrite=True,
    )

    grass_raster_v_to_raster(
        grass,
        input=snapped_obs_points,
        output=obsname + "1",
        column=obs_attributes[0] + "n",
        use="attr",
    )

    if path_sub_reg_outlets_v != "#":
        # unpack subregion outlet point
        grass_raster_v_unpack(grass,
                              input=path_sub_reg_outlets_v,
                              output="Sub_reg_outlets_pt")
        # convert it to raster
        grass_raster_v_to_raster(
            grass,
            input="Sub_reg_outlets_pt",
            output="Sub_reg_outlets",
            column="reg_subid",
            use="attr",
        )
        # added into observation raster point
        exp = "%s = if(isnull(int(Sub_reg_outlets)),%s,Sub_reg_outlets)" % (
            obsname,
            obsname + "1",
        )
        grass_raster_r_mapcalc(
            grass,
            expression=exp,
        )

        grass_raster_setnull(
            grass,
            raster_nm=obsname,
            null_values=[-9999, 0],
            create_new_raster=False,
            new_raster_nm="#",
        )
    else:
        grass.run_command("g.copy",
                          rast=(obsname + "1", obsname),
                          overwrite=True)
    ####

    # remove obs point located in lake catchments
    if lake_outflow_pourpoints != "#":

        ##### obtain lake id and correspond catchment id
        lake_id, cat_id = generate_stats_list_from_grass_raster(
            grass,
            mode=2,
            input_a=lake_outflow_pourpoints,
            input_b=pourpoints_with_lakes,
        )

        lake_new_cat_ids = np.column_stack((lake_id, cat_id))
        grass.run_command("g.copy",
                          rast=(obsname, obsname + "2"),
                          overwrite=True)
        # remove obs that located within the lake catchments
        obsid, cat_add_lake_id = generate_stats_list_from_grass_raster(
            grass, mode=2, input_a=obsname, input_b=cat_add_lake)
        lakecat_obs = np.column_stack((cat_add_lake_id, obsid))
        obsinlake_mask = np.isin(lakecat_obs[:, 0], lake_new_cat_ids[:, 1])
        obsid_inlake = lakecat_obs[obsinlake_mask, 1]
        if len(obsid_inlake) > 0:
            grass.run_command("r.null",
                              map=obsname + "2",
                              setnull=obsid_inlake,
                              overwrite=True)

        # combine lake and obs pourpoints
        # combine obsoutlets and outlet from cat no lake
        exp = "'%s' =if(isnull(int(%s)),%s,%s)" % (
            pourpoints_add_obs,
            pourpoints_with_lakes,
            obsname + "2",
            pourpoints_with_lakes,
        )
        grass.run_command("r.mapcalc", expression=exp, overwrite=True)

    else:
        # combine obsoutlets and outlet from cat no lake
        exp = "'%s' =if(isnull(int(%s)),%s,%s)" % (
            pourpoints_add_obs,
            pourpoints_with_lakes,
            obsname,
            pourpoints_with_lakes,
        )
        grass.run_command("r.mapcalc", expression=exp, overwrite=True)

    return
Esempio n. 21
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 11 07:41:19 2017

@author: pietro
"""
from grass_session import Session
from grass.script import core as gcore

GRASS_COMPRESSOR = "ZSTD"
GRASS_ZLIB_LEVEL = 6
GRASS_COMPRESS_NULLS = 1

print("creating location")
with Session(gisdb="/tmp", location="location", create_opts="EPSG:4326"):
    print("Inside grass session")
    print(gcore.parse_command("g.gisenv", flags="s"))
print("location created!")

print("creating mapset")
with Session(gisdb="/tmp", location="location", mapset="test", create_opts=""):
    print(gcore.parse_command("g.gisenv", flags="s"))
print("mapset created!")
print("done!")
Esempio n. 22
0
# Import GRASS Python bindings
# https://github.com/zarch/grass-session

from grass_session import Session
import grass.script as gs

with Session(gisdb="/grassdata/",
             location="test",
             mapset="PERMANENT",
             create_opts="EPSG:25832"):
    print("Tests for PROJ, GDAL, PDAL, GRASS")

    # simple test: just scan the LAZ file
    gs.run_command(
        "r.in.pdal",
        input="/tmp/simple.laz",
        output="count_1",
        method="n",
        flags="s",
        resolution=1,
        overwrite=True,
    )
Esempio n. 23
0
import numpy as np
from grass_session import Session

# Read list of scenes and bands from i.sentinel.import output
register_output = '/mnt/falk-ns9693k/data/projects/FALK/reg_test.txt'
register = np.genfromtxt(register_output, delimiter='|', dtype=np.str)
register[:, 0][0]

# Define mapset
mapset = 'S2_L2A_T32VNR_tcor'
# Define hadcoded map names
dem = 'dem_10m_nosefi_float@S2_L2A_T32VNR'
illu = 'illumination'

with Session(gisdb='/mnt/falk-ns9693k/data/NINA/grass',
             location='utm32n',
             mapset=mapset):
    import grass.script.core as gscript
    gscript.run_command('g.mapsets', operation='add', mapset='S2_L2A_T32VNR')
    for scene in register[:, 0]:
        print(scene)
        if '_SCL_' in scene or '_60m' in scene:
            continue
        gscript.read_command('g.region', raster=scene)
        gscript.run_command('r.mapcalc',
                            overwrite=True,
                            verbose=True,
                            expression='{0}_double={0}/10000.0'.format(scene))
        if '10m' not in scene:
            gscript.read_command('g.region', raster=dem)
            gscript.run_command('r.resamp.interp',
Esempio n. 24
0
def calculate_flood_plain_manning_n(
    grassdb,
    grass_location,
    qgis_prefix_path,
    catinfo,
    input_geo_names,
    path_landuse="#",
    path_landuse_info="#",
):

    mask = input_geo_names["mask"]

    preprocess_raster(
        grassdb=grassdb,
        grass_location=grass_location,
        qgis_prefix_path=qgis_prefix_path,
        mask=mask,
        raster_path=path_landuse,
        raster_name="landuse",
    )

    cat_riv_info = input_geo_names["cat_riv_info"]
    outlet_pt_info = input_geo_names["outlet_pt_info"]

    import grass.script as grass
    import grass.script.setup as gsetup
    from grass.pygrass.modules import Module
    from grass.pygrass.modules.shortcuts import general as g
    from grass.pygrass.modules.shortcuts import raster as r
    from grass.script import array as garray
    from grass.script import core as gcore
    from grass_session import Session

    os.environ.update(
        dict(GRASS_COMPRESS_NULLS="1",
             GRASS_COMPRESSOR="ZSTD",
             GRASS_VERBOSE="1"))
    PERMANENT = Session()
    PERMANENT.open(gisdb=grassdb, location=grass_location, create_opts="")

    con = sqlite3.connect(
        os.path.join(grassdb, grass_location, "PERMANENT", "sqlite",
                     "sqlite.db"))

    # read lanning's n and landuse type table
    landuse_and_n_table = pd.read_csv(path_landuse_info, sep=",")

    write_grass_reclass_rule_from_table(
        landuse_and_n_table.values,
        os.path.join(grassdb, "landuse_manning_rules.csv"))

    # viturally  landuse dataset

    grass_raster_r_external(grass,
                            input=os.path.join(grassdb,
                                               "landuse_proj" + ".tif"),
                            output="landuse_in")
    # clip raster with mask in grass env
    grass_raster_r_clip(grass, input="landuse_in", output="landuse")
    # reclass landuse to manning's coefficient value *1000
    grass_raster_r_reclass(
        grass,
        input="landuse",
        output="landuse_Manning1",
        rules=os.path.join(grassdb, "landuse_manning_rules.csv"),
    )
    # calcuate real manning's coefficient for each landuse grid
    grass_raster_r_mapcalc(
        grass, expression="landuse_Manning = float(landuse_Manning1)/1000")

    ### add averaged manning coefficent along the river network into river attribut table
    grass.run_command(
        "v.rast.stats",
        map=cat_riv_info,
        raster="landuse_Manning",
        column_prefix="mn",
        method=["average"],
    )

    grass.run_command("v.what.rast",
                      map=outlet_pt_info,
                      raster="landuse_Manning",
                      column="mn_average")

    # grass.run_command(
    #     "v.rast.stats",
    #     map=cat_ply_info,
    #     raster="landuse_Manning",
    #     column_prefix="mn",
    #     method=["average"],
    # )

    ### read length and maximum and minimum dem along channel
    sqlstat = "SELECT Gridcode,mn_average FROM %s" % (cat_riv_info)
    rivleninfo = pd.read_sql_query(sqlstat, con)
    rivleninfo = rivleninfo.fillna(-9999)

    ### read length and maximum and minimum dem along channel
    sqlstat = "SELECT SubId,mn_average FROM %s" % (outlet_pt_info)
    outletpoint = pd.read_sql_query(sqlstat, con)
    outletpoint = outletpoint.fillna(-9999)

    for i in range(0, len(rivleninfo)):
        catid = rivleninfo["Gridcode"].values[i]
        catrow = catinfo["SubId"] == catid
        floodn = rivleninfo["mn_average"].values[i]

        if floodn < 0:
            floodn = outletpoint.loc[outletpoint['SubId'] == catid,
                                     "mn_average"].values[0]

        catinfo.loc[catrow, "FloodP_n"] = floodn

    PERMANENT.close()
    return catinfo
Esempio n. 25
0
def generatesubdomainmaskandinfo(
    Out_Sub_Reg_Dem_Folder,
    input_geo_names,
    grassdb,
    grass_location,
    qgis_prefix_path,
    path_bkfwidthdepth,
    bkfwd_attributes,
):
    ###
    dem = input_geo_names["dem"]
    cat_add_lake = input_geo_names["cat_add_lake"]
    ndir_Arcgis = input_geo_names["nfdr_arcgis"]
    acc_grass = input_geo_names["acc"]
    str_r = input_geo_names["str_r"]
    outlet_pt_info = "outlet_pt_info"

    maximum_obs_id = 80000
    tempfolder = os.path.join(
        tempfile.gettempdir(),
        "basinmaker_subreg" + str(np.random.randint(1, 10000 + 1)),
    )
    if not os.path.exists(tempfolder):
        os.makedirs(tempfolder)

    k = -1
    c = -1
    if path_bkfwidthdepth != "#":
        k, c = calculate_bankfull_width_depth_from_polyline(
            grassdb=grassdb,
            grass_location=grass_location,
            qgis_prefix_path=qgis_prefix_path,
            path_bkfwidthdepth=path_bkfwidthdepth,
            bkfwd_attributes=bkfwd_attributes,
            catinfo=[],
            input_geo_names=input_geo_names,
            k_in=-1,
            c_in=-1,
            return_k_c_only=True,
        )

    #### generate subbregion outlet points and subregion info table
    QgsApplication.setPrefixPath(qgis_prefix_path, True)
    Qgs = QgsApplication([], False)
    Qgs.initQgis()
    from processing.core.Processing import Processing
    from processing.tools import dataobjects
    from qgis import processing

    feedback = QgsProcessingFeedback()
    Processing.initialize()
    QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms())
    context = dataobjects.createContext()
    context.setInvalidGeometryCheck(QgsFeatureRequest.GeometryNoCheck)

    import grass.script as grass
    import grass.script.setup as gsetup
    from grass.pygrass.modules import Module
    from grass.pygrass.modules.shortcuts import general as g
    from grass.pygrass.modules.shortcuts import raster as r
    from grass.script import array as garray
    from grass.script import core as gcore
    from grass_session import Session

    os.environ.update(
        dict(GRASS_COMPRESS_NULLS="1", GRASS_COMPRESSOR="ZSTD", GRASS_VERBOSE="-1")
    )
    con = sqlite3.connect(
        os.path.join(grassdb, grass_location, "PERMANENT", "sqlite", "sqlite.db")
    )

    PERMANENT = Session()
    PERMANENT.open(gisdb=grassdb, location=grass_location, create_opts="")
    grass.run_command("r.mask", raster=dem, maskcats="*", overwrite=True)
    grass.run_command("r.null", map=cat_add_lake, setnull=-9999)

    exp = "%s = if(isnull(%s),null(),%s)" % (
        "river_r",
        str_r,
        cat_add_lake,
    )
    grass.run_command("r.mapcalc", expression=exp, overwrite=True)

    routing_temp = generate_routing_info_of_catchments(
        grass,
        con,
        cat=cat_add_lake,
        acc=acc_grass,
        Name="Final",
        str="river_r",
        garray=garray,
    )

    grass.run_command("g.copy", vector=("Final_OL_v", outlet_pt_info), overwrite=True)
    grass.run_command(
        "g.copy", vector=("Final_IL_v_c", "sub_reg_inlet"), overwrite=True
    )
    Paths_Finalcat_ply = []

    sqlstat = (
        "SELECT SubId, DowSubId,ILSubIdmax,ILSubIdmin,MaxAcc_cat,ILpt_ID FROM %s"
        % (outlet_pt_info)
    )
    outletinfo = pd.read_sql_query(sqlstat, con)
    outletinfo = outletinfo.fillna(-1)
    outletinfo = outletinfo.loc[outletinfo["SubId"] > 0]

    sqlstat = "SELECT ILpt_ID,SubId_I FROM %s" % ("Final_IL_v_c")
    inletinfo = pd.read_sql_query(sqlstat, con)
    inletinfo = inletinfo.fillna(-1)

    # update watershed bankfull k and c first
    outletinfo["k"] = k
    outletinfo["c"] = c

    subregin_info = pd.DataFrame(
        np.full(len(outletinfo), -9999), columns=["Sub_Reg_ID"]
    )
    subregin_info["Dow_Sub_Reg_Id"] = -9999
    subregin_info["ProjectNM"] = -9999
    subregin_info["Nun_Grids"] = -9999
    subregin_info["Ply_Name"] = -9999
    subregin_info["Max_ACC"] = -9999
    subregin_info["ILpt_ID"] = -9999

    for i in range(0, len(outletinfo)):

        basinid = int(outletinfo["SubId"].values[i])

        grass.run_command("r.mask", raster=dem, maskcats="*", overwrite=True)
        exp = "%s = if(%s == %s,%s,null())" % (
            "dem_reg_" + str(basinid),
            cat_add_lake,
            str(basinid),
            dem,
        )
        grass.run_command("r.mapcalc", expression=exp, overwrite=True)
        ####define mask
        grass.run_command(
            "r.mask", raster="dem_reg_" + str(basinid), maskcats="*", overwrite=True
        )
        grass.run_command(
            "r.out.gdal",
            input="MASK",
            output=os.path.join(tempfolder, "Mask1.tif"),
            format="GTiff",
            overwrite=True,
        )
        processing.run(
            "gdal:polygonize",
            {
                "INPUT": os.path.join(tempfolder, "Mask1.tif"),
                "BAND": 1,
                "FIELD": "DN",
                "EIGHT_CONNECTEDNESS": False,
                "EXTRA": "",
                "OUTPUT": os.path.join(
                    tempfolder, "HyMask_region_" + str(basinid) + ".shp"
                ),
            },
        )
        processing.run(
            "gdal:dissolve",
            {
                "INPUT": os.path.join(
                    tempfolder, "HyMask_region_" + str(basinid) + ".shp"
                ),
                "FIELD": "DN",
                "OUTPUT": os.path.join(
                    tempfolder, "HyMask_region_f" + str(basinid) + ".shp"
                ),
            },
        )
        processing.run(
            "gdal:dissolve",
            {
                "INPUT": os.path.join(
                    tempfolder, "HyMask_region_" + str(basinid) + ".shp"
                ),
                "FIELD": "DN",
                "OUTPUT": os.path.join(
                    Out_Sub_Reg_Dem_Folder,
                    "HyMask_region_"
                    + str(int(basinid + maximum_obs_id))
                    + "_nobuffer.shp",
                ),
            },
        )

        Paths_Finalcat_ply.append(
            os.path.join(
                Out_Sub_Reg_Dem_Folder,
                "HyMask_region_" + str(int(basinid + maximum_obs_id)) + "_nobuffer.shp",
            )
        )

        processing.run(
            "native:buffer",
            {
                "INPUT": os.path.join(
                    tempfolder, "HyMask_region_f" + str(basinid) + ".shp"
                ),
                "DISTANCE": 0.005,
                "SEGMENTS": 5,
                "END_CAP_STYLE": 0,
                "JOIN_STYLE": 0,
                "MITER_LIMIT": 2,
                "DISSOLVE": True,
                "OUTPUT": os.path.join(
                    Out_Sub_Reg_Dem_Folder,
                    "HyMask_region_" + str(int(basinid + maximum_obs_id)) + ".shp",
                ),
            },
        )

    qgis_vector_merge_vector_layers(
        processing,
        context,
        INPUT_Layer_List=Paths_Finalcat_ply,
        OUTPUT=os.path.join(Out_Sub_Reg_Dem_Folder, "subregion_ply.shp"),
    )

    grass.run_command("r.mask", raster=dem, maskcats="*", overwrite=True)

    problem_subid = []
    for i in range(0, len(outletinfo)):
        basinid = int(outletinfo["SubId"].values[i])
        dowsubreginid = int(outletinfo["DowSubId"].values[i])
        ILpt_ID = outletinfo["ILpt_ID"].values[i]
        subregin_info.loc[i, "ILpt_ID"] = ILpt_ID
        if len(inletinfo[inletinfo["ILpt_ID"] == ILpt_ID]["SubId_I"]) > 0:
            downsubid_inlet = inletinfo[inletinfo["ILpt_ID"] == ILpt_ID][
                "SubId_I"
            ].values[0]
            if dowsubreginid != downsubid_inlet:
                problem_subid.append(basinid)

        catacc = int(outletinfo["MaxAcc_cat"].values[i])

        subregin_info.loc[i, "ProjectNM"] = (
            "sub_reg" + "_" + str(int(basinid + maximum_obs_id))
        )
        subregin_info.loc[i, "Ply_Name"] = (
            "HyMask_region_" + str(int(basinid + maximum_obs_id)) + ".shp"
        )
        subregin_info.loc[i, "Max_ACC"] = catacc

        if basinid == dowsubreginid:
            subregin_info.loc[i, "Dow_Sub_Reg_Id"] = int(-1 + maximum_obs_id)
        else:
            subregin_info.loc[i, "Dow_Sub_Reg_Id"] = int(dowsubreginid + maximum_obs_id)
        subregin_info.loc[i, "Sub_Reg_ID"] = int(basinid + maximum_obs_id)

    subregin_info["k"] = k
    subregin_info["c"] = c

    ### remove subregion do not contribute to the outlet
    ## find watershed outlet subregion
    #    subregin_info  = subregin_info.loc[subregin_info['Dow_Sub_Reg_Id'] == self.maximum_obs_id-1]
    subregin_info = subregin_info.sort_values(by="Max_ACC", ascending=False)
    outlet_reg_id = subregin_info["Sub_Reg_ID"].values[0]
    routing_info = (
        subregin_info[["Sub_Reg_ID", "Dow_Sub_Reg_Id"]].astype("float").values
    )
    needed_sub_reg_ids = defcat(routing_info, outlet_reg_id)

    mask = subregin_info["Sub_Reg_ID"].isin(needed_sub_reg_ids)
    subregin_info = subregin_info.loc[mask, :]
    #        subregin_info.drop(subregin_info.index[del_row_mask]) ###
    subregin_info.to_csv(
        os.path.join(Out_Sub_Reg_Dem_Folder, "Sub_reg_info.csv"),
        index=None,
        header=True,
    )

    subregin_info.to_csv(
        os.path.join(Out_Sub_Reg_Dem_Folder, "Sub_reg_info.csv"),
        index=None,
        header=True,
    )

    grass.run_command("v.db.addcolumn", map=outlet_pt_info, columns="reg_subid int")
    grass.run_command("v.db.addcolumn", map=outlet_pt_info, columns="reg_dowid int")

    grass.run_command("v.db.addcolumn", map=outlet_pt_info, columns="sub_reg_id int")

    grass.run_command(
        "v.db.update",
        map=outlet_pt_info,
        column="reg_subid",
        qcol="SubId + " + str(maximum_obs_id),
    )

    grass.run_command(
        "v.db.update",
        map=outlet_pt_info,
        column="sub_reg_id",
        qcol="SubId + " + str(maximum_obs_id),
    )

    grass.run_command(
        "v.db.update",
        map=outlet_pt_info,
        column="reg_dowid",
        qcol="DowSubId + " + str(maximum_obs_id),
    )

    grass.run_command("v.db.addcolumn", map="sub_reg_inlet", columns="sub_reg_id int")

    grass.run_command(
        "v.db.update",
        map="sub_reg_inlet",
        column="sub_reg_id",
        qcol="SubId_I + " + str(maximum_obs_id),
    )

    grass.run_command(
        "v.out.ogr",
        input=outlet_pt_info,
        output=os.path.join(Out_Sub_Reg_Dem_Folder, outlet_pt_info + ".shp"),
        format="ESRI_Shapefile",
        overwrite=True,
    )

    grass.run_command(
        "v.out.ogr",
        input="sub_reg_inlet",
        output=os.path.join(Out_Sub_Reg_Dem_Folder, "sub_reg_inlet" + ".shp"),
        format="ESRI_Shapefile",
        overwrite=True,
    )

    grass.run_command(
        "v.pack",
        input=outlet_pt_info,
        output=os.path.join(Out_Sub_Reg_Dem_Folder, "Sub_Reg_Outlet_v" + ".pack"),
        overwrite=True,
    )
    grass.run_command(
        "r.pack",
        input="Final_OL",
        output=os.path.join(Out_Sub_Reg_Dem_Folder, "Sub_Reg_Outlet_r" + ".pack"),
        overwrite=True,
    )

    print("following subregion's inlet needs to be checked ")
    print(problem_subid)

    return
Esempio n. 26
0
#!/usr/bin/env python
# filename: test_session.py

from grass_session import Session
from grass.script import core as gcore
import grass.script as grass
from grass.pygrass.raster import RasterRow
import numpy as np

vrt = '/tmp/dsm/nine_cells3.vrt'
output_tif = '/tmp/dsm/viewshed.tif'

with Session(gisdb="/tmp/grassdb_test",
             location="test",
             create_opts=single_tif):
    print(gcore.parse_command("g.gisenv", flags="s"))

    dsm = RasterRow(vrt)
    dsm.exists()
    dsm.mapset
    dsm.open('r')
    np.array(dsm)

# gcore.run_command('r.in.gdal', input=single_tif, output='tempdsm')
#   gcore.run_command('r.viewshed', input='tempdsm', output='viewshed', max_distance=1000, memory=1424, coordinates=(701495,6201503), observer_elevation=500.0)
#  gcore.run_command('r.out.gdal', input='viewshed', output=output_tif)
Esempio n. 27
0
        transform=src_rst.transform,
        all_touched=True,
        invert=True)
    new_dsm = np.copy(np.squeeze(
        dsm))  #Forstaer ikke hvorfor, men dsm'en har en ekstra dimension,
    #som jeg fjerner med squeeze, saa den passer med result dsm'en
    with rasterio.Env():
        result = features.rasterize(
            ((feature['geometry'], np.int(feature['properties']['hoejde']))
             for feature in shapefile),
            out_shape=src_rst.shape,
            transform=src_rst.transform,
            all_touched=True)
        new_dsm[mask] = result[mask]

        with Session(gisdb=grassdb, location="test", create_opts=vrt):
            import grass.script.array as garray
            r_viewshed = Module('r.viewshed')
            r_out_gdal = Module('r.out.gdal')
            from_np_raster = garray.array()
            from_np_raster[...] = new_dsm
            from_np_raster.write('ny_rast', overwrite=True)
            print(from_np_raster)
            gcore.run_command('r.viewshed',
                              overwrite=True,
                              memory=assigned_mem,
                              input='ny_rast',
                              output='viewshed',
                              max_distance=distance,
                              coordinates=point,
                              observer_elevation=observer_height)
Esempio n. 28
0
def define_project_extent_using_hybasin_ply(
    grassdb,
    grass_location,
    qgis_prefix_path,
    path_dem_in,
    buffer_distance,
    hybasin_ply,
    down_hybasin_id,
    up_hybasin_id,
    mask="MASK",
    dem="dem",
):

    """Define processing extent

    Function that used to define project processing spatial extent (PSE).
    The processing spatial extent is a region where Toolbox will work in. Toolbox
    will not process grids or features outside the processing spatial extent.
    Several options is available here. The PSE can be defined using Hybasin
    product and a hydrobasin ID. All subbasin drainage to that hydrobasin ID
    will be extracted. And the extent of the extracted polygon will be used as PSE

    Parameters
    ----------
    grassdb                           : path (required)
        It is a path to project grass database folder
    grass_location                    : string (required)
        It is a string of grass location name
    qgis_prefix_path                  : string (required)
        It is a string of qgis prefix path
    path_dem_in                      : string (required)
        It is the path to input dem
    buffer_distance                  : float (optional)
        It is a float number to increase the extent of the PSE
        obtained from Hydrobasins. It is needed when input DEM is not from
        HydroSHEDS. Then the extent of the watershed will be different
        with PSE defined by HydroBASINS.
    hybasin_ply                      : string (optional)
        It is a path to hydrobasin routing product, If it is provided, the
        PSE will be based on the OutHyID and OutHyID2 and
        this HydroBASINS routing product.
    down_hybasin_id                  : int (optional)
        It is a HydroBASINS subbasin ID, which should be the ID of the most
        downstream subbasin in the region of interest.
    up_hybasin_id                    : int (optional)
        It is a HydroBASINS subbasin ID, which should be the ID of the most
        upstream subbasin in the region of interest, normally do not needed.
    mask                             : string (optional)
        It is a output mask name, which will stored in grass_location in both
        vector and raster format
    dem                              : string (optional)
        It is a output dem raster name, which will be stored in grass_location

    Notes
    -------
    Outputs are following files

    MASK                   : raster
        it is a mask raster stored in grass database, which indicate
        the PSE. The grass database is located at
        os.path.join(grassdb, grass_location)
    dem                   : raster
        it is a dem raster stored in grass database, which is
        has the same extent with MASK. The grass database is located at
        os.path.join(grassdb, grass_location)

    Returns:
    -------
       None

    Examples
    -------
    """

    print("mask region:   using hybasin polygon ")

    QgsApplication.setPrefixPath(qgis_prefix_path, True)
    Qgs = QgsApplication([], False)
    Qgs.initQgis()
    from processing.core.Processing import Processing
    from processing.tools import dataobjects
    from qgis import processing

    feedback = QgsProcessingFeedback()
    Processing.initialize()
    QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms())
    context = dataobjects.createContext()
    context.setInvalidGeometryCheck(QgsFeatureRequest.GeometryNoCheck)

    r_dem_layer = qgis_raster_read_raster(
        processing, path_dem_in
    )  ### load DEM raster as a  QGIS raster object to obtain attribute
    cellSize, SpRef_in = qgis_raster_return_raster_properties(
        processing, r_dem_layer
    )  ### Get Raster cell size

    hyshdinfo = Dbf_To_Dataframe(hybasin_ply)
    routing_info = hyshdinfo[["HYBAS_ID", "NEXT_DOWN"]].astype("float").values

    # obtain sub id of subbasins between OutHyID and OutHyID2 in the routing
    # network
    HydroBasins = Return_SubIds_Between_Two_Subbasins_In_Rouing_Network(
        routing_info, down_hybasin_id, up_hybasin_id
    )

    # extract subbasins from hydrobasin product
    Selectfeatureattributes(
        processing,
        Input=hybasin_ply,
        Output=os.path.join(grassdb, mask + "_hy.shp"),
        Attri_NM="HYBAS_ID",
        Values=HydroBasins,
    )

    print("Mask Region:   Using buffered hydroBasin product polygons ")

    # dissolve, buffer and reproject the extracted hydrobasin product
    qgis_vector_dissolve(
        processing,
        context,
        INPUT=os.path.join(grassdb, mask + "_hy.shp"),
        FIELD="MAIN_BAS",
        OUTPUT=os.path.join(grassdb, mask + "_1hy.shp"),
    )
    qgis_vector_buffer(
        processing,
        context,
        INPUT=os.path.join(grassdb, mask + "_1hy.shp"),
        Buffer_Distance=buffer_distance,
        OUTPUT=os.path.join(grassdb, mask + "_2hy.shp"),
    )
    qgis_vector_dissolve(
        processing,
        context,
        INPUT=os.path.join(grassdb, mask + "_2hy.shp"),
        FIELD="MAIN_BAS",
        OUTPUT=os.path.join(grassdb, mask + "_3hy.shp"),
    )
    qgis_vector_reproject_layers(
        processing,
        context,
        INPUT=os.path.join(grassdb, mask + "_3hy.shp"),
        TARGET_CRS=SpRef_in,
        OUTPUT=os.path.join(grassdb, mask + ".shp"),
    )

    # clip raster layer with this mask
    qgis_raster_clip_raster_by_mask(
        processing,
        Input=path_dem_in,
        MASK=os.path.join(grassdb, mask + ".shp"),
        TARGET_CRS=SpRef_in,
        Output=os.path.join(grassdb, mask + ".tif"),
    )

    # use clipped DEM to great a grass work enviroment
    import grass.script as grass
    import grass.script.setup as gsetup
    from grass.pygrass.modules import Module
    from grass.pygrass.modules.shortcuts import general as g
    from grass.pygrass.modules.shortcuts import raster as r
    from grass.script import array as garray
    from grass.script import core as gcore
    from grass_session import Session

    # open/create a grass location
    os.environ.update(
        dict(GRASS_COMPRESS_NULLS="1", GRASS_COMPRESSOR="ZSTD", GRASS_VERBOSE="1")
    )
    PERMANENT_temp = Session()
    PERMANENT_temp.open(
        gisdb=grassdb,
        location=grass_location + "t1",
        create_opts="EPSG:4326",
    )

    # import clipped dem to target location
    grass_raster_r_in_gdal(
        grass=grass,
        raster_path=os.path.join(grassdb, mask + ".tif"),
        output_nm=dem,
        location=grass_location,
    )
    PERMANENT_temp.close()

    # Define mask and processing region for grass working enviroments
    PERMANENT = Session()
    PERMANENT.open(gisdb=grassdb, location=grass_location, create_opts="")

    # define mask of current working enviroments
    grass_raster_r_mask(grass, dem)
    # define processing extent of the current working enviroment
    grass_raster_g_region(grass, dem)

    PERMANENT.close()
    Qgs.exit()
Esempio n. 29
0
from grass_session import Session
# import grass python libraries
from grass.pygrass.modules import Module

# simple example for pyGRASS usage: raster processing via modules approach
# Windows path
# gisdb = 'F:/GEO450_GRASS/test_python'
# Linux path
gisdb = '/home/user/grassdata'
location = 'test3'
mapset = 'PERMANENT'

## IMPORT AND INSTALL SENTINELSAT FIRST !!! ##

# set some common environmental variables, like for raster compression settings:
with Session(gisdb=gisdb, location=location, create_opts='EPSG:32632'):
    #def ogrimport():
    ## function body to be added here ##

    #### RUN THIS SHIT BEFORE RUNNING SENTINELDOWNLOAD!!!!!!
    # ogrimport = Module("v.in.ogr")
    # ogrimport("/home/user/Desktop/GRASS Jena Workshop/geodata/osm/jena_boundary.gpkg")

    #print(v.info(map='jena_boundary'))

    sentineldownload = Module("i.sentinel.download")
    sentineldownload(
        ### Linux folder ###
        settings="/home/user/Desktop/GRASS Jena Workshop/settings.txt",
        output=
        "/home/user/Desktop/GRASS Jena Workshop/geodata/sentinel/Sentinel_Download",
Esempio n. 30
0
def main():
    with Session(gisdb="/home/ubuntu/grass_DB",
                 location="wgs84",
                 mapset="casella"):
        # run something in PERMANENT mapset:
        print(gcore.parse_command("g.gisenv", flags="s"))