Beispiel #1
0
def main():
    """
    RichDEM flat resolution: give a gentle slope
    """
    # lazy import RICHDEM
    try:
        import richdem as rd
    except:
        g.message(
            flags="e",
            message=("RichDEM not detected. Install pip3 and " +
                     "then type at the command prompt: " +
                     '"pip3 install richdem".'),
        )

    _input = options["input"]
    _output = options["output"]
    _attribute = options["attribute"]
    _zscale = float(options["zscale"])

    dem = garray.array()
    dem.read(_input, null=np.nan)

    rd_input = rd.rdarray(dem, no_data=np.nan)
    del dem
    rd_output = rd.TerrainAttribute(dem=rd_input,
                                    attrib=_attribute,
                                    zscale=_zscale)

    outarray = garray.array()
    outarray[:] = rd_output[:]
    outarray.write(_output, overwrite=gscript.overwrite())
Beispiel #2
0
def main():
    """
    RichDEM flat resolution: give a gentle slope
    """
    # lazy import RICHDEM
    try:
        import richdem as rd
    except:
        g.message(
            flags="e",
            message=("RichDEM not detected. Install pip3 and " +
                     "then type at the command prompt: " +
                     '"pip3 install richdem".'),
        )

    _input = options["input"]
    _output = options["output"]
    _method = options["method"]
    _exponent = options["exponent"]
    _weights = options["weights"]

    if (_method == "Holmgren") or (_method == "Freeman"):
        if _exponent == "":
            g.message(
                flags="w",
                message=("Exponent must be defined for " +
                         "Holmgren or Freeman methods. " + "Exiting."),
            )
            return
        else:
            _exponent = float(_exponent)
    else:
        _exponent = None

    if _weights == "":
        rd_weights = None
    else:
        g_weights = garray.array(_weights, null=np.nan)
        rd_weights = rd.rdarray(g_weights, no_data=np.nan)

    dem = garray.array(_input, null=np.nan)

    mask = dem * 0 + 1

    rd_input = rd.rdarray(dem, no_data=np.nan)
    del dem
    rd_output = rd.FlowAccumulation(
        dem=rd_input,
        method=_method,
        exponent=_exponent,
        weights=rd_weights,
        in_place=False,
    )

    rd_output *= mask

    accum = garray.array()
    accum[:] = rd_output[:]
    accum.write(_output, overwrite=gscript.overwrite())
Beispiel #3
0
def main():
    """
    RichDEM flat resolution: give a gentle slope
    """
    # lazy import RICHDEM
    try:
        import richdem as rd
    except:
        g.message(flags='e',
                  message=('RichDEM not detected. Install pip3 and ' +
                           'then type at the command prompt: ' +
                           '"pip3 install richdem".'))

    options, flags = gscript.parser()
    _input = options['input']
    _output = options['output']
    _method = options['method']
    _exponent = options['exponent']
    _weights = options['weights']

    if (_method == 'Holmgren') or (_method == 'Freeman'):
        if _exponent == '':
            g.message(flags='w',
                      message=('Exponent must be defined for ' +
                               'Holmgren or Freeman methods. ' + 'Exiting.'))
            return
        else:
            _exponent = float(_exponent)
    else:
        _exponent = None

    if _weights == '':
        rd_weights = None
    else:
        g_weights = garray.array()
        g_weights.read(_weights, null=np.nan)
        rd_weights = rd.rdarray(g_weights, no_data=np.nan)

    dem = garray.array()
    dem.read(_input, null=np.nan)

    mask = dem * 0 + 1

    rd_input = rd.rdarray(dem, no_data=np.nan)
    del dem
    rd_output = rd.FlowAccumulation(dem=rd_input,
                                    method=_method,
                                    exponent=_exponent,
                                    weights=rd_weights,
                                    in_place=False)

    rd_output *= mask

    accum = garray.array()
    accum[:] = rd_output[:]
    accum.write(_output, overwrite=gscript.overwrite())
Beispiel #4
0
    def populate_user_flow_stds(self, rast_quser_name, overwrite):
        '''rast_quser_name: name of user flow raster map
        '''
        arr_qfix = garray.array(dtype=np.float32)
        arr_qvar = garray.array(dtype=np.float32)
        arr_quser = garray.array(dtype=np.float32)

        map_list = []
        var_map_list = []
        # iterate in boundary conditions
        for bc_key, bc_value in self.content.iteritems():
            start_coord = bc_value['start_coor']
            end_coord = bc_value['end_coor']

            if bc_value['type'] == 'QFIX':
                value = bc_value['value'][0][0]
                if not arr_qfix:
                    arr_qfix = populate_array(
                                arr_qfix, start_coord, end_coord, value)
                # Add all qfix together to make only one map
                else:
                    arr_qfix += populate_array(arr_qfix, start_coord,
                                                    end_coord, value)

            elif bc_value['type'] == 'QVAR':
                for bc_var_value in bc_value['values']:
                    arr_qvar = populate_array(arr_qvar,
                        start_coord, end_coord, bc_var_value[0])
                    var_map_list.append((arr_qvar,
                                    bc_var_value[1],
                                    bc_value['time_unit']))

        for var_map in var_map_list:
            # include all QFIX and QVAR in one map
            arr_quser[:] = var_map[0] + arr_qfix
            # write GRASS map
            rast_name_var = '{}_{}'.format(
                rast_quser_name, str(int(var_map[1])))
            rast_id_var = tgis.AbstractMapDataset.build_id(
                            rast_name_var, self.mapset)
            arr_quser.write(mapname=rast_id_var, overwrite=overwrite)
            # add temporal informations
            rast_var = tgis.RasterDataset(rast_id_var)
            rast_var.set_relative_time(start_time=var_map[1],
                        end_time=None, unit=var_map[2])
            map_list.append(rast_var)

        # Register maps in the space-time dataset
        if map_list:
            stds = tgis.open_old_stds(rast_quser_name, 'strds', dbif=self.dbif)
            tgis.register.register_map_object_list('raster',
                                map_list, output_stds=stds,
                                     delete_empty=True, unit=var_map[2],
                                     dbif=self.dbif)
        return self
Beispiel #5
0
def main():
    input = options["input"]
    output = options["output"]

    win_size = int(options["size"])
    orientation = [float(o) for o in options["orientation"].split(",")]
    wavelength = [float(f) for f in options["wavelength"].split(",")]
    offset = float(options["offset"])
    aspect = float(options["aspect"])
    threshold = int(options["threshold"])

    if flags["i"]:
        ntype = "imag"
    else:
        ntype = "real"

    if flags["q"]:
        if not threshold:
            grass.fatal(_("A percentile threshold is needed to quantify."))
        q = [2**i for i in range(len(orientation))]
    else:
        q = 0

    filters = {}
    for deg in orientation:
        for freq in wavelength:
            name = f"{win_size}_{deg}_{freq}_{offset}_{aspect}"
            filters[name] = gabor2d(win_size, deg, freq, aspect, offset, ntype)

    inarr = garray.array()
    inarr.read(input)
    if flags["c"]:
        convolved = []
        if type(q) == list:
            for i in range(len(filters.keys())):
                name = list(filters.keys())[i]
                convolved.append(
                    gabor_convolve(inarr, filters[name], threshold, q[i]))
        else:
            for name in filters.keys():
                convolved.append(
                    gabor_convolve(inarr, filters[name], threshold))
        outarr = garray.array()
        out = np.sum(convolved, axis=0)
        outarr[...] = out
        outarr.write(output)
    else:
        for name in filters.keys():
            outarr = garray.array()
            outarr[...] = gabor_convolve(inarr, filters[name], threshold)
            outarr.write(f"{output}_{name.replace('.', '')}")
Beispiel #6
0
def main():
    """
    RichDEM depression breaching
    """
    # lazy import RICHDEM
    try:
        import richdem as rd
    except:
        g.message(
            flags="e",
            message=("RichDEM not detected. Install pip3 and " +
                     "then type at the command prompt: " +
                     '"pip3 install richdem".'),
        )

    _input = options["input"]
    _output = options["output"]
    _topology = options["topology"]

    dem = garray.array()
    dem.read(_input, null=np.nan)

    rd_inout = rd.rdarray(dem, no_data=np.nan)
    rd.BreachDepressions(dem=rd_inout, in_place=True, topology=_topology)

    dem[:] = rd_inout[:]
    dem.write(_output, overwrite=gscript.overwrite())
Beispiel #7
0
def main():
    """Main function, called at execution time"""

    # parse arguments
    input = options['input']
    var = options['var']
    prefix = options['prefix']

    # read NetCDF data
    nc = Dataset(input, 'r')
    data = nc.variables[var][:]
    nc.close()

    # set temporary region
    res = 32463.0
    rows = 277
    cols = 349
    grass.run_command('g.region',
                      n=res*(rows-0.5), s=res*-0.5,
                      e=res*(cols-0.5), w=res*-0.5,
                      rows=rows, cols=cols)

    # for each month
    a = garray.array()
    for (i, timeslice) in enumerate(data):
        mapname = prefix + '%02i' % (i+1)
        grass.message("Importing <%s> ..." % mapname)

        # import data with grass array
        a[:] = flipud(timeslice)
        a.write(mapname=mapname, overwrite=True, null=-32767)
def main():
    """
    RichDEM depression filling
    """
    # lazy import RICHDEM
    try:
        import richdem as rd
    except:
        g.message(flags='e',
                  message=('RichDEM not detected. Install pip3 and ' +
                           'then type at the command prompt: ' +
                           '"pip3 install richdem".'))

    _input = options['input']
    _output = options['output']
    _epsilon = options['epsilon']
    _topology = options['topology']

    if _epsilon == 'true':
        epsilon = True
    else:
        epsilon = False

    dem = garray.array()
    dem.read(_input, null=np.nan)

    rd_inout = rd.rdarray(dem, no_data=np.nan)
    rd.FillDepressions(dem=rd_inout,
                       epsilon=epsilon,
                       in_place=True,
                       topology=_topology)

    dem[:] = rd_inout[:]
    dem.write(_output, overwrite=gscript.overwrite())
Beispiel #9
0
def img2array(img, npred=8):
    rl = garray.array()
    imagegroup = np.empty((rl.shape[0], rl.shape[1], npred), dtype='float')
    for i, v in enumerate(img[3:]):
        rl.read(v)
        imagegroup[:, :, i] = rl
    return imagegroup
Beispiel #10
0
def main():
    """Main function, called at execution time."""

    # parse arguments
    input = options['input']
    output = options['output']
    method = options['method']

    # read map with grass array
    a = garray.array(dtype='f4')
    a.read(input)

    # points with existing data
    points = np.nonzero(a)
    data = a[points]

    # interpolation grid
    rows, cols = a.shape
    gridx, gridy = np.mgrid[0:rows, 0:cols]  # WORKS!

    # interpolate and write map
    a[...] = griddata(points, data, (gridx, gridy), method=method)
    a.write(output)

    return
def main():
    """
    RichDEM flat resolution: give a gentle slope
    """
    # lazy import RICHDEM
    try:
        import richdem as rd
    except:
        g.message(flags='e',
                  message=('RichDEM not detected. Install pip3 and ' +
                           'then type at the command prompt: ' +
                           '"pip3 install richdem".'))

    _input = options['input']
    _output = options['output']

    # Check for overwrite
    _rasters = np.array(gscript.parse_command('g.list', type='raster').keys())
    if (_rasters == _output).any():
        g.message(flags='e', message="output would overwrite " + _output)

    dem = garray.array()
    dem.read(_input, null=np.nan)

    rd_input = rd.rdarray(dem, no_data=np.nan)
    rd_output = rd.ResolveFlats(rd_input)

    dem[:] = rd_output[:]
    dem.write(_output, overwrite=gscript.overwrite())
Beispiel #12
0
def main():
    """Main function, called at execution time."""

    # parse arguments
    input = options['input']
    output = options['output']
    method = options['method']

    # read map with grass array
    a = garray.array(dtype='f4')
    a.read(input)

    # points with existing data
    points = np.nonzero(a)
    data = a[points]

    # interpolation grid
    rows, cols = a.shape
    gridx, gridy = np.mgrid[0:rows, 0:cols]  # WORKS!

    # interpolate and write map
    a[...] = griddata(points, data, (gridx, gridy), method=method)
    a.write(output)

    return
Beispiel #13
0
def main():
    """
    RichDEM depression filling
    """
    
    options, flags = gscript.parser()
    _input = options['input']
    _output = options['output']
    _epsilon = options['epsilon']
    _topology = options['topology']
    
    if _epsilon == 'true':
        epsilon = True
    else:
        epsilon = False

    dem = garray.array()
    dem.read(_input, null=np.nan)
    
    rd_inout = rd.rdarray(dem, no_data=np.nan)
    rd.FillDepressions(dem=rd_inout, epsilon=epsilon, in_place=True,
                       topology=_topology)
    
    dem[:] = rd_inout[:]
    dem.write(_output, overwrite=gscript.overwrite())
Beispiel #14
0
def main():
    """Main function, called at execution time"""

    # parse arguments
    inputfile = options['input']
    outputmap = options['output']
    var = options['var']

    # read NetCDF data
    nc = Dataset(inputfile, 'r')
    lon = nc.variables['longitude'][:]
    lat = nc.variables['latitude'][:]
    z = nc.variables[var][:]
    nc.close()

    # check that coordinates are regular
    dlon = np.diff(lon)
    dlat = np.diff(lat)
    dlon0 = dlon[0]
    dlat0 = dlat[0]
    assert (dlon == dlon0).all()
    assert (dlat == dlat0).all()

    # crop illegal latitudes
    lat = lat[np.abs(lat) < 90-np.abs(dlat0)/2]

    # rotate longitudes and sort
    lon -= (lon > 180)*360
    lon_args = lon.argsort()
    lat_args = lat.argsort()[::-1]

    # crop and rotate data
    lat = lat[lat_args]
    lon = lon[lon_args]
    z = z[:, lat_args, :][:, :, lon_args]

    # set temporary region
    w = lon[-1] + dlon0/2
    e = lon[0] - dlon0/2
    s = lat[-1] - dlat0/2
    n = lat[0] + dlat0/2
    rows = len(lat)
    cols = len(lon)
    grass.run_command('g.region', w=w, e=e, s=s, n=n, rows=rows, cols=cols)

    # import time-independent data as such
    a = garray.array()
    if z.shape[0] == 1:
        a[:] = z[0]
        grass.message("Importing <%s> ..." % outputmap)
        a.write(mapname=outputmap, overwrite=True, null=-32767)

    # otherwise import each time slice
    else:
        for (i, data) in enumerate(z):
            mapname = outputmap + '%02i' % (i+1)
            grass.message("Importing <%s> ..." % mapname)
            a[:] = data
            a.write(mapname=mapname, overwrite=True, null=-32767)
Beispiel #15
0
def main():
    """Main function, called at execution time"""

    # parse arguments
    inputfile = options['input']
    outputmap = options['output']
    var = options['var']

    # read NetCDF data
    nc = Dataset(inputfile, 'r')
    lon = nc.variables['longitude'][:]
    lat = nc.variables['latitude'][:]
    z = nc.variables[var][:]
    nc.close()

    # check that coordinates are regular
    dlon = np.diff(lon)
    dlat = np.diff(lat)
    dlon0 = dlon[0]
    dlat0 = dlat[0]
    assert (dlon == dlon0).all()
    assert (dlat == dlat0).all()

    # crop illegal latitudes
    lat = lat[np.abs(lat) < 90 - np.abs(dlat0) / 2]

    # rotate longitudes and sort
    lon -= (lon > 180) * 360
    lon_args = lon.argsort()
    lat_args = lat.argsort()[::-1]

    # crop and rotate data
    lat = lat[lat_args]
    lon = lon[lon_args]
    z = z[:, lat_args, :][:, :, lon_args]

    # set temporary region
    w = lon[-1] + dlon0 / 2
    e = lon[0] - dlon0 / 2
    s = lat[-1] - dlat0 / 2
    n = lat[0] + dlat0 / 2
    rows = len(lat)
    cols = len(lon)
    grass.run_command('g.region', w=w, e=e, s=s, n=n, rows=rows, cols=cols)

    # import time-independent data as such
    a = garray.array()
    if z.shape[0] == 1:
        a[:] = z[0]
        grass.message("Importing <%s> ..." % outputmap)
        a.write(mapname=outputmap, overwrite=True, null=-32767)

    # otherwise import each time slice
    else:
        for (i, data) in enumerate(z):
            mapname = outputmap + '%02i' % (i + 1)
            grass.message("Importing <%s> ..." % mapname)
            a[:] = data
            a.write(mapname=mapname, overwrite=True, null=-32767)
Beispiel #16
0
def export_to_grass(data, map_name, nodata=-32768):
    # Export the array as a raster map to GRASS-GIS
    output_map = garray.array()
    output_map[:, :] = np.flipud(data)
    output_map.write(
        mapname=map_name,
        overwrite=True
        )
Beispiel #17
0
def mahal(v, m, VI):
    """Compute the mahalanobis distance over reference layers"""
    delta = v - m[:, None, None]
    mahdist = np.sum(np.sum(delta[None, :, :, :] *
                     VI[:, :, None, None], axis=1) * delta, axis=0)
    stat_mahal = garray.array()
    stat_mahal[...] = mahdist
    return stat_mahal
Beispiel #18
0
def grass_raster_setnull_array(input, output, values, grass):

    raster_array = garray.array(mapname=input)
    if (len(values) > 0):
        mask = np.isin(raster_array, values)
        raster_array[mask] = -9999

    temparray = garray.array()
    temparray[:, :] = raster_array[:, :]
    temparray.write(mapname=output, overwrite=True)
    grass.run_command("r.null", map=output, setnull=[-9999, 0])
    exp = "%s = int(%s)" % (
        output,
        output,
    )
    grass.run_command("r.mapcalc", expression=exp, overwrite=True)
    del temparray
    del raster_array
Beispiel #19
0
def raster_as_1d_array(raster):
    ''' return GRASS raster as numpy array - keep null values '''
    print('----------   raster_as_1d_array   ----------')
    print(raster)
    grass.run_command('g.region', raster=raster, flags='pa')
    raster_array = garray.array()
    raster_array.read(raster, null=np.nan)
    print('----------  raster_as_1d_array OK ----------')
    return raster_array.flatten(order='C')
Beispiel #20
0
 def test_setgrasslayer(self):
     # only do this test, if self.rastlayername is set
     if self.rastlayername:
         layer = garray.array()
         # set the layer
         ## the first time it is missing
         self.assertRaises(error.DataError, self.pg.setgrasslayer,
                                 *[self.rastlayername, self.rastlayername])
         ## so we need to write it first
         pass
def worker(name, correction):
  starttime = time.time()
  outgrid = garray.array()
  print name, ': interpolating.'
  outgrid[...] = interpolate(item) + correction
  print "    Writing numpy array to GRASS GIS."
  outgrid.write(name)
  print 'Deleting outgrid object' # possibly unnecessary b/c in function, but playing safe.
  del outgrid
  print "    ", name, ':', time.time() - starttime, "seconds elapsed."
Beispiel #22
0
def viewshed(vrt,list_of_dicts,distance,point,
             observer_height,grassdb,burn_viewshed_rst,total_cells_output):
    ## deles op - foerste funktion
    with rasterio.open(vrt) as src_rst:

        dsm = src_rst.read()
        out_meta = src_rst.meta.copy()
        print(out_meta)
        out_meta.update(dtype=rasterio.int16,driver='GTiff') 
        mask = features.geometry_mask(
                                      [feature["feature"]["geometry"] for feature in
                                       list_of_dicts],
                                       src_rst.shape,
                                       transform=src_rst.transform,
                                       all_touched=True, 
                                       invert=True)
        new_dsm = np.copy(np.squeeze(dsm)) #Forstaer ikke hvorfor, men dsm'en har en ekstra dimension, 
                                           #som jeg fjerner med squeeze, saa den passer med result dsm'en
    ## deles op - anden funktion, maaske
        with rasterio.Env():
            result = features.rasterize(
                                        ((feature['feature']['geometry'],np.int(feature['feature']['properties']['hoejde'])
                                         * 1000)
                                          for feature in list_of_dicts), 
                                          out_shape=src_rst.shape,
                                          transform=src_rst.transform,
                                          all_touched=True)             
            new_dsm[mask] = result[mask] 

    ## deles op - tredje funktion
            with Session(gisdb=grassdb, location="test",create_opts=vrt):
                import grass.script.array as garray
                r_viewshed = Module('r.viewshed')
                r_out_gdal = Module('r.out.gdal')
                r_stats = Module('r.stats')
                r_univar = Module('r.univar')
                from_np_raster = garray.array()
                from_np_raster[...] = new_dsm
                from_np_raster.write('ny_rast',overwrite=True)
                print(from_np_raster)
                gcore.run_command('r.viewshed', overwrite=True, memory=2000, 
		input='ny_rast', output='viewshed', max_distance=distance, 
		coordinates=point, observer_elevation=observer_height)
                r_stats(flags='nc',overwrite=True,input='viewshed',output=total_cells_output)
                ## finde ud af hvordan r_stats kan outputte til noget som
                ## python kan laese direkte
                with open(total_cells_output) as tcls:
                    counts = []
                    for line in tcls:
                        nbr = int(line.split()[-1])
                        counts.append(nbr)
                # summary = r_univar(map='viewshed')
                #r_viewshed(input=from_np_raster, output='viewshed', max_distance=1000, memory=1424, coordinates=(701495,6201503), observer_elevation=500.0)
                r_out_gdal(overwrite=True, input='viewshed', output=burn_viewshed_rst)
    return sum(counts) #visible_cells
Beispiel #23
0
 def createlayer(self, layername, grassmapname=False, force=False):
     """
     Create a new layer and add it to the layer collection
     @param string name of the layer
     @param string optional name of a GRASS map layer (False if only local)
     @param boolean optional, whether to overwrite an existing layer
     """
     layer = garray.array()
     if grassmapname:
         self.grassmapnames[layername] = grassmapname
     self.setlayer(layername, layer, force)
Beispiel #24
0
    def irradiance(self,time,day, alb=0.2, lin=3, flat = None, outpath=None):
        """
        DESCRIPTION
            Calculate the Irradiance
        INPUT
            Outpath: True, use the path to save the global irradiance array on a file
        """
 
        try:
            self.rasteraspect
            self.rasterslope
        except AttributeError:
            self.slope_aspect()
 
        if flat:
            rasteraspect = self.rasteraspectflat
            rasterslope = self.rasterslopeflat
        else:
            rasteraspect = self.rasteraspect
            rasterslope = self.rasterslope
             
 
 
        grass.run_command("r.sun", 
                            flags="s",
                            overwrite=True,
                            verbose=False,
                            alb=alb,
                            lin=lin,
                            elevin=self.rasterelev, 
                            aspin=rasteraspect, 
                            slopein=rasterslope,
                            time=time,
                            beam_rad="beam_rad",
                            diff_rad="diff_rad",
                            refl_rad="refl_rad",
                            glob_rad="glob_rad",
                             
                            day = day
                            )
         
        self.rasterbeam = "beam_rad"+ "@" + self.env.get('MAPSET')# complete name
         
         
        self.rasterdiff = "diff_rad"+ "@" + self.env.get('MAPSET')# complete name
        self.rasterrefl = "refl_rad"+ "@" + self.env.get('MAPSET')# complete name
        self.rasterglob = "glob_rad"+ "@" + self.env.get('MAPSET')# complete name
        
        if outpath:
            irrglob = garray.array()
            irrglob.read(self.rasterelev)
            filename = outpath + str(day) + "_" + str(time)
            np.savetxt(filename, irrglob, delimiter=',') 
            print "Saved at :" + outpath
Beispiel #25
0
def pickup_landscape_variables(habitat_map_name,
                               variables=[],
                               list_variable_maps={},
                               variable_types={},
                               variable_mapsets={},
                               null=0,
                               null_nan=False,
                               exportPNG=True):
    '''
    exportPNG not implemented yet
    '''

    # Check if a list of variables and a list of names of map variables was passed as input
    if len(variables) == 0 or len(list_variable_maps) == 0:
        raise Exception(
            'You need to first load the variables and the list of variable maps.'
        )

    # Initialize dictionary of variable names and maps
    landscape_variable_map_names = {}
    landscape_variable_maps = {}

    # Define region
    grass.run_command('g.region', raster=habitat_map_name)

    # Select variables according to the habitat map name
    for i in variables:

        try:
            landscape_variable_map_names[i] = [
                name for name in list_variable_maps[i]
                if habitat_map_name in name
            ][0]
            var_type = variable_types[i]
            mapset = variable_mapsets[i]
        except:
            raise Exception('There is no map with a pattern ' +
                            habitat_map_name +
                            ' in the list of maps for variable ' + i +
                            '. Please check it.')

        # Initialize grass.array and read it from GRASS into a numpy array
        landscape_variable_maps[i] = garray.array()  #(dtype = var_type)
        landscape_variable_maps[i].read(landscape_variable_map_names[i] + '@' +
                                        mapset,
                                        null=null)
        print 'Loading spatial variables: ' + landscape_variable_map_names[i]

        if null_nan:
            landscape_variable_maps[i] = np.where(
                landscape_variable_maps[i] == null, np.nan,
                landscape_variable_maps[i])

    return landscape_variable_map_names, landscape_variable_maps
def worker(name, correction):
    starttime = time.time()
    outgrid = garray.array()
    print name, ": reading."
    outgrid.read(name)
    print name, ": correcting."
    outgrid[...] += correction
    print "    Writing numpy array to GRASS GIS."
    outgrid.write(name, overwrite=True)
    print "    Deleting outgrid object"  # possibly unnecessary b/c in function, but playing safe.
    del outgrid
    print "    ", name, ":", time.time() - starttime, "seconds elapsed."
Beispiel #27
0
 def saveGRASS(self):
   iceGA = garray.array()
   iceGA[...] = self.H
   iceGA.write(self.OutNameGRASS+'_Hice', overwrite=True)
   iceGA[...] = self.uD
   iceGA.write(self.OutNameGRASS+'_uDeformation', overwrite=True)
   iceGA[...] = self.uS
   iceGA.write(self.OutNameGRASS+'_uSliding', overwrite=True)
   iceGA[...] = self.Zb
   iceGA.write(self.OutNameGRASS+'_zBed', overwrite=True)
   iceGA[...] = self.Zs
   iceGA.write(self.OutNameGRASS+'_zSurface', overwrite=True)
def flatParam(parameter, aslist=False):
    print 'flatParam: ' + parameter
    param = garray.array()
    param.read(parameter, null=np.nan)
    param = param[~np.isnan(param)]
    if aslist == True:
        parflat = param.tolist()
    else:
        parflat = np.array(param)
    param = None
    gc.collect()
    return parflat
Beispiel #29
0
def grass_raster_create_raster_empty_raster(garray, raster_nm):
    """grass create a raster with -9999 in current location
    Parameters
    ----------

    Returns:
    -------

    """
    temparray = garray.array()
    temparray[:, :] = -9999
    temparray.write(mapname=raster_nm, overwrite=True)
Beispiel #30
0
def main():

    rast1 = '/home/vitale232/Google\ Drive/UNR/UNR-Thesis/Data/GIS-Data/DEM/n39w114/imgn39w114_1.img'    
    rast2 = '/home/vitale232/Google\ Drive/UNR/UNR-Thesis/Data/GIS-Data/DEM/n39w115/imgn39w115_1.img'
    rast3 = '/home/vitale232/Google\ Drive/UNR/UNR-Thesis/Data/GIS-Data/DEM/n40w114/imgn40w114_1.img'
    rast4 = '/home/vitale232/Google\ Drive/UNR/UNR-Thesis/Data/GIS-Data/DEM/n40w115/imgn40w115_1.img'


    a = garray.array()
    a.read(rast1)

    print(grass.raster_info(rast1))
Beispiel #31
0
def flatParam(parameter, aslist=False):
    print 'flatParam: ' + parameter
    param = garray.array()
    param.read(parameter, null=np.nan)
    param = param[~np.isnan(param)]
    if aslist == True:
        parflat = param.tolist()
    else:
        parflat = np.array(param)
    param = None
    gc.collect()
    return parflat
Beispiel #32
0
 def saveGRASS(self):
   from grass.script import array as garray
   iceGA = garray.array()
   iceGA[...] = self.H
   iceGA.write(self.OutNameGRASS+'_Hice', overwrite=True)
   iceGA[...] = self.uD
   iceGA.write(self.OutNameGRASS+'_uDeformation', overwrite=True)
   iceGA[...] = self.uS
   iceGA.write(self.OutNameGRASS+'_uSliding', overwrite=True)
   iceGA[...] = self.Zb
   iceGA.write(self.OutNameGRASS+'_zBed', overwrite=True)
   iceGA[...] = self.Zs
   iceGA.write(self.OutNameGRASS+'_zSurface', overwrite=True)
Beispiel #33
0
def main():
    """
    RichDEM flat resolution: give a gentle slope
    """
    
    options, flags = gscript.parser()
    _input = options['input']
    _output = options['output']
    _attribute = options['attribute']
    _zscale = float(options['zscale'])
    
    dem = garray.array()
    dem.read(_input, null=np.nan)
    
    rd_input = rd.rdarray(dem, no_data=np.nan)
    del dem
    rd_output = rd.TerrainAttribute(dem=rd_input, attrib=_attribute,
                                    zscale=_zscale)
    
    outarray = garray.array()
    outarray[:] = rd_output[:]
    outarray.write(_output, overwrite=gscript.overwrite())
Beispiel #34
0
def pickup_one_landscape_garray(select_form='random',
                                previous_landscape='',
                                list_habitat_maps=[],
                                null=0,
                                null_nan=False,
                                exportPNG=True):
    '''
    exportPNG not implemented yet
    '''

    # Check if a list of habitat maps was passed as input
    if len(list_habitat_maps) == 0:
        raise Exception('You need to first load the list of habitat maps.')

    # Select a landscape name from the list of habitat map names:

    # If select_form == random, pick up a random map (the first if no one was chosen before)
    if select_form == 'random':
        if previous_landscape == '':
            habitat_map_name = list_habitat_maps[0]
        else:
            habitat_map_name = random.sample(list_habitat_maps, 1)[0]
    # If select_form == order, pick up the next one in the list (the first if no one was chosen before or if it is the last one)
    elif select_form == 'order':
        if previous_landscape == '' or previous_landscape == list_habitat_maps[
            (len(list_habitat_maps) - 1)]:
            habitat_map_name = list_habitat_maps[0]
        else:
            index = list_habitat_maps.index(previous_landscape)
            habitat_map_name = list_habitat_maps[(index + 1)]
    # If select_form == type, pick up the a fixed landscape typed by the user (always the same)
    elif select_form == 'same':
        habitat_map_name = previous_landscape
    else:
        raise Exception(
            'You must select a landscape according to random, order, or type rule.'
        )

    # Now, load it using grass.script.array

    # Define region
    grass.run_command('g.region', raster=habitat_map_name)

    # Load map
    landscape_map = garray.array(dtype=np.int8)
    landscape_map.read(habitat_map_name, null=null)

    if null_nan:
        landscape_map = np.where(landscape_map == null, np.nan, landscape_map)

    return habitat_map_name, landscape_map
Beispiel #35
0
def GrassToOssim(mapname):
    registry = ossimImageHandlerRegistry.instance()
    # read map
    array = garray.array()
    array.read(mapname)
 
    ##print grass.raster_info(mapname)['datatype']

    memSource = ossimMemoryImageSource()
    stype = PYOSSIM_FLOAT32
    imdata = ossimImageData(memSource,stype,1)
    memSource.initialize()
    WriteArrayToImageData(imdata,array,0)
    outfile = mapname +".jpg"
    WriteImageDataToFile(imdata,outfile)
Beispiel #36
0
 def rastack(self, layers):
     if all(General().grasslayercheck(layer) for layer in layers):
         dim=len(layers)
         rl = garray.array()
         imagegroup = np.empty((rl.shape[0], rl.shape[1], dim), dtype='float')
         for i, v in enumerate(layers):
             rl.read(v)
             imagegroup[:, :, i] = rl
         return imagegroup
     else:
         matchinglayers = [layer for layer in layers if General().grasslayercheck(layer)]
         # set(layers) & set(matchinglayers)
         missinglayers = len(layers) - len(matchinglayers)
         print('Not all the layers where found, Num layer missing: %s \n Matching layers: %s' % (missinglayers,
                                                                                                 matchinglayers))
Beispiel #37
0
 def setgrasslayer(self, layername, grassmapname, force=False):
     """
     Put an existing map from GRASS to the layer collection
     @param string name of the layer
     @param string name of an existing GRASS map layer
     @param boolean optional, whether to overwrite values if key exists
     """
     # fill the new grass array with the contents from the map (must exist)
     if grassmapname in grass.list_strings("rast"):
         layer = garray.array(grassmapname)
         self.grassmapnames[layername] = grassmapname
         self.setlayer(layername, layer, force)
     else:
         raise error.DataError(Grassland.ME,
                               "Grass Map was missing: " + grassmapname)
Beispiel #38
0
 def rastprep(self, raster_grid_name, resolution=90, figsize=(6,8)):#, colormap=cm.GMT_haxby, alpha=1):
   # handle the flipud and resolution (per above function)
   # also use any set transparency
   # Send input to class-wide variables and set the resolution
   self.raster_grid_name = raster_grid_name
   self.resolution = resolution
   self.figsize = figsize
   self.set_resolution()
   # Then get the grid from GRASS
   self.rast_grid = garray.array()
   self.rast_grid.read(raster_grid_name)
   self.rast_grid = np.flipud(self.rast_grid)
   self.buffer_rast_grid() # put nan's around it and extend n, s, w, e, lats, lons, nlats, nlons, to prevent streaking
   # And transform it into the coordiante system
   rast_grid_transformed = self.m.transform_scalar(self.rast_grid, self.lons, self.lats,self.nlons,self.nlats)
   return rast_grid_transformed
Beispiel #39
0
 def test_writelayer(self):
     if self.rastlayername:
         # create an empty test map
         layer = garray.array()
         self.pg.createlayer(self.rastlayername, self.rastlayername)
         # write it once
         self.pg.writelayer(self.rastlayername)
         # now remove it from the internal grasslayer list
         self.pg.grassmapnames.pop(self.rastlayername)
         self.assertRaises(error.DataError, self.pg.writelayer,
                             self.rastlayername)
         # try again, this time being explicit, but still fail
         self.assertRaises(error.DataError, self.pg.writelayer,
                             *[self.rastlayername, self.rastlayername])
         # force write it again..
         self.pg.writelayer(self.rastlayername, self.rastlayername, True)
Beispiel #40
0
def main():
    """
    RichDEM depression breaching
    """

    options, flags = gscript.parser()
    _input = options['input']
    _output = options['output']
    _topology = options['topology']

    dem = garray.array()
    dem.read(_input, null=np.nan)

    rd_inout = rd.rdarray(dem, no_data=np.nan)
    rd.BreachDepressions(dem=rd_inout, in_place=True, topology=_topology)

    dem[:] = rd_inout[:]
    dem.write(_output, overwrite=gscript.overwrite())
Beispiel #41
0
def pickup_one_landscape_garray(select_form = 'random', previous_landscape = '', list_habitat_maps = [], 
                                null = 0, null_nan = False, exportPNG = True):
    '''
    exportPNG not implemented yet
    '''

    # Check if a list of habitat maps was passed as input
    if len(list_habitat_maps) == 0:
        raise Exception('You need to first load the list of habitat maps.')

    # Select a landscape name from the list of habitat map names:

    # If select_form == random, pick up a random map (the first if no one was chosen before)
    if select_form == 'random':
        if previous_landscape == '':
            habitat_map_name = list_habitat_maps[0]
        else:
            habitat_map_name = random.sample(list_habitat_maps, 1)[0]
    # If select_form == order, pick up the next one in the list (the first if no one was chosen before or if it is the last one)
    elif select_form == 'order':              
        if previous_landscape == '' or previous_landscape == list_habitat_maps[(len(list_habitat_maps)-1)]:
            habitat_map_name = list_habitat_maps[0]
        else:
            index = list_habitat_maps.index(previous_landscape)
            habitat_map_name = list_habitat_maps[(index+1)]
    # If select_form == type, pick up the a fixed landscape typed by the user (always the same)
    elif select_form == 'same':
        habitat_map_name = previous_landscape
    else:
        raise Exception('You must select a landscape according to random, order, or type rule.')

    # Now, load it using grass.script.array

    # Define region
    grass.run_command('g.region', raster = habitat_map_name)

    # Load map
    landscape_map = garray.array(dtype = np.int8)
    landscape_map.read(habitat_map_name, null = null)
    
    if null_nan:
        landscape_map = np.where(landscape_map == null, np.nan, landscape_map)    

    return habitat_map_name, landscape_map
Beispiel #42
0
def OssimToGrass(filename):
   
    registry = ossimImageHandlerRegistry.instance()
    ossimfile = ossimFilename(filename) 
    handler = registry.open(ossimfile)    
    if not handler.isOpen():
        print "Could not open first image at <" + imgname.c_str() +  ">. Aborting..."
        sys.exit(0)

    ossimdata = ossimImageSourceAsArray(handler)
    base=os.path.basename(filename)
    grass_file = os.path.splitext(base)[0]

    for band in xrange(ossimdata.shape[0]):

        gdata = garray.array()
        gdata[...] = ossimdata[band][:gdata.shape[0],:gdata.shape[1]]
        grass_band = grass_file + "." + str(band+1)
        gdata.write(grass_band)
def make_map(name, outname, res, outres=None, sdiv=30):
  grass.run_command('g.region', n=90, s=-90, w=-180, e=180, res=res)
  a = garray.array() # var of interest
  theta = garray.array() # colat
  phi = garray.array()   # e-lon
  a.read(name, null=np.nan)
  
  grass.mapcalc("lats = y()", overwrite=True)
  grass.mapcalc("lons = x()", overwrite=True)
  theta.read('lats')
  theta = 90 - theta # colat
  phi.read('lons')
  phi += 180 # e-lon

  theta *= np.pi/180.
  phi *= np.pi/180.

  theta1 = theta.ravel()
  phi1   = phi.ravel()
  a1     = a.ravel()

  theta1 = theta1[np.isnan(a1) == False]
  phi1 = phi1[np.isnan(a1) == False]
  a1 = a1[np.isnan(a1) == False]

  if outres:
    grass.run_command('g.region', n=90, s=-90, w=-180, e=180, res=outres)
    thetaout = garray.array() # colat
    phiout = garray.array()   # e-lon
    grass.mapcalc("outlats = y()", overwrite=True)
    grass.mapcalc("outlons = x()", overwrite=True)
    # Sloppy, but don't need these anymore
    thetaout.read('outlats')
    thetaout = 90 - thetaout # colat
    phiout.read('outlons')
    phiout += 180 # e-lon
    thetaout *= np.pi/180.
    phiout *= np.pi/180.
  else:
    thetaout=theta
    phiout=phi

  # If a1 too small, can produce bad results
  lut = interp.SmoothSphereBivariateSpline( theta1, phi1, a1, s=int(np.floor(len(theta1)/sdiv)), eps=1E-10)
  data_global = lut(thetaout[:,0], phiout[0,:])
  outarray = garray.array()
  outarray[...] = data_global
  outarray.write(outname, overwrite=True)
  grass.run_command('g.region', n=90, s=-90, w=-180, e=180, res=res)
Beispiel #44
0
    def get_np_array(self):
        """Return this raster map as memmap numpy style array to access the raster
           values in numpy style without loading the whole map in the RAM.

           In case this raster map does exists in the grass spatial database,
           the map will be exported using r.out.bin to a temporary location
           and assigned to the memmap object that is returned by this function.

           In case the raster map does not exist, an empty temporary
           binary file will be created and assigned to the memap object.

           You need to call the write function to write the memmap
           array back into grass.
        """

        a = garray.array()

        if self.map_exists():
            a.read(self.get_map_id())

        return a
    def get_np_array(self):
        """Return this raster map as memmap numpy style array to access the raster
           values in numpy style without loading the whole map in the RAM.

           In case this raster map does exists in the grass spatial database,
           the map will be exported using r.out.bin to a temporary location
           and assigned to the memmap object that is returned by this function.

           In case the raster map does not exist, an empty temporary
           binary file will be created and assigned to the memap object.

           You need to call the write function to write the memmap
           array back into grass.
        """

        a = garray.array()

        if self.map_exists():
            a.read(self.get_map_id())

        return a
Beispiel #46
0
def Return_Raster_As_Array(grassdb, grass_location, raster_mn):
    """Transfer an rater in grass database into np array
    Parameters
    ----------
    grassdb         : string
    Full path to a grass database
    grass_location  : string
    location name in that grass database
    raster_mn       : string
    raster name

    Returns:
    -------
    Array            : array
    np array of the raster.

    """
    PERMANENT = Session()
    PERMANENT.open(gisdb=grassdb, location=grass_location, create_opts="")
    Array = copy.deepcopy(garray.array(mapname=raster_mn))
    PERMANENT.close()
    return Array
Beispiel #47
0
def read_map(mapname, scalefactor=1.0):
    """Return numpy array from a GRASS raster map."""

    # show which map is processed if verbose
    grass.verbose(mapname)

    # parse smoothing option
    smooth = options['smooth']

    # smooth map with r.neighbors
    if smooth:
        smoothmap = 'r.out.pism_' + str(os.getpid()) + '_tmp'
        grass.run_command('r.neighbors', flags='c',
                          input=mapname, output=smoothmap,
                          size=options['smooth'], quiet=True)
        mapname = smoothmap

    # read map into array
    a = garray.array()
    a.read(mapname)
    if smooth:
        grass.run_command('g.remove', rast=smoothmap, quiet=True)
    return transpose(flipud(a[:]))*scalefactor
Beispiel #48
0
def pickup_landscape_variables(habitat_map_name, variables = [], list_variable_maps = {}, 
                               variable_types = {}, variable_mapsets = {}, null = 0, null_nan = False, exportPNG = True):
    '''
    exportPNG not implemented yet
    '''

    # Check if a list of variables and a list of names of map variables was passed as input
    if len(variables) == 0 or len(list_variable_maps) == 0:
        raise Exception('You need to first load the variables and the list of variable maps.')

    # Initialize dictionary of variable names and maps
    landscape_variable_map_names = {}
    landscape_variable_maps = {}

    # Define region
    grass.run_command('g.region', raster = habitat_map_name)    

    # Select variables according to the habitat map name
    for i in variables:

        try:
            landscape_variable_map_names[i] = [name for name in list_variable_maps[i] if habitat_map_name in name][0]
            var_type = variable_types[i]
            mapset = variable_mapsets[i]
        except:
            raise Exception('There is no map with a pattern '+habitat_map_name+' in the list of maps for variable '+i+'. Please check it.')

        # Initialize grass.array and read it from GRASS into a numpy array
        landscape_variable_maps[i] = garray.array()#(dtype = var_type)
        landscape_variable_maps[i].read(landscape_variable_map_names[i]+'@'+mapset, null = null)
        print 'Loading spatial variables: '+landscape_variable_map_names[i]        
        
        if null_nan:
            landscape_variable_maps[i] = np.where(landscape_variable_maps[i] == null, np.nan, landscape_variable_maps[i])

    return landscape_variable_map_names, landscape_variable_maps
    jobs = []
    for topomap in topomaps_subset:
        correction = float(SLcorr_ts[topomaps == topomap])
        print topomap, correction
        p = multiprocessing.Process(target=worker, args=(topomap, correction))
        jobs.append(p)
        p.start()
    if p:
        while p.is_alive():
            pass
    n_file_start += n_simultaneous_jobs


# Check which ones were messed up
# g.region -p n=12:00:30n s=12n w=58:10:30w e=58:10w
a = garray.array()
z = []
for topomap in topomaps:
    print topomap
    a.read(topomap)
    z.append(float(a))

plt.plot(ages_numeric, z)
plt.show()


a = garray.array()
z = []
for age in ages:
    print age
    a.read("topo_" + age)
from matplotlib.colors import Normalize

# run in global database 30as

imshow = True

m = Basemap(width=7000000,height=6500000,
            resolution='l',projection='laea',\
            lat_ts=52,lat_0=52,lon_0=-100.)

grass.run_command('g.region', rast='wb_000000')
nx = grass.region()['cols']
ny = grass.region()['rows']

# Now the actual data
WB_000000 = garray.array()
WB_000000.read("wb_000000", null=np.nan)

WB = garray.array()
WB.read("wb_021000", null=np.nan)

WBdiff = np.flipud(WB - WB_000000) * 1000 # mm/yr


# Colorbar is bipolar:
# http://stackoverflow.com/questions/7404116/defining-the-midpoint-of-a-colormap-in-matplotlib

class myNorm(Normalize):    
  def __init__(self,linthresh,vmin=None,vmax=None,clip=False):
    Normalize.__init__(self,vmin,vmax,clip)
    self.linthresh=linthresh
  topogrid = np.zeros((8400,15600), dtype='float32')
  print "    N-S resizing for GEBCO."
  for i in range(8400):
    topogrid[i,:] = np.average(topogrid_tmp[i:i+2,:], axis=0)
  return topogrid


# Time step and file lists
files = np.array(sorted(glob.glob("topo*.txt")))
ages_numeric = np.loadtxt('ages')

# Start out by gridding at time = 0 (for corrections to modern topography)
#topo_spharm_now = interpolate(files[ages_numeric == 0])
print "Loading modern topography derived from GEBCO_08 (30 arcsecond) and"
print "interpolated etopo1 (60 arcsecond) bedrock surface data"
toponow_temp = garray.array()
toponow_temp.read('toponow')
toponow = toponow_temp.astype('int')
del toponow_temp
print "Generating array to correct spherical harmonic bumpiness"
correction = toponow - interpolate(files[ages_numeric == 0][0])

def worker(name, correction):
  starttime = time.time()
  outgrid = garray.array()
  print name, ': interpolating.'
  outgrid[...] = interpolate(item) + correction
  print "    Writing numpy array to GRASS GIS."
  outgrid.write(name)
  print 'Deleting outgrid object' # possibly unnecessary b/c in function, but playing safe.
  del outgrid
Beispiel #52
0
    def _new_map_horizontal_slope(self, north=None, east=None):
        """
        DESCRIPTION
            Write a new map with an horizontal slope and aspect
        TODO
            compare the results with the non horizontal
        """
        # get the value of the position to be change
#         self.get_values()
 
        try:
            self.rasterslope
            self.rasteraspect
        except AttributeError:
            self.slope_aspect()
 
 
        roundvalue = 4
        value = np.float64(0.00000000000000000001) # can not set 0 ?????
         
        elev_point = np.array(float(self.get_values(self.rasterelev, north=north, east=east)))
        slope_point = np.array(float(self.get_values(self.rasterslope, north=north, east=east)))
        aspect_point = np.array(float(self.get_values(self.rasteraspect, north=north, east=east)))
 
        elev_point =np.round(elev_point,roundvalue)
        slope_point =np.round(slope_point,roundvalue)
        aspect_point =np.round(aspect_point,roundvalue)
 
        elev = garray.array()
        slope = garray.array()
        aspect = garray.array()
 
#         print self.namerasterelev
#         print self.namerasterslope
#         print self.namerasteraspect
#         
        elev.read(self.namerasterelev)
        slope.read(self.namerasterslope)
        aspect.read(self.namerasteraspect)
         
        arrayelev = np.round(elev,roundvalue)
        arrayslope = np.round(slope,roundvalue)
        arrayaspect = np.round(aspect,roundvalue)
 
#         print np.round(elev,roundvalue)
#         print elev_point
#          
#         print np.round(slope,roundvalue)
#         print slope_point
#          
#         print np.round(aspect,roundvalue)
#         print aspect_point
#  
#         print zip(*np.where(arrayelev == elev_point))
#         print zip(*np.where(arrayslope == slope_point))
#         print zip(*np.where(arrayaspect == aspect_point))
 
 
        i_elev = zip(*np.where(arrayelev == elev_point))
        i_slope = zip(*np.where(arrayslope == slope_point))
        i_aspect = zip(*np.where(arrayaspect == aspect_point))
 
        newset = set(i_elev).intersection(i_slope)
        newset = newset.intersection(i_aspect)
 
        index = list(newset)[0]
        index = [index[0], index[1]]
        print index
 
        slope[index[0], index[1]] = value
        aspect[index[0], index[1]] = value
 
#         
#         print "="*120
#         print "New valor set"
 
 
        outslope = self.namerasterslope+"_flat"
        outaspect = self.namerasteraspect+"_flat"
         
        self.namerasterslopeflat = outslope
        self.namerasteraspectflat = outaspect
         
        self.rasterslopeflat = self.namerasterslopeflat + "@" + self.env.get('MAPSET')# complete name
        self.rasteraspectflat = self.namerasteraspectflat + "@" + self.env.get('MAPSET')# complete name
 
        slope.write(outslope, overwrite = True)
        aspect.write(outaspect, overwrite = True)
#         
#         print self.get_values(self.rasterslopeflat, north=north, east=east)
#         
         
        print "WRITE THE NEW SLOPE AND ASPECT HORIZONTAL"
# Generate output names with ages
ages = []
for item in ages_numeric:
  strage = '%06.2f' %item
  strage = strage[:3] + '_' + strage[4:] + 'k' # Replace decimal with underscore
  ages.append(strage)
ages = np.array(ages)

# Start out by gridding at time = 0 (for corrections to modern topography)
#topo_spharm_now = interpolate(files[ages_numeric == 0])
print "Loading modern topography derived from GEBCO_08 (30 arcsecond) and"
print "interpolated etopo1 bedrock surface data"
toponow = loadmat('../ICE3G_custom_uniform_grid/gebco_plus_etopo/gebco_and_etopo.mat')['map_data'] # This is the one local idiosyncracy I think I have
print "Generating array to correct spherical harmonic bumpiness"
correction = toponow - interpolate(files[ages_numeric == 0][0])

outgrid = garray.array()
for item in files:
  starttime = time.time()
  name = 'topo_' + ages[files == item][0]
  print "***", item, ":", name, "***"
  outgrid[...] = interpolate(item) + correction
  print "    Writing numpy array to GRASS GIS."
  outgrid.write(name)
  print "    ", time.time() - starttime, "seconds elapsed."





Beispiel #54
0
def main():
    """main function, called at execution time"""

    # parse arguments
    temp_maps = options['temp'].split(',')
    prec_maps = options['prec'].split(',')
    stdv_maps = options['stdv'].split(',')

    # check that we have compatible number of input maps
    ntemp = len(temp_maps)
    nprec = len(prec_maps)
    nstdv = len(stdv_maps)
    if nprec not in (1, ntemp):
        grass.fatal('Got %i prec maps, expected 1 (constant) or %i (as temp)'
                    % (nprec, ntemp))
    if nstdv not in (1, ntemp):
        grass.fatal('Got %i stdv maps, expected 1 (constant) or %i (as temp)'
                    % (nstdv, ntemp))

    # exit if no output is requested
    out_vars = ['pdd', 'accu', 'snow_melt', 'ice_melt', 'melt', 'runoff', 'smb']
    out_maps = {var: options[var] for var in out_vars if options[var] != ''}
    if len(out_maps) == 0:
        grass.fatal('No output required. Please inform at least one of ' +
                    ', '.join(out_vars) + '.')

    # read temperature maps
    grass.info('reading temperature maps...')
    temp = [garray.array() for m in temp_maps]
    for i, m in enumerate(temp_maps):
        temp[i].read(m)
        grass.percent(i, ntemp, 1)
    temp = np.asarray(temp)

    # read precipitation maps
    grass.info('reading precipitation maps...')
    prec = [garray.array() for m in temp_maps]
    for i, m in enumerate(prec_maps):
        prec[i].read(m)
        grass.percent(i, nprec, 1)
    prec = np.asarray(prec)

    # read standard deviation maps
    if stdv_maps != ['']:
        grass.info('reading standard deviation maps...')
        stdv = [garray.array() for m in stdv_maps]
        for i, m in enumerate(stdv_maps):
            stdv[i].read(m)
            grass.percent(i, nstdv, 1)
        stdv = np.asarray(stdv)
    else:
        stdv = 0.0

    # initialize PDD model
    pdd = PDDModel()
    for param in ('pdd_factor_snow', 'pdd_factor_ice',
                  'refreeze_snow', 'refreeze_ice', 'temp_snow', 'temp_rain',
                  'interpolate_rule', 'interpolate_n'):
        if options[param]:
            setattr(pdd, param, float(options[param]))
    for param in ('interpolate_rule',):
        if options[param]:
            setattr(pdd, param, str(options[param]))

    # run PDD model
    grass.info('running PDD model...')
    smb = pdd(temp, prec, stdv)

    # write output maps
    grass.info('writing output maps...')
    for varname in ['pdd', 'accu', 'snow_melt', 'ice_melt', 'melt',
                    'runoff', 'smb']:
        if options[varname]:
            a = garray.array()
            a[:] = smb[varname]
            a.write(mapname=options[varname])
#############################
# STARTS TO USE MEMORY HERE #
#############################

###########################
# TOPOGRAPHY / BATHYMETRY #
###########################

# These maps run from 0 to 360
grass.run_command('g.region', n=90, s=-90, w=0, e=360, res='0:0:30')

# Start out by gridding at time = 0 (for corrections to modern topography)
print "Loading modern topography derived from GEBCO_08 (30 arcsecond) and"
print "  interpolated etopo1 (60 arcsecond) bedrock surface data"
toponow_temp = garray.array()
toponow_temp.read('toponow')
toponow = toponow_temp.astype('int')
del toponow_temp
print "Generating array to correct spherical harmonic bumpiness"
try:
  # Write "correction" to location in future versions?
  # Yes -- starting with G12
  correction = toponow - interpolate(files[ages_numeric == 0][0])
except:
  print "Projecting to present -- hard-coded for G12"
  t500 = interpolate(files[0])
  #t600 = interpolate(files[1])
  #diff = t500 - t600
  diff = (toponow - t500)/5.
  #del t600
clscaled = np.linspace(0, 1, len(znew))
cdr = []
cdg = []
cdb = []
for i in range(len(znew)):
  cdr.append([clscaled[i], rnew[i]/255., rnew[i]/255.])
  cdg.append([clscaled[i], gnew[i]/255., gnew[i]/255.])
  cdb.append([clscaled[i], bnew[i]/255., bnew[i]/255.])
cdict = {'red': cdr, 'green': cdg, 'blue': cdb}
cm_etopo2 = LinearSegmentedColormap('etopo2',cdict,4096)

def midpoints(invar):
  return (invar[1:] + invar[:-1]) / 2

grass.run_command('g.region', res='.1')
toponow = garray.array()
toponow.read('toponow')

ages = get_time_steps()

for age in ages:

  grass.run_command('g.region', res='.1')
  reg = grass.region()
  s = reg['s']
  n = reg['n']
  w = reg['w']
  e = reg['e']
  nlats = reg['rows']
  nlons = reg['cols']
  # If a1 too small, can produce bad results
  lut = interp.SmoothSphereBivariateSpline( theta1, phi1, a1, s=int(np.floor(len(theta1)/sdiv)), eps=1E-10)
  data_global = lut(thetaout[:,0], phiout[0,:])
  outarray = garray.array()
  outarray[...] = data_global
  outarray.write(outname, overwrite=True)
  grass.run_command('g.region', n=90, s=-90, w=-180, e=180, res=res)

# Get ICE-6G files
# First, navigate to ICE-6G folder.
# Then...
files = sorted(glob.glob('*.nc'))
from Scientific.IO.NetCDF import NetCDFFile

grass.run_command('g.region', n=90, s=-90, w=0, e=360, res=1)
outarray = garray.array()
for fname in files:
  ncfile = NetCDFFile(fname)
  H_ice = ncfile.variables['stgit'][:][::-1]
  outarray[...] = H_ice
  age = re.findall('\d+', fname)[-1]
  outname = 'ice_raw_'+age
  print outname
  outarray.write(outname, overwrite=True)
    
lats = ncfile.variables['lat'][:]
lons = ncfile.variables['lon'][:]

for fname in files:
  age = re.findall('\d+', fname)[-1]
  name = 'ice_raw_'+age
 print scanName
 print "***"
 print ""
 # DEM processing
 dem = np.fromfile(DATpath, dtype=np.float32)
 dem = dem.reshape(length_y, length_x)
 dem[dem == -9999] = np.nan
 dem /= 1000. # MM TO M
 # THIS SHOULD BE EXTERNALLY SET: MAX HEIGHT
 #dem[dem > 480] = np.nan # Trim off the tops of the input devices
 demFull = np.flipud(dem)
 dem = dem[margin_bottom:margin_top, margin_left:margin_right]
 dem = np.flipud(dem)
 # DEM import into GRASS GIS
 #try:
 DEMarray = garray.array()
 DEMarray[...] = dem
 DEMarray.write('tmp', overwrite=True)
 # Compute map of null areas
 r.mapcalc(scanNameNULL+' = isnull(tmp)', overwrite=True)
 # DEM null filling
 try:
   r.fillnulls(input='tmp', output=scanNameDEM, method='bilinear', overwrite=False)
 except:
   pass
 r.colors(map=scanNameDEM, color='wave')
 # Shaded relief map
 try:
   r.relief(input=scanNameDEM, output=scanNameShaded, overwrite=False)
 except:
   pass
Beispiel #59
0
def main():
    dem = options['demraster']
    dem = 'dem_tinitaly_rocchetta'
    #SDR = options['output']
    weightmap = options['weightmap']


    #region setting
    gregion=grass.region()
    cell_s=gregion['nsres']
    cell_s=float(cell_s)



    # r.slope.aspect
    # elevation = dem_tinitaly_rocchetta @ SDR
    # slope = slope
    rasterTemp = []
    vectTemp = []
    grass.run_command('r.slope.aspect', elevation=dem, slope="slope1", overwrite=True)
    rasterTemp.append('slope1')

    grass.run_command('r.watershed', flags='s', elevation=dem, accumulation='accD8', drainage='drainD8', overwrite=True)
    rasterTemp.append('accD8')
    rasterTemp.append('drainD8')

    grass.run_command('r.slope.aspect',elevation = dem,slope = 'slope',format ='percent',overwrite=True)
    rasterTemp.append('slope')

    # tif_fdir8 coincide con drainD8

    # read drainage direction map
    tif_fdir8_ar = garray.array()
    tif_fdir8_ar.read('drainD8')
    # converto il float
    tif_fdir8_ar = tif_fdir8_ar.astype(numpy.float)  # otherwise overflow in future operations
    # r.watershead: Negative numbers indicate that those cells possibly have surface runoff from outside of the current geographic region.
    tif_fdir8_ar[(tif_fdir8_ar <= 0)] = 0
    ndv = numpy.min(tif_fdir8_ar)
    tif_fdir8_ar[tif_fdir8_ar == ndv] = numpy.NaN

    # create constant array to trasform into raster
    const_ar = tif_fdir8_ar * 0 + cell_s

    ### zero matrix bigger than F_dir8, to avoid border indexing problems
    # sorrounding tif_fdir8_ar with one width zeros cells
    Fd8 = numpy.zeros(shape=((tif_fdir8_ar.shape[0]) + 1, (tif_fdir8_ar.shape[1]) + 1), dtype=numpy.float32)
    # popolo la matrice
    Fd8[1:Fd8.shape[0], 1:Fd8.shape[1]] = Fd8[1:Fd8.shape[0], 1:Fd8.shape[1]] + tif_fdir8_ar
    # adding bottom row and right y axis with zeros
    Fdir8 = numpy.zeros(shape=((Fd8.shape[0]) + 1, (Fd8.shape[1]) + 1), dtype=numpy.float32)
    Fdir8[:Fdir8.shape[0] - 1, :Fdir8.shape[1] - 1] = Fd8
    ##------------
    # read weight map an slope
    tif_wgt_ar = garray.array()
    #TODO controllare la mappa weight che va presa da input
    tif_wgt_ar.read('weight')
    tif_slope = garray.array()
    tif_slope.read('slope')

    tif_slope=tif_slope/100. #converting percentage from r.slope.aspect to value in range 0 - 1

    # imposing upper and lower limits to slope, no data here are -1
    tif_slope[(tif_slope >= 0) & (tif_slope < 0.005)] = 0.005
    tif_slope[(tif_slope > 1)] = 1
    tif_slope[(tif_slope < 0)] = -1

    #imposing a value bigger than zero in weight map
    tif_wgt_ar[tif_wgt_ar==0]=1e-10

    Ws_1 = 1 / (tif_wgt_ar * tif_slope)
    # converto il float
    Ws_1 = Ws_1.astype(numpy.float)  # otherwise overflow in future operations
    # r.watershead: Negative numbers indicate that those cells possibly have surface runoff from outside of the current geographic region.
    # tif_fdir8_ar[(tif_fdir8_ar <= 0)] = 0
    ndv = numpy.min(Ws_1)
    Ws_1[Ws_1 == ndv] = numpy.NaN
    #
    # zero matrix bigger than weight, to avoid border indexing problems, and have same indexing as Fdir8
    Wg = numpy.zeros(shape=((tif_wgt_ar.shape[0]) + 1, (tif_wgt_ar.shape[1]) + 1), dtype=numpy.float32)
    # TODO da sostituire la variabile con Ws_1 ovvero il denom di Ddn
    Wg[1:Wg.shape[0], 1:Wg.shape[1]] = Wg[1:Fd8.shape[0],
                                       1:Wg.shape[1]] + Ws_1  # the weigth to weigth tha flow length
    # adding bottom row and right y axis with zeros
    Wgt = numpy.zeros(shape=((Wg.shape[0]) + 1, (Wg.shape[1]) + 1), dtype=numpy.float32)
    Wgt[:Wgt.shape[0] - 1, :Wgt.shape[1] - 1] = Wg
    #
    start = time.clock()  # for computational time
    # Creating a bigger matrix as large as weight(and all the matrices) to store the weighted flow length values
    W_Fl = numpy.zeros(shape=((Wgt.shape[0]), (Wgt.shape[1])), dtype=numpy.float32)
    W_Fl = W_Fl - 1  # to give -1 to NoData after the while loop calculation
    #
    # Let's go for the search and algo-rhytm for the weighted-Flow-Length
    ND = numpy.where(numpy.isnan(Fdir8) == True)  # fast coordinates all the NoData values, starting from them to go forward and compute flow length
    #
    Y = ND[0]  # rows, NoData indexes
    X = ND[1]  # columns, NoData indexes pay attention not to invert values !!!!!!!!!!!!!!
    #
    # initializing lists for outlet and moving cell coordinates, in function of their position
    YC1 = []
    YC2 = []
    YC3 = []
    YC4 = []
    YC5 = []
    YC6 = []
    YC7 = []
    YC8 = []
    XC1 = []
    XC2 = []
    XC3 = []
    XC4 = []
    XC5 = []
    XC6 = []
    XC7 = []
    XC8 = []
    #
    #   Flow Directions r.watershead
    #   4   3   2
    #   5   -   1
    #   6   7   8
    #
    #   Draining in Direction Matrix
    #   8   7   6
    #   1   -   5
    #   2   3   4
    #
    i1 = Fdir8[Y, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D1 = numpy.where(i1 == 1)  # l
    YC1.extend(Y[D1])  # coordinates satisfacting the conditions
    XC1.extend(X[D1])
    W_Fl[YC1, XC1] = 0  # initialize flow length at cells draining to NoData
    #
    i2 = Fdir8[Y + 1, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D2 = numpy.where(i2 == 2)  # lrad2
    YC2.extend(Y[D2])  # coordinates satisfacting the conditions
    XC2.extend(X[D2])
    W_Fl[YC2, XC2] = 0  # initialize flow length at cells draining to NoData
    #
    i3 = Fdir8[Y + 1, X]  # Searching for NoData with cells draining into them, 8 directions
    D3 = numpy.where(i3 == 3)  # l
    YC3.extend(Y[D3])  # coordinates satisfacting the conditions
    XC3.extend(X[D3])
    W_Fl[YC3, XC3] = 0  # initialize flow length at cells draining to NoData
    #
    i4 = Fdir8[Y + 1, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D4 = numpy.where(i4 == 4)  # lrad2
    YC4.extend(Y[D4])  # coordinates satisfacting the conditions
    XC4.extend(X[D4])
    W_Fl[YC4, XC4] = 0  # initialize flow length at cells draining to NoData
    #
    i5 = Fdir8[Y, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D5 = numpy.where(i5 == 5)  # l
    YC5.extend(Y[D5])  # coordinates satisfacting the conditions
    XC5.extend(X[D5])
    W_Fl[YC5, XC5] = 0  # initialize flow length at cells draining to NoData
    #
    i6 = Fdir8[Y - 1, X + 1]  # Searching for NoData with cells draining into them, 8 directions
    D6 = numpy.where(i6 == 6)  # lrad2
    YC6.extend(Y[D6])  # coordinates satisfacting the conditions
    XC6.extend(X[D6])
    W_Fl[YC6, XC6] = 0  # initialize flow length at cells draining to NoData
    #
    i7 = Fdir8[Y - 1, X]  # Searching for NoData with cells draining into them, 8 directions
    D7 = numpy.where(i7 == 7)  # l
    YC7.extend(Y[D7])  # coordinates satisfacting the conditions
    XC7.extend(X[D7])
    W_Fl[YC7, XC7] = 0  # initialize flow length at cells draining to NoData
    #
    i8 = Fdir8[Y - 1, X - 1]  # Searching for NoData with cells draining into them, 8 directions
    D8 = numpy.where(i8 == 8)  # lrad2
    YC8.extend(Y[D8])  # coordinates satisfacting the conditions
    XC8.extend(X[D8])
    W_Fl[YC8, XC8] = 0  # initialize flow length at cells draining to NoData
    #
    #start =time.clock()#da cancellare poi.....!!!!!! Solo per check
    count = 1  # "0" passage already done during the previous step
    while len(YC1) or len(YC2) or len(YC3) or len(YC4) or len(YC5) or len(YC6) or len(YC7) or len(YC8) > 0:
        # Converting into array to be able to do operations
        YYC1=numpy.asarray(YC1);XXC1=numpy.asarray(XC1)
        YYC2=numpy.asarray(YC2);XXC2=numpy.asarray(XC2)
        YYC3=numpy.asarray(YC3);XXC3=numpy.asarray(XC3)
        YYC4=numpy.asarray(YC4);XXC4=numpy.asarray(XC4)
        YYC5=numpy.asarray(YC5);XXC5=numpy.asarray(XC5)
        YYC6=numpy.asarray(YC6);XXC6=numpy.asarray(XC6)
        YYC7=numpy.asarray(YC7);XXC7=numpy.asarray(XC7)
        YYC8=numpy.asarray(YC8);XXC8=numpy.asarray(XC8)
        #
        # Now I can do operations and moving towards the right cell!!!!!!!!
        # Weigthing flow length, weights are half sum of pixels weight * travelled length
        # I'm chosing the directions accordingly to Flow_dir step by step going from outlet-nodata to the ridges,
        # each time account for distance (l or l*rad2) multiplied by the half of the weigths of the 2 travelled cells.
        # Then, with variables substitution I'm moving a step further, and adding the prevous pixel value to the new calculated.
        #
        YYC1 = (YYC1);XXC1 = (XXC1 - 1)  # l
        YYC2 = (YYC2 + 1);XXC2 = (XXC2 - 1)  # lrad2
        YYC3 = (YYC3 + 1);XXC3 = (XXC3)  # l
        YYC4 = (YYC4 + 1);XXC4 = (XXC4 + 1)  # lrad2
        YYC5 = (YYC5);XXC5 = (XXC5 + 1)  # l
        YYC6 = (YYC6 - 1);XXC6 = (XXC6 + 1)  # lrad2
        YYC7 = (YYC7 - 1);XXC7 = (XXC7)  # l
        YYC8 = (YYC8 - 1);XXC8 = (XXC8 - 1)  # lrad2
        #
        if count == 1:  # first run zero, like TauDEM, need to check if there is a Nodata pixel receiving flow for all the 8 directions
            if len(YYC1) > 0:
                W_Fl[YYC1, XXC1] = 0
            else:
                pass
            if len(YYC2) > 0:
                W_Fl[YYC2, XXC2] = 0
            else:
                pass
            if len(YYC3) > 0:
                W_Fl[YYC3, XXC3] = 0
            else:
                pass
            if len(YYC4) > 0:
                W_Fl[YYC4, XXC4] = 0
            else:
                pass
            if len(YYC5) > 0:
                W_Fl[YYC5, XXC5] = 0
            else:
                pass
            if len(YYC6) > 0:
                W_Fl[YYC6, XXC6] = 0
            else:
                pass
            if len(YYC7) > 0:
                W_Fl[YYC7, XXC7] = 0
            else:
                pass
            if len(YYC8) > 0:
                W_Fl[YYC8, XXC8] = 0
            else:
                pass
        else:
            W_Fl[YYC1, XXC1] = W_Fl[YC1, XC1] + (cell_s * ((Wgt[YC1, XC1] + Wgt[YYC1, XXC1]) / 2))
            W_Fl[YYC2, XXC2] = W_Fl[YC2, XC2] + (cell_s * math.sqrt(2) * ((Wgt[YC2, XC2] + Wgt[YYC2, XXC2]) / 2))
            W_Fl[YYC3, XXC3] = W_Fl[YC3, XC3] + (cell_s * ((Wgt[YC3, XC3] + Wgt[YYC3, XXC3]) / 2))
            W_Fl[YYC4, XXC4] = W_Fl[YC4, XC4] + (cell_s * math.sqrt(2) * ((Wgt[YC4, XC4] + Wgt[YYC4, XXC4]) / 2))
            W_Fl[YYC5, XXC5] = W_Fl[YC5, XC5] + (cell_s * ((Wgt[YC5, XC5] + Wgt[YYC5, XXC5]) / 2))
            W_Fl[YYC6, XXC6] = W_Fl[YC6, XC6] + (cell_s * math.sqrt(2) * ((Wgt[YC6, XC6] + Wgt[YYC6, XXC6]) / 2))
            W_Fl[YYC7, XXC7] = W_Fl[YC7, XC7] + (cell_s * ((Wgt[YC7, XC7] + Wgt[YYC7, XXC7]) / 2))
            W_Fl[YYC8, XXC8] = W_Fl[YC8, XC8] + (cell_s * math.sqrt(2) * ((Wgt[YC8, XC8] + Wgt[YYC8, XXC8]) / 2))
            #
        #
        # Reconstructing all X and Y of this step and moving on upwards (Downstream if you think in GIS, right?)
        YY = [];XX = []
        YY.extend(YYC1);XX.extend(XXC1)
        YY.extend(YYC2);XX.extend(XXC2)
        YY.extend(YYC3);XX.extend(XXC3)
        YY.extend(YYC4);XX.extend(XXC4)
        YY.extend(YYC5);XX.extend(XXC5)
        YY.extend(YYC6);XX.extend(XXC6)
        YY.extend(YYC7);XX.extend(XXC7)
        YY.extend(YYC8);XX.extend(XXC8)
        #
        YY = numpy.asarray(YY)
        XX = numpy.asarray(XX)
        #
        i1 = Fdir8[YY, XX - 1]  # Searching for cells draining into them, 8 directions
        D1 = numpy.where(i1 == 1)  # l
        YC1 = YY[D1]  # coordinates satisfacting the conditions, HERE i NEED TO ADD ACTUAL LENGTH VALUE + PREVIOUS ONE
        XC1 = XX[D1]
        #
        i2 = Fdir8[YY + 1, XX - 1]  # Searching for cells draining into them, 8 directions
        D2 = numpy.where(i2 == 2)  # lrad2
        YC2 = YY[D2]  # coordinates satisfacting the conditions
        XC2 = XX[D2]
        #
        i3 = Fdir8[YY + 1, XX]  # Searching for cells draining into them, 8 directions
        D3 = numpy.where(i3 == 3)  # l
        YC3 = YY[D3]  # coordinates satisfacting the conditions
        XC3 = XX[D3]
        #
        i4 = Fdir8[YY + 1, XX + 1]  # Searching for cells draining into them, 8 directions
        D4 = numpy.where(i4 == 4)  # lrad2
        YC4 = YY[D4]  # coordinates satisfacting the conditions
        XC4 = XX[D4]
        #
        i5 = Fdir8[YY, XX + 1]  # Searching for cells draining into them, 8 directions
        D5 = numpy.where(i5 == 5)  # l
        YC5 = YY[D5]  # coordinates satisfacting the conditions
        XC5 = XX[D5]
        #
        i6 = Fdir8[YY - 1, XX + 1]  # Searching for cells draining into them, 8 directions
        D6 = numpy.where(i6 == 6)  # lrad2
        YC6 = YY[D6]  # coordinates satisfacting the conditions
        XC6 = XX[D6]
        #
        i7 = Fdir8[YY - 1, XX]  # Searching for cells draining into them, 8 directions
        D7 = numpy.where(i7 == 7)  # l
        YC7 = YY[D7]  # coordinates satisfacting the conditions
        XC7 = XX[D7]
        #
        i8 = Fdir8[YY - 1, XX - 1]  # Searching for cells draining into them, 8 directions
        D8 = numpy.where(i8 == 8)  # lrad2
        YC8 = YY[D8]  # coordinates satisfacting the conditions
        XC8 = XX[D8]
        count = count + 1
    #
    elapsed = (time.clock() - start)  # computational time
    print time.strftime("%d/%m/%Y %H:%M:%S    "), "Process concluded succesfully \n", "%.2f" % elapsed, 'seconds for Weighted-Flow Length calculation with ', int(count), ' iterations'  # truncating the precision

    W_fl = W_Fl[1:W_Fl.shape[0] - 1, 1:W_Fl.shape[1] - 1]#reshaping weigthed flow length, we need this step to homogenize matrices dimensions!!!!!!!!!!
    del W_Fl
    #imposto il valore di zero a 1 per evitare divisioni per zero
    D_down_ar = garray.array()
    W_fl[W_fl == 0] = 1
    D_down_ar[...] = W_fl
    del W_fl
    D_down_ar.write('w_flow_length',null=numpy.nan,overwrite=True)

    """
    --------------------------------------
    WORKING ON D_UP COMPONENT
    --------------------------------------
    """

    grass.run_command('r.watershed',elevation = 'dem',accumulation = 'accMDF',convergence=5, memory=300)
    rasterTemp.append('accMDF')

    tif_dtmsca = garray.array()
    tif_dtmsca.read('acc_watershead_dinf')

    tif_dtmsca = abs(tif_dtmsca)*cell_s

    acc_final_ar = tif_dtmsca / const_ar

    grass.run_command('r.watershed',elevation = 'dem',flow = 'weight',accumulation = "accW",convergence = 5,memory = 300)
    rasterTemp.append('accW')

    acc_W_ar = garray.array()
    acc_W_ar.read('accW')


    grass.run_command('r.watershed', elevation='dem', flow='slope', accumulation="accS", convergence=5, memory=300)
    rasterTemp.append('accS')

    acc_S_ar = garray.array()
    acc_S_ar.read('accS')

    # Computing C_mean as (accW+weigth)/acc_final
    C_mean_ar = (acc_W_ar + tif_wgt_ar) / acc_final_ar
    del (acc_W_ar)  # free memory
    #
    # Computing S mean (accS+s)/acc_final
    S_mean_ar = (acc_S_ar + tif_fdir8_ar) / acc_final_ar
    del (acc_S_ar, tif_fdir8_ar)  # free memory
    #
    # Computing D_up as "%cmean.tif%" * "%smean.tif%" * SquareRoot("%ACCfinal.tif%" * "%resolution.tif%" * "%resolution.tif%")
    cell_area = (const_ar) ** 2  # change of variables, to be sure
    D_up_ar = C_mean_ar * S_mean_ar * numpy.sqrt(acc_final_ar * cell_area)  # to transform from unit values to square units
    #
    # Computing Connectivity index
    ic_ar = numpy.log10(D_up_ar / D_down_ar)

    SDRmax = 0.8;IC0=0.5;k=1
    SDRmap = SDRmax / (1+math.exp((IC0-ic_ar/k)))
Beispiel #60
0
def main(image, gisenv, gisrc):
    os.environ['GISRC'] = gisrc
    path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'], gisenv['MAPSET'], 'group', image)
    path_to_points = os.path.join(path, 'POINTS')

    # setup target environment and switch to it
    path_to_TARGET = os.path.join(path, 'TARGET')
    with open(path_to_TARGET, 'r') as f:
        target = f.readlines()
        target_location = target[0]
        target_mapset = target[1]
    target_gisrc, tenv = getEnvironment(gisenv['GISDBASE'], target_location, target_mapset)

    im = io.imread('{}.jpg'.format(image))
    dst = []
    src = []
    with open(path_to_points) as f:
        for line in f.readlines():
            if line.startswith('#'):
                continue
            dstx, dsty, srcx, srcy, ok = line.split()
            if int(ok):
                dst.append((float(dstx), float(dsty)))
                src.append((float(srcx), float(srcy)))
    dst = np.array(dst)
    src = np.array(src)
    # src2 = copy.copy(src)
    dst[:, 1] = im.shape[0] - dst[:, 1]

    centerx, centery = np.min(src[:, 0]), np.min(src[:, 1])
    src[:, 0] -= centerx
    src[:, 1] -= centery
    revers = 400
    src[:, 1] = revers - src[:, 1]

    tform3 = tf.ProjectiveTransform()
    tform3.estimate(src, dst)
    warped = tf.warp(im, tform3)

    os.environ['GISRC'] = target_gisrc
    gscript.run_command('g.region', w=centerx, e=centerx + im.shape[1],
                        n=centery + revers, s=centery - im.shape[0] + revers, res=1)

    name = 'rectified_{}'.format(image)
    for num, color in zip([0, 1, 2], 'rgb'):
        rectified = garray.array()
        for y in range(rectified.shape[0]):
            for x in range(rectified.shape[1]):
                rectified[y, x] = round(255 * warped[y, x, num])
        rectified.write(mapname=name + '_' + color, overwrite=True)
    gscript.run_command('r.colors', map=[name + '_r', name + '_g', name + '_b'], color='grey')
    gscript.run_command('r.composite', red=name + '_r', green=name + '_g', blue=name + '_b',
                        output=name, overwrite=True)
    gscript.run_command('g.remove', type='raster', pattern=name + "_*", flags='f')

    os.environ['GISRC'] = gisrc
    # indicatrix
    print indicatrix(raster=image, size=5)

    # rectify points
    H = inv(tform3.params)
    name = image.strip('camera_')
    os.environ['GISRC'] = gisrc
    vectors = gscript.read_command('g.list', type='vector', pattern="*{}*".format(name),
                                   exclude='*indicatrix').strip().splitlines()
    for vector in vectors:
        os.environ['GISRC'] = gisrc
        points = gscript.read_command('v.out.ascii', input=vector, columns='*').strip()
        new = []
        for record in points.splitlines():
            point = record.split('|')
            xx, yy = float(point[0]), float(point[1])
            yy = im.shape[0] - yy
            Z = xx * H[2, 0] + yy * H[2, 1] + H[2, 2]
            X = (xx * H[0, 0] + yy * H[0, 1] + H[0, 2]) / Z
            Y = (xx * H[1, 0] + yy * H[1, 1] + H[1, 2]) / Z
            X += centerx
            Y = revers - Y + centery
            # Y =  -Y + centery
            new.append([point[3], X, Y, point[2]])
            new[-1].extend(point[4:])
            new[-1] = '|'.join([str(each) for each in new[-1]])
        os.environ['GISRC'] = target_gisrc

        gscript.write_command('v.in.ascii', input='-', flags='z',
                              output='points_{}'.format(vector), overwrite=True, stdin='\n'.join(new),
                              columns="cat integer,x double precision,y double precision,height double precision,"
                                      "date varchar(50),time varchar(50),hour integer,"
                                      "minutes integer,url varchar(500),url2 varchar(500)",
                              x=2, y=3, z=4, cat=1)

    os.environ['GISRC'] = gisrc
    vectors = gscript.read_command('g.list', type='vector', pattern="*{}*indicatrix".format(name)).strip().splitlines()

    for vector in vectors:
        os.environ['GISRC'] = gisrc
        lines = gscript.read_command('v.out.ascii', input=vector, format='standard').strip()
        new = []
        for record in lines.splitlines():
            first = record.strip().split()[0].strip()
            try:
                float(first)
            except ValueError:
                new.append(record)
                continue
            if first == '1':
                new.append(record)
                continue
            xx, yy = record.strip().split()
            xx, yy = float(xx), float(yy)
            yy = im.shape[0] - yy
            Z = xx * H[2, 0] + yy * H[2, 1] + H[2, 2]
            X = (xx * H[0, 0] + yy * H[0, 1] + H[0, 2]) / Z
            Y = (xx * H[1, 0] + yy * H[1, 1] + H[1, 2]) / Z
            X += centerx
            Y = revers - Y + centery
            # Y =  -Y + centery
            new.append('{} {}'.format(X, Y))
        os.environ['GISRC'] = target_gisrc
        gscript.write_command('v.in.ascii', input='-', output=vector, format='standard', overwrite=True,
                              stdin='\n'.join(new))
        gscript.run_command('v.generalize', overwrite=True, input=vector, type='line',
                            output=vector + 'tmp', method='snakes', threshold=10)
        gscript.run_command('g.rename', vector=[vector + 'tmp', vector], overwrite=True)

    gscript.try_remove(target_gisrc)
    return