コード例 #1
0
def _generate_harmonics(time_names, freq, prefix):
    """Generate (constant) rasters of harmonics
        t0, t1, t2, ..., t_{N-1})
        where t0 = 0, t_{N-1} = 2*pi

    freq -- frequencies for sin() and cos() rasters
    prefix -- prefix for the raster names

    return list of names for the result rasters
    """
    names = dict()
    for i in time_names:
        harm = dict()
        t = get_time(len(time_names), i)
        for f in freq:
            sin_output = prefix + 'sin' + _time_to_name(i) + _freq_to_name(f)
            grass.mapcalc("${out} = sin(${f} * ${t})", out=sin_output, t=t, f=f,
                          quiet=True, overwrite=grass.overwrite())

            cos_output = prefix + 'cos' + _time_to_name(i) + _freq_to_name(f)
            grass.mapcalc("${out} = cos(${f} * ${t})", out=cos_output, t=t, f=f,
                          quiet=True, overwrite=grass.overwrite())
            harm[f] = dict(sin=sin_output, cos=cos_output)

        names[i] = harm

    return names
コード例 #2
0
ファイル: r.friction.py プロジェクト: dongjwOU/GRASS
def main():
    # User inputs
    dem = options['input']          # Elevation map
    out = options['friction']       # Output friction surface
    slope = options['slope']        # Optional output slope
    formula = options['formula']    # Formula

    # Error if no friction surface is given
    if not grass.find_file(dem)['name']:
        grass.message(_("Input raster <%s> not found") % dem)
        sys.exit()

    # If output raster exists, but overwrite option isn't selected
    if not grass.overwrite():
        if grass.find_file(out)['name']:
            grass.message(_("Output raster map <%s> already exists") % out)
            sys.exit()

    # Check if slope output name is specified and give error message if it needs to be overwritten but overwrite option isn't selected
    if not slope:
        slope = "tmp_slope_%d_" %os.getpid()
    else:
        if not grass.overwrite():
            if grass.find_file(slope)['name']:
                grass.message(_("Output raster map <%s> already exists") % slope)
                sys.exit()

    # Generate slope (in percent)
    grass.run_command('r.slope.aspect', elevation=dem, slope=slope, format='percent')

    # Choose formula
    if formula == 'Hiker':
        # So-called "Hiker's formula"
        # Tobler W (1993) Three Presentations on Geographical Analysis and Modeling. Technical Report 93-1. California
        # Gorenflo LJ and Gale N (1990) Mapping Regional Settlement in information Space. Journal of Antropological Archaeology (9): 240 - 274
        expression = "$outmap = 1.0 / (( 6.0 * exp(-3.5 * abs(( $slope / 100) + 0.05 ))) * 1000)"
    elif formula == 'Minetti':
        # This formula is presentedy by I. Herzog who based the formula on the physiological data by Minetti.
        # Herzog, I. In press. "Theory and Practice of Cost Functions." In Fusion  of  Cultures.  Proceedings  of  the
        # XXXVIII  Conference  on  Computer  Applications  and Quantitative Methods in Archaeology, CAA 2010.
        expression = "$outmap = 1337.8 * ($slope / 100)^6 + 278.19 * ($slope / 100)^5 - 517.39 * ($slope / 100)^4 - 78.199 * ($slope / 100)^3 + 93.419 * ($slope / 100)^2 + 19.825 * ($slope / 100) + 1.64"
    else:
        grass.message("No valid formula chosen")
        sys.exit()

    # Create friction surface
    try:
        grass.mapcalc(expression, outmap = out, slope = slope)
        grass.message("Friction raster <" + out + "> complete")
    except:
        grass.run_command('g.remove', rast = slope)
        grass.message("Unable to finish operation. Temporary slope raster deleted.")
        sys.exit()

    # Delete temporary slope map if necessary
    if not options['slope']:
        grass.run_command('g.remove', rast = slope)

    grass.message("All done")
コード例 #3
0
ファイル: t.rast.series.py プロジェクト: rashadkm/grass_cmake
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = ""
        if len(rows) > 1000:
            grass.warning(_("Processing over 1000 maps: activating -z flag of r.series which slows down processing"))
            flag += "z"
        if nulls:
            flag += "n"

        try:
            grass.run_command("r.series", flags=flag, file=filename,
                              output=output, overwrite=grass.overwrite(),
                              method=method)
        except CalledModuleError:
            grass.fatal(_("%s failed. Check above error messages.") % 'r.series')

        if not add_time:
            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()
            map.set_temporal_extent(sp.get_temporal_extent())

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
コード例 #4
0
ファイル: r.in.wcs.py プロジェクト: martinzbinden/ch.eros
    def _initializeParameters(self, options, flags):
        '''
        Initialize all given and needed parameters. Get region information and
        calculate boundingbox according to it

        '''
        self._debug("_initializeParameters", "started")

        self._env = os.environ.copy()
        self._env['GRASS_MESSAGE_FORMAT'] = 'gui'
        g.gisenv(set="GRASS_MESSAGE_FORMAT=gui")

        for key in ['url', 'coverage','output','location']:
            self.params[key] = options[key].strip()

        if not self.params['output']:
            self.params['output'] = self.params['coverage']
            if not gscript.overwrite():
                result = gscript.find_file(name = self.params['output'], element = 'cell')
                if  result['file']:
                    gscript.fatal("Raster map <%s> does already exist. Choose other output name or toggle flag --o." % self.params['output'])

        for key in ['password', 'username', 'version','region']:
            self.params[key] = options[key]

        # check if authentication information is complete
        if (self.params['password'] and self.params['username'] == '') or \
           (self.params['password'] == '' and self.params['username']):
                gscript.fatal(_("Please insert both %s and %s parameters or none of them." % ('password', 'username')))


        # configure region extent (specified name or current region)
        self.params['region'] = self._getRegionParams(options['region'])
        self.params['boundingbox'] = self._computeBbox(self.params['region'])
        self._debug("_initializeParameters", "finished")
コード例 #5
0
ファイル: extract.py プロジェクト: rashadkm/grass_cmake
def run_mapcalc3d(expr):
    """Helper function to run r3.mapcalc in parallel"""
    try:
        gscript.run_command("r3.mapcalc", expression=expr,
                            overwrite=gscript.overwrite(), quiet=True)
    except CalledModuleError:
        exit(1)
コード例 #6
0
def main():

    # process command options
    input = options['input']
    if not gs.find_file(input)['file']:
        gs.fatal(_("Raster map <%s> not found") % input)

    output = options['output']
    if gs.find_file(output)['file'] and not gs.overwrite():
        gs.fatal(_("Output map <%s> already exists") % output)

    # set aside region for internal use
    gs.use_temp_region()

    # subset input if desired
    region = options.get('region')
    if region:
        if not gs.find_file(region)['file']:
            gs.fatal(_("Raster map <%s> not found") % region)
        gs.message("Setting region to %s" % region, flag='i')
        gs.run_command('g.region', rast=region, align=input)
    else:
        gs.message("Using existing GRASS region", flag='i')
    gs.debug('='*50)
    gs.debug('\n'.join(gs.parse_command('g.region', 'p').keys()))
    gs.debug('='*50)

    calculate_noise(input, output)

    # restore original region
    gs.del_temp_region()

    return None
コード例 #7
0
ファイル: t.rename.py プロジェクト: rashadkm/grass_cmake
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    #Get the current mapset to create the id of the space time dataset
    mapset = grass.gisenv()["MAPSET"]

    if input.find("@") >= 0:
        old_id = input
    else:
        old_id = input + "@" + mapset

    if output.find("@") >= 0:
        new_id = output
    else:
        new_id = output + "@" + mapset
        
    # Do not overwrite yourself
    if new_id == old_id:
        return
        

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds = tgis.dataset_factory(type, old_id)

    if new_id.split("@")[1] != mapset:
        grass.fatal(_("Space time %s dataset <%s> can not be renamed. "
                      "Mapset of the new identifier differs from the current "
                      "mapset.") % (stds.get_new_map_instance(None).get_type(), 
                                    old_id))
        
    if stds.is_in_db(dbif=dbif) == False:
        dbif.close()
        grass.fatal(_("Space time %s dataset <%s> not found") % (
            stds.get_new_map_instance(None).get_type(), old_id))

    # Check if the new id is in the database
    new_stds = tgis.dataset_factory(type, new_id)

    if new_stds.is_in_db(dbif=dbif) == True and grass.overwrite() == False:
        dbif.close()
        grass.fatal(_("Unable to rename Space time %s dataset <%s>. Name <%s> "
                      "is in use, please use the overwrite flag.") % (
            stds.get_new_map_instance(None).get_type(), old_id, new_id))
    
    # Remove an already existing space time dataset
    if new_stds.is_in_db(dbif=dbif) == True:
        new_stds.delete(dbif=dbif)
        
    stds.select(dbif=dbif)
    stds.rename(ident=new_id, dbif=dbif)
    stds.update_command_string(dbif=dbif)
コード例 #8
0
def main():
    """
    Compute cell areas
    """
    
    projinfo = grass.parse_command('g.proj', flags='g')
    
    options, flags = grass.parser()
    output = options['output']
    units = options['units']
    
    # First check if output exists
    if len(grass.parse_command('g.list', type='rast', 
                               pattern=options['output'])):
        if not grass.overwrite():
            grass.fatal("Raster map '" + options['output'] + 
                        "' already exists. Use '--o' to overwrite.")

    # Then compute
    if projinfo['units'] == 'meters':
        if units == 'm2':
            grass.mapcalc(output+' = nsres() * ewres()')
        elif units == 'km2':
            grass.mapcalc(output+' = nsres() * ewres() / 10.^6')
    elif projinfo['units'] == 'degrees':
        if units == 'm2':
            grass.mapcalc(output+' = ( 111195. * nsres() ) * \
                          ( ewres() * '+str(np.pi/180.)+' * 6371000. * cos(y()) )')
        elif units == 'km2':
            grass.mapcalc(output+' = ( 111.195 * nsres() ) * \
                          ( ewres() * '+str(np.pi/180.)+' * 6371. * cos(y()) )')
    else:
        print 'Units: ', + projinfo['units'] + ' not currently supported'
コード例 #9
0
ファイル: r.blend.py プロジェクト: caomw/grass
def main():
    first = options['first']
    second = options['second']
    output = options['output_prefix']
    percent = options['percent']

    mapset = grass.gisenv()['MAPSET']

    if not grass.overwrite():
	for ch in ['r','g','b']:
	    map = '%s.%s' % (output, ch)
	    if grass.find_file(map, element = 'cell', mapset = mapset)['file']:
		grass.fatal(_("Raster map <%s> already exists.") % map)

    percent = float(percent)
    perc_inv = 100.0 - percent

    frac1 = percent / 100.0
    frac2 = perc_inv / 100.0

    grass.message(_("Calculating the three component maps..."))

    template = string.Template("$$output.$ch = if(isnull($$first), $ch#$$second, if(isnull($$second), $ch#$$first, $$frac1 * $ch#$$first + $$frac2 * $ch#$$second))")
    cmd = [template.substitute(ch = ch) for ch in ['r','g','b']]
    cmd = ';'.join(cmd)

    grass.mapcalc(cmd,
		  output = output,
		  first = first, second = second,
		  frac1 = frac1, frac2 = frac2)

    for ch in ['r','g','b']:
	map = "%s.%s" % (output, ch)
	grass.run_command('r.colors', map = map, color = 'grey255')
	grass.run_command('r.support', map = map, history="",
			  title = "Color blend of %s and %s" % (first, second),
			  description = "generated by r.blend")
	grass.run_command('r.support', map = map,
			  history = "r.blend %s channel." % ch)
	grass.run_command('r.support', map = map,
			  history = "  %d%% of %s, %d%% of %s" % (percent, first, perc_inv, second))
	grass.run_command('r.support', map = map, history = "")
	grass.run_command('r.support', map = map, history = os.environ['CMDLINE'])


    if flags['c']:
        grass.run_command('r.composite', r = '%s.r' % output,
	    g = '%s.g' % output, b = '%s.b' % output, output = output)

	grass.run_command('r.support', map = output, history="",
	    title = "Color blend of %s and %s" % (first, second),
	    description = "generated by r.blend")
	grass.run_command('r.support', map = output,
	    history = "  %d%% of %s, %d%% of %s" % (percent, first, perc_inv, second))
	grass.run_command('r.support', map = output, history = "")
	grass.run_command('r.support', map = output, history = os.environ['CMDLINE'])
    else:
        grass.message(_("Done. Use the following command to visualize the result:"))
        grass.message(_("d.rgb r=%s.r g=%s.g b=%s.b") % (output, output, output))
コード例 #10
0
ファイル: extract.py プロジェクト: rashadkm/grass_cmake
def run_vector_extraction(input, output, layer, type, where):
    """Helper function to run r.mapcalc in parallel"""
    try:
        gscript.run_command("v.extract", input=input, output=output,
                            layer=layer, type=type, where=where,
                            overwrite=gscript.overwrite(), quiet=True)
    except CalledModuleError:
        exit(1)
コード例 #11
0
ファイル: stds_import.py プロジェクト: rashadkm/grass_cmake
def _import_raster_maps_from_gdal(maplist, overr, exp, location, link, format_,
                                  set_current_region=False):
    impflags = ""
    if overr:
        impflags += "o"
    if exp or location:
        impflags += "e"
    for row in maplist:
        name = row["name"]
        if format_ == "GTiff":
            filename = row["filename"] + ".tif"
        elif format_ == "AAIGrid":
            filename = row["filename"] + ".asc"
            if not overr:
                impflags += "o"

        try:
            if link:
                gscript.run_command("r.external", input=filename,
                                    output=name,
                                    flags=impflags,
                                    overwrite=gscript.overwrite())
            else:
                gscript.run_command("r.in.gdal", input=filename,
                                    output=name,
                                    flags=impflags,
                                    overwrite=gscript.overwrite())

        except CalledModuleError:
            gscript.fatal(_("Unable to import/link raster map <%s> from file"
                            " %s.") % (name, filename))

        # Set the color rules if present
        filename = row["filename"] + ".color"
        if os.path.isfile(filename):
            try:
                gscript.run_command("r.colors", map=name,
                                    rules=filename,
                                    overwrite=gscript.overwrite())
            except CalledModuleError:
                gscript.fatal(_("Unable to set the color rules for "
                                "raster map <%s>.") % name)

    # Set the computational region from the last map imported
    if set_current_region is True:
        gscript.run_command("g.region", raster=name)
コード例 #12
0
ファイル: r.mask.py プロジェクト: AsherBond/MondocosmOS
def main():
    input = options['input']
    maskcats = options['maskcats']
    remove = flags['r']
    invert = flags['i']

    if not remove and not input:
	grass.fatal(_("Required parameter <input> not set"))

    #check if input file exists
    if not grass.find_file(input)['file'] and not remove:
        grass.fatal(_("<%s> does not exist.") % input)

    if not 'MASKCATS' in grass.gisenv() and not remove:
        ## beware: next check is made with != , not with 'is', otherwise:
        #>>> grass.raster_info("basin_50K")['datatype'] is "CELL"
        #False
        # even if:
        #>>> "CELL" is "CELL"
        #True 
        if grass.raster_info(input)['datatype'] != "CELL":
            grass.fatal(_("Raster map %s must be integer for maskcats parameter") % input)

    mapset = grass.gisenv()['MAPSET']
    exists = bool(grass.find_file('MASK', element = 'cell', mapset = mapset)['file'])

    if remove:
	if exists:
	    grass.run_command('g.remove', rast = 'MASK')
	    grass.message(_("Raster MASK removed"))
 	else:
	    grass.fatal(_("No existing MASK to remove"))
    else:
	if exists:
            if not grass.overwrite():
                grass.fatal(_("MASK already found in current mapset. Delete first or overwrite."))
            else:
                grass.warning(_("MASK already exists and will be overwritten"))

	p = grass.feed_command('r.reclass', input = input, output = 'MASK', overwrite = True, rules = '-')
	p.stdin.write("%s = 1" % maskcats)
	p.stdin.close()
	p.wait()

	if invert:
	    global tmp
	    tmp = "r_mask_%d" % os.getpid()
	    grass.run_command('g.rename', rast = ('MASK',tmp), quiet = True)
	    grass.mapcalc("MASK=if(isnull($tmp),1,null())", tmp = tmp)
	    grass.run_command('g.remove', rast = tmp, quiet = True)
	    grass.message(_("Inverted MASK created."))
	else:
	    grass.message(_("MASK created."))

        grass.message(_("All subsequent raster operations will be limited to MASK area. ") +
		      "Removing or renaming raster file named MASK will " +
		      "restore raster operations to normal")
コード例 #13
0
ファイル: t.create.py プロジェクト: AsherBond/MondocosmOS
def main():

    # Get the options
    name = options["dataset"]
    type = options["type"]
    temporaltype = options["temporaltype"]
    title = options["title"]
    descr = options["description"]
    semantic = options["semantictype"]
    gran = options["granularity"]

    # Make sure the temporal database exists
    tgis.create_temporal_database()

    #Get the current mapset to create the id of the space time dataset

    mapset =  grass.gisenv()["MAPSET"]
    id = name + "@" + mapset

    if type == "strds":
        sp = tgis.space_time_raster_dataset(id)
    if type == "str3ds":
        sp = tgis.space_time_raster3d_dataset(id)
    if type == "stvds":
        sp = tgis.space_time_vector_dataset(id)

    dbif = tgis.sql_database_interface()
    dbif.connect()

    if sp.is_in_db(dbif) and grass.overwrite() == False:
        grass.fatal("Space time " + sp.get_new_map_instance(None).get_type() + " dataset <" + name + "> is already in the database. Use the overwrite flag.")

    if sp.is_in_db(dbif) and grass.overwrite() == True:
        grass.info("Overwrite space time " + sp.get_new_map_instance(None).get_type() + " dataset <" + name + "> and unregister all maps.")
        sp.delete(dbif)
        sp = sp.get_new_instance(id)

    grass.info("Create space time " + sp.get_new_map_instance(None).get_type() + " dataset.")

    sp.set_initial_values(granularity=gran, temporal_type=temporaltype, semantic_type=semantic, title=title, description=descr)
    sp.insert(dbif)

    dbif.close()
コード例 #14
0
ファイル: t.rast.series.py プロジェクト: caomw/grass
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = "z"
        if nulls:
            flag += "n"

        ret = grass.run_command("r.series", flags=flag, file=filename,
                                output=output, overwrite=grass.overwrite(),
                                method=method)

        if ret == 0 and not add_time:
            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()
            map.set_temporal_extent(sp.get_temporal_extent())

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
コード例 #15
0
ファイル: r.sun.hourlyPMR.py プロジェクト: NMTHydro/GADGET
 def registerToTemporal(basename, suffixes, mapset, start_time,
                        time_step, title, desc):
     maps = ','.join([basename + suf + '@' + mapset for suf in suffixes])
     tgis.open_new_stds(basename, type='strds',
                        temporaltype='absolute',
                        title=title, descr=desc,
                        semantic='mean', dbif=None,
                        overwrite=grass.overwrite())
     tgis.register_maps_in_space_time_dataset(
         type='raster', name=basename, maps=maps, start=start_time,
         end=None, increment=time_step, dbif=None, interval=False)
コード例 #16
0
ファイル: r.terrainanalysis.py プロジェクト: haav/GRASS
def main():
    # Input data
    inraster = options["input"]
    outraster = options["output"]
    nsize = options["nsize"]
    statistic = options["statistic"]
    circular = "c" if flags["c"] else ""

    # Get process id
    pid = os.getpid()

    # Check overwrite settings
    # If output raster file exists, but overwrite option isn't selected
    if not grass.overwrite():
        if grass.find_file(outraster)["name"]:
            grass.message(_("Output raster map <%s> already exists") % outraster)
            sys.exit()

    # Choose the statistic
    if statistic == "Deviation from Mean":

        # First, get neighbourhood mean rasters
        tmp_avg = "tmp_avg_%d" % pid  # Create a temporary filename
        tmp_rlayers.append(tmp_avg)
        proc_avg = grass.start_command(
            "r.neighbors", overwrite=True, flags=circular, input=inraster, output=tmp_avg, size=nsize
        )

        # Get neighbourhood standard deviation rasters
        tmp_stddev = "tmp_stddev_%d" % pid  # Create a temporary filename
        tmp_rlayers.append(tmp_stddev)
        proc_stddev = grass.start_command(
            "r.neighbors",
            overwrite=True,
            flags=circular,
            method="stddev",
            input=inraster,
            output=tmp_stddev,
            size=nsize,
        )

        # Wait for the processes to finish
        proc_avg.wait()
        proc_stddev.wait()

        # Calculate Deviation from Mean
        grass.mapcalc(
            "$outmap = ($inraster - $avgraster) / $stddevraster",
            outmap=outraster,
            inraster=inraster,
            avgraster=tmp_avg,
            stddevraster=tmp_stddev,
        )
コード例 #17
0
def main(options, flags):
    samples = options['samples']
    res_pref = options['result_prefix']
    if not os.path.isfile(samples):
        sys.stderr.write("File '%s' doesn't exist.\n" % (samples, ))
        sys.exit(1)

    headers, outputs, inputs = get_sample_names(samples)

    model = DataModel(headers, outputs, inputs, res_pref)
    model.ols(overwrite=grass.overwrite())
    sys.exit(0)
コード例 #18
0
def open_rasters(raster_list, write=False):
    for r in raster_list:
        try:
            if write:
                if r.exist():
                    r.open("w", "DCELL", overwrite=grass.overwrite())
                else:
                    r.open("w", "DCELL")
            else:
                r.open()
        except OpenError:
            grass.fatal("Can't open raster %s" % (r.name,))
コード例 #19
0
ファイル: r.in.lisflood.py プロジェクト: lrntct/r.in.lisflood
def main():
    # start messenger
    msgr = Messenger()

    # Use temporary GRASS region
    grass.use_temp_region()

    # reads CLI options
    rast_n_file_name = options['friction']
    rast_dem_name = options['dem']
    rast_start_file_name = options['start_file']
    rast_bc_name = options['bc']
    rast_bcval_name = options['bcval']
    rast_user_name = options['user_inflow']

    # Load *.par file
    par = Par(msgr, options['par_file'])
    par.read()

    # Write DEM file
    par.write_dem(rast_dem_name, grass.overwrite())
    # set computational region to match DEM
    par.set_region_from_map(rast_dem_name)
    # Write friction
    par.write_n_map(rast_n_file_name, grass.overwrite())
    # Write start file
    par.write_start_h(rast_start_file_name, grass.overwrite())

    # boundary conditions
    bc = BoundaryConditions(msgr, sim_time=par.sim_time,
                        region=par.region)
    if par.bci_file:
        bci_full_path = os.path.join(par.directory, par.bci_file)
        bc.read_bci(bci_full_path)
    if par.bdy_file:
        bdy_full_path = os.path.join(par.directory, par.bdy_file)
        bc.read_bdy(bdy_full_path)
    # create STDS
    bc.create_stds(stds_name=rast_user_name, overwrite=grass.overwrite())
    bc.create_stds(rast_bcval_name, overwrite=grass.overwrite())
    # Write maps en register them in STDS
    bc.populate_user_flow_stds(rast_quser_name=rast_user_name,
                                overwrite=grass.overwrite())
    bc.populate_bc_stds(rast_bcval_name, grass.overwrite())
    # write Boundary condition type map
    bc.write_bctype(rast_bc_name, grass.overwrite())

    # Restore original region
    grass.del_temp_region()
    return 0
コード例 #20
0
ファイル: t.rast.in.gpm.py プロジェクト: lrntct/t.rast.in.gpm
def register_maps_in_strds(strds_name, raster_dts_lst):
    '''Register given maps
    '''
    # create strds
    strds_id = format_id(strds_name)
    strds_title = "data from GPM"
    strds_desc = ""
    strds = tgis.open_new_stds(strds_id, 'strds', 'absolute',
        strds_title, strds_desc, "mean", overwrite=grass.overwrite())

    # Register the maps
    tgis.register.register_map_object_list('raster', raster_dts_lst,
            strds, delete_empty=True, unit=None)
コード例 #21
0
def inverse_transform(settings_name, coef_prefix, result_prefix='res.'):
    reader = csv.reader(open(settings_name), delimiter=',')
    header = reader.next()

    data_names = [coef_prefix + name for name in header[1:]]

    for row in reader:
        s = "%s%s = " % (result_prefix, row[0])
        sums = []
        for i in range(len(data_names)):
            sums.append("%s*%s" % (data_names[i], row[i+1]))
        s += ' + '.join(sums)

        grass.mapcalc(s, overwrite=grass.overwrite(), quite=True)
コード例 #22
0
ファイル: t.create.py プロジェクト: caomw/grass
def main():

    # Get the options
    name = options["output"]
    type = options["type"]
    temporaltype = options["temporaltype"]
    title = options["title"]
    descr = options["description"]
    semantic = options["semantictype"]

    tgis.init()
    
    tgis.open_new_stds(name, type, temporaltype, title, descr, 
                                     semantic, None, grass.overwrite())
コード例 #23
0
ファイル: stds_import.py プロジェクト: caomw/grass
def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
    impflags = "o"
    if exp or location:
        impflags += "e"
    for row in maplist:
        name = row["name"]
        filename = row["filename"] + ".xml"

        ret = gscript.run_command("v.in.ogr", dsn=filename,
                                  output=name, flags=impflags,
                                  overwrite=gscript.overwrite())

        if ret != 0:
            gscript.fatal(_("Unable to import vector map <%s> from file "
                            "%s.") % (name, filename))
コード例 #24
0
ファイル: tr.series.py プロジェクト: AsherBond/MondocosmOS
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    sort = options["sort"]
    where = options["where"]

    # Make sure the temporal database exists
    tgis.create_temporal_database()

    if input.find("@") >= 0:
        id = input
    else:
        mapset =  grass.gisenv()["MAPSET"]
        id = input + "@" + mapset

    sp = tgis.space_time_raster_dataset(id)

    if sp.is_in_db() == False:
        grass.fatal(_("Dataset <%s> not found in temporal database") % (id))

    sp.select()

    rows = sp.get_registered_maps(None, where, sort)

    if rows:
        inputs = ""

        count = 0
        for row in rows:
            if count == 0:
                inputs += row["id"]
            else:
                inputs += "," + row["id"]
            count += 1

        print inputs

        if grass.overwrite() == True:
            grass.run_command("r.series", input=inputs, output=output, overwrite=True, method=method)
        else:
            grass.run_command("r.series", input=inputs, output=output, overwrite=False, method=method)
コード例 #25
0
ファイル: stds_import.py プロジェクト: caomw/grass
def _import_raster_maps(maplist, set_current_region=False):
    # We need to disable the projection check because of its
    # simple implementation
    impflags = "o"
    for row in maplist:
        name = row["name"]
        filename = row["filename"] + ".pack"
        ret = gscript.run_command("r.unpack", input=filename,
                                  output=name, flags=impflags,
                                  overwrite=gscript.overwrite(),
                                  verbose=True)

        if ret != 0:
            gscript.fatal(_("Unable to unpack raster map <%s> from file "
                            "%s.") % (name, filename))

    # Set the computational region from the last map imported
    if set_current_region is True:
        gscript.run_command("g.region", rast=name)
コード例 #26
0
def main():

    # process command options
    input = options['input']
    if not gs.find_file(input)['file']:
        gs.fatal(_("Raster map <%s> not found") % input)

    smooth = options['output']
    if gs.find_file(smooth)['file'] and not gs.overwrite():
        gs.fatal(_("Output map <%s> already exists") % smooth)

    sd = options['sd']
    try:
        sd = float(sd)
    except ValueError:
        if not gs.find_file(sd)['file']:
            gs.fatal(_("Raster map <%s> not found") % sd)

    alpha = float(options['alpha'])

    # set aside region for internal use
    gs.use_temp_region()

    # subset input if desired
    region = options.get('region')
    if region:
        if not gs.find_file(region)['file']:
            gs.fatal(_("Raster map <%s> not found") % region)
        gs.message("Setting region to %s" % region, flag='i')
        gs.run_command('g.region', rast=region, align=input)
    else:
        gs.message("Using existing GRASS region", flag='i')
    gs.debug('='*50)
    gs.debug('\n'.join(gs.parse_command('g.region', 'p').keys()))
    gs.debug('='*50)

    multiscalesmooth(input, smooth, sd, alpha)

    # restore original region
    gs.del_temp_region()

    return None
コード例 #27
0
def _generate_time(N, prefix):
    """Generate (constant) rasters of time line
        t0, t1, t2, ..., t_{N-1})
        where t0 = 0, t_{N-1} = 2*pi

    N -- count of result rasters
    prefix -- prefix for the raster names

    return list of names for the result rasters
    """
    assert N > 1
    names = dict()
    for i in range(N):
        output = prefix + _time_to_name(i)
        t = get_time(N, i, deg=True)
        grass.mapcalc("${out} = ${t}", out=output, t=t,
                      quiet=True, overwrite=grass.overwrite())
        names[i] = output

    return names
コード例 #28
0
ファイル: stds_import.py プロジェクト: caomw/grass
def _import_vector_maps(maplist):
    # We need to disable the projection check because of its
    # simple implementation
    impflags = "o"
    for row in maplist:
        # Separate the name from the layer
        name = row["name"].split(":")[0]
        # Import only unique maps
        if name in imported_maps:
            continue
        filename = row["filename"] + ".pack"
        ret = gscript.run_command("v.unpack", input=filename,
                                  output=name, flags=impflags,
                                  overwrite=gscript.overwrite(),
                                  verbose=True)

        if ret != 0:
            gscript.fatal(_("Unable to unpack vector map <%s> from file "
                            "%s.") % (name, filename))

        imported_maps[name] = name
コード例 #29
0
ファイル: t.rast.accdetect.py プロジェクト: caomw/grass
def create_strds_register_maps(in_strds, out_strds, out_maps, register_null,
                    empty_maps, dbif):

    out_id = out_strds.get_id()

    if out_strds.is_in_db(dbif):
        if grass.overwrite():
            out_strds.delete(dbif)
            out_strds = in_strds.get_new_instance(out_id)

    temporal_type, semantic_type, title, description = in_strds.get_initial_values()
    out_strds.set_initial_values(temporal_type, semantic_type, title,
                                    description)
    out_strds.insert(dbif)

    # Register the maps in the database
    count = 0
    for map in out_maps.values():
        count += 1
        if count%10 == 0:
            grass.percent(count, len(out_maps), 1)
        # Read the raster map data
        map.load()
        # In case of a empty map continue, do not register empty maps
        if not register_null:
            if map.metadata.get_min() is None and \
                map.metadata.get_max() is None:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        out_strds.register_map(map, dbif)

    out_strds.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)
コード例 #30
0
def main():
    #lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    vector_output = options["vector_output"]
    strds = options["strds"]
    where = options["where"]
    columns = options["columns"]

    if where == "" or where == " " or where == "\n":
        where = None

    overwrite = grass.overwrite()

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")
    column_names = columns.split(",")

    if len(strds_names) != len(column_names):
        grass.fatal(
            _("The number of columns must be equal to the number of space time raster datasets"
              ))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = grass.gisenv()["MAPSET"]

    out_sp = tgis.check_new_stds(output, "stvds", dbif, overwrite)

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)

    # Single space time raster dataset
    if len(strds_names) == 1:
        rows = first_strds.get_registered_maps(
            columns="name,mapset,start_time,end_time",
            order="start_time",
            dbif=dbif)

        if not rows:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty") % out_sp.get_id())

        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [
                row["name"] + "@" + row["mapset"],
            ]

            s = Sample(start, end, raster_maps)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(_("Temporal type of space time raster datasets must be equal\n"
                              "<%(a)s> of type %(type_a)s do not match <%(b)s> of type %(type_b)s"%\
                              {"a":first_strds.get_id(),
                               "type_a":first_strds.get_temporal_type(),
                               "b":dataset.get_id(),
                               "type_b":dataset.get_temporal_type()}))

        mapmatrizes = tgis.sample_stds_by_stds_topology(
            "strds", "strds", strds_names, strds_names[0], False, None,
            "equal", False, False)

        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:

                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list)
                samples.append(s)

    num_samples = len(samples)

    # Get the layer and database connections of the input vector
    vector_db = grass.vector.vector_db(input)

    # We copy the vector table and create the new layers
    if vector_db:
        # Use the first layer to copy the categories from
        layers = "1,"
    else:
        layers = ""
    first = True
    for layer in range(num_samples):
        layer += 1
        # Skip existing layer
        if vector_db and layer in vector_db and \
           vector_db[layer]["layer"] == layer:
            continue
        if first:
            layers += "%i" % (layer)
            first = False
        else:
            layers += ",%i" % (layer)

    vectmap = vector_output

    # We create a new vector map using the categories of the original map
    try:
        grass.run_command("v.category",
                          input=input,
                          layer=layers,
                          output=vectmap,
                          option="transfer",
                          overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(
            _("Unable to create new layers for vector map <%s>") % (vectmap))

    title = _("Observaion of space time raster dataset(s) <%s>") % (strds)
    description = _("Observation of space time raster dataset(s) <%s>"
                    " with vector map <%s>") % (strds, input)

    # Create the output space time vector dataset
    out_sp = tgis.open_new_stds(output, "stvds",
                                first_strds.get_temporal_type(),
                                title, description,
                                first_strds.get_semantic_type(), dbif,
                                overwrite)

    dummy = out_sp.get_new_map_instance(None)

    # Sample the space time raster dataset with the vector
    # map at specific layer with v.what.rast
    count = 1
    for sample in samples:
        raster_names = sample.raster_names

        if len(raster_names) != len(column_names):
            grass.fatal(
                _("The number of raster maps in a granule must "
                  "be equal to the number of column names"))

        # Create the columns creation string
        columns_string = ""
        for name, column in zip(raster_names, column_names):
            # The column is by default double precision
            coltype = "DOUBLE PRECISION"
            # Get raster map type
            raster_map = tgis.RasterDataset(name)
            raster_map.load()
            if raster_map.metadata.get_datatype() == "CELL":
                coltype = "INT"

            tmp_string = "%s %s," % (column, coltype)
            columns_string += tmp_string

        # Remove last comma
        columns_string = columns_string[0:len(columns_string) - 1]

        # Try to add a column
        if vector_db and count in vector_db and vector_db[count]["table"]:
            try:
                grass.run_command("v.db.addcolumn",
                                  map=vectmap,
                                  layer=count,
                                  column=columns_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to add column %s to vector map <%s> "
                      "with layer %i") % (columns_string, vectmap, count))
        else:
            # Try to add a new table
            grass.message("Add table to layer %i" % (count))
            try:
                grass.run_command("v.db.addtable",
                                  map=vectmap,
                                  layer=count,
                                  columns=columns_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to add table to vector map "
                      "<%s> with layer %i") % (vectmap, count))

        # Call v.what.rast for each raster map
        for name, column in zip(raster_names, column_names):
            try:
                grass.run_command("v.what.rast",
                                  map=vectmap,
                                  layer=count,
                                  raster=name,
                                  column=column,
                                  where=where)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to run v.what.rast for vector map <%s> "
                            "with layer %i and raster map <%s>") % \
                            (vectmap, count, str(raster_names)))

        vect = out_sp.get_new_map_instance(
            dummy.build_id(vectmap, mapset, str(count)))
        vect.load()

        start = sample.start
        end = sample.end

        if out_sp.is_time_absolute():
            vect.set_absolute_time(start, end)
        else:
            vect.set_relative_time(start, end,
                                   first_strds.get_relative_time_unit())

        if vect.is_in_db(dbif):
            vect.update_all(dbif)
        else:
            vect.insert(dbif)

        out_sp.register_map(vect, dbif)
        count += 1

    out_sp.update_from_registered_maps(dbif)
    dbif.close()
コード例 #31
0
def main():
    """
    Builds river segments for input to the USGS hydrologic models
    PRMS and GSFLOW.
    """

    ##################
    # OPTION PARSING #
    ##################

    options, flags = gscript.parser()

    # I/O
    streams = options["input"]
    segments = options["output"]

    # Hydraulic geometry
    ICALC = int(options["icalc"])

    # ICALC=0: Constant depth
    WIDTH1 = options["width1"]
    WIDTH2 = options["width2"]

    # ICALC=1,2: Manning (in channel and overbank): below

    # ICALC=3: Power-law relationships (following Leopold and others)
    # The at-a-station default exponents are from Rhodes (1977)
    CDPTH = str(float(options["cdpth"]) / 35.3146667)  # cfs to m^3/s
    FDPTH = options["fdpth"]
    AWDTH = str(float(options["awdth"]) / 35.3146667)  # cfs to m^3/s
    BWDTH = options["bwdth"]

    ##################################################
    # CHECKING DEPENDENCIES WITH OPTIONAL PARAMETERS #
    ##################################################

    if ICALC == 3:
        if CDPTH and FDPTH and AWDTH and BWDTH:
            pass
        else:
            gscript.fatal("Missing CDPTH, FDPTH, AWDTH, and/or BWDTH. \
                         These are required when ICALC = 3.")

    ###########
    # RUNNING #
    ###########

    # New Columns for Segments
    segment_columns = []
    # Self ID
    segment_columns.append("id integer")  # segment number
    segment_columns.append("ISEG integer")  # segment number
    segment_columns.append("NSEG integer")  # segment number
    # for GSFLOW
    segment_columns.append(
        "ICALC integer"
    )  # 1 for channel, 2 for channel+fp, 3 for power function
    segment_columns.append(
        "OUTSEG integer")  # downstream segment -- tostream, renumbered
    segment_columns.append("ROUGHCH double precision")  # overbank roughness
    segment_columns.append("ROUGHBK double precision")  # in-channel roughness
    segment_columns.append("WIDTH1 double precision")  # overbank roughness
    segment_columns.append("WIDTH2 double precision")  # in-channel roughness
    segment_columns.append("CDPTH double precision")  # depth coeff
    segment_columns.append("FDPTH double precision")  # depth exp
    segment_columns.append("AWDTH double precision")  # width coeff
    segment_columns.append("BWDTH double precision")  # width exp
    segment_columns.append(
        "floodplain_width double precision"
    )  # floodplain width (8-pt approx channel + flat fp)
    # The below will be all 0
    segment_columns.append(
        "IUPSEG varchar")  # upstream segment ID number, for diversions
    segment_columns.append("FLOW varchar")
    segment_columns.append("RUNOFF varchar")
    segment_columns.append("ETSW varchar")
    segment_columns.append("PPTSW varchar")

    segment_columns = ",".join(segment_columns)

    # CONSIDER THE EFFECT OF OVERWRITING COLUMNS -- WARN FOR THIS
    # IF MAP EXISTS ALREADY?

    # Create a map to work with
    g.copy(vector=(streams, segments), overwrite=gscript.overwrite())
    # and add its columns
    v.db_addcolumn(map=segments, columns=segment_columns)

    # Produce the data table entries
    ##################################
    colNames = np.array(gscript.vector_db_select(segments, layer=1)["columns"])
    colValues = np.array(
        gscript.vector_db_select(segments, layer=1)["values"].values())
    number_of_segments = colValues.shape[0]
    cats = colValues[:, colNames == "cat"].astype(int).squeeze()

    nseg = np.arange(1, len(cats) + 1)
    nseg_cats = []
    for i in range(len(cats)):
        nseg_cats.append((nseg[i], cats[i]))

    segmentsTopo = VectorTopo(segments)
    segmentsTopo.open("rw")
    cur = segmentsTopo.table.conn.cursor()

    # id = cat (as does ISEG and NSEG)
    cur.executemany("update " + segments + " set id=? where cat=?", nseg_cats)
    cur.executemany("update " + segments + " set ISEG=? where cat=?",
                    nseg_cats)
    cur.executemany("update " + segments + " set NSEG=? where cat=?",
                    nseg_cats)

    # outseg = tostream: default is 0 if "tostream" is off-map
    cur.execute("update " + segments + " set OUTSEG=0")
    cur.executemany("update " + segments + " set OUTSEG=? where tostream=?",
                    nseg_cats)

    # Hydraulic geometry selection
    cur.execute("update " + segments + " set ICALC=" + str(ICALC))
    segmentsTopo.table.conn.commit()
    segmentsTopo.close()
    if ICALC == 0:
        gscript.message("")
        gscript.message("ICALC=0 (constant) not supported")
        gscript.message("Continuing nonetheless.")
        gscript.message("")
    if ICALC == 1:
        if options["width_points"] is not "":
            # Can add machinery here for separate upstream and downstream widths
            # But really should not vary all that much
            # v.to_db(map=segments, option='start', columns='xr1,yr1')
            # v.to_db(map=segments, option='end', columns='xr2,yr2')
            gscript.run_command(
                "v.distance",
                from_=segments,
                to=options["width_points"],
                upload="to_attr",
                to_column=options["width_points_col"],
                column="WIDTH1",
            )
            v.db_update(map=segments, column="WIDTH2", query_column="WIDTH1")
        else:
            segmentsTopo = VectorTopo(segments)
            segmentsTopo.open("rw")
            cur = segmentsTopo.table.conn.cursor()
            cur.execute("update " + segments + " set WIDTH1=" + str(WIDTH1))
            cur.execute("update " + segments + " set WIDTH2=" + str(WIDTH2))
            segmentsTopo.table.conn.commit()
            segmentsTopo.close()
    if ICALC == 2:
        # REMOVE THIS MESSAGE ONCE THIS IS INCLUDED IN INPUT-FILE BUILDER
        gscript.message("")
        gscript.message("ICALC=2 (8-point channel + floodplain) not supported")
        gscript.message("Continuing nonetheless.")
        gscript.message("")
        if options["fp_width_pts"] is not "":
            gscript.run_command(
                "v.distance",
                from_=segments,
                to=options["fp_width_pts"],
                upload="to_attr",
                to_column=options["fp_width_pts_col"],
                column="floodplain_width",
            )
        else:
            segmentsTopo = VectorTopo(segments)
            segmentsTopo.open("rw")
            cur = segmentsTopo.table.conn.cursor()
            cur.execute("update " + segments + " set floodplain_width=" +
                        str(options["fp_width_value"]))
            segmentsTopo.table.conn.commit()
            segmentsTopo.close()
    if ICALC == 3:
        segmentsTopo = VectorTopo(segments)
        segmentsTopo.open("rw")
        cur = segmentsTopo.table.conn.cursor()
        cur.execute("update " + segments + " set CDPTH=" + str(CDPTH))
        cur.execute("update " + segments + " set FDPTH=" + str(FDPTH))
        cur.execute("update " + segments + " set AWDTH=" + str(AWDTH))
        cur.execute("update " + segments + " set BWDTH=" + str(BWDTH))
        segmentsTopo.table.conn.commit()
        segmentsTopo.close()

    # values that are 0
    gscript.message("")
    gscript.message("NOTICE: not currently used:")
    gscript.message("IUPSEG, FLOW, RUNOFF, ETSW, and PPTSW.")
    gscript.message("All set to 0.")
    gscript.message("")

    segmentsTopo = VectorTopo(segments)
    segmentsTopo.open("rw")
    cur = segmentsTopo.table.conn.cursor()
    cur.execute("update " + segments + " set IUPSEG=" + str(0))
    cur.execute("update " + segments + " set FLOW=" + str(0))
    cur.execute("update " + segments + " set RUNOFF=" + str(0))
    cur.execute("update " + segments + " set ETSW=" + str(0))
    cur.execute("update " + segments + " set PPTSW=" + str(0))
    segmentsTopo.table.conn.commit()
    segmentsTopo.close()

    # Roughness
    # ICALC=1,2: Manning (in channel)
    if (options["roughch_raster"] is not "") and (options["roughch_points"]
                                                  is not ""):
        gscript.fatal(
            "Choose either a raster or vector or a value as Manning's n input."
        )
    if options["roughch_raster"] is not "":
        ROUGHCH = options["roughch_raster"]
        v.rast_stats(
            raster=ROUGHCH,
            method="average",
            column_prefix="tmp",
            map=segments,
            flags="c",
        )
        # v.db_renamecolumn(map=segments, column='tmp_average,ROUGHCH', quiet=True)
        v.db_update(map=segments,
                    column="ROUGHCH",
                    query_column="tmp_average",
                    quiet=True)
        v.db_dropcolumn(map=segments, columns="tmp_average", quiet=True)
    elif options["roughch_points"] is not "":
        ROUGHCH = options["roughch_points"]
        gscript.run_command(
            "v.distance",
            from_=segments,
            to=ROUGHCH,
            upload="to_attr",
            to_column=options["roughch_pt_col"],
            column="ROUGHCH",
        )
    else:
        segmentsTopo = VectorTopo(segments)
        segmentsTopo.open("rw")
        cur = segmentsTopo.table.conn.cursor()
        ROUGHCH = options["roughch_value"]
        cur.execute("update " + segments + " set ROUGHCH=" + str(ROUGHCH))
        segmentsTopo.table.conn.commit()
        segmentsTopo.close()

    # ICALC=2: Manning (overbank)
    if (options["roughbk_raster"] is not "") and (options["roughbk_points"]
                                                  is not ""):
        gscript.fatal(
            "Choose either a raster or vector or a value as Manning's n input."
        )
    if options["roughbk_raster"] is not "":
        ROUGHBK = options["roughbk_raster"]
        v.rast_stats(
            raster=ROUGHBK,
            method="average",
            column_prefix="tmp",
            map=segments,
            flags="c",
        )
        v.db_renamecolumn(map=segments,
                          column="tmp_average,ROUGHBK",
                          quiet=True)
    elif options["roughbk_points"] is not "":
        ROUGHBK = options["roughbk_points"]
        gscript.run_command(
            "v.distance",
            from_=segments,
            to=ROUGHBK,
            upload="to_attr",
            to_column=options["roughbk_pt_col"],
            column="ROUGHBK",
        )
    else:
        segmentsTopo = VectorTopo(segments)
        segmentsTopo.open("rw")
        cur = segmentsTopo.table.conn.cursor()
        ROUGHBK = options["roughbk_value"]
        cur.execute("update " + segments + " set ROUGHBK=" + str(ROUGHBK))
        segmentsTopo.table.conn.commit()
        segmentsTopo.close()
コード例 #32
0
def dataset_mapcalculator(inputs, output, type, expression, base, method,
                          nprocs=1, register_null=False, spatial=False):
    """Perform map-calculations of maps from different space time
       raster/raster3d datasets, using a specific sampling method
       to select temporal related maps.

       A mapcalc expression must be provided to process the temporal
       selected maps. Temporal operators are available in addition to
       the r.mapcalc operators:

       Supported operators for relative and absolute time are:

       - td() - the time delta of the current interval in days
                and fractions of days or the unit in case of relative time
       - start_time() - The start time of the interval from the begin of
                        the time series in days and fractions of days or the
                        unit in case of relative time
       - end_time() - The end time of the current interval from the begin of
                      the time series in days and fractions of days or the
                      unit in case of relative time

       Supported operators for absolute time:

       - start_doy() - Day of year (doy) from the start time [1 - 366]
       - start_dow() - Day of week (dow) from the start time [1 - 7],
                       the start of the week is monday == 1
       - start_year() - The year of the start time [0 - 9999]
       - start_month() - The month of the start time [1 - 12]
       - start_week() - Week of year of the start time [1 - 54]
       - start_day() - Day of month from the start time [1 - 31]
       - start_hour() - The hour of the start time [0 - 23]
       - start_minute() - The minute of the start time [0 - 59]
       - start_second() - The second of the start time [0 - 59]

       - end_doy() - Day of year (doy) from the end time [1 - 366]
       - end_dow() - Day of week (dow) from the end time [1 - 7],
                     the start of the week is monday == 1
       - end_year() - The year of the end time [0 - 9999]
       - end_month() - The month of the end time [1 - 12]
       - end_week() - Week of year of the end time [1 - 54]
       - end_day() - Day of month from the end time [1 - 31]
       - end_hour() - The hour of the end time [0 - 23]
       - end_minute() - The minute of the end time [0 - 59]
       - end_second() - The minute of the end time [0 - 59]

       :param inputs: The names of the input space time raster/raster3d datasets
       :param output: The name of the extracted new space time raster(3d) dataset
       :param type: The type of the dataset: "raster" or "raster3d"
       :param expression: The r(3).mapcalc expression
       :param base: The base name of the new created maps in case a
              mapclac expression is provided
       :param method: The method to be used for temporal sampling
       :param nprocs: The number of parallel processes to be used for
              mapcalc processing
       :param register_null: Set this number True to register empty maps
       :param spatial: Check spatial overlap
    """

    # We need a database interface for fast computation
    dbif = SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = get_current_mapset()
    msgr = get_tgis_message_interface()

    input_name_list = inputs.split(",")

    first_input = open_old_stds(input_name_list[0], type, dbif)

    # All additional inputs in reverse sorted order to avoid
    # wrong name substitution
    input_name_list = input_name_list[1:]
    input_name_list.sort()
    input_name_list.reverse()
    input_list = []

    for input in input_name_list:
        sp = open_old_stds(input, type, dbif)
        input_list.append(copy.copy(sp))

    new_sp = check_new_stds(output, type, dbif, gscript.overwrite())

    # Sample all inputs by the first input and create a sample matrix
    if spatial:
        msgr.message(_("Starting spatio-temporal sampling..."))
    else:
        msgr.message(_("Starting temporal sampling..."))
    map_matrix = []
    id_list = []
    sample_map_list = []
    # First entry is the first dataset id
    id_list.append(first_input.get_name())

    if len(input_list) > 0:
        has_samples = False
        for dataset in input_list:
            list = dataset.sample_by_dataset(stds=first_input,
                                             method=method, spatial=spatial,
                                             dbif=dbif)

            # In case samples are not found
            if not list and len(list) == 0:
                dbif.close()
                msgr.message(_("No samples found for map calculation"))
                return 0

            # The fist entries are the samples
            map_name_list = []
            if not has_samples:
                for entry in list:
                    granule = entry["granule"]
                    # Do not consider gaps
                    if granule.get_id() is None:
                        continue
                    sample_map_list.append(granule)
                    map_name_list.append(granule.get_name())
                # Attach the map names
                map_matrix.append(copy.copy(map_name_list))
                has_samples = True

            map_name_list = []
            for entry in list:
                maplist = entry["samples"]
                granule = entry["granule"]

                # Do not consider gaps in the sampler
                if granule.get_id() is None:
                    continue

                if len(maplist) > 1:
                    msgr.warning(_("Found more than a single map in a sample "
                                   "granule. Only the first map is used for "
                                   "computation. Use t.rast.aggregate.ds to "
                                   "create synchronous raster datasets."))

                # Store all maps! This includes non existent maps,
                # identified by id == None
                map_name_list.append(maplist[0].get_name())

            # Attach the map names
            map_matrix.append(copy.copy(map_name_list))

            id_list.append(dataset.get_name())
    else:
        list = first_input.get_registered_maps_as_objects(dbif=dbif)

        if list is None:
            dbif.close()
            msgr.message(_("No maps registered in input dataset"))
            return 0

        map_name_list = []
        for map in list:
            map_name_list.append(map.get_name())
            sample_map_list.append(map)

        # Attach the map names
        map_matrix.append(copy.copy(map_name_list))

    # Needed for map registration
    map_list = []

    if len(map_matrix) > 0:

        msgr.message(_("Starting mapcalc computation..."))

        count = 0
        # Get the number of samples
        num = len(map_matrix[0])

        # Parallel processing
        proc_list = []
        proc_count = 0

        # For all samples
        for i in range(num):

            count += 1
            if count % 10 == 0:
                msgr.percent(count, num, 1)

            # Create the r.mapcalc statement for the current time step
            map_name = "{base}_{suffix}".format(base=base,
                                                suffix=gscript.get_num_suffix(count, num))
            # Remove spaces and new lines
            expr = expression.replace(" ", "")

            # Check that all maps are in the sample
            valid_maps = True
            # Replace all dataset names with their map names of the
            # current time step
            for j in range(len(map_matrix)):
                if map_matrix[j][i] is None:
                    valid_maps = False
                    break
                # Substitute the dataset name with the map name
                expr = expr.replace(id_list[j], map_matrix[j][i])

            # Proceed with the next sample
            if not valid_maps:
                continue

            # Create the new map id and check if the map is already
            # in the database
            map_id = map_name + "@" + mapset

            new_map = first_input.get_new_map_instance(map_id)

            # Check if new map is in the temporal database
            if new_map.is_in_db(dbif):
                if gscript.overwrite():
                    # Remove the existing temporal database entry
                    new_map.delete(dbif)
                    new_map = first_input.get_new_map_instance(map_id)
                else:
                    msgr.error(_("Map <%s> is already in temporal database, "
                                 "use overwrite flag to overwrite"))
                    continue

            # Set the time stamp
            if sample_map_list[i].is_time_absolute():
                start, end = sample_map_list[i].get_absolute_time()
                new_map.set_absolute_time(start, end)
            else:
                start, end, unit = sample_map_list[i].get_relative_time()
                new_map.set_relative_time(start, end, unit)

            # Parse the temporal expressions
            expr = _operator_parser(expr, sample_map_list[0],
                                    sample_map_list[i])
            # Add the output map name
            expr = "%s=%s" % (map_name, expr)

            map_list.append(new_map)

            msgr.verbose(_("Apply mapcalc expression: \"%s\"") % expr)

            # Start the parallel r.mapcalc computation
            if type == "raster":
                proc_list.append(Process(target=_run_mapcalc2d, args=(expr,)))
            else:
                proc_list.append(Process(target=_run_mapcalc3d, args=(expr,)))
            proc_list[proc_count].start()
            proc_count += 1

            if proc_count == nprocs or proc_count == num or count == num:
                proc_count = 0
                exitcodes = 0
                for proc in proc_list:
                    proc.join()
                    exitcodes += proc.exitcode

                if exitcodes != 0:
                    dbif.close()
                    msgr.fatal(_("Error while mapcalc computation"))

                # Empty process list
                proc_list = []

        # Register the new maps in the output space time dataset
        msgr.message(_("Starting map registration in temporal database..."))

        temporal_type, semantic_type, title, description = first_input.get_initial_values()

        new_sp = open_new_stds(output, type, temporal_type, title, description,
                               semantic_type, dbif, gscript.overwrite())
        count = 0

        # collect empty maps to remove them
        empty_maps = []

        # Insert maps in the temporal database and in the new space time
        # dataset
        for new_map in map_list:

            count += 1
            if count % 10 == 0:
                msgr.percent(count, num, 1)

            # Read the map data
            new_map.load()

            # In case of a null map continue, do not register null maps
            if new_map.metadata.get_min() is None and \
               new_map.metadata.get_max() is None:
                if not register_null:
                    empty_maps.append(new_map)
                    continue

            # Insert map in temporal database
            new_map.insert(dbif)

            new_sp.register_map(new_map, dbif)

        # Update the spatio-temporal extent and the metadata table entries
        new_sp.update_from_registered_maps(dbif)

        msgr.percent(1, 1, 1)

        # Remove empty maps
        if len(empty_maps) > 0:
            names = ""
            count = 0
            for map in empty_maps:
                if count == 0:
                    names += "%s" % (map.get_name())
                else:
                    names += ",%s" % (map.get_name())
                count += 1
            if type == "raster":
                gscript.run_command("g.remove", flags='f', type='raster',
                                    name=names, quiet=True)
            elif type == "raster3d":
                gscript.run_command("g.remove", flags='f', type='raster_3d',
                                    name=names, quiet=True)

    dbif.close()
コード例 #33
0
def main():
    """
    Adds GSFLOW parameters to a set of HRU sub-basins
    """

    ##################
    # OPTION PARSING #
    ##################

    options, flags = gscript.parser()
    basins = options["input"]
    HRU = options["output"]
    slope = options["slope"]
    aspect = options["aspect"]
    elevation = options["elevation"]
    land_cover = options["cov_type"]
    soil = options["soil_type"]

    ################################
    # CREATE HRUs FROM SUB-BASINS  #
    ################################

    g.copy(vector=(basins, HRU), overwrite=gscript.overwrite())

    ############################################
    # ATTRIBUTE COLUMNS (IN ORDER FROM MANUAL) #
    ############################################

    # HRU
    hru_columns = []
    # Self ID
    hru_columns.append("id integer")  # nhru
    # Basic Physical Attributes (Geometry)
    hru_columns.append("hru_area double precision")  # acres (!!!!)
    hru_columns.append(
        "hru_area_m2 double precision")  # [not for GSFLOW: for me!]
    hru_columns.append("hru_aspect double precision")  # Mean aspect [degrees]
    hru_columns.append("hru_elev double precision")  # Mean elevation
    hru_columns.append("hru_lat double precision")  # Latitude of centroid
    hru_columns.append("hru_lon double precision")  # Longitude of centroid
    # unnecessary but why not?
    hru_columns.append("hru_slope double precision")  # Mean slope [percent]
    # Basic Physical Attributes (Other)
    # hru_columns.append('hru_type integer') # 0=inactive; 1=land; 2=lake; 3=swale; almost all will be 1
    # hru_columns.append('elev_units integer') # 0=feet; 1=meters. 0=default. I think I will set this to 1 by default.
    # Measured input
    hru_columns.append(
        "outlet_sta integer")  # Index of streamflow station at basin outlet:
    # station number if it has one, 0 if not
    # Note that the below specify projections and note lat/lon; they really seem
    # to work for any projected coordinates, with _x, _y, in meters, and _xlong,
    # _ylat, in feet (i.e. they are just northing and easting). The meters and feet
    # are not just simple conversions, but actually are required for different
    # modules in the code, and are hence redundant but intentional.
    hru_columns.append("hru_x double precision")  # Easting [m]
    hru_columns.append("hru_xlong double precision")  # Easting [feet]
    hru_columns.append("hru_y double precision")  # Northing [m]
    hru_columns.append("hru_ylat double precision")  # Northing [feet]
    # Streamflow and lake routing
    hru_columns.append(
        "K_coef double precision"
    )  # Travel time of flood wave to next downstream segment;
    # this is the Muskingum storage coefficient
    # 1.0 for reservoirs, diversions, and segments flowing
    # out of the basin
    hru_columns.append(
        "x_coef double precision")  # Amount of attenuation of flow wave;
    # this is the Muskingum routing weighting factor
    # range: 0.0--0.5; default 0.2
    # 0 for all segments flowing out of the basin
    hru_columns.append("hru_segment integer"
                       )  # ID of stream segment to which flow will be routed
    # this is for non-cascade routing (flow goes directly
    # from HRU to stream segment)
    hru_columns.append("obsin_segment integer"
                       )  # Index of measured streamflow station that replaces
    # inflow to a segment
    hru_columns.append(
        "cov_type integer"
    )  # 0=bare soil;1=grasses; 2=shrubs; 3=trees; 4=coniferous
    hru_columns.append("soil_type integer")  # 1=sand; 2=loam; 3=clay

    # Create strings
    hru_columns = ",".join(hru_columns)

    # Add columns to tables
    v.db_addcolumn(map=HRU, columns=hru_columns, quiet=True)

    ###########################
    # UPDATE DATABASE ENTRIES #
    ###########################

    colNames = np.array(gscript.vector_db_select(HRU, layer=1)["columns"])
    colValues = np.array(
        gscript.vector_db_select(HRU, layer=1)["values"].values())
    number_of_hrus = colValues.shape[0]
    cats = colValues[:, colNames == "cat"].astype(int).squeeze()
    rnums = colValues[:, colNames == "rnum"].astype(int).squeeze()

    nhru = np.arange(1, number_of_hrus + 1)
    nhrut = []
    for i in range(len(nhru)):
        nhrut.append((nhru[i], cats[i]))
    # Access the HRUs
    hru = VectorTopo(HRU)
    # Open the map with topology:
    hru.open("rw")
    # Create a cursor
    cur = hru.table.conn.cursor()
    # Use it to loop across the table
    cur.executemany("update " + HRU + " set id=? where cat=?", nhrut)
    # Commit changes to the table
    hru.table.conn.commit()
    # Close the table
    hru.close()
    """
    # Do the same for basins <-------------- DO THIS OR SIMPLY HAVE HRUs OVERLAIN WITH GRID CELLS? IN THIS CASE, RMV AREA ADDITION TO GRAVRES
    v.db_addcolumn(map=basins, columns='id int', quiet=True)
    basins = VectorTopo(basins)
    basins.open('rw')
    cur = basins.table.conn.cursor()
    cur.executemany("update basins set id=? where cat=?", nhrut)
    basins.table.conn.commit()
    basins.close()
    """

    # if you want to append to table
    # cur.executemany("update HRU(id) values(?)", nhrut) # "insert into" will add rows

    # hru_columns.append('hru_area double precision')
    # Acres b/c USGS
    v.to_db(map=HRU,
            option="area",
            columns="hru_area",
            units="acres",
            quiet=True)
    v.to_db(map=HRU,
            option="area",
            columns="hru_area_m2",
            units="meters",
            quiet=True)

    # GET MEAN VALUES FOR THESE NEXT ONES, ACROSS THE BASIN

    # SLOPE (and aspect)
    #####################
    v.rast_stats(
        map=HRU,
        raster=slope,
        method="average",
        column_prefix="tmp",
        flags="c",
        quiet=True,
    )
    v.db_update(map=HRU,
                column="hru_slope",
                query_column="tmp_average",
                quiet=True)

    # ASPECT
    #########
    v.db_dropcolumn(map=HRU, columns="tmp_average", quiet=True)
    # Dealing with conversion from degrees (no good average) to something I can
    # average -- x- and y-vectors
    # Geographic coordinates, so sin=x, cos=y.... not that it matters so long
    # as I am consistent in how I return to degrees
    r.mapcalc("aspect_x = sin(" + aspect + ")",
              overwrite=gscript.overwrite(),
              quiet=True)
    r.mapcalc("aspect_y = cos(" + aspect + ")",
              overwrite=gscript.overwrite(),
              quiet=True)
    # grass.run_command('v.db.addcolumn', map=HRU, columns='aspect_x_sum double precision, aspect_y_sum double precision, ncells_in_hru integer')
    v.rast_stats(
        map=HRU,
        raster="aspect_x",
        method="sum",
        column_prefix="aspect_x",
        flags="c",
        quiet=True,
    )
    v.rast_stats(
        map=HRU,
        raster="aspect_y",
        method="sum",
        column_prefix="aspect_y",
        flags="c",
        quiet=True,
    )
    hru = VectorTopo(HRU)
    hru.open("rw")
    cur = hru.table.conn.cursor()
    cur.execute("SELECT cat,aspect_x_sum,aspect_y_sum FROM %s" % hru.name)
    _arr = np.array(cur.fetchall()).astype(float)
    _cat = _arr[:, 0]
    _aspect_x_sum = _arr[:, 1]
    _aspect_y_sum = _arr[:, 2]
    aspect_angle = np.arctan2(_aspect_y_sum, _aspect_x_sum) * 180.0 / np.pi
    aspect_angle[aspect_angle < 0] += 360  # all positive
    aspect_angle_cat = np.vstack((aspect_angle, _cat)).transpose()
    cur.executemany("update " + HRU + " set hru_aspect=? where cat=?",
                    aspect_angle_cat)
    hru.table.conn.commit()
    hru.close()

    # ELEVATION
    ############
    v.rast_stats(
        map=HRU,
        raster=elevation,
        method="average",
        column_prefix="tmp",
        flags="c",
        quiet=True,
    )
    v.db_update(map=HRU,
                column="hru_elev",
                query_column="tmp_average",
                quiet=True)
    v.db_dropcolumn(map=HRU, columns="tmp_average", quiet=True)

    # CENTROIDS
    ############

    # get x,y of centroid -- but have areas not in database table, that do have
    # centroids, and having a hard time finding a good way to get rid of them!
    # They have duplicate category values!
    # Perhaps these are little dangles on the edges of the vectorization where
    # the raster value was the same but pinched out into 1-a few cells?
    # From looking at map, lots of extra centroids on area boundaries, and removing
    # small areas (though threshold hard to guess) gets rid of these

    hru = VectorTopo(HRU)
    hru.open("rw")
    hru_cats = []
    hru_coords = []
    for hru_i in hru:
        if isinstance(hru_i, vector.geometry.Centroid):
            hru_cats.append(hru_i.cat)
            hru_coords.append(hru_i.coords())
    hru_cats = np.array(hru_cats)
    hru_coords = np.array(hru_coords)
    hru.rewind()

    hru_area_ids = []
    for coor in hru_coords:
        _area = hru.find_by_point.area(Point(coor[0], coor[1]))
        hru_area_ids.append(_area)
    hru_area_ids = np.array(hru_area_ids)
    hru.rewind()

    hru_areas = []
    for _area_id in hru_area_ids:
        hru_areas.append(_area_id.area())
    hru_areas = np.array(hru_areas)
    hru.rewind()

    allcats = sorted(list(set(list(hru_cats))))

    # Now create weighted mean
    hru_centroid_locations = []
    for cat in allcats:
        hrus_with_cat = hru_cats[hru_cats == cat]
        if len(hrus_with_cat) == 1:
            hru_centroid_locations.append(
                (hru_coords[hru_cats == cat]).squeeze())
        else:
            _centroids = hru_coords[hru_cats == cat]
            # print _centroids
            _areas = hru_areas[hru_cats == cat]
            # print _areas
            _x = np.average(_centroids[:, 0], weights=_areas)
            _y = np.average(_centroids[:, 1], weights=_areas)
            # print _x, _y
            hru_centroid_locations.append(np.array([_x, _y]))

    # Now upload weighted mean to database table
    # allcats and hru_centroid_locations are co-indexed
    index__cats = create_iterator(HRU)
    cur = hru.table.conn.cursor()
    for i in range(len(allcats)):
        # meters
        cur.execute("update " + HRU + " set hru_x=" +
                    str(hru_centroid_locations[i][0]) + " where cat=" +
                    str(allcats[i]))
        cur.execute("update " + HRU + " set hru_y=" +
                    str(hru_centroid_locations[i][1]) + " where cat=" +
                    str(allcats[i]))
        # feet
        cur.execute("update " + HRU + " set hru_xlong=" +
                    str(hru_centroid_locations[i][0] * 3.28084) +
                    " where cat=" + str(allcats[i]))
        cur.execute("update " + HRU + " set hru_ylat=" +
                    str(hru_centroid_locations[i][1] * 3.28084) +
                    " where cat=" + str(allcats[i]))
        # (un)Project to lat/lon
        _centroid_ll = gscript.parse_command("m.proj",
                                             coordinates=list(
                                                 hru_centroid_locations[i]),
                                             flags="od").keys()[0]
        _lon, _lat, _z = _centroid_ll.split("|")
        cur.execute("update " + HRU + " set hru_lon=" + _lon + " where cat=" +
                    str(allcats[i]))
        cur.execute("update " + HRU + " set hru_lat=" + _lat + " where cat=" +
                    str(allcats[i]))

    # feet -- not working.
    # Probably an issue with index__cats -- maybe fix later, if needed
    # But currently not a major speed issue
    """
    cur.executemany("update "+HRU+" set hru_xlong=?*3.28084 where hru_x=?",
                    index__cats)
    cur.executemany("update "+HRU+" set hru_ylat=?*3.28084 where hru_y=?",
                    index__cats)
    """

    cur.close()
    hru.table.conn.commit()
    hru.close()

    # ID NUMBER
    ############
    # cur.executemany("update "+HRU+" set hru_segment=? where id=?",
    #                index__cats)
    # Segment number = HRU ID number
    v.db_update(map=HRU, column="hru_segment", query_column="id", quiet=True)

    # LAND USE/COVER
    ############
    try:
        land_cover = int(land_cover)
    except:
        pass
    if isinstance(land_cover, int):
        if land_cover <= 3:
            v.db_update(map=HRU,
                        column="cov_type",
                        value=land_cover,
                        quiet=True)
        else:
            sys.exit(
                "WARNING: INVALID LAND COVER TYPE. CHECK INTEGER VALUES.\n"
                "EXITING TO ALLOW USER TO CHANGE BEFORE RUNNING GSFLOW")
    else:
        # NEED TO UPDATE THIS TO MODAL VALUE!!!!
        gscript.message(
            "Warning: values taken from HRU centroids. Code should be updated to"
        )
        gscript.message("acquire modal values")
        v.what_rast(map=HRU,
                    type="centroid",
                    raster=land_cover,
                    column="cov_type",
                    quiet=True)
        # v.rast_stats(map=HRU, raster=land_cover, method='average', column_prefix='tmp', flags='c', quiet=True)
        # v.db_update(map=HRU, column='cov_type', query_column='tmp_average', quiet=True)
        # v.db_dropcolumn(map=HRU, columns='tmp_average', quiet=True)

    # SOIL
    ############
    try:
        soil = int(soil)
    except:
        pass
    if isinstance(soil, int):
        if (soil > 0) and (soil <= 3):
            v.db_update(map=HRU, column="soil_type", value=soil, quiet=True)
        else:
            sys.exit("WARNING: INVALID SOIL TYPE. CHECK INTEGER VALUES.\n"
                     "EXITING TO ALLOW USER TO CHANGE BEFORE RUNNING GSFLOW")
    else:
        # NEED TO UPDATE THIS TO MODAL VALUE!!!!
        gscript.message(
            "Warning: values taken from HRU centroids. Code should be updated to"
        )
        gscript.message("acquire modal values")
        v.what_rast(map=HRU,
                    type="centroid",
                    raster=soil,
                    column="soil_type",
                    quiet=True)
コード例 #34
0
def main():
    """
    Builds river reaches for input to the USGS hydrologic model, GSFLOW.
    These reaches link the PRMS stream segments to the MODFLOW grid cells.
    """

    ##################
    # OPTION PARSING #
    ##################

    options, flags = gscript.parser()
    segments = options['segment_input']
    grid = options['grid_input']
    reaches = options['output']
    elevation = options['elevation']
    Smin = options['s_min']
    h_stream = options['h_stream']
    x1 = options['upstream_easting_column_seg']
    y1 = options['upstream_northing_column_seg']
    x2 = options['downstream_easting_column_seg']
    y2 = options['downstream_northing_column_seg']
    tostream = options['tostream_cat_column_seg']
    # Hydraulic paramters
    STRTHICK = options['strthick']
    STRHC1 = options['strhc1']
    THTS = options['thts']
    THTI = options['thti']
    EPS = options['eps']
    UHC = options['uhc']
    # Build reach maps by overlaying segments on grid
    if len(gscript.find_file(segments, element='vector')['name']) > 0:
        v.extract(input=segments, output='GSFLOW_TEMP__', type='line', quiet=True, overwrite=True)
        v.overlay(ainput='GSFLOW_TEMP__', atype='line', binput=grid, output=reaches, operator='and', overwrite=gscript.overwrite(), quiet=True)
        g.remove(type='vector', name='GSFLOW_TEMP__', quiet=True, flags='f')
    else:
        gscript.fatal('No vector file "'+segments+'" found.')

    # Start editing database table
    reachesTopo = VectorTopo(reaches)
    reachesTopo.open('rw')

    # Rename a,b columns
    reachesTopo.table.columns.rename('a_'+x1, 'x1')
    reachesTopo.table.columns.rename('a_'+x2, 'x2')
    reachesTopo.table.columns.rename('a_'+y1, 'y1')
    reachesTopo.table.columns.rename('a_'+y2, 'y2')
    reachesTopo.table.columns.rename('a_NSEG', 'NSEG')
    reachesTopo.table.columns.rename('a_ISEG', 'ISEG')
    reachesTopo.table.columns.rename('a_stream_type', 'stream_type')
    reachesTopo.table.columns.rename('a_type_code', 'type_code')
    reachesTopo.table.columns.rename('a_cat', 'rnum_cat')
    reachesTopo.table.columns.rename('a_'+tostream, 'tostream')
    reachesTopo.table.columns.rename('a_id', 'segment_id')
    reachesTopo.table.columns.rename('a_OUTSEG', 'OUTSEG')
    reachesTopo.table.columns.rename('b_row', 'row')
    reachesTopo.table.columns.rename('b_col', 'col')
    reachesTopo.table.columns.rename('b_id', 'cell_id')

    # Drop unnecessary columns
    cols = reachesTopo.table.columns.names()
    for col in cols:
        if (col[:2] == 'a_') or (col[:2] == 'b_'):
            reachesTopo.table.columns.drop(col)

    # Add new columns to 'reaches'
    reachesTopo.table.columns.add('KRCH', 'integer')
    reachesTopo.table.columns.add('IRCH', 'integer')
    reachesTopo.table.columns.add('JRCH', 'integer')
    reachesTopo.table.columns.add('IREACH', 'integer')
    reachesTopo.table.columns.add('RCHLEN', 'integer')
    reachesTopo.table.columns.add('STRTOP', 'double precision')
    reachesTopo.table.columns.add('SLOPE', 'double precision')
    reachesTopo.table.columns.add('STRTHICK', 'double precision')
    reachesTopo.table.columns.add('STRHC1', 'double precision')
    reachesTopo.table.columns.add('THTS', 'double precision')
    reachesTopo.table.columns.add('THTI', 'double precision')
    reachesTopo.table.columns.add('EPS', 'double precision')
    reachesTopo.table.columns.add('UHC', 'double precision')
    reachesTopo.table.columns.add('xr1', 'double precision')
    reachesTopo.table.columns.add('xr2', 'double precision')
    reachesTopo.table.columns.add('yr1', 'double precision')
    reachesTopo.table.columns.add('yr2', 'double precision')

    # Commit columns before editing (necessary?)
    reachesTopo.table.conn.commit()
    reachesTopo.close()

    # Update some columns that can be done now
    reachesTopo.open('rw')
    colNames = np.array(gscript.vector_db_select(reaches, layer=1)['columns'])
    colValues = np.array(gscript.vector_db_select(reaches, layer=1)['values'].values())
    cats = colValues[:,colNames == 'cat'].astype(int).squeeze()
    nseg = np.arange(1, len(cats)+1)
    nseg_cats = []
    for i in range(len(cats)):
        nseg_cats.append( (nseg[i], cats[i]) )
    cur = reachesTopo.table.conn.cursor()
    # Hydrogeologic properties
    cur.execute("update "+reaches+" set STRTHICK="+str(STRTHICK))
    cur.execute("update "+reaches+" set STRHC1="+str(STRHC1))
    cur.execute("update "+reaches+" set THTS="+str(THTS))
    cur.execute("update "+reaches+" set THTI="+str(THTI))
    cur.execute("update "+reaches+" set EPS="+str(EPS))
    cur.execute("update "+reaches+" set UHC="+str(UHC))
    # Grid properties
    cur.execute("update "+reaches+" set KRCH=1") # Top layer: unchangable
    cur.executemany("update "+reaches+" set IRCH=? where row=?", nseg_cats)
    cur.executemany("update "+reaches+" set JRCH=? where col=?", nseg_cats)
    reachesTopo.table.conn.commit()
    reachesTopo.close()
    v.to_db(map=reaches, columns='RCHLEN', option='length', quiet=True)


    # Still to go after these:
    # STRTOP (added with slope)
    # IREACH (whole next section dedicated to this)
    # SLOPE (need z_start and z_end)

    # Now, the light stuff is over: time to build the reach order
    v.to_db(map=reaches, option='start', columns='xr1,yr1')
    v.to_db(map=reaches, option='end', columns='xr2,yr2')

    # Now just sort by category, find which stream has the same xr1 and yr1 as
    # x1 and y1 (or a_x1, a_y1) and then find where its endpoint matches another 
    # starting point and move down the line.
    # v.db.select reaches col=cat,a_id,xr1,xr2 where="a_x1 = xr1"

    # First, get the starting coordinates of each stream segment
    # and a set of river ID's (ordered from 1...N)
    colNames = np.array(gscript.vector_db_select(segments, layer=1)['columns'])
    colValues = np.array(gscript.vector_db_select(segments, layer=1)['values'].values())
    number_of_segments = colValues.shape[0]
    segment_x1s = colValues[:,colNames == 'x1'].astype(float).squeeze()
    segment_y1s = colValues[:,colNames == 'y1'].astype(float).squeeze()
    segment_ids = colValues[:,colNames == 'id'].astype(float).squeeze()

    # Then move back to the reaches map to produce the ordering
    colNames = np.array(gscript.vector_db_select(reaches, layer=1)['columns'])
    colValues = np.array(gscript.vector_db_select(reaches, layer=1)['values'].values())
    reach_cats = colValues[:,colNames == 'cat'].astype(int).squeeze()
    reach_x1s = colValues[:,colNames == 'xr1'].astype(float).squeeze()
    reach_y1s = colValues[:,colNames == 'yr1'].astype(float).squeeze()
    reach_x2s = colValues[:,colNames == 'xr2'].astype(float).squeeze()
    reach_y2s = colValues[:,colNames == 'yr2'].astype(float).squeeze()
    segment_ids__reach = colValues[:,colNames == 'segment_id'].astype(float).squeeze()

    for segment_id in segment_ids:
        reach_order_cats = []
        downstream_directed = []
        ssel = segment_ids == segment_id
        rsel = segment_ids__reach == segment_id # selector
        # Find first segment: x1y1 first here, but not necessarily later
        downstream_directed.append(1)
        _x_match = reach_x1s[rsel] == segment_x1s[ssel]
        _y_match = reach_y1s[rsel] == segment_y1s[ssel]
        _i_match = _x_match * _y_match
        x1y1 = True # false if x2y2
        # Find cat
        _cat = int(reach_cats[rsel][_x_match * _y_match])
        reach_order_cats.append(_cat)
        # Get end of reach = start of next one
        reach_x_end = float(reach_x2s[reach_cats == _cat])
        reach_y_end = float(reach_y2s[reach_cats == _cat])
        while _i_match.any():
            _x_match = reach_x1s[rsel] == reach_x_end
            _y_match = reach_y1s[rsel] == reach_y_end
            _i_match = _x_match * _y_match
            if _i_match.any():
                _cat = int(reach_cats[rsel][_x_match * _y_match])
                reach_x_end = float(reach_x2s[reach_cats == _cat])
                reach_y_end = float(reach_y2s[reach_cats == _cat])
                reach_order_cats.append(_cat)
        print len(reach_order_cats), len(reach_cats[rsel])
          
        # Reach order to database table
        reach_number__reach_order_cats = []
        for i in range(len(reach_order_cats)):
            reach_number__reach_order_cats.append( (i+1, reach_order_cats[i]) )
        reachesTopo = VectorTopo(reaches)
        reachesTopo.open('rw')
        cur = reachesTopo.table.conn.cursor()
        cur.executemany("update "+reaches+" set IREACH=? where cat=?", 
                        reach_number__reach_order_cats)
        reachesTopo.table.conn.commit()
        reachesTopo.close()
      

    # TOP AND BOTTOM ARE OUT OF ORDER: SOME SEGS ARE BACKWARDS. UGH!!!!
    # NEED TO GET THEM IN ORDER TO GET THE Z VALUES AT START AND END

    # Compute slope and starting elevations from the elevations at the start and 
    # end of the reaches and the length of each reach]
    gscript.message('Obtaining elevation values from raster: may take time.')
    v.db_addcolumn(map=reaches, columns='zr1 double precision, zr2 double precision')
    zr1 = []
    zr2 = []
    for i in range(len(reach_cats)):
        _x = reach_x1s[i]
        _y = reach_y1s[i]
        _z = float(gscript.parse_command('r.what', map=elevation, coordinates=str(_x)+','+str(_y)).keys()[0].split('|')[-1])
        zr1.append(_z)
        _x = reach_x2s[i]
        _y = reach_y2s[i]
        _z = float(gscript.parse_command('r.what', map=elevation, coordinates=str(_x)+','+str(_y)).keys()[0].split('|')[-1])
        zr2.append(_z)

    zr1_cats = []
    zr2_cats = []
    for i in range(len(reach_cats)):
        zr1_cats.append( (zr1[i], reach_cats[i]) )
        zr2_cats.append( (zr2[i], reach_cats[i]) )

    reachesTopo = VectorTopo(reaches)
    reachesTopo.open('rw')
    cur = reachesTopo.table.conn.cursor()
    cur.executemany("update "+reaches+" set zr1=? where cat=?", zr1_cats)
    cur.executemany("update "+reaches+" set zr2=? where cat=?", zr2_cats)
    reachesTopo.table.conn.commit()
    reachesTopo.close()

    # Use these to create slope -- backwards possible on DEM!
    v.db_update(map=reaches, column='SLOPE', value='(zr1 - zr2)/RCHLEN')
    v.db_update(map=reaches, column='SLOPE', value=Smin, where='SLOPE <= '+str(Smin))

    # srtm_local_filled_grid = srtm_local_filled @ 200m (i.e. current grid)
    #  resolution
    # r.to.vect in=srtm_local_filled_grid out=srtm_local_filled_grid col=z type=area --o#
    # NOT SURE IF IT IS BEST TO USE MEAN ELEVATION OR TOP ELEVATION!!!!!!!!!!!!!!!!!!!!!!!
    v.db_addcolumn(map=reaches, columns='z_topo_mean double precision')
    v.what_rast(map=reaches, raster=elevation, column='z_topo_mean')#, query_column='z')
    v.db_update(map=reaches, column='STRTOP', value='z_topo_mean -'+str(h_stream), quiet=True)
コード例 #35
0
def main():
    repeat = int(options.pop("repeat"))
    nprocs = int(options.pop("nprocs"))
    subregions = options["subregions"]
    tosplit = flags["d"]
    # filter unused optional params
    for key in list(options.keys()):
        if options[key] == "":
            options.pop(key)
    if tosplit and "output_series" in options:
        gscript.fatal(
            _("Parallelization on subregion level is not supported together with <output_series> option"
              ))

    if (not gscript.overwrite() and gscript.list_grouped(
            "raster",
            pattern=options["output"] + "_run1")[gscript.gisenv()["MAPSET"]]):
        gscript.fatal(
            _("Raster map <{r}> already exists."
              " To overwrite, use the --overwrite flag").format(
                  r=options["output"] + "_run1"))
    global TMP_RASTERS
    cats = []
    if tosplit:
        gscript.message(_("Splitting subregions"))
        cats = (gscript.read_command("r.stats", flags="n",
                                     input=subregions).strip().splitlines())
        if len(cats) < 2:
            gscript.fatal(
                _("Not enough subregions to split computation. Do not use -d flag."
                  ))
        mapcalcs = []
        for cat in cats:
            new = PREFIX + cat
            TMP_RASTERS.append(new)
            mapcalcs.append("{new} = if({sub} == {cat}, {sub}, null())".format(
                sub=subregions, cat=cat, new=new))
        pool = Pool(nprocs)
        p = pool.map_async(split_subregions, mapcalcs)
        try:
            p.wait()
        except (KeyboardInterrupt, CalledModuleError):
            return

    options_list = []
    for i in range(repeat):
        if cats:
            for cat in cats:
                op = options.copy()
                op["random_seed"] = i + 1
                if "output_series" in op:
                    op["output_series"] += "_run" + str(i + 1) + "_" + cat
                    TMP_RASTERS.append(op["output_series"])
                op["output"] += "_run" + str(i + 1) + "_" + cat
                op["subregions"] = PREFIX + cat
                options_list.append((repeat, i + 1, cat, op))
                TMP_RASTERS.append(op["output"])
        else:
            op = options.copy()
            op["random_seed"] = i + 1
            if "output_series" in op:
                op["output_series"] += "_run" + str(i + 1)
            op["output"] += "_run" + str(i + 1)
            options_list.append((repeat, i + 1, None, op))

    pool = Pool(nprocs)
    p = pool.map_async(futures_process, options_list)
    try:
        p.wait()
    except (KeyboardInterrupt, CalledModuleError):
        return

    if cats:
        gscript.message(_("Patching subregions"))
        for i in range(repeat):
            patch_input = [
                options["output"] + "_run" + str(i + 1) + "_" + cat
                for cat in cats
            ]
            gscript.run_command(
                "r.patch",
                input=patch_input,
                output=options["output"] + "_run" + str(i + 1),
            )

    return 0
コード例 #36
0
def main():
    options, flags = grass.parser()

    # required
    elevation_input = options['elevation']
    aspect_input = options['aspect']
    slope_input = options['slope']

    # optional
    latitude = options['lat']
    longitude = options['long']
    linke_input = options['linke']
    linke_value = options['linke_value']
    albedo_input = options['albedo']
    albedo_value = options['albedo_value']
    horizon_basename = options['horizon_basename']
    horizon_step = options['horizon_step']

    # outputs
    beam_rad = options['beam_rad']
    diff_rad = options['diff_rad']
    refl_rad = options['refl_rad']
    glob_rad = options['glob_rad']
    beam_rad_basename = beam_rad_basename_user = options['beam_rad_basename']
    diff_rad_basename = diff_rad_basename_user = options['diff_rad_basename']
    refl_rad_basename = refl_rad_basename_user = options['refl_rad_basename']
    glob_rad_basename = glob_rad_basename_user = options['glob_rad_basename']

    # missing output?
    if not any([
            beam_rad, diff_rad, refl_rad, glob_rad, beam_rad_basename,
            diff_rad_basename, refl_rad_basename, glob_rad_basename
    ]):
        grass.fatal(_("No output specified."))

    start_day = int(options['start_day'])
    end_day = int(options['end_day'])
    day_step = int(options['day_step'])

    if day_step > 1 and (beam_rad or diff_rad or refl_rad or glob_rad):
        grass.fatal(
            _("Day step higher then 1 would produce"
              " meaningless cumulative maps."))

    # check: start < end
    if start_day > end_day:
        grass.fatal(_("Start day is after end day."))
    if day_step >= end_day - start_day:
        grass.fatal(_("Day step is too big."))

    step = float(options['step'])

    nprocs = int(options['nprocs'])

    if beam_rad and not beam_rad_basename:
        beam_rad_basename = create_tmp_map_name('beam_rad')
        MREMOVE.append(beam_rad_basename)
    if diff_rad and not diff_rad_basename:
        diff_rad_basename = create_tmp_map_name('diff_rad')
        MREMOVE.append(diff_rad_basename)
    if refl_rad and not refl_rad_basename:
        refl_rad_basename = create_tmp_map_name('refl_rad')
        MREMOVE.append(refl_rad_basename)
    if glob_rad and not glob_rad_basename:
        glob_rad_basename = create_tmp_map_name('glob_rad')
        MREMOVE.append(glob_rad_basename)

    # check for existing identical map names
    if not grass.overwrite():
        check_daily_map_names(beam_rad_basename,
                              grass.gisenv()['MAPSET'], start_day, end_day,
                              day_step)
        check_daily_map_names(diff_rad_basename,
                              grass.gisenv()['MAPSET'], start_day, end_day,
                              day_step)
        check_daily_map_names(refl_rad_basename,
                              grass.gisenv()['MAPSET'], start_day, end_day,
                              day_step)
        check_daily_map_names(glob_rad_basename,
                              grass.gisenv()['MAPSET'], start_day, end_day,
                              day_step)

    # check for slope/aspect
    if not aspect_input or not slope_input:
        params = {}
        if not aspect_input:
            aspect_input = create_tmp_map_name('aspect')
            params.update({'aspect': aspect_input})
            REMOVE.append(aspect_input)
        if not slope_input:
            slope_input = create_tmp_map_name('slope')
            params.update({'slope': slope_input})
            REMOVE.append(slope_input)

        grass.info(_("Running r.slope.aspect..."))
        grass.run_command('r.slope.aspect',
                          elevation=elevation_input,
                          quiet=True,
                          **params)

    if beam_rad:
        grass.mapcalc('{beam} = 0'.format(beam=beam_rad), quiet=True)
    if diff_rad:
        grass.mapcalc('{diff} = 0'.format(diff=diff_rad), quiet=True)
    if refl_rad:
        grass.mapcalc('{refl} = 0'.format(refl=refl_rad), quiet=True)
    if glob_rad:
        grass.mapcalc('{glob} = 0'.format(glob=glob_rad), quiet=True)

    rsun_flags = ''
    if flags['m']:
        rsun_flags += 'm'
    if flags['p']:
        rsun_flags += 'p'

    grass.info(_("Running r.sun in a loop..."))
    count = 0
    # Parallel processing
    proc_list = []
    proc_count = 0
    suffixes_all = []
    days = range(start_day, end_day + 1, day_step)
    num_days = len(days)
    core.percent(0, num_days, 1)
    for day in days:
        count += 1
        core.percent(count, num_days, 10)

        suffix = '_' + format_order(day)
        proc_list.append(
            Process(target=run_r_sun,
                    args=(elevation_input, aspect_input, slope_input, latitude,
                          longitude, linke_input, linke_value, albedo_input,
                          albedo_value, horizon_basename, horizon_step, day,
                          step, beam_rad_basename, diff_rad_basename,
                          refl_rad_basename, glob_rad_basename, suffix,
                          rsun_flags)))

        proc_list[proc_count].start()
        proc_count += 1
        suffixes_all.append(suffix)

        if proc_count == nprocs or proc_count == num_days or count == num_days:
            proc_count = 0
            exitcodes = 0
            for proc in proc_list:
                proc.join()
                exitcodes += proc.exitcode

            if exitcodes != 0:
                core.fatal(_("Error while r.sun computation"))

            # Empty process list
            proc_list = []

    if beam_rad:
        sum_maps(beam_rad, beam_rad_basename, suffixes_all)
    if diff_rad:
        sum_maps(diff_rad, diff_rad_basename, suffixes_all)
    if refl_rad:
        sum_maps(refl_rad, refl_rad_basename, suffixes_all)
    if glob_rad:
        sum_maps(glob_rad, glob_rad_basename, suffixes_all)

    # FIXME: how percent really works?
    # core.percent(1, 1, 1)

    # set color table
    if beam_rad:
        set_color_table([beam_rad])
    if diff_rad:
        set_color_table([diff_rad])
    if refl_rad:
        set_color_table([refl_rad])
    if glob_rad:
        set_color_table([glob_rad])

    if not any([
            beam_rad_basename_user, diff_rad_basename_user,
            refl_rad_basename_user, glob_rad_basename_user
    ]):
        return 0

    # add timestamps and register to spatio-temporal raster data set
    temporal = flags['t']
    if temporal:
        core.info(_("Registering created maps into temporal dataset..."))
        import grass.temporal as tgis

        def registerToTemporal(basename, suffixes, mapset, start_day, day_step,
                               title, desc):
            """
            Register daily output maps in spatio-temporal raster data set
            """
            maps = ','.join(
                [basename + suf + '@' + mapset for suf in suffixes])
            tgis.open_new_stds(basename,
                               type='strds',
                               temporaltype='relative',
                               title=title,
                               descr=desc,
                               semantic='sum',
                               dbif=None,
                               overwrite=grass.overwrite())

            tgis.register_maps_in_space_time_dataset(type='rast',
                                                     name=basename,
                                                     maps=maps,
                                                     start=start_day,
                                                     end=None,
                                                     unit='days',
                                                     increment=day_step,
                                                     dbif=None,
                                                     interval=False)

        # Make sure the temporal database exists
        tgis.init()

        mapset = grass.gisenv()['MAPSET']
        if beam_rad_basename_user:
            registerToTemporal(
                beam_rad_basename,
                suffixes_all,
                mapset,
                start_day,
                day_step,
                title="Beam irradiation",
                desc="Output beam irradiation raster maps [Wh.m-2.day-1]")
        if diff_rad_basename_user:
            registerToTemporal(
                diff_rad_basename,
                suffixes_all,
                mapset,
                start_day,
                day_step,
                title="Diffuse irradiation",
                desc="Output diffuse irradiation raster maps [Wh.m-2.day-1]")
        if refl_rad_basename_user:
            registerToTemporal(
                refl_rad_basename,
                suffixes_all,
                mapset,
                start_day,
                day_step,
                title="Reflected irradiation",
                desc="Output reflected irradiation raster maps [Wh.m-2.day-1]")
        if glob_rad_basename_user:
            registerToTemporal(
                glob_rad_basename,
                suffixes_all,
                mapset,
                start_day,
                day_step,
                title="Total irradiation",
                desc="Output total irradiation raster maps [Wh.m-2.day-1]")

    # just add timestamps, don't register
    else:
        for i, day in enumerate(days):
            if beam_rad_basename_user:
                set_time_stamp(beam_rad_basename + suffixes_all[i], day=day)
            if diff_rad_basename_user:
                set_time_stamp(diff_rad_basename + suffixes_all[i], day=day)
            if refl_rad_basename_user:
                set_time_stamp(refl_rad_basename + suffixes_all[i], day=day)
            if glob_rad_basename_user:
                set_time_stamp(glob_rad_basename + suffixes_all[i], day=day)

    # set color table for daily maps
    if beam_rad_basename_user:
        maps = [beam_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps)
    if diff_rad_basename_user:
        maps = [diff_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps)
    if refl_rad_basename_user:
        maps = [refl_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps)
    if glob_rad_basename_user:
        maps = [glob_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps)
コード例 #37
0
def main():
    # Get the options
    input = options["input"]
    output = options["output"]
    strds = options["strds"]
    tempwhere = options["t_where"]
    where = options["where"]
    methods = options["method"]
    percentile = options["percentile"]

    overwrite = grass.overwrite()

    quiet = True

    if grass.verbosity() > 2:
        quiet = False

    if where == "" or where == " " or where == "\n":
        where = None

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)
    # Single space time raster dataset
    if len(strds_names) == 1:
        granu = first_strds.get_granularity()
        rows = first_strds.get_registered_maps(
            "name,mapset,start_time,end_time", tempwhere, "start_time", dbif)
        if not rows:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty") %
                first_strds.get_id())

        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [
                row["name"] + "@" + row["mapset"],
            ]

            s = Sample(start, end, raster_maps, first_strds.get_name(), granu)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(
                    _(
                        "Temporal type of space time raster "
                        "datasets must be equal\n<%(a)s> of type "
                        "%(type_a)s do not match <%(b)s> of type "
                        "%(type_b)s" % {
                            "a": first_strds.get_id(),
                            "type_a": first_strds.get_temporal_type(),
                            "b": dataset.get_id(),
                            "type_b": dataset.get_temporal_type(),
                        }))

        mapmatrizes = tgis.sample_stds_by_stds_topology(
            "strds",
            "strds",
            strds_names,
            strds_names[0],
            False,
            None,
            "equal",
            False,
            False,
        )
        # TODO check granularity for multiple STRDS
        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:

                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list, name)
                samples.append(s)
    # Get the layer and database connections of the input vector
    if where:
        try:
            grass.run_command("v.extract",
                              input=input,
                              where=where,
                              output=output)
        except CalledModuleError:
            dbif.close()
            grass.fatal(
                _("Unable to run v.extract for vector map"
                  " <%s> and where <%s>") % (input, where))
    else:
        gcopy(input, output, "vector")

    msgr = Messenger()
    perc_curr = 0
    perc_tot = len(samples)
    pymap = Vector(output)
    try:
        pymap.open("r")
    except:
        dbif.close()
        grass.fatal(_("Unable to create vector map <%s>" % output))
    pymap.close()

    for sample in samples:
        raster_names = sample.raster_names
        # Call v.what.rast for each raster map
        for name in raster_names:
            day = sample.printDay()
            column_name = "%s_%s" % (sample.strds_name, day)
            try:
                grass.run_command(
                    "v.rast.stats",
                    map=output,
                    raster=name,
                    column=column_name,
                    method=methods,
                    percentile=percentile,
                    quiet=quiet,
                    overwrite=overwrite,
                )
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to run v.what.rast for vector map"
                      " <%s> and raster map <%s>") % (output, name))

        msgr.percent(perc_curr, perc_tot, 1)
        perc_curr += 1

    dbif.close()
コード例 #38
0
def main():
    """
    Builds a grid for the MODFLOW component of the USGS hydrologic model,
    GSFLOW.
    """

    options, flags = gscript.parser()
    basin = options['basin']
    pp = options['pour_point']
    raster_input = options['raster_input']
    dx = options['dx']
    dy = options['dy']
    grid = options['output']
    mask = options['mask_output']
    bc_cell = options['bc_cell']
    # basin='basins_tmp_onebasin'; pp='pp_tmp'; raster_input='DEM'; raster_output='DEM_coarse'; dx=dy='500'; grid='grid_tmp'; mask='mask_tmp'
    """
    # Fatal if raster input and output are not both set
    _lena0 = (len(raster_input) == 0)
    _lenb0 = (len(raster_output) == 0)
    if _lena0 + _lenb0 == 1:
        grass.fatal("You must set both raster input and output, or neither.")
    """

    # Create grid -- overlaps DEM, one cell of padding
    gscript.use_temp_region()
    reg = gscript.region()
    reg_grid_edges_sn = np.linspace(reg['s'], reg['n'], reg['rows'])
    reg_grid_edges_we = np.linspace(reg['w'], reg['e'], reg['cols'])
    g.region(vector=basin, ewres=dx, nsres=dy)
    regnew = gscript.region()
    # Use a grid ratio -- don't match exactly the desired MODFLOW resolution
    grid_ratio_ns = np.round(regnew['nsres'] / reg['nsres'])
    grid_ratio_ew = np.round(regnew['ewres'] / reg['ewres'])
    # Get S, W, and then move the unit number of grid cells over to get N and E
    # and include 3 cells of padding around the whole watershed
    _s_dist = np.abs(reg_grid_edges_sn - (regnew['s'] - 3. * regnew['nsres']))
    _s_idx = np.where(_s_dist == np.min(_s_dist))[0][0]
    _s = float(reg_grid_edges_sn[_s_idx])
    _n_grid = np.arange(_s, reg['n'] + 3 * grid_ratio_ns * reg['nsres'],
                        grid_ratio_ns * reg['nsres'])
    _n_dist = np.abs(_n_grid - (regnew['n'] + 3. * regnew['nsres']))
    _n_idx = np.where(_n_dist == np.min(_n_dist))[0][0]
    _n = float(_n_grid[_n_idx])
    _w_dist = np.abs(reg_grid_edges_we - (regnew['w'] - 3. * regnew['ewres']))
    _w_idx = np.where(_w_dist == np.min(_w_dist))[0][0]
    _w = float(reg_grid_edges_we[_w_idx])
    _e_grid = np.arange(_w, reg['e'] + 3 * grid_ratio_ew * reg['ewres'],
                        grid_ratio_ew * reg['ewres'])
    _e_dist = np.abs(_e_grid - (regnew['e'] + 3. * regnew['ewres']))
    _e_idx = np.where(_e_dist == np.min(_e_dist))[0][0]
    _e = float(_e_grid[_e_idx])
    # Finally make the region
    g.region(w=str(_w),
             e=str(_e),
             s=str(_s),
             n=str(_n),
             nsres=str(grid_ratio_ns * reg['nsres']),
             ewres=str(grid_ratio_ew * reg['ewres']))
    # And then make the grid
    v.mkgrid(map=grid, overwrite=gscript.overwrite())

    # Cell numbers (row, column, continuous ID)
    v.db_addcolumn(map=grid, columns='id int', quiet=True)
    colNames = np.array(gscript.vector_db_select(grid, layer=1)['columns'])
    colValues = np.array(
        gscript.vector_db_select(grid, layer=1)['values'].values())
    cats = colValues[:, colNames == 'cat'].astype(int).squeeze()
    rows = colValues[:, colNames == 'row'].astype(int).squeeze()
    cols = colValues[:, colNames == 'col'].astype(int).squeeze()
    nrows = np.max(rows)
    ncols = np.max(cols)
    cats = np.ravel([cats])
    _id = np.ravel([ncols * (rows - 1) + cols])
    _id_cat = []
    for i in range(len(_id)):
        _id_cat.append((_id[i], cats[i]))
    gridTopo = VectorTopo(grid)
    gridTopo.open('rw')
    cur = gridTopo.table.conn.cursor()
    cur.executemany("update " + grid + " set id=? where cat=?", _id_cat)
    gridTopo.table.conn.commit()
    gridTopo.close()

    # Cell area
    v.db_addcolumn(map=grid, columns='area_m2', quiet=True)
    v.to_db(map=grid,
            option='area',
            units='meters',
            columns='area_m2',
            quiet=True)

    # Basin mask
    if len(mask) > 0:
        # Fine resolution region:
        g.region(n=reg['n'],
                 s=reg['s'],
                 w=reg['w'],
                 e=reg['e'],
                 nsres=reg['nsres'],
                 ewres=reg['ewres'])
        # Rasterize basin
        v.to_rast(input=basin,
                  output=mask,
                  use='val',
                  value=1,
                  overwrite=gscript.overwrite(),
                  quiet=True)
        # Coarse resolution region:
        g.region(w=str(_w),
                 e=str(_e),
                 s=str(_s),
                 n=str(_n),
                 nsres=str(grid_ratio_ns * reg['nsres']),
                 ewres=str(grid_ratio_ew * reg['ewres']))
        r.resamp_stats(input=mask,
                       output=mask,
                       method='sum',
                       overwrite=True,
                       quiet=True)
        r.mapcalc(mask + ' = ' + mask + ' > 0', overwrite=True, quiet=True)
    """
    # Resampled raster
    if len(raster_output) > 0:
        r.resamp_stats(input=raster_input, output=raster_output, method='average', overwrite=gscript.overwrite(), quiet=True)
    """

    # Pour point
    if len(pp) > 0:
        v.db_addcolumn(map=pp,
                       columns=('row integer', 'col integer'),
                       quiet=True)
        v.build(map=pp, quiet=True)
        v.what_vect(map=pp,
                    query_map=grid,
                    column='row',
                    query_column='row',
                    quiet=True)
        v.what_vect(map=pp,
                    query_map=grid,
                    column='col',
                    query_column='col',
                    quiet=True)

    # Next point downstream of the pour point
    if len(bc_cell) > 0:
        ########## NEED TO USE TRUE TEMPORARY FILE ##########
        # May not work with dx != dy!
        v.to_rast(input=pp, output='tmp', use='val', value=1, overwrite=True)
        r.buffer(input='tmp',
                 output='tmp',
                 distances=float(dx) * 1.5,
                 overwrite=True)
        r.mapcalc('tmp = (tmp == 2) * ' + raster_input, overwrite=True)
        r.drain(input=raster_input,
                start_points=pp,
                output='tmp2',
                overwrite=True)
        r.mapcalc('tmp = tmp2 * tmp', overwrite=True)
        r.null(map='tmp', setnull=0)
        r.to_vect(input='tmp',
                  output=bc_cell,
                  type='point',
                  column='z',
                  overwrite=gscript.overwrite(),
                  quiet=True)
        v.db_addcolumn(map=bc_cell,
                       columns=('row integer', 'col integer'),
                       quiet=True)
        v.build(map=bc_cell, quiet=True)
        v.what_vect(map=bc_cell, query_map=grid, column='row', \
                    query_column='row', quiet=True)
        v.what_vect(map=bc_cell, query_map=grid, column='col', \
                    query_column='col', quiet=True)

    g.region(n=reg['n'],
             s=reg['s'],
             w=reg['w'],
             e=reg['e'],
             nsres=reg['nsres'],
             ewres=reg['ewres'])
コード例 #39
0
def main():

    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    global tile, tmpdir, in_temp, currdir, tmpregionname

    in_temp = False

    url = options["url"]
    username = options["username"]
    password = options["password"]
    local = options["local"]
    output = options["output"]
    memory = options["memory"]
    fillnulls = flags["n"]
    srtmv3 = flags["2"] == 0
    one = flags["1"]
    dozerotile = flags["z"]
    reproj_res = options["resolution"]

    overwrite = grass.overwrite()

    res = "00:00:03"
    if srtmv3:
        fillnulls = 0
        if one:
            res = "00:00:01"
    else:
        one = None

    if len(local) == 0:
        if len(url) == 0:
            if srtmv3:
                if one:
                    url = "https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/"
                else:
                    url = "https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11/"
            else:
                url = "http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/"

    if len(local) == 0:
        local = None

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags="j")
    kv = grass.parse_key_val(s)

    if fillnulls == 1 and memory <= 0:
        grass.warning(
            _("Amount of memory to use for interpolation must be positive, setting to 300 MB"
              ))
        memory = "300"

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    currdir = os.getcwd()
    pid = os.getpid()

    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True
    if local is None:
        local = tmpdir

    # save region
    tmpregionname = "r_in_srtm_tmp_region"
    grass.run_command("g.region", save=tmpregionname, overwrite=overwrite)

    # get extents
    if kv["+proj"] == "longlat":
        reg = grass.region()
    else:
        if not options["resolution"]:
            grass.fatal(
                _("The <resolution> must be set if the projection is not 'longlat'."
                  ))
        reg2 = grass.parse_command("g.region", flags="uplg")
        north = [float(reg2["ne_lat"]), float(reg2["nw_lat"])]
        south = [float(reg2["se_lat"]), float(reg2["sw_lat"])]
        east = [float(reg2["ne_long"]), float(reg2["se_long"])]
        west = [float(reg2["nw_long"]), float(reg2["sw_long"])]
        reg = {}
        if np.mean(north) > np.mean(south):
            reg["n"] = max(north)
            reg["s"] = min(south)
        else:
            reg["n"] = min(north)
            reg["s"] = max(south)
        if np.mean(west) > np.mean(east):
            reg["w"] = max(west)
            reg["e"] = min(east)
        else:
            reg["w"] = min(west)
            reg["e"] = max(east)
        # get actual location, mapset, ...
        grassenv = grass.gisenv()
        tgtloc = grassenv["LOCATION_NAME"]
        tgtmapset = grassenv["MAPSET"]
        GISDBASE = grassenv["GISDBASE"]
        TGTGISRC = os.environ["GISRC"]

    if kv["+proj"] != "longlat":
        SRCGISRC, TMPLOC = createTMPlocation()
    if options["region"] is None or options["region"] == "":
        north = reg["n"]
        south = reg["s"]
        east = reg["e"]
        west = reg["w"]
    else:
        west, south, east, north = options["region"].split(",")
        west = float(west)
        south = float(south)
        east = float(east)
        north = float(north)

    # adjust extents to cover SRTM tiles: 1 degree bounds
    tmpint = int(north)
    if tmpint < north:
        north = tmpint + 1
    else:
        north = tmpint

    tmpint = int(south)
    if tmpint > south:
        south = tmpint - 1
    else:
        south = tmpint

    tmpint = int(east)
    if tmpint < east:
        east = tmpint + 1
    else:
        east = tmpint

    tmpint = int(west)
    if tmpint > west:
        west = tmpint - 1
    else:
        west = tmpint

    if north == south:
        north += 1
    if east == west:
        east += 1

    rows = abs(north - south)
    cols = abs(east - west)
    ntiles = rows * cols
    grass.message(_("Importing %d SRTM tiles...") % ntiles, flag="i")
    counter = 1

    srtmtiles = ""
    valid_tiles = 0
    for ndeg in range(south, north):
        for edeg in range(west, east):
            grass.percent(counter, ntiles, 1)
            counter += 1
            if ndeg < 0:
                tile = "S"
            else:
                tile = "N"
            tile = tile + "%02d" % abs(ndeg)
            if edeg < 0:
                tile = tile + "W"
            else:
                tile = tile + "E"
            tile = tile + "%03d" % abs(edeg)
            grass.debug("Tile: %s" % tile, debug=1)

            if local != tmpdir:
                gotit = import_local_tile(tile, local, pid, srtmv3, one)
            else:
                gotit = download_tile(tile, url, pid, srtmv3, one, username,
                                      password)
                if gotit == 1:
                    gotit = import_local_tile(tile, tmpdir, pid, srtmv3, one)
            if gotit == 1:
                grass.verbose(_("Tile %s successfully imported") % tile)
                valid_tiles += 1
            elif dozerotile:
                # create tile with zeros
                if one:
                    # north
                    if ndeg < -1:
                        tmpn = "%02d:59:59.5S" % (abs(ndeg) - 2)
                    else:
                        tmpn = "%02d:00:00.5N" % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = "%02d:00:00.5S" % abs(ndeg)
                    else:
                        tmps = "%02d:59:59.5N" % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = "%03d:59:59.5W" % (abs(edeg) - 2)
                    else:
                        tmpe = "%03d:00:00.5E" % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = "%03d:00:00.5W" % abs(edeg)
                    else:
                        tmpw = "%03d:59:59.5E" % (edeg - 1)
                else:
                    # north
                    if ndeg < -1:
                        tmpn = "%02d:59:58.5S" % (abs(ndeg) - 2)
                    else:
                        tmpn = "%02d:00:01.5N" % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = "%02d:00:01.5S" % abs(ndeg)
                    else:
                        tmps = "%02d:59:58.5N" % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = "%03d:59:58.5W" % (abs(edeg) - 2)
                    else:
                        tmpe = "%03d:00:01.5E" % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = "%03d:00:01.5W" % abs(edeg)
                    else:
                        tmpw = "%03d:59:58.5E" % (edeg - 1)

                grass.run_command("g.region",
                                  n=tmpn,
                                  s=tmps,
                                  e=tmpe,
                                  w=tmpw,
                                  res=res)
                grass.run_command(
                    "r.mapcalc",
                    expression="%s = 0" %
                    (tile + ".r.in.srtm.tmp." + str(pid)),
                    quiet=True,
                )
                grass.run_command("g.region", region=tmpregionname)

    # g.list with sep = comma does not work ???
    pattern = "*.r.in.srtm.tmp.%d" % pid
    srtmtiles = grass.read_command("g.list",
                                   type="raster",
                                   pattern=pattern,
                                   sep="newline",
                                   quiet=True)

    srtmtiles = srtmtiles.splitlines()
    srtmtiles = ",".join(srtmtiles)
    grass.debug("'List of Tiles: %s" % srtmtiles, debug=1)

    if valid_tiles == 0:
        grass.run_command("g.remove",
                          type="raster",
                          name=str(srtmtiles),
                          flags="f",
                          quiet=True)
        grass.warning(_("No tiles imported"))
        if local != tmpdir:
            grass.fatal(
                _("Please check if local folder <%s> is correct.") % local)
        else:
            grass.fatal(
                _("Please check internet connection, credentials, and if url <%s> is correct."
                  ) % url)

    grass.run_command("g.region", raster=str(srtmtiles))

    grass.message(_("Patching tiles..."))
    if fillnulls == 0:
        if valid_tiles > 1:
            if kv["+proj"] != "longlat":
                grass.run_command("r.buildvrt", input=srtmtiles, output=output)
            else:
                grass.run_command("r.patch", input=srtmtiles, output=output)
        else:
            grass.run_command("g.rename",
                              raster="%s,%s" % (srtmtiles, output),
                              quiet=True)
    else:
        ncells = grass.region()["cells"]
        if long(ncells) > 1000000000:
            grass.message(
                _("%s cells to interpolate, this will take some time") %
                str(ncells),
                flag="i",
            )
        if kv["+proj"] != "longlat":
            grass.run_command("r.buildvrt",
                              input=srtmtiles,
                              output=output + ".holes")
        else:
            grass.run_command("r.patch",
                              input=srtmtiles,
                              output=output + ".holes")
        mapstats = grass.parse_command("r.univar",
                                       map=output + ".holes",
                                       flags="g",
                                       quiet=True)
        if mapstats["null_cells"] == "0":
            grass.run_command("g.rename",
                              raster="%s,%s" % (output + ".holes", output),
                              quiet=True)
        else:
            grass.run_command(
                "r.resamp.bspline",
                input=output + ".holes",
                output=output + ".interp",
                se="0.0025",
                sn="0.0025",
                method="linear",
                memory=memory,
                flags="n",
            )
            grass.run_command(
                "r.patch",
                input="%s,%s" % (output + ".holes", output + ".interp"),
                output=output + ".float",
                flags="z",
            )
            grass.run_command("r.mapcalc",
                              expression="%s = round(%s)" %
                              (output, output + ".float"))
            grass.run_command(
                "g.remove",
                type="raster",
                name="%s,%s,%s" %
                (output + ".holes", output + ".interp", output + ".float"),
                flags="f",
                quiet=True,
            )

    # switch to target location
    if kv["+proj"] != "longlat":
        os.environ["GISRC"] = str(TGTGISRC)
        # r.proj
        grass.message(_("Reprojecting <%s>...") % output)
        kwargs = {
            "location": TMPLOC,
            "mapset": "PERMANENT",
            "input": output,
            "memory": memory,
            "resolution": reproj_res,
        }
        if options["method"]:
            kwargs["method"] = options["method"]
        try:
            grass.run_command("r.proj", **kwargs)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % output)
    else:
        if fillnulls != 0:
            grass.run_command("g.remove",
                              type="raster",
                              pattern=pattern,
                              flags="f",
                              quiet=True)

    # nice color table
    grass.run_command("r.colors", map=output, color="srtm", quiet=True)

    # write metadata:
    tmphist = grass.tempfile()
    f = open(tmphist, "w+")
    f.write(os.environ["CMDLINE"])
    f.close()
    if srtmv3:
        source1 = "SRTM V3"
    else:
        source1 = "SRTM V2.1"
    grass.run_command(
        "r.support",
        map=output,
        loadhistory=tmphist,
        description="generated by r.in.srtm.region",
        source1=source1,
        source2=(local if local != tmpdir else url),
    )
    grass.try_remove(tmphist)

    grass.message(_("Done: generated map <%s>") % output)
コード例 #40
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    inputs = options["inputs"]
    output = options["output"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    #Get the current mapset to create the id of the space time dataset
    mapset = grass.encode(grass.gisenv()["MAPSET"])

    inputs_split = inputs.split(",")
    input_ids = []

    for input in inputs_split:
        if input.find("@") >= 0:
            input_ids.append(input)
        else:
            input_ids.append(input + "@" + mapset)

    # Set the output name correct
    if output.find("@") >= 0:
        out_mapset = output.split("@")[1]
        if out_mapset != mapset:
            grass.fatal(
                _("Output space time dataset <%s> must be located in this mapset"
                  ) % (output))
    else:
        output_id = output + "@" + mapset

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds_list = []
    first = None

    for id in input_ids:
        stds = tgis.open_old_stds(id, type, dbif)
        if first is None:
            first = stds

        if first.get_temporal_type() != stds.get_temporal_type():
            dbif.close()
            grass.fatal(
                _("Space time datasets to merge must have the same temporal type"
                  ))

        stds_list.append(stds)

    # Do nothing if nothing to merge
    if first is None:
        dbif.close()
        return

    # Check if the new id is in the database
    output_stds = tgis.dataset_factory(type, output_id)
    output_exists = output_stds.is_in_db(dbif=dbif)

    if output_exists == True and grass.overwrite() == False:
        dbif.close()
        grass.fatal(_("Unable to merge maps into space time %s dataset <%s> "\
                      "please use the overwrite flag.") % \
                      (stds.get_new_map_instance(None).get_type(), output_id))

    if not output_exists:
        output_stds = tgis.open_new_stds(output,
                                         type,
                                         first.get_temporal_type(),
                                         "Merged space time dataset",
                                         "Merged space time dataset",
                                         "mean",
                                         dbif=dbif,
                                         overwrite=False)
    else:
        output_stds.select(dbif=dbif)

    registered_output_maps = {}
    # Maps that are already registered in an existing dataset
    # are not registered again
    if output_exists == True:
        rows = output_stds.get_registered_maps(columns="id", dbif=dbif)
        if rows:
            for row in rows:
                registered_output_maps[row["id"]] = row["id"]

    for stds in stds_list:
        # Avoid merging of already registered maps
        if stds.get_id() != output_stds.get_id():
            maps = stds.get_registered_maps_as_objects(dbif=dbif)

            if maps:
                for map in maps:
                    # Jump over already registered maps
                    if map.get_id() in registered_output_maps:
                        continue

                    map.select(dbif=dbif)
                    output_stds.register_map(map=map, dbif=dbif)
                    # Update the registered map list
                    registered_output_maps[map.get_id()] = map.get_id()

    output_stds.update_from_registered_maps(dbif=dbif)

    if output_exists == True:
        output_stds.update_command_string(dbif=dbif)
コード例 #41
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    sampler = options["sample"]
    where = options["where"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    sampling = options["sampling"]
    offset = options["offset"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]
    type = options["type"]

    topo_list = sampling.split(",")

    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "strds", dbif)
    sampler_sp = tgis.open_old_stds(sampler, type, dbif)

    if sampler_sp.get_temporal_type() != sp.get_temporal_type():
        dbif.close()
        gcore.fatal(
            _("Input and aggregation dataset must have "
              "the same temporal type"))

    # Check if intervals are present
    if sampler_sp.temporal_extent.get_map_time() != "interval":
        dbif.close()
        gcore.fatal(
            _("All registered maps of the aggregation dataset "
              "must have time intervals"))

    # We will create the strds later, but need to check here
    tgis.check_new_stds(output, "strds", dbif, gcore.overwrite())

    map_list = sp.get_registered_maps_as_objects(where=where,
                                                 order="start_time",
                                                 dbif=dbif)

    if not map_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)

    granularity_list = sampler_sp.get_registered_maps_as_objects(
        where=where, order="start_time", dbif=dbif)

    if not granularity_list:
        dbif.close()
        gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler)

    gran = sampler_sp.get_granularity()

    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,
                                             granularity=gran,
                                             map_list=map_list,
                                             topo_list=topo_list,
                                             basename=base,
                                             time_suffix=time_suffix,
                                             offset=offset,
                                             method=method,
                                             nprocs=nprocs,
                                             spatial=None,
                                             overwrite=gcore.overwrite())

    if output_list:
        temporal_type, semantic_type, title, description = sp.get_initial_values(
        )
        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
                                          title, description, semantic_type,
                                          dbif, gcore.overwrite())
        tgis.register_map_object_list("rast", output_list,
                                      output_strds, register_null,
                                      sp.get_relative_time_unit(), dbif)

        # Update the raster metadata table entries with aggregation type
        output_strds.set_aggregation_type(method)
        output_strds.metadata.update(dbif)

    dbif.close()
コード例 #42
0
ファイル: v.to.lines.py プロジェクト: bigrusterwall/grass
def main():
    # Get the options
    input = options["input"]
    input_name = input.split('@')[0]
    output = options["output"]
    method = options["method"]
    min_cat = None
    max_cat = None
    point = None
    overwrite = grass.overwrite()

    quiet = True

    if grass.verbosity() > 2:
        quiet = False

    in_info = grass.vector_info(input)
    # check for wild mixture of vector types
    if in_info['points'] > 0 and in_info['boundaries'] > 0:
        grass.fatal(
            _("The input vector map contains both polygons and points,"
              " cannot handle mixed types"))

    pid = os.getpid()
    # process points via triangulation, then exit
    if in_info['points'] > 0:
        point = True
        layer = 1  # hardcoded for now
        out_temp = '{inp}_point_tmp_{pid}'.format(inp=input_name, pid=pid)
        if method == 'delaunay':
            grass.message(
                _("Processing point data (%d points found)...") %
                in_info['points'])
            grass.run_command('v.delaunay',
                              input=input,
                              layer=layer,
                              output=out_temp,
                              quiet=quiet)

        grass.run_command('v.db.addtable', map=out_temp, quiet=True)
        input = out_temp
        in_info = grass.vector_info(input)

    # process areas
    if in_info['areas'] == 0 and in_info['boundaries'] == 0:
        grass.fatal(_("The input vector map does not contain polygons"))

    out_type = '{inp}_type_{pid}'.format(inp=input_name, pid=pid)
    input_tmp = '{inp}_tmp_{pid}'.format(inp=input_name, pid=pid)
    remove_names = "%s,%s" % (out_type, input_tmp)
    grass.message(
        _("Processing area data (%d areas found)...") % in_info['areas'])

    try:
        grass.run_command('v.category',
                          layer="2",
                          type='boundary',
                          option='add',
                          input=input,
                          out=input_tmp,
                          quiet=quiet)
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=input_tmp,
                          quiet=quiet)
        grass.fatal(_("Error creating layer 2"))
    try:
        grass.run_command('v.db.addtable',
                          map=input_tmp,
                          layer="2",
                          quiet=quiet)
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=input_tmp,
                          quiet=quiet)
        grass.fatal(_("Error creating new table for layer 2"))
    try:
        grass.run_command('v.to.db',
                          map=input_tmp,
                          option="sides",
                          columns="left,right",
                          layer="2",
                          quiet=quiet)
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=input_tmp,
                          quiet=quiet)
        grass.fatal(_("Error populating new table for layer 2"))
    try:
        grass.run_command('v.type',
                          input=input_tmp,
                          output=out_type,
                          from_type='boundary',
                          to_type='line',
                          quiet=quiet,
                          layer="2")
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=remove_names,
                          quiet=quiet)
        grass.fatal(_("Error converting polygon to line"))
    report = grass.read_command('v.category',
                                flags='g',
                                input=out_type,
                                option='report',
                                quiet=quiet)
    report = decode(report).split('\n')
    for r in report:
        if r.find('centroid') != -1:
            min_cat = report[0].split()[-2]
            max_cat = report[0].split()[-1]
            break
    if min_cat and max_cat:
        try:
            grass.run_command('v.edit',
                              map=out_type,
                              tool='delete',
                              type='centroid',
                              layer=2,
                              quiet=quiet,
                              cats='{mi}-{ma}'.format(mi=min_cat, ma=max_cat))
        except CalledModuleError:
            grass.run_command('g.remove',
                              flags='f',
                              type='vector',
                              name=remove_names,
                              quiet=quiet)
            grass.fatal(_("Error removing centroids"))

    try:
        try:
            # TODO: fix magic numbers for layer here and there
            grass.run_command('v.db.droptable',
                              map=out_type,
                              layer=1,
                              flags='f',
                              quiet=True)
        except CalledModuleError:
            grass.run_command('g.remove',
                              flags='f',
                              type='vector',
                              name=remove_names,
                              quiet=quiet)
            grass.fatal(_("Error removing table from layer 1"))
    # TODO: when this except is happaning, it seems that never, so it seems wrong
    except:
        grass.warning(_("No table for layer %d" % 1))
    try:
        grass.run_command('v.category',
                          input=out_type,
                          option='transfer',
                          output=output,
                          layer="2,1",
                          quiet=quiet,
                          overwrite=overwrite)
    except CalledModuleError:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=remove_names,
                          quiet=quiet)
        grass.fatal(_("Error adding categories"))
    grass.run_command('g.remove',
                      flags='f',
                      type='vector',
                      name=remove_names,
                      quiet=quiet)
    if point:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=out_temp,
                          quiet=quiet)
コード例 #43
0
def main():
    global insert_sql
    insert_sql = None
    global temporary_vect
    temporary_vect = None
    global stats_temp_file
    stats_temp_file = None
    global content
    content = None
    global raster
    raster = options['raster']
    global decimals
    decimals = int(options['decimals'])
    global zone_map
    zone_map = options['zone_map']

    csvfile = options['csvfile'] if options['csvfile'] else []
    separator = gscript.separator(options['separator'])
    prefix = options['prefix'] if options['prefix'] else []
    classes_list = options['classes_list'].split(
        ',') if options['classes_list'] else []
    vectormap = options['vectormap'] if options['vectormap'] else []

    prop = False if 'proportion' not in options['statistics'].split(
        ',') else True
    mode = False if 'mode' not in options['statistics'].split(',') else True

    # Check if input layer is CELL
    if gscript.parse_command('r.info', flags='g',
                             map=raster)['datatype'] != 'CELL':
        gscript.fatal(
            _("The type of the input map 'raster' is not CELL. Please use raster with integer values"
              ))
    if gscript.parse_command('r.info', flags='g',
                             map=zone_map)['datatype'] != 'CELL':
        gscript.fatal(
            _("The type of the input map 'zone_map' is not CELL. Please use raster with integer values"
              ))

    # Check if 'decimals' is + and with credible value
    if decimals <= 0:
        gscript.fatal(_("The number of decimals should be positive"))
    if decimals > 100:
        gscript.fatal(_("The number of decimals should not be more than 100"))

    # Adjust region to input map is flag active
    if flags['r']:
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=zone_map)

    # R.STATS
    tmpfile = gscript.tempfile()
    try:
        if flags['n']:
            gscript.run_command(
                'r.stats',
                overwrite=True,
                flags='c',
                input='%s,%s' % (zone_map, raster),
                output=tmpfile,
                separator=separator)  # Consider null values in R.STATS
        else:
            gscript.run_command(
                'r.stats',
                overwrite=True,
                flags='cn',
                input='%s,%s' % (zone_map, raster),
                output=tmpfile,
                separator=separator)  # Do not consider null values in R.STATS
        gscript.message(_("r.stats command finished..."))
    except:
        gscript.fatal(_("The execution of r.stats failed"))

    # COMPUTE STATISTICS
    # Open csv file and create a csv reader
    rstatsfile = open(tmpfile, 'r')
    reader = csv.reader(rstatsfile, delimiter=separator)
    # Total pixels per category per zone
    totals_dict = {}
    for row in reader:
        if row[0] not in totals_dict:  # Will pass the condition only if the current zone ID does not exists in the dictionary
            totals_dict[row[0]] = {
            }  # Declare a new embedded dictionnary for the current zone ID
        totals_dict[row[0]][row[1]] = int(row[2])
    # Delete key '*' in 'totals_dict' that could append if there are null values on the zone raster
    if '*' in totals_dict:
        del totals_dict['*']
    # Close file
    rstatsfile.close()
    # Mode
    if mode:
        modalclass_dict = {}
        for ID in totals_dict:
            # The trick was found here : https://stackoverflow.com/a/268285/8013239
            mode = max(totals_dict[ID].iteritems(),
                       key=operator.itemgetter(1))[0]
            if mode == '*':  # If the mode is NULL values
                modalclass_dict[ID] = 'NULL'
            else:
                modalclass_dict[ID] = mode
    # Classes proportions
    if prop:
        # Get list of categories to output
        if classes_list:  #If list of classes provided by user
            class_dict = {str(a): ''
                          for a in classes_list
                          }  #To be sure it's string format
        else:
            class_dict = {}
        # Proportion of each category per zone
        proportion_dict = {}
        for ID in totals_dict:
            proportion_dict[ID] = {}
            for cl in totals_dict[ID]:
                if flags['p']:
                    proportion_dict[ID][cl] = round(
                        float(totals_dict[ID][cl]) /
                        sum(totals_dict[ID].values()) * 100, decimals)
                else:
                    proportion_dict[ID][cl] = round(
                        float(totals_dict[ID][cl]) /
                        sum(totals_dict[ID].values()), decimals)
                if cl == '*':
                    class_dict['NULL'] = ''
                else:
                    class_dict[cl] = ''
        # Fill class not met in the raster with zero
        for ID in proportion_dict:
            for cl in class_dict:
                if cl not in proportion_dict[ID].keys():
                    proportion_dict[ID][cl] = '{:.{}f}'.format(0, decimals)
        # Get list of class sorted by value (arithmetic)
        if 'NULL' in class_dict.keys():
            class_list = [int(k) for k in class_dict.keys() if k != 'NULL']
            class_list.sort()
            class_list.append('NULL')
        else:
            class_list = [int(k) for k in class_dict.keys()]
            class_list.sort()
    gscript.verbose(_("Statistics computed..."))

    # OUTPUT CONTENT
    # Header
    header = [
        'cat',
    ]
    if mode:
        if prefix:
            header.append('%s_mode' % prefix)
        else:
            header.append('mode')
    if prop:
        if prefix:
            [header.append('%s_prop_%s' % (prefix, cl)) for cl in class_list]
        else:
            [header.append('prop_%s' % cl) for cl in class_list]
    # Values
    value_dict = {}
    for ID in totals_dict:
        value_dict[ID] = []
        if mode:
            value_dict[ID].append(modalclass_dict[ID])
        if prop:
            for cl in class_list:
                value_dict[ID].append(proportion_dict[ID]['%s' % cl])

    # WRITE OUTPUT
    if csvfile:
        outfile = open(csvfile, 'w')
        writer = csv.writer(outfile, delimiter=separator)
        writer.writerow(header)
        csvcontent_dict = copy.deepcopy(value_dict)
        [csvcontent_dict[ID].insert(0, ID) for ID in csvcontent_dict]
        [csvcontent_dict[ID] for ID in csvcontent_dict]
        writer.writerows(csvcontent_dict.values())
        outfile.close()
    if vectormap:
        gscript.message(_("Creating output vector map..."))
        temporary_vect = 'rzonalclasses_tmp_vect_%d' % os.getpid()
        gscript.run_command('r.to.vect',
                            input_=zone_map,
                            output=temporary_vect,
                            type_='area',
                            flags='vt',
                            overwrite=True,
                            quiet=True)
        insert_sql = gscript.tempfile()
        fsql = open(insert_sql, 'w')
        fsql.write('BEGIN TRANSACTION;\n')
        if gscript.db_table_exist(temporary_vect):
            if gscript.overwrite():
                fsql.write('DROP TABLE %s;' % temporary_vect)
            else:
                gscript.fatal(
                    _("Table %s already exists. Use --o to overwrite") %
                    temporary_vect)
        create_statement = 'CREATE TABLE ' + temporary_vect + ' (cat int PRIMARY KEY);\n'
        fsql.write(create_statement)
        for col in header[1:]:
            if col.split('_')[-1] == 'mode':  # Mode column should be integer
                addcol_statement = 'ALTER TABLE %s ADD COLUMN %s integer;\n' % (
                    temporary_vect, col)
            else:  # Proportions column should be double precision
                addcol_statement = 'ALTER TABLE %s ADD COLUMN %s double precision;\n' % (
                    temporary_vect, col)
            fsql.write(addcol_statement)
        for key in value_dict:
            insert_statement = 'INSERT INTO %s VALUES (%s, %s);\n' % (
                temporary_vect, key, ','.join(
                    [str(x) for x in value_dict[key]]))
            fsql.write(insert_statement)
        fsql.write('END TRANSACTION;')
        fsql.close()
        gscript.run_command('db.execute', input=insert_sql, quiet=True)
        gscript.run_command('v.db.connect',
                            map_=temporary_vect,
                            table=temporary_vect,
                            quiet=True)
        gscript.run_command('g.copy',
                            vector='%s,%s' % (temporary_vect, vectormap),
                            quiet=True)
コード例 #44
0
def main():
    options, flags = gscript.parser()

    keep = flags['k']
    input = options['input']
    output = options['output']
    category = int(options['category'])

    nsize = int(options['nsize'])
    maxiter = int(options['maxiter'])
    animationfile = options['animationfile']
    quality = int(options['quality'])

    overwrite_flag = ''
    if gscript.overwrite():
        overwrite_flag = 't'

    # keep intermediate maps
    keepintmaps = False
    if flags['k']:
        keepintmaps = True

    # to generate the animation file, intermediate files must be kept
    # they will be removed at the end of the process if the 'k' flag is not set
    if animationfile:
        keepintmaps = True

    # check if input file exists
    if not gscript.find_file(input)['file']:
        gscript.fatal(_("Raster map <%s> not found") % input)

    # strip mapset name
    in_name_strip = options['input'].split('@')
    in_name = in_name_strip[0]
    out_name_strip = options['output'].split('@')
    out_name = out_name_strip[0]

    tmp = str(os.getpid())

    # maps to bootstrap the loop
    # create a map containing only the category to replace and NULL
    categorymap = '{}'.format(in_name) + '_bin_' + '{}'.format(tmp)
    gscript.verbose(_("Category map: <%s>") % categorymap)
    gscript.run_command(
        'r.mapcalc',
        expression="{outmap}=if({inmap}=={cat}, 1, null())".format(
            outmap=categorymap, inmap=input, cat=category),
        quiet=True,
        overwrite='t')
    # create a copy of the input map to be used as a selection map in r.neighbors,
    # it will be replaced by the map with category replacement in the loop
    stepmap_old = '{}'.format(in_name) + '_step_000'
    gscript.run_command('g.copy',
                        raster='{inmap},{outmap}'.format(inmap=input,
                                                         outmap=stepmap_old),
                        quiet=True,
                        overwrite='t')

    gscript.verbose(_("Category to remove: %d") % category)
    gscript.verbose(_("Maxiter: %d") % maxiter)
    gscript.verbose(_("Quality for animation: %d") % quality)

    pixel_num = 1
    iteration = 1

    # iterate until no pixel of the category to be replaced is left
    # or the maximum number of iterations is reached
    while (pixel_num > 0) and (iteration <= maxiter):
        stepmap = '{}'.format(in_name)
        stepmap += '_step_'
        stepmap += '{:03d}'.format(iteration)
        gscript.verbose(_("Step map: <%s>") % stepmap)

        # substitute pixels of the category to remove with the mode of the surrounding pixels
        gscript.run_command('r.neighbors',
                            input=stepmap_old,
                            selection=categorymap,
                            size=nsize,
                            output=stepmap,
                            method='mode',
                            overwrite='true',
                            quiet=True)

        # remove intermediate map unless the k flag is set
        if keepintmaps is False:
            gscript.run_command('g.remove',
                                type='raster',
                                name=stepmap_old,
                                flags='f',
                                quiet=True)

        # the r.neighbors output map is the input map for the next step
        stepmap_old = stepmap

        # create the new map containing only the category to replace and NULL
        gscript.run_command(
            'r.mapcalc',
            expression="{outmap}=if({inmap}=={cat},1,null())".format(
                outmap=categorymap, inmap=stepmap, cat=category),
            quiet=True,
            overwrite='t')

        # evaluate the number of the remaining pixels of the category to relace
        pixel_stat = gscript.parse_command(
            'r.stats',
            input='{inmap}'.format(inmap=stepmap),
            flags='c',
            sep='=',
            quiet=True)
        # parse the output, if the category is not in the list raise an exception and set pixel_num = 0
        try:
            pixel_num = float(pixel_stat['{}'.format(category)])
        except KeyError as e:
            pixel_num = 0
            # print(e.message)

        gscript.verbose(
            _("Iteration: %d  Remaining pixels: %d") % (iteration, pixel_num))

        iteration = iteration + 1

    # the last value stopped the loop
    iteration = iteration - 1

    # if the loop ended before reaching pixel_num=0
    if pixel_num > 0:
        gscript.warning(
            _("the process stopped after %d iterations with %d pixels of category %d left"
              ) % (iteration, pixel_num, category))

    # copy the output of the last iteration to the output map
    gscript.run_command('g.copy',
                        raster='{inmap},{outmap}'.format(inmap=stepmap,
                                                         outmap=out_name),
                        overwrite='{}'.format(overwrite_flag),
                        quiet=True)

    # remove the last intermediate map unless the k flag is set
    if keepintmaps is False:
        gscript.run_command('g.remove',
                            type='raster',
                            name=stepmap_old,
                            flags='f',
                            quiet=True)

    gscript.run_command('g.remove',
                        type='raster',
                        name=categorymap,
                        flags='f',
                        quiet=True)

    # optionally create an mpeg animation of the replacement sequence
    if animationfile:
        gscript.message(_("Generating mpeg file %s...") % animationfile)
        gscript.run_command('r.out.mpeg',
                            view1='{}_step_[0-9][0-9][0-9]'.format(in_name),
                            output='{}'.format(animationfile),
                            quality='{}'.format(quality),
                            overwrite='{}'.format(overwrite_flag))

    # remove intermediate maps if they have been kept for generating the animation
    # but the 'k' flag is not set
    if animationfile and not flags['k']:
        gscript.message(
            _("Removing intermediate files after generating %s...") %
            animationfile)
        newiter = 0
        while newiter <= iteration:
            stepmap = '{}'.format(in_name)
            stepmap += '_step_'
            stepmap += '{:03d}'.format(newiter)
            gscript.verbose(_("Removing step map: <%s>") % stepmap)
            gscript.run_command('g.remove',
                                type='raster',
                                name=stepmap,
                                flags='f',
                                quiet=True)
            newiter = newiter + 1
コード例 #45
0
ファイル: r.in.nasadem.py プロジェクト: lucadelu/grass-addons
def main():

    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    global tile, tmpdir, in_temp, currdir, tmpregionname

    in_temp = False

    nasadem_version = options["version"]
    nasadem_layer = options["layer"]
    url = options["url"]
    username = options["username"]
    password = options["password"]
    local = options["local"]
    output = options["output"]
    dozerotile = flags["z"]
    reproj_res = options["resolution"]

    overwrite = grass.overwrite()

    tile = None
    tmpdir = None
    in_temp = None
    currdir = None
    tmpregionname = None

    if len(local) == 0:
        local = None
        if len(username) == 0 or len(password) == 0:
            grass.fatal(_("NASADEM download requires username and password."))

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags="j")
    kv = grass.parse_key_val(s)

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    currdir = os.getcwd()
    pid = os.getpid()

    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True

    # save region
    tmpregionname = "r_in_nasadem_region_" + str(pid)
    grass.run_command("g.region", save=tmpregionname, overwrite=overwrite)

    # get extents
    if kv["+proj"] == "longlat":
        reg = grass.region()
        if options["region"] is None or options["region"] == "":
            north = reg["n"]
            south = reg["s"]
            east = reg["e"]
            west = reg["w"]
        else:
            west, south, east, north = options["region"].split(",")
            west = float(west)
            south = float(south)
            east = float(east)
            north = float(north)

    else:
        if not options["resolution"]:
            grass.fatal(
                _("The <resolution> must be set if the projection is not 'longlat'.")
            )
        if options["region"] is None or options["region"] == "":
            reg2 = grass.parse_command("g.region", flags="uplg")
            north_vals = [float(reg2["ne_lat"]), float(reg2["nw_lat"])]
            south_vals = [float(reg2["se_lat"]), float(reg2["sw_lat"])]
            east_vals = [float(reg2["ne_long"]), float(reg2["se_long"])]
            west_vals = [float(reg2["nw_long"]), float(reg2["sw_long"])]
            reg = {}
            if np.mean(north_vals) > np.mean(south_vals):
                north = max(north_vals)
                south = min(south_vals)
            else:
                north = min(north_vals)
                south = max(south_vals)
            if np.mean(west_vals) > np.mean(east_vals):
                west = max(west_vals)
                east = min(east_vals)
            else:
                west = min(west_vals)
                east = max(east_vals)
            # get actual location, mapset, ...
            grassenv = grass.gisenv()
            tgtloc = grassenv["LOCATION_NAME"]
            tgtmapset = grassenv["MAPSET"]
            GISDBASE = grassenv["GISDBASE"]
            TGTGISRC = os.environ["GISRC"]
        else:
            grass.fatal(
                _(
                    "The option <resolution> is only supported in the projection 'longlat'"
                )
            )

    # adjust extents to cover SRTM tiles: 1 degree bounds
    tmpint = int(north)
    if tmpint < north:
        north = tmpint + 1
    else:
        north = tmpint

    tmpint = int(south)
    if tmpint > south:
        south = tmpint - 1
    else:
        south = tmpint

    tmpint = int(east)
    if tmpint < east:
        east = tmpint + 1
    else:
        east = tmpint

    tmpint = int(west)
    if tmpint > west:
        west = tmpint - 1
    else:
        west = tmpint

    if north == south:
        north += 1
    if east == west:
        east += 1

    # switch to longlat location
    if kv["+proj"] != "longlat":
        SRCGISRC, TMPLOC = createTMPlocation()

    rows = abs(north - south)
    cols = abs(east - west)
    ntiles = rows * cols
    grass.message(_("Importing %d NASADEM tiles...") % ntiles, flag="i")
    counter = 1

    srtmtiles = ""
    valid_tiles = 0
    for ndeg in range(south, north):
        for edeg in range(west, east):
            grass.percent(counter, ntiles, 1)
            counter += 1
            if ndeg < 0:
                tile = "s"
            else:
                tile = "n"
            tile = tile + "%02d" % abs(ndeg)
            if edeg < 0:
                tile = tile + "w"
            else:
                tile = tile + "e"
            tile = tile + "%03d" % abs(edeg)
            grass.debug("Tile: %s" % tile, debug=1)

            if local is None:
                download_tile(tile, url, pid, nasadem_version, username, password)

            gotit = import_local_tile(tile, local, pid, nasadem_layer)
            if gotit == 1:
                grass.verbose(_("Tile %s successfully imported") % tile)
                valid_tiles += 1
            elif dozerotile:
                # create tile with zeros
                # north
                if ndeg < -1:
                    tmpn = "%02d:59:59.5S" % (abs(ndeg) - 2)
                else:
                    tmpn = "%02d:00:00.5N" % (ndeg + 1)
                # south
                if ndeg < 1:
                    tmps = "%02d:00:00.5S" % abs(ndeg)
                else:
                    tmps = "%02d:59:59.5N" % (ndeg - 1)
                # east
                if edeg < -1:
                    tmpe = "%03d:59:59.5W" % (abs(edeg) - 2)
                else:
                    tmpe = "%03d:00:00.5E" % (edeg + 1)
                # west
                if edeg < 1:
                    tmpw = "%03d:00:00.5W" % abs(edeg)
                else:
                    tmpw = "%03d:59:59.5E" % (edeg - 1)

                grass.run_command("g.region", n=tmpn, s=tmps, e=tmpe, w=tmpw, res=res)
                grass.run_command(
                    "r.mapcalc",
                    expression="%s = 0" % (tile + ".r.in.nasadem.tmp." + str(pid)),
                    quiet=True,
                )
                grass.run_command("g.region", region=tmpregionname)

    # g.list with sep = comma does not work ???
    pattern = "*.r.in.nasadem.tmp.%d" % pid
    demtiles = grass.read_command(
        "g.list", type="raster", pattern=pattern, sep="newline", quiet=True
    )

    demtiles = demtiles.splitlines()
    demtiles = ",".join(demtiles)
    grass.debug("'List of Tiles: %s" % demtiles, debug=1)

    if valid_tiles == 0:
        grass.run_command(
            "g.remove", type="raster", name=str(demtiles), flags="f", quiet=True
        )
        grass.warning(_("No tiles imported"))
        if local is not None:
            grass.fatal(_("Please check if local folder <%s> is correct.") % local)
        else:
            grass.fatal(
                _(
                    "Please check internet connection, credentials, and if url <%s> is correct."
                )
                % url
            )

    grass.run_command("g.region", raster=str(demtiles))

    if valid_tiles > 1:
        grass.message(_("Patching tiles..."))
        if kv["+proj"] != "longlat":
            grass.run_command("r.buildvrt", input=demtiles, output=output)
        else:
            grass.run_command("r.patch", input=demtiles, output=output)
            grass.run_command(
                "g.remove", type="raster", name=str(demtiles), flags="f", quiet=True
            )
    else:
        grass.run_command("g.rename", raster="%s,%s" % (demtiles, output), quiet=True)

    # switch to target location and repoject nasadem
    if kv["+proj"] != "longlat":
        os.environ["GISRC"] = str(TGTGISRC)
        # r.proj
        grass.message(_("Reprojecting <%s>...") % output)
        kwargs = {
            "location": TMPLOC,
            "mapset": "PERMANENT",
            "input": output,
            "resolution": reproj_res,
        }
        if options["memory"]:
            kwargs["memory"] = options["memory"]
        if options["method"]:
            kwargs["method"] = options["method"]
        try:
            grass.run_command("r.proj", **kwargs)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % output)

    # nice color table
    grass.run_command("r.colors", map=output, color="srtm", quiet=True)

    # write metadata:
    tmphist = grass.tempfile()
    f = open(tmphist, "w+")
    # hide username and password
    cmdline = os.environ["CMDLINE"]
    if username is not None and len(username) > 0:
        cmdline = cmdline.replace("=" + username, "=xxx")
    if password is not None and len(password) > 0:
        cmdline = cmdline.replace("=" + password, "=xxx")

    f.write(cmdline)
    f.close()
    source1 = nasadem_version
    grass.run_command(
        "r.support",
        map=output,
        loadhistory=tmphist,
        description="generated by r.in.nasadem",
        source1=source1,
        source2=(local if local != tmpdir else url),
    )
    grass.try_remove(tmphist)

    grass.message(_("Done: generated map <%s>") % output)
コード例 #46
0
def main():
    raster = options['raster']
    maskcats = options['maskcats']
    vector = options['vector']
    layer = options['layer']
    cats = options['cats']
    where = options['where']
    remove = flags['r']
    invert = flags['i']

    if not remove and not raster and not vector:
        grass.fatal(_("Either parameter <raster> or parameter <vector> is required"))

    mapset = grass.gisenv()['MAPSET']
    exists = bool(grass.find_file('MASK', element='cell', mapset=mapset)['file'])

    if remove:
        # -> remove
        if exists:
            if sys.platform == 'win32':
                grass.run_command('g.remove', flags='if', quiet=True,
                                  type='raster', name='MASK')
            else:
                grass.run_command('g.remove', flags='f', quiet=True,
                                  type='raster', name='MASK')
            grass.message(_("Raster MASK removed"))
        else:
            grass.fatal(_("No existing MASK to remove"))
    else:
        # -> create
        if exists:
            if not grass.overwrite():
                grass.fatal(_("MASK already found in current mapset. Delete first or overwrite."))
            else:
                grass.warning(_("MASK already exists and will be overwritten"))
                grass.run_command('g.remove', flags='f', quiet=True,
                                  type='raster', name='MASK')

        if raster:
            # check if input raster exists
            if not grass.find_file(raster)['file']:
                grass.fatal(_("Raster map <%s> not found") % raster)

            if maskcats != '*' and not remove:
                if grass.raster_info(raster)['datatype'] != "CELL":
                    grass.fatal(_("The raster map <%s> must be integer (CELL type) "
                                  " in order to use the 'maskcats' parameter") % raster)

            p = grass.feed_command(
                'r.reclass',
                input=raster,
                output='MASK',
                overwrite=True,
                rules='-')
            p.stdin.write("%s = 1" % maskcats)
            p.stdin.close()
            p.wait()
        elif vector:
            vector_name = grass.find_file(vector, 'vector')['fullname']
            if not vector_name:
                grass.fatal(_("Vector map <%s> not found") % vector)

            # parser bug?
            if len(cats) == 0:
                cats = None
            if len(where) == 0:
                where = None

            if grass.vector_info_topo(vector_name)['areas'] < 1:
                grass.warning(_("No area found in vector map <%s>. "
                                "Creating a convex hull for MASK.") % vector_name)
                global tmp_hull
                tmp_hull = "tmp_hull_%d" % os.getpid()
                to_rast_input = tmp_hull
                # force 'flat' convex hull for 3D vector maps
                try:
                    grass.run_command('v.hull', flags='f', quiet=True,
                                      input=vector_name, output=tmp_hull,
                                      layer=layer, cats=cats, where=where)
                except CalledModuleError:
                    grass.fatal(
                        _("Unable to create a convex hull for vector map <%s>") %
                        vector_name)
            else:
                to_rast_input = vector_name

            env = os.environ.copy()
            if grass.verbosity() > 1:
                env['GRASS_VERBOSE'] = '1'
            grass.run_command('v.to.rast', input=to_rast_input, layer=layer,
                              output='MASK', use='val', val='1',
                              type='area', cats=cats, where=where, env=env)

        if invert:
            global tmp
            tmp = "r_mask_%d" % os.getpid()
            grass.run_command('g.rename', raster=('MASK', tmp), quiet=True)
            grass.message(_("Creating inverted raster MASK..."))
            grass.mapcalc("MASK = if(isnull($tmp), 1, null())", tmp=tmp)
            grass.verbose(_("Inverted raster MASK created"))
        else:
            grass.verbose(_("Raster MASK created"))

        grass.message(_("All subsequent raster operations will be limited to "
                        "the MASK area. Removing or renaming raster map named "
                        "'MASK' will restore raster operations to normal."))
コード例 #47
0
def main():
    options, flags = grass.parser()

    elevation_input = options['elevation']
    aspect_input = options['aspect']
    slope_input = options['slope']
    linke = options['linke']
    linke_value = options['linke_value']
    albedo = options['albedo']
    albedo_value = options['albedo_value']

    beam_rad_basename = options['beam_rad_basename']
    diff_rad_basename = options['diff_rad_basename']
    refl_rad_basename = options['refl_rad_basename']
    glob_rad_basename = options['glob_rad_basename']
    incidout_basename = options['incidout_basename']

    if not any([beam_rad_basename, diff_rad_basename,
                refl_rad_basename, glob_rad_basename,
                incidout_basename]):
        grass.fatal(_("No output specified."))

    start_time = float(options['start_time'])
    end_time = float(options['end_time'])
    time_step = float(options['time_step'])
    nprocs = int(options['nprocs'])
    day = int(options['day'])
    temporal = flags['t']
    binary = flags['b']
    binaryTmpName = 'binary'
    year = int(options['year'])

    if not is_grass_7() and temporal:
        grass.warning(_("Flag t has effect only in GRASS 7"))

    # check: start < end
    if start_time > end_time:
        grass.fatal(_("Start time is after end time."))
    if time_step >= end_time - start_time:
        grass.fatal(_("Time step is too big."))

    # here we check all the days
    if not grass.overwrite():
        check_time_map_names(beam_rad_basename, grass.gisenv()['MAPSET'],
                             start_time, end_time, time_step, binary,
                             binaryTmpName)
        check_time_map_names(diff_rad_basename, grass.gisenv()['MAPSET'],
                             start_time, end_time, time_step, binary,
                             binaryTmpName)
        check_time_map_names(refl_rad_basename, grass.gisenv()['MAPSET'],
                             start_time, end_time, time_step, binary,
                             binaryTmpName)
        check_time_map_names(glob_rad_basename, grass.gisenv()['MAPSET'],
                             start_time, end_time, time_step, binary,
                             binaryTmpName)

    # check for slope/aspect
    if not aspect_input or not slope_input:
        params = {}
        if not aspect_input:
            aspect_input = create_tmp_map_name('aspect')
            params.update({'aspect': aspect_input})
            TMP.append(aspect_input)
        if not slope_input:
            slope_input = create_tmp_map_name('slope')
            params.update({'slope': slope_input})
            TMP.append(slope_input)

        grass.info(_("Running r.slope.aspect..."))
        grass.run_command('r.slope.aspect', elevation=elevation_input,
                          quiet=True, **params)

    grass.info(_("Running r.sun in a loop..."))
    count = 0
    # Parallel processing
    proc_list = []
    proc_count = 0
    suffixes = []
    suffixes_all = []
    times = list(frange(start_time, end_time, time_step))
    num_times = len(times)
    core.percent(0, num_times, 1)
    for time in times:
        count += 1
        core.percent(count, num_times, 10)

        suffix = '_' + format_time(time)
        proc_list.append(Process(target=run_r_sun,
                                 args=(elevation_input, aspect_input,
                                       slope_input, day, time,
                                       linke, linke_value,
                                       albedo, albedo_value,
                                       beam_rad_basename,
                                       diff_rad_basename,
                                       refl_rad_basename,
                                       glob_rad_basename,
                                       incidout_basename,
                                       suffix,
                                       binary, binaryTmpName)))

        proc_list[proc_count].start()
        proc_count += 1
        suffixes.append(suffix)
        suffixes_all.append(suffix)

        if proc_count == nprocs or proc_count == num_times or count == num_times:
            proc_count = 0
            exitcodes = 0
            for proc in proc_list:
                proc.join()
                exitcodes += proc.exitcode

            if exitcodes != 0:
                core.fatal(_("Error while r.sun computation"))

            # Empty process list
            proc_list = []
            suffixes = []
    # FIXME: how percent really works?
    # core.percent(1, 1, 1)

    # add timestamps either via temporal framework in 7 or r.timestamp in 6.x
    if is_grass_7() and temporal:
        core.info(_("Registering created maps into temporal dataset..."))
        import grass.temporal as tgis

        def registerToTemporal(basename, suffixes, mapset, start_time,
                               time_step, title, desc):
            maps = ','.join([basename + suf + '@' + mapset for suf in suffixes])
            tgis.open_new_stds(basename, type='strds',
                               temporaltype='absolute',
                               title=title, descr=desc,
                               semantic='mean', dbif=None,
                               overwrite=grass.overwrite())
            tgis.register_maps_in_space_time_dataset(
                type='raster', name=basename, maps=maps, start=start_time,
                end=None, increment=time_step, dbif=None, interval=False)
        # Make sure the temporal database exists
        tgis.init()

        mapset = grass.gisenv()['MAPSET']
        absolute_time = datetime.datetime(year, 1, 1) + \
                        datetime.timedelta(days=day - 1) + \
                        datetime.timedelta(hours=start_time)
        start = absolute_time.strftime("%Y-%m-%d %H:%M:%S")
        step = datetime.timedelta(hours=time_step)
        step = "%d seconds" % step.seconds

        if beam_rad_basename:
            registerToTemporal(beam_rad_basename, suffixes_all, mapset, start,
                               step, title="Beam irradiance",
                               desc="Output beam irradiance raster maps [Wh.m-2]")
        if diff_rad_basename:
            registerToTemporal(diff_rad_basename, suffixes_all, mapset, start,
                               step, title="Diffuse irradiance",
                               desc="Output diffuse irradiance raster maps [Wh.m-2]")
        if refl_rad_basename:
            registerToTemporal(refl_rad_basename, suffixes_all, mapset, start,
                               step, title="Reflected irradiance",
                               desc="Output reflected irradiance raster maps [Wh.m-2]")
        if glob_rad_basename:
            registerToTemporal(glob_rad_basename, suffixes_all, mapset, start,
                               step, title="Total irradiance",
                               desc="Output total irradiance raster maps [Wh.m-2]")
        if incidout_basename:
            registerToTemporal(incidout_basename, suffixes_all, mapset, start,
                               step, title="Incidence angle",
                               desc="Output incidence angle raster maps")

    else:
        absolute_time = datetime.datetime(year, 1, 1) + \
                        datetime.timedelta(days=day - 1)
        for i, time in enumerate(times):
            grass_time = format_grass_time(absolute_time + datetime.timedelta(hours=time))
            if beam_rad_basename:
                set_time_stamp(beam_rad_basename + suffixes_all[i],
                               time=grass_time)
            if diff_rad_basename:
                set_time_stamp(diff_rad_basename + suffixes_all[i],
                               time=grass_time)
            if refl_rad_basename:
                set_time_stamp(refl_rad_basename + suffixes_all[i],
                               time=grass_time)
            if glob_rad_basename:
                set_time_stamp(glob_rad_basename + suffixes_all[i],
                               time=grass_time)
            if incidout_basename:
                set_time_stamp(incidout_basename + suffixes_all[i],
                               time=grass_time)

    if beam_rad_basename:
        maps = [beam_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if diff_rad_basename:
        maps = [diff_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if refl_rad_basename:
        maps = [refl_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if glob_rad_basename:
        maps = [glob_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if incidout_basename:
        maps = [incidout_basename + suf for suf in suffixes_all]
        set_color_table(maps)
コード例 #48
0
def main():
    """
    Links each river segment to the next downstream segment in a tributary
    network by referencing its category (cat) number in a new column. "0"
    means that the river exits the map.
    """
    import matplotlib  # required by windows

    matplotlib.use("wxAGG")  # required by windows
    from matplotlib import pyplot as plt

    options, flags = gscript.parser()

    # Parsing
    window = float(options["window"])
    accum_mult = float(options["accum_mult"])
    if options["units"] == "m2":
        accum_label = "Drainage area [m$^2$]"
    elif options["units"] == "km2":
        accum_label = "Drainage area [km$^2$]"
    elif options["units"] == "cumecs":
        accum_label = "Water discharge [m$^3$ s$^{-1}$]"
    elif options["units"] == "cfs":
        accum_label = "Water discharge [cfs]"
    else:
        accum_label = "Flow accumulation [$-$]"
    plots = options["plots"].split(",")

    # Attributes of streams
    colNames = np.array(vector_db_select(options["streams"])["columns"])
    colValues = np.array(
        vector_db_select(options["streams"])["values"].values())
    tostream = colValues[:, colNames == "tostream"].astype(int).squeeze()
    cats = colValues[:,
                     colNames == "cat"].astype(int).squeeze()  # = "fromstream"

    # We can loop over this list to get the shape of the full river network.
    selected_cats = []
    segment = int(options["cat"])
    selected_cats.append(segment)
    x = []
    z = []
    if options["direction"] == "downstream":
        # Get network
        gscript.message("Network")
        while selected_cats[-1] != 0:
            selected_cats.append(int(tostream[cats == selected_cats[-1]]))
        x.append(selected_cats[-1])
        selected_cats = selected_cats[:-1]  # remove 0 at end

        # Extract x points in network
        data = vector.VectorTopo(
            options["streams"])  # Create a VectorTopo object
        data.open("r")  # Open this object for reading

        coords = []
        _i = 0
        for i in range(len(data)):
            if isinstance(data.read(i + 1), vector.geometry.Line):
                if data.read(i + 1).cat in selected_cats:
                    coords.append(data.read(i + 1).to_array())
                    gscript.core.percent(_i, len(selected_cats),
                                         100.0 / len(selected_cats))
                    _i += 1
        gscript.core.percent(1, 1, 1)
        coords = np.vstack(np.array(coords))

        _dx = np.diff(coords[:, 0])
        _dy = np.diff(coords[:, 1])
        x_downstream_0 = np.hstack((0, np.cumsum((_dx**2 + _dy**2)**0.5)))
        x_downstream = x_downstream_0.copy()

    elif options["direction"] == "upstream":
        # terminalCATS = list(options['cat'])
        # while terminalCATS:
        #
        print("Upstream direction not yet active!")
        return
        """
        # Add new lists for each successive upstream river
        river_is_upstream =
        while
        full_river_cats
        """

    # Network extraction
    if options["outstream"] is not "":
        selected_cats_str = list(np.array(selected_cats).astype(str))
        selected_cats_csv = ",".join(selected_cats_str)
        v.extract(
            input=options["streams"],
            output=options["outstream"],
            cats=selected_cats_csv,
            overwrite=gscript.overwrite(),
        )

    # Analysis
    gscript.message("Elevation")
    if options["elevation"]:
        _include_z = True
        DEM = RasterRow(options["elevation"])
        DEM.open("r")
        z = []
        _i = 0
        _lasti = 0
        for row in coords:
            z.append(DEM.get_value(Point(row[0], row[1])))
            if float(_i) / len(coords) > float(_lasti) / len(coords):
                gscript.core.percent(_i, len(coords), np.floor(_i - _lasti))
            _lasti = _i
            _i += 1
        DEM.close()
        z = np.array(z)
        if options["window"] is not "":
            x_downstream, z = moving_average(x_downstream_0, z, window)
        gscript.core.percent(1, 1, 1)
    else:
        _include_z = False
    gscript.message("Slope")
    if options["slope"]:
        _include_S = True
        slope = RasterRow(options["slope"])
        slope.open("r")
        S = []
        _i = 0
        _lasti = 0
        for row in coords:
            S.append(slope.get_value(Point(row[0], row[1])))
            if float(_i) / len(coords) > float(_lasti) / len(coords):
                gscript.core.percent(_i, len(coords), np.floor(_i - _lasti))
            _lasti = _i
            _i += 1
        slope.close()
        S = np.array(S)
        S_0 = S.copy()
        if options["window"] is not "":
            x_downstream, S = moving_average(x_downstream_0, S, window)
        gscript.core.percent(1, 1, 1)
    else:
        _include_S = False
    gscript.message("Accumulation")
    if options["accumulation"]:
        _include_A = True
        accumulation = RasterRow(options["accumulation"])
        accumulation.open("r")
        A = []
        _i = 0
        _lasti = 0
        for row in coords:
            A.append(
                accumulation.get_value(Point(row[0], row[1])) * accum_mult)
            if float(_i) / len(coords) > float(_lasti) / len(coords):
                gscript.core.percent(_i, len(coords), np.floor(_i - _lasti))
            _lasti = _i
            _i += 1
        accumulation.close()
        A = np.array(A)
        A_0 = A.copy()
        if options["window"] is not "":
            x_downstream, A = moving_average(x_downstream_0, A, window)
        gscript.core.percent(1, 1, 1)
    else:
        _include_A = False

    # Plotting
    if "LongProfile" in plots:
        plt.figure()
        plt.plot(x_downstream / 1000.0, z, "k-", linewidth=2)
        plt.xlabel("Distance downstream [km]", fontsize=16)
        plt.ylabel("Elevation [m]", fontsize=20)
        plt.tight_layout()
    if "SlopeAccum" in plots:
        plt.figure()
        plt.loglog(A, S, "ko", linewidth=2)
        plt.xlabel(accum_label, fontsize=20)
        plt.ylabel("Slope [$-$]", fontsize=20)
        plt.tight_layout()
    if "SlopeDistance" in plots:
        plt.figure()
        plt.plot(x_downstream / 1000.0, S, "k-", linewidth=2)
        plt.xlabel("Distance downstream [km]", fontsize=16)
        plt.ylabel("Slope [$-$]", fontsize=20)
        plt.tight_layout()
    if "AccumDistance" in plots:
        plt.figure()
        plt.plot(x_downstream / 1000.0, A, "k-", linewidth=2)
        plt.xlabel("Distance downstream [km]", fontsize=16)
        plt.ylabel(accum_label, fontsize=20)
        plt.tight_layout()
    plt.show()

    # Saving data
    if options["outfile_original"] is not "":
        header = ["x_downstream", "E", "N"]
        outfile = np.hstack((np.expand_dims(x_downstream_0, axis=1), coords))
        if _include_S:
            header.append("slope")
            outfile = np.hstack((outfile, np.expand_dims(S_0, axis=1)))
        if _include_A:
            if (options["units"] == "m2") or (options["units"] == "km2"):
                header.append("drainage_area_" + options["units"])
            elif (options["units"] == "cumecs") or (options["units"] == "cfs"):
                header.append("water_discharge_" + options["units"])
            else:
                header.append("flow_accumulation_arbitrary_units")
            outfile = np.hstack((outfile, np.expand_dims(A_0, axis=1)))
        header = np.array(header)
        outfile = np.vstack((header, outfile))
        np.savetxt(options["outfile_original"], outfile, "%s")
    if options["outfile_smoothed"] is not "":
        header = ["x_downstream", "E", "N"]
        # E, N on smoothed grid
        x_downstream, E = moving_average(x_downstream_0, coords[:, 0], window)
        x_downstream, N = moving_average(x_downstream_0, coords[:, 1], window)
        # Back to output
        outfile = np.hstack((
            np.expand_dims(x_downstream, axis=1),
            np.expand_dims(E, axis=1),
            np.expand_dims(N, axis=1),
        ))
        if _include_S:
            header.append("slope")
            outfile = np.hstack((outfile, np.expand_dims(S, axis=1)))
        if _include_A:
            if (options["units"] == "m2") or (options["units"] == "km2"):
                header.append("drainage_area_" + options["units"])
            elif (options["units"] == "cumecs") or (options["units"] == "cfs"):
                header.append("water_discharge_" + options["units"])
            else:
                header.append("flow_accumulation_arbitrary_units")
            outfile = np.hstack((outfile, np.expand_dims(A, axis=1)))
        header = np.array(header)
        outfile = np.vstack((header, outfile))
        np.savetxt(options["outfile_smoothed"], outfile, "%s")
コード例 #49
0
def main():
    """
    Gridded flexural isostatic solutions
    """

    options, flags = grass.parser()
    # if just interface description is requested, it will not get to this point
    # so gflex will not be needed

    # GFLEX
    # try to import gflex only after we know that
    # we will actually do the computation
    try:
        import gflex
    except:
        print("")
        print("MODULE IMPORT ERROR.")
        print("In order to run r.flexure or g.flexure, you must download and install")
        print("gFlex. The most recent development version is available from")
        print("https://github.com/awickert/gFlex.")
        print("Installation instructions are available on the page.")
        grass.fatal("Software dependency must be installed.")

    # This code is for 2D flexural isostasy
    flex = gflex.F2D()
    # And show that it is coming from GRASS GIS
    flex.grass = True

    # Flags
    latlon_override = flags["l"]

    # Inputs
    # Solution selection
    flex.Method = options["method"]
    if flex.Method == "FD":
        flex.Solver = options["solver"]
        if flex.Solver:
            flex.ConvergenceTolerance = options["tolerance"]
        # Always use the van Wees and Cloetingh (1994) solution type.
        # It is the best.
        flex.PlateSolutionType = "vWC1994"
    # Parameters that are often changed for the solution
    qs = options["input"]
    flex.qs = garray.array(qs)
    # Elastic thickness
    try:
        flex.Te = float(options["te"])
    except:
        flex.Te = garray.array(
            options["te"]
        )  # FlexureTe is the one that is used by Flexure
        flex.Te = np.array(flex.Te)
    if options["te_units"] == "km":
        flex.Te *= 1000
    elif options["te_units"] == "m":
        pass
    # No "else"; shouldn't happen
    flex.rho_fill = float(options["rho_fill"])
    # Parameters that often stay at their default values
    flex.g = float(options["g"])
    flex.E = float(
        options["ym"]
    )  # Can't just use "E" because reserved for "east", I think
    flex.nu = float(options["nu"])
    flex.rho_m = float(options["rho_m"])
    # Solver type and iteration tolerance
    flex.Solver = options["solver"]
    flex.ConvergenceTolerance = float(options["tolerance"])
    # Boundary conditions
    flex.BC_N = options["northbc"]
    flex.BC_S = options["southbc"]
    flex.BC_W = options["westbc"]
    flex.BC_E = options["eastbc"]

    # Set verbosity
    if grass.verbosity() >= 2:
        flex.Verbose = True
    if grass.verbosity() >= 3:
        flex.Debug = True
    elif grass.verbosity() == 0:
        flex.Quiet = True

    # First check if output exists
    if len(grass.parse_command("g.list", type="rast", pattern=options["output"])):
        if not grass.overwrite():
            grass.fatal(
                "Raster map '"
                + options["output"]
                + "' already exists. Use '--o' to overwrite."
            )

    # Get grid spacing from GRASS
    # Check if lat/lon and proceed as directed
    if grass.region_env()[6] == "3":
        if latlon_override:
            if flex.Verbose:
                print("Latitude/longitude grid.")
                print("Based on r_Earth = 6371 km")
                print("Setting y-resolution [m] to 111,195 * [degrees]")
            flex.dy = grass.region()["nsres"] * 111195.0
            NSmid = (grass.region()["n"] + grass.region()["s"]) / 2.0
            dx_at_mid_latitude = (
                (3.14159 / 180.0) * 6371000.0 * np.cos(np.deg2rad(NSmid))
            )
            if flex.Verbose:
                print(
                    "Setting x-resolution [m] to "
                    + "%.2f" % dx_at_mid_latitude
                    + " * [degrees]"
                )
            flex.dx = grass.region()["ewres"] * dx_at_mid_latitude
        else:
            grass.fatal("Need the '-l' flag to enable lat/lon solution approximation.")
    # Otherwise straightforward
    else:
        flex.dx = grass.region()["ewres"]
        flex.dy = grass.region()["nsres"]

    # CALCULATE!
    flex.initialize()
    flex.run()
    flex.finalize()

    # Write to GRASS
    # Create a new garray buffer and write to it
    outbuffer = garray.array()  # Instantiate output buffer
    outbuffer[...] = flex.w
    outbuffer.write(
        options["output"], overwrite=grass.overwrite()
    )  # Write it with the desired name
    # And create a nice colormap!
    grass.run_command(
        "r.colors", map=options["output"], color="differences", quiet=True
    )
コード例 #50
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    elevation = options["elevation"]
    expdir = options["directory"]
    where = options["where"]
    null = options["null"]
    use_pdata = flags["p"]
    coorcorr = flags["c"]
    use_granularity = flags["g"]

    # Make sure the temporal database exists
    tgis.init()

    if not os.path.exists(expdir):
        grass.fatal(_("Export directory <%s> not found.") % expdir)

    os.chdir(expdir)

    sp = tgis.open_old_stds(input, "strds")

    if use_granularity:
        # Attention: A list of lists of maps will be returned
        maps = sp.get_registered_maps_as_objects_by_granularity()
        # Create a NULL map in case of granularity support
        null_map = "temporary_null_map_%i" % os.getpid()
        grass.mapcalc("%s = null()" % (null_map))
    else:
        maps = sp.get_registered_maps_as_objects(where, "start_time", None)

    # To have scalar values with the same name, we need to copy the
    # raster maps using a single name
    map_name = "%s_%i" % (sp.base.get_name(), os.getpid())

    count = 0
    if maps is not None:
        for map in maps:
            if use_granularity:
                if map and len(map) > 0:
                    id = map[0].get_map_id()
            else:
                id = map.get_map_id()
            # None ids will be replaced by NULL maps
            if id is None:
                id = null_map

            grass.run_command("g.copy",
                              raster="%s,%s" % (id, map_name),
                              overwrite=True)
            out_name = "%6.6i_%s.vtk" % (count, sp.base.get_name())

            mflags = ""
            if use_pdata:
                mflags += "p"
            if coorcorr:
                mflags += "c"

            # Export the raster map with r.out.vtk
            try:
                if elevation:
                    grass.run_command(
                        "r.out.vtk",
                        flags=mflags,
                        null=null,
                        input=map_name,
                        elevation=elevation,
                        output=out_name,
                        overwrite=grass.overwrite(),
                    )
                else:
                    grass.run_command(
                        "r.out.vtk",
                        flags=mflags,
                        null=null,
                        input=map_name,
                        output=out_name,
                        overwrite=grass.overwrite(),
                    )
            except CalledModuleError:
                grass.fatal(_("Unable to export raster map <%s>" % map_name))

            count += 1

    if use_granularity:
        grass.run_command("g.remove", flags="f", type="raster", name=null_map)
    grass.run_command("g.remove", flags="f", type="raster", name=map_name)
コード例 #51
0
ファイル: v.what.strds.py プロジェクト: bigrusterwall/grass
def main():
    # lazy imports
    import grass.temporal as tgis
    from grass.pygrass.utils import copy as gcopy
    from grass.pygrass.messages import Messenger
    from grass.pygrass.vector import Vector

    # Get the options
    input = options["input"]
    output = options["output"]
    strds = options["strds"]
    where = options["where"]
    tempwhere = options["t_where"]

    if output and flags['u']:
        grass.fatal(_("Cannot combine 'output' option and 'u' flag"))
    elif not output and not flags['u']:
        grass.fatal(_("'output' option or 'u' flag must be given"))
    elif not output and flags['u']:
        grass.warning(
            _("Attribute table of vector {name} will be updated...").format(
                name=input))

    if where == "" or where == " " or where == "\n":
        where = None

    overwrite = grass.overwrite()

    quiet = True

    if grass.verbosity() > 2:
        quiet = False

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)
    # Single space time raster dataset
    if len(strds_names) == 1:
        granu = first_strds.get_granularity()
        rows = first_strds.get_registered_maps(
            "name,mapset,start_time,end_time", tempwhere, "start_time", dbif)

        if not rows:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty") %
                first_strds.get_id())
        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [
                row["name"] + "@" + row["mapset"],
            ]

            s = Sample(start, end, raster_maps, first_strds.get_name(), granu)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(
                    _(
                        "Temporal type of space time raster "
                        "datasets must be equal\n<%(a)s> of type "
                        "%(type_a)s do not match <%(b)s> of type "
                        "%(type_b)s" % {
                            "a": first_strds.get_id(),
                            "type_a": first_strds.get_temporal_type(),
                            "b": dataset.get_id(),
                            "type_b": dataset.get_temporal_type()
                        }))

        mapmatrizes = tgis.sample_stds_by_stds_topology(
            "strds", "strds", strds_names, strds_names[0], False, None,
            "equal", False, False)
        #TODO check granularity for multiple STRDS
        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:

                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list, name)
                samples.append(s)

    # Get the layer and database connections of the input vector
    if output:
        gcopy(input, output, 'vector')
    else:
        output = input

    msgr = Messenger()
    perc_curr = 0
    perc_tot = len(samples)
    pymap = Vector(output)
    try:
        pymap.open('r')
    except:
        dbif.close()
        grass.fatal(_("Unable to create vector map <%s>" % output))

    if len(pymap.dblinks) == 0:
        try:
            pymap.close()
            grass.run_command("v.db.addtable", map=output)
        except CalledModuleError:
            dbif.close()
            grass.fatal(
                _("Unable to add table <%s> to vector map <%s>" % output))
    if pymap.is_open():
        pymap.close()

    for sample in samples:
        raster_names = sample.raster_names
        # Call v.what.rast for each raster map

        for name in raster_names:
            coltype = "DOUBLE PRECISION"
            # Get raster map type
            raster_map = tgis.RasterDataset(name)
            raster_map.load()
            if raster_map.metadata.get_datatype() == "CELL":
                coltype = "INT"
            day = sample.printDay()
            column_name = "%s_%s" % (sample.strds_name, day)
            column_string = "%s %s" % (column_name, coltype)
            column_string.replace('.', '_')
            try:
                grass.run_command("v.db.addcolumn",
                                  map=output,
                                  column=column_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to add column %s to vector map "
                      "<%s> ") % (column_string, output))
            try:
                grass.run_command("v.what.rast",
                                  map=output,
                                  raster=name,
                                  column=column_name,
                                  where=where,
                                  quiet=quiet)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to run v.what.rast for vector map"
                      " <%s> and raster map <%s>") %
                    (output, str(raster_names)))

        msgr.percent(perc_curr, perc_tot, 1)
        perc_curr += 1

    dbif.close()
コード例 #52
0
ファイル: i.segment.stats.py プロジェクト: zarch/grass-addons
def main():

    global insert_sql
    insert_sql = None
    global temporary_vect
    temporary_vect = None
    global stats_temp_file
    stats_temp_file = None

    segment_map = options['map']
    csvfile = options['csvfile'] if options['csvfile'] else []
    vectormap = options['vectormap'] if options['vectormap'] else []
    global rasters
    rasters = options['rasters'].split(',') if options['rasters'] else []
    area_measures = options['area_measures'].split(',') if (
        options['area_measures'] and not flags['s']) else []
    if area_measures:
        if not gscript.find_program('r.object.geometry', '--help'):
            message = _(
                "You need to install the addon r.object.geometry to be able")
            message += _(" to calculate area measures.\n")
            message += _(
                " You can install the addon with 'g.extension r.object.geometry'"
            )
            gscript.fatal(message)
    neighborhood = True if flags['n'] else False
    if neighborhood:
        if not gscript.find_program('r.neighborhoodmatrix', '--help'):
            message = _(
                "You need to install the addon r.neighborhoodmatrix to be able"
            )
            message += _(" to calculate area measures.\n")
            message += _(
                " You can install the addon with 'g.extension r.neighborhoodmatrix'"
            )
            gscript.fatal(message)

    raster_statistics = options['raster_statistics'].split(
        ',') if options['raster_statistics'] else []
    separator = gscript.separator(options['separator'])
    processes = int(options['processes'])

    output_header = ['cat']
    output_dict = collections.defaultdict(list)

    raster_stat_dict = {
        'zone': 0,
        'min': 4,
        'third_quart': 16,
        'max': 5,
        'sum': 12,
        'null_cells': 3,
        'median': 15,
        'label': 1,
        'first_quart': 14,
        'range': 6,
        'mean_of_abs': 8,
        'stddev': 9,
        'non_null_cells': 2,
        'coeff_var': 11,
        'variance': 10,
        'sum_abs': 13,
        'perc_90': 17,
        'mean': 7
    }

    geometry_stat_dict = {
        'cat': 0,
        'area': 1,
        'perimeter': 2,
        'compact_square': 3,
        'compact_circle': 4,
        'fd': 5,
        'xcoords': 6,
        'ycoords': 7
    }

    if flags['r']:
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=segment_map)

    stats_temp_file = gscript.tempfile()
    if area_measures:
        gscript.message(_("Calculating geometry statistics..."))
        output_header += area_measures
        stat_indices = [geometry_stat_dict[x] for x in area_measures]
        gscript.run_command('r.object.geometry',
                            input_=segment_map,
                            output=stats_temp_file,
                            overwrite=True,
                            quiet=True)

        firstline = True
        with open(stats_temp_file, 'r') as fin:
            for line in fin:
                if firstline:
                    firstline = False
                    continue
                values = line.rstrip().split('|')
                output_dict[values[0]] = [values[x] for x in stat_indices]

    if rasters:
        if not flags['c']:
            gscript.message(_("Checking usability of raster maps..."))
            for raster in rasters:
                if not gscript.find_file(raster, element='cell')['name']:
                    gscript.message(_("Cannot find raster '%s'" % raster))
                    gscript.message(_("Removing this raster from list."))
                    rasters.remove(raster)
                raster_info = gscript.parse_command('r.univar',
                                                    flags='g',
                                                    map_=raster,
                                                    quiet=True)
                if len(raster_info) == 0 or int(raster_info['null_cells']) > 0:
                    message = 'Raster %s contains null values.\n' % raster
                    message += 'This can lead to errors in the calculations.\n'
                    message += 'Check region settings and raster extent.\n'
                    message += 'Possibly fill null values of raster.\n'
                    message += 'Removing this raster from list.'
                    gscript.warning(message)
                    while raster in rasters:
                        rasters.remove(raster)
                    continue

        if len(rasters) > 0:
            gscript.message(_("Calculating statistics for raster maps..."))
            if len(rasters) < processes:
                processes = len(rasters)
                gscript.message(
                    _("Only one process per raster. Reduced number of processes to %i."
                      % processes))

            stat_indices = [raster_stat_dict[x] for x in raster_statistics]
            pool = Pool(processes)
            func = partial(worker, segment_map, stats_temp_file)
            pool.map(func, rasters)
            pool.close()
            pool.join()

            for raster in rasters:
                rastername = raster.split('@')[0]
                rastername = rastername.replace('.', '_')
                temp_file = stats_temp_file + '.' + rastername
                output_header += [
                    rastername + "_" + x for x in raster_statistics
                ]
                firstline = True
                with open(temp_file, 'r') as fin:
                    for line in fin:
                        if firstline:
                            firstline = False
                            continue
                        values = line.rstrip().split('|')
                        output_dict[values[0]] = output_dict[values[0]] + [
                            values[x] for x in stat_indices
                        ]

    # Calculating neighborhood statistics if requested
    if neighborhood:

        gscript.message(_("Calculating neighborhood statistics..."))

        # Add neighbordhood statistics to headers
        original_nb_values = len(output_header) - 1
        new_headers = ['neighbors_count']
        for i in range(1, len(output_header)):
            new_headers.append('%s_nbrmean' % output_header[i])
            new_headers.append('%s_nbrstddev' % output_header[i])

        output_header += new_headers

        # Get sorted neighborhood matrix
        nbr_matrix = sorted([
            x.split('|')
            for x in gscript.read_command('r.neighborhoodmatrix',
                                          input_=segment_map,
                                          flags='d',
                                          quiet=True).splitlines()
        ])

        # Calculate mean and stddev of neighbor values for each variable in the
        # output_dict
        for key, group in groupby(nbr_matrix, lambda x: x[0]):
            d = {}
            for i in range(original_nb_values):
                d[i] = (0, 0, 0)
            nbrlist = [str(x[1]) for x in group]
            if len(nbrlist) > 1:
                for nbr in nbrlist:
                    for i in range(original_nb_values):
                        d[i] = update(d[i], float(output_dict[nbr][i]))
                output_dict[key] = output_dict[key] + [str(len(nbrlist))]
                output_dict[key] = output_dict[key] + [
                    str(i) for sub in [finalize(x) for x in d.values()]
                    for i in sub
                ]
            else:
                newvalues = ['1']
                nbr = nbrlist[0]
                for i in range(original_nb_values):
                    newvalues.append(output_dict[nbr][i])
                    newvalues.append('0')
                output_dict[key] = output_dict[key] + newvalues

    message = _("Some values could not be calculated for the objects below. ")
    message += _("These objects are thus not included in the results. ")
    message += _("HINT: Check some of the raster maps for null values ")
    message += _("and possibly fill these values with r.fillnulls.")
    error_objects = []

    if csvfile:
        with open(csvfile, 'wb') as f:
            f.write(separator.join(output_header) + "\n")
            for key in output_dict:
                if len(output_dict[key]) + 1 == len(output_header):
                    f.write(key + separator +
                            separator.join(output_dict[key]) + "\n")
                else:
                    error_objects.append(key)
        f.close()

    if vectormap:
        gscript.message(_("Creating output vector map..."))
        temporary_vect = 'segmstat_tmp_vect_%d' % os.getpid()
        gscript.run_command('r.to.vect',
                            input_=segment_map,
                            output=temporary_vect,
                            type_='area',
                            flags='vt',
                            overwrite=True,
                            quiet=True)

        insert_sql = gscript.tempfile()
        fsql = open(insert_sql, 'w')
        fsql.write('BEGIN TRANSACTION;\n')
        if gscript.db_table_exist(temporary_vect):
            if gscript.overwrite():
                fsql.write('DROP TABLE %s;' % temporary_vect)
            else:
                gscript.fatal(
                    _("Table %s already exists. Use --o to overwrite" %
                      temporary_vect))
        create_statement = 'CREATE TABLE ' + temporary_vect + ' (cat int PRIMARY KEY);\n'
        fsql.write(create_statement)
        for header in output_header[1:]:
            addcol_statement = 'ALTER TABLE %s ADD COLUMN %s double precision;\n' % (
                temporary_vect, header)
            fsql.write(addcol_statement)
        for key in output_dict:
            if len(output_dict[key]) + 1 == len(output_header):
                sql = "INSERT INTO %s VALUES (%s, %s);\n" % (
                    temporary_vect, key, ",".join(output_dict[key]))
                sql = sql.replace('inf', 'NULL')
                sql = sql.replace('nan', 'NULL')
                fsql.write(sql)
            else:
                if not csvfile:
                    error_objects.append(key)

        fsql.write('END TRANSACTION;')
        fsql.close()

        gscript.run_command('db.execute', input=insert_sql, quiet=True)
        gscript.run_command('v.db.connect',
                            map_=temporary_vect,
                            table=temporary_vect,
                            quiet=True)
        gscript.run_command('g.copy',
                            vector="%s,%s" % (temporary_vect, vectormap),
                            quiet=True)

    if error_objects:
        object_string = ', '.join(error_objects[:100])
        message += _(
            "\n\nObjects with errors (only first 100 are shown):\n%s" %
            object_string)
        gscript.message(message)
コード例 #53
0
def reclass(inf, outf, lim, clump, diag, les):
    infile = inf
    outfile = outf
    lesser = les
    limit = lim
    clumped = clump
    diagonal = diag

    s = grass.read_command("g.region", flags='p')
    s = decode(s)
    kv = grass.parse_key_val(s, sep=':')
    s = kv['projection'].strip().split()
    if s == '0':
        grass.fatal(_("xy-locations are not supported"))
        grass.fatal(_("Need projected data with grids in meters"))

    if not grass.find_file(infile)['name']:
        grass.fatal(_("Raster map <%s> not found") % infile)

    if clumped and diagonal:
        grass.fatal(_("flags c and d are mutually exclusive"))

    if clumped:
        clumpfile = infile
    else:
        clumpfile = "%s.clump.%s" % (infile.split('@')[0], outfile)
        TMPRAST.append(clumpfile)

        if not grass.overwrite():
            if grass.find_file(clumpfile)['name']:
                grass.fatal(_("Temporary raster map <%s> exists") % clumpfile)
        if diagonal:
            grass.message(
                _("Generating a clumped raster file including "
                  "diagonal neighbors..."))
            grass.run_command('r.clump',
                              flags='d',
                              input=infile,
                              output=clumpfile)
        else:
            grass.message(_("Generating a clumped raster file ..."))
            grass.run_command('r.clump', input=infile, output=clumpfile)

    if lesser:
        grass.message(
            _("Generating a reclass map with area size less than "
              "or equal to %f hectares...") % limit)
    else:
        grass.message(
            _("Generating a reclass map with area size greater "
              "than or equal to %f hectares...") % limit)

    recfile = outfile + '.recl'
    TMPRAST.append(recfile)

    sflags = 'aln'
    if grass.raster_info(infile)['datatype'] in ('FCELL', 'DCELL'):
        sflags += 'i'
    p1 = grass.pipe_command('r.stats',
                            flags=sflags,
                            input=(clumpfile, infile),
                            sep=';')
    p2 = grass.feed_command('r.reclass',
                            input=clumpfile,
                            output=recfile,
                            rules='-')
    rules = ''
    for line in p1.stdout:
        f = decode(line).rstrip(os.linesep).split(';')
        if len(f) < 5:
            continue
        hectares = float(f[4]) * 0.0001
        if lesser:
            test = hectares <= limit
        else:
            test = hectares >= limit
        if test:
            rules += "%s = %s %s\n" % (f[0], f[2], f[3])
    if rules:
        p2.stdin.write(encode(rules))
    p1.wait()
    p2.stdin.close()
    p2.wait()
    if p2.returncode != 0:
        if lesser:
            grass.fatal(
                _("No areas of size less than or equal to %f "
                  "hectares found.") % limit)
        else:
            grass.fatal(
                _("No areas of size greater than or equal to %f "
                  "hectares found.") % limit)
    grass.mapcalc("$outfile = $recfile", outfile=outfile, recfile=recfile)
コード例 #54
0
def main():
    try:
        import sklearn
        import joblib

        if sklearn.__version__ < "0.20":
            gs.fatal(
                "Package python3-scikit-learn 0.20 or newer is not installed")

    except ImportError:
        gs.fatal("Package python3-scikit-learn 0.20 or newer is not installed")

    # parser options
    group = options["group"]
    output = options["output"]
    model_load = options["load_model"]
    probability = flags["p"]
    prob_only = flags["z"]
    chunksize = int(options["chunksize"])

    # remove @ from output in case overwriting result
    if "@" in output:
        output = output.split("@")[0]

    # check probabilities=True if prob_only=True
    if prob_only is True and probability is False:
        gs.fatal("Need to set probabilities=True if prob_only=True")

    # reload fitted model and training data
    estimator, y, class_labels = joblib.load(model_load)

    # define RasterStack
    stack = RasterStack(group=group)

    # perform raster prediction
    region = Region()
    row_incr = math.ceil(chunksize / region.cols)

    # do not read by increments if increment > n_rows
    if row_incr >= region.rows:
        row_incr = None

    # prediction
    if prob_only is False:
        gs.message("Predicting classification/regression raster...")
        stack.predict(
            estimator=estimator,
            output=output,
            height=row_incr,
            overwrite=gs.overwrite(),
        )

    if probability is True:
        gs.message("Predicting class probabilities...")
        stack.predict_proba(
            estimator=estimator,
            output=output,
            class_labels=np.unique(y),
            overwrite=gs.overwrite(),
            height=row_incr,
        )

    # assign categories for classification map
    if class_labels and prob_only is False:
        rules = []

        for val, lab in class_labels.items():
            rules.append(",".join([str(val), str(lab)]))

        rules = "\n".join(rules)
        rules_file = string_to_rules(rules)
        r.category(map=output, rules=rules_file, separator="comma")
コード例 #55
0
def main(options, flags):

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    nprocs = int(options["nprocs"])
    step = options["step"]
    levels = options["levels"]
    minlevel = options["minlevel"]
    maxlevel = options["maxlevel"]
    cut = options["cut"]

    register_null = flags["n"]
    t_flag = flags["t"]
    

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif,
                                 overwrite=overwrite)
                                               
    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"
    
    # Configure the r.to.vect module
    contour_module = pymod.Module("r.contour", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, flags=flags,
                                   overwrite=overwrite,
                                   quiet=True)

    if step:
        contour_module.inputs.step = float(step)
    if minlevel:
        contour_module.inputs.minlevel = float(minlevel)
    if maxlevel:
        contour_module.inputs.maxlevel = float(maxlevel)
    if levels:
        contour_module.inputs.levels = levels.split(",")
    if cut:
        contour_module.inputs.cut = int(cut)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(_("The number of parellel r.contour processes was "\
                              "reduced to 1 because of the table attribute "\
                              "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1
        map_name = "%s_%i" % (base, count)
        new_map = tgis.open_new_map_dataset(map_name, None, type="vector",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(contour_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title,
                                descr, stype, dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        try:
            if map.load() is not True:
                continue
        except FatalError:
            continue
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove", flags='f', type='vector', name=names, 
                            quiet=True)

    dbif.close()
コード例 #56
0
def main():
    if not hasNumPy:
        grass.fatal(_("Required dependency NumPy not found. Exiting."))

    sharpen = options["method"]  # sharpening algorithm
    ms1_orig = options["blue"]  # blue channel
    ms2_orig = options["green"]  # green channel
    ms3_orig = options["red"]  # red channel
    pan_orig = options["pan"]  # high res pan channel
    out = options["output"]  # prefix for output RGB maps
    bits = options["bitdepth"]  # bit depth of image channels
    bladjust = flags["l"]  # adjust blue channel
    sproc = flags["s"]  # serial processing
    rescale = flags["r"]  # rescale to spread pixel values to entire 0-255 range

    # Checking bit depth
    bits = float(bits)
    if bits < 2 or bits > 30:
        grass.warning(_("Bit depth is outside acceptable range"))
        return

    outb = grass.core.find_file("%s_blue" % out)
    outg = grass.core.find_file("%s_green" % out)
    outr = grass.core.find_file("%s_red" % out)

    if (
        outb["name"] != "" or outg["name"] != "" or outr["name"] != ""
    ) and not grass.overwrite():
        grass.warning(
            _(
                "Maps with selected output prefix names already exist."
                " Delete them or use overwrite flag"
            )
        )
        return

    pid = str(os.getpid())

    # convert input image channels to 8 bit for processing
    ms1 = "tmp%s_ms1" % pid
    ms2 = "tmp%s_ms2" % pid
    ms3 = "tmp%s_ms3" % pid
    pan = "tmp%s_pan" % pid

    if not rescale:
        if bits == 8:
            grass.message(_("Using 8bit image channels"))
            if sproc:
                # serial processing
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms1_orig, ms1),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms2_orig, ms2),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms3_orig, ms3),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (pan_orig, pan),
                    quiet=True,
                    overwrite=True,
                )
            else:
                # parallel processing
                pb = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms1_orig, ms1),
                    quiet=True,
                    overwrite=True,
                )
                pg = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms2_orig, ms2),
                    quiet=True,
                    overwrite=True,
                )
                pr = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms3_orig, ms3),
                    quiet=True,
                    overwrite=True,
                )
                pp = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (pan_orig, pan),
                    quiet=True,
                    overwrite=True,
                )

                pb.wait()
                pg.wait()
                pr.wait()
                pp.wait()

        else:
            grass.message(_("Converting image chanels to 8bit for processing"))
            maxval = pow(2, bits) - 1
            if sproc:
                # serial processing
                grass.run_command(
                    "r.rescale",
                    input=ms1_orig,
                    from_="0,%f" % maxval,
                    output=ms1,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=ms2_orig,
                    from_="0,%f" % maxval,
                    output=ms2,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=ms3_orig,
                    from_="0,%f" % maxval,
                    output=ms3,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=pan_orig,
                    from_="0,%f" % maxval,
                    output=pan,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )

            else:
                # parallel processing
                pb = grass.start_command(
                    "r.rescale",
                    input=ms1_orig,
                    from_="0,%f" % maxval,
                    output=ms1,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pg = grass.start_command(
                    "r.rescale",
                    input=ms2_orig,
                    from_="0,%f" % maxval,
                    output=ms2,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pr = grass.start_command(
                    "r.rescale",
                    input=ms3_orig,
                    from_="0,%f" % maxval,
                    output=ms3,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pp = grass.start_command(
                    "r.rescale",
                    input=pan_orig,
                    from_="0,%f" % maxval,
                    output=pan,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )

                pb.wait()
                pg.wait()
                pr.wait()
                pp.wait()

    else:
        grass.message(_("Rescaling image chanels to 8bit for processing"))

        min_ms1 = int(grass.raster_info(ms1_orig)["min"])
        max_ms1 = int(grass.raster_info(ms1_orig)["max"])
        min_ms2 = int(grass.raster_info(ms2_orig)["min"])
        max_ms2 = int(grass.raster_info(ms2_orig)["max"])
        min_ms3 = int(grass.raster_info(ms3_orig)["min"])
        max_ms3 = int(grass.raster_info(ms3_orig)["max"])
        min_pan = int(grass.raster_info(pan_orig)["min"])
        max_pan = int(grass.raster_info(pan_orig)["max"])

        maxval = pow(2, bits) - 1
        if sproc:
            # serial processing
            grass.run_command(
                "r.rescale",
                input=ms1_orig,
                from_="%f,%f" % (min_ms1, max_ms1),
                output=ms1,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=ms2_orig,
                from_="%f,%f" % (min_ms2, max_ms2),
                output=ms2,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=ms3_orig,
                from_="%f,%f" % (min_ms3, max_ms3),
                output=ms3,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=pan_orig,
                from_="%f,%f" % (min_pan, max_pan),
                output=pan,
                to="0,255",
                quiet=True,
                overwrite=True,
            )

        else:
            # parallel processing
            pb = grass.start_command(
                "r.rescale",
                input=ms1_orig,
                from_="%f,%f" % (min_ms1, max_ms1),
                output=ms1,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pg = grass.start_command(
                "r.rescale",
                input=ms2_orig,
                from_="%f,%f" % (min_ms2, max_ms2),
                output=ms2,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pr = grass.start_command(
                "r.rescale",
                input=ms3_orig,
                from_="%f,%f" % (min_ms3, max_ms3),
                output=ms3,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pp = grass.start_command(
                "r.rescale",
                input=pan_orig,
                from_="%f,%f" % (min_pan, max_pan),
                output=pan,
                to="0,255",
                quiet=True,
                overwrite=True,
            )

            pb.wait()
            pg.wait()
            pr.wait()
            pp.wait()

    # get PAN resolution:
    kv = grass.raster_info(map=pan)
    nsres = kv["nsres"]
    ewres = kv["ewres"]
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()
    grass.run_command("g.region", res=panres, align=pan)

    # Select sharpening method
    grass.message(_("Performing pan sharpening with hi res pan image: %f" % panres))
    if sharpen == "brovey":
        brovey(pan, ms1, ms2, ms3, out, pid, sproc)
    elif sharpen == "ihs":
        ihs(pan, ms1, ms2, ms3, out, pid, sproc)
    elif sharpen == "pca":
        pca(pan, ms1, ms2, ms3, out, pid, sproc)
    # Could add other sharpening algorithms here, e.g. wavelet transformation

    grass.message(_("Assigning grey equalized color tables to output images..."))

    # equalized grey scales give best contrast
    grass.message(_("setting pan-sharpened channels to equalized grey scale"))
    for ch in ["red", "green", "blue"]:
        grass.run_command(
            "r.colors", quiet=True, map="%s_%s" % (out, ch), flags="e", color="grey"
        )

    # Landsat too blue-ish because panchromatic band less sensitive to blue
    # light, so output blue channed can be modified
    if bladjust:
        grass.message(_("Adjusting blue channel color table..."))
        blue_colors = ["0 0 0 0\n5% 0 0 0\n67% 255 255 255\n100% 255 255 255"]
        # these previous colors are way too blue for landsat
        # blue_colors = ['0 0 0 0\n10% 0 0 0\n20% 200 200 200\n40% 230 230 230\n67% 255 255 255\n100% 255 255 255']
        bc = grass.feed_command("r.colors", quiet=True, map="%s_blue" % out, rules="-")
        bc.stdin.write(grass.encode("\n".join(blue_colors)))
        bc.stdin.close()

    # output notice
    grass.verbose(_("The following pan-sharpened output maps have been generated:"))
    for ch in ["red", "green", "blue"]:
        grass.verbose(_("%s_%s") % (out, ch))

    grass.verbose(_("To visualize output, run: g.region -p raster=%s_red" % out))
    grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out)))
    grass.verbose(
        _("If desired, combine channels into a single RGB map with 'r.composite'.")
    )
    grass.verbose(_("Channel colors can be rebalanced using i.colors.enhance."))

    # write cmd history:
    for ch in ["red", "green", "blue"]:
        grass.raster_history("%s_%s" % (out, ch))

    # create a group with the three outputs
    # grass.run_command('i.group', group=out,
    #                  input="{n}_red,{n}_blue,{n}_green".format(n=out))

    # Cleanup
    grass.message(_("cleaning up temp files"))
    try:
        grass.run_command(
            "g.remove", flags="f", type="raster", pattern="tmp%s*" % pid, quiet=True
        )
    except:
        ""
コード例 #57
0
ファイル: i.in.spotvgt.py プロジェクト: sebastic/grass
def main():
    global vrtfile, tmpfile

    infile = options['input']
    rast = options['output']
    also = flags['a']

    # check for gdalinfo (just to check if installation is complete)
    if not gscript.find_program('gdalinfo', '--help'):
        gscript.fatal(_("'gdalinfo' not found, install GDAL tools first "
                        "(http://www.gdal.org)"))

    pid = str(os.getpid())
    tmpfile = gscript.tempfile()

    # let's go

    spotdir = os.path.dirname(infile)
    spotname = gscript.basename(infile, 'hdf')

    if rast:
        name = rast
    else:
        name = spotname

    if not gscript.overwrite() and gscript.find_file(name)['file']:
        gscript.fatal(_("<%s> already exists. Aborting.") % name)

    # still a ZIP file?  (is this portable?? see the r.in.srtm script for
    # ideas)
    if infile.lower().endswith('.zip'):
        gscript.fatal(_("Please extract %s before import.") % infile)

    try:
        p = gscript.Popen(['file', '-ib', infile], stdout=gscript.PIPE)
        s = p.communicate()[0]
        if s == "application/x-zip":
            gscript.fatal(_("Please extract %s before import.") % infile)
    except:
        pass

    # create VRT header for NDVI

    projfile = os.path.join(spotdir, "0001_LOG.TXT")
    vrtfile = tmpfile + '.vrt'

    # first process the NDVI:
    gscript.try_remove(vrtfile)
    create_VRT_file(projfile, vrtfile, infile)

    # let's import the NDVI map...
    gscript.message(_("Importing SPOT VGT NDVI map..."))
    try:
        gscript.run_command('r.in.gdal', input=vrtfile, output=name)
    except CalledModuleError:
        gscript.fatal(_("An error occurred. Stop."))

    gscript.message(_("Imported SPOT VEGETATION NDVI map <%s>.") % name)

    #################
    # http://www.vgt.vito.be/faq/FAQS/faq19.html
    # What is the relation between the digital number and the real NDVI ?
    # Real NDVI =coefficient a * Digital Number + coefficient b
    #           = a * DN +b
    #
    # Coefficient a = 0.004
    # Coefficient b = -0.1

    # clone current region
    # switch to a temporary region
    gscript.use_temp_region()

    gscript.run_command('g.region', raster=name, quiet=True)

    gscript.message(_("Remapping digital numbers to NDVI..."))
    tmpname = "%s_%s" % (name, pid)
    gscript.mapcalc("$tmpname = 0.004 * $name - 0.1", tmpname=tmpname, name=name)
    gscript.run_command('g.remove', type='raster', name=name, quiet=True,
                        flags='f')
    gscript.run_command('g.rename', raster=(tmpname, name), quiet=True)

    # write cmd history:
    gscript.raster_history(name)

    # apply color table:
    gscript.run_command('r.colors', map=name, color='ndvi', quiet=True)

    ##########################
    # second, optionally process the SM quality map:

    # SM Status Map
    # http://nieuw.vgt.vito.be/faq/FAQS/faq22.html
    # Data about
    # Bit NR 7: Radiometric quality for B0 coded as 0 if bad and 1 if good
    # Bit NR 6: Radiometric quality for B2 coded as 0 if bad and 1 if good
    # Bit NR 5: Radiometric quality for B3 coded as 0 if bad and 1 if good
    # Bit NR 4: Radiometric quality for MIR coded as 0 if bad and 1 if good
    # Bit NR 3: land code 1 or water code 0
    # Bit NR 2: ice/snow code 1 , code 0 if there is no ice/snow
    # Bit NR 1:	0	0	1		1
    # Bit NR 0:	0	1	0		1
    # 		clear	shadow	uncertain	cloud
    #
    # Note:
    # pos 7     6    5    4    3    2   1   0 (bit position)
    #   128    64   32   16    8    4   2   1 (values for 8 bit)
    #
    #
    # Bit 4-7 should be 1: their sum is 240
    # Bit 3   land code, should be 1, sum up to 248 along with higher bits
    # Bit 2   ice/snow code
    # Bit 0-1 should be 0
    #
    # A good map threshold: >= 248

    if also:
        gscript.message(_("Importing SPOT VGT NDVI quality map..."))
        gscript.try_remove(vrtfile)
        qname = spotname.replace('NDV', 'SM')
        qfile = os.path.join(spotdir, qname)
        create_VRT_file(projfile, vrtfile, qfile)

        # let's import the SM quality map...
        smfile = name + '.sm'
        try:
            gscript.run_command('r.in.gdal', input=vrtfile, output=smfile)
        except CalledModuleError:
            gscript.fatal(_("An error occurred. Stop."))

        # some of the possible values:
        rules = [r + '\n' for r in ['8 50 50 50',
                                    '11 70 70 70',
                                    '12 90 90 90',
                                    '60 grey',
                                    '155 blue',
                                    '232 violet',
                                    '235 red',
                                    '236 brown',
                                    '248 orange',
                                    '251 yellow',
                                    '252 green']]
        gscript.write_command('r.colors', map=smfile, rules='-', stdin=rules)

        gscript.message(_("Imported SPOT VEGETATION SM quality map <%s>.") %
                        smfile)
        gscript.message(_("Note: A snow map can be extracted by category "
                          "252 (d.rast %s cat=252)") % smfile)
        gscript.message("")
        gscript.message(_("Filtering NDVI map by Status Map quality layer..."))

        filtfile = "%s_filt" % name
        gscript.mapcalc("$filtfile = if($smfile % 4 == 3 || "
                        "($smfile / 16) % 16 == 0, null(), $name)",
                        filtfile=filtfile, smfile=smfile, name=name)
        gscript.run_command('r.colors', map=filtfile, color='ndvi', quiet=True)
        gscript.message(_("Filtered SPOT VEGETATION NDVI map <%s>.") %
                        filtfile)

        # write cmd history:
        gscript.raster_history(smfile)
        gscript.raster_history(filtfile)

    gscript.message(_("Done."))
コード例 #58
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    quantile = options["quantile"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Check if number of methods and output maps matches
    if 'quantile' in method:
        len_method = len(method.split(',')) - 1
    else:
        len_method = len(method.split(','))

    if (len(list(filter(None, quantile.split(',')))) + len_method) != len(
            output.split(',')):
        grass.fatal(
            _('Number requested methods and output maps do not match.'))

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = ""
        if len(rows) > 1000:
            grass.warning(
                _("Processing over 1000 maps: activating -z flag of r.series which slows down processing"
                  ))
            flag += "z"
        if nulls:
            flag += "n"

        try:
            grass.run_command("r.series",
                              flags=flag,
                              file=filename,
                              output=output,
                              overwrite=grass.overwrite(),
                              method=method,
                              quantile=quantile)
        except CalledModuleError:
            grass.fatal(
                _("%s failed. Check above error messages.") % 'r.series')

        if not add_time:

            # We need to set the temporal extent from the subset of selected maps
            maps = sp.get_registered_maps_as_objects(where=where,
                                                     order=order,
                                                     dbif=None)
            first_map = maps[0]
            last_map = maps[-1]
            start_a, end_a = first_map.get_temporal_extent_as_tuple()
            start_b, end_b = last_map.get_temporal_extent_as_tuple()

            if end_b is None:
                end_b = start_b

            if first_map.is_time_absolute():
                extent = tgis.AbsoluteTemporalExtent(start_time=start_a,
                                                     end_time=end_b)
            else:
                extent = tgis.RelativeTemporalExtent(
                    start_time=start_a,
                    end_time=end_b,
                    unit=first_map.get_relative_time_unit())

            for out_map in output.split(','):

                # Create the time range for the output map
                if out_map.find("@") >= 0:
                    id = out_map
                else:
                    mapset = grass.gisenv()["MAPSET"]
                    id = out_map + "@" + mapset

                map = sp.get_new_map_instance(id)
                map.load()

                map.set_temporal_extent(extent=extent)

                # Register the map in the temporal database
                if map.is_in_db():
                    map.update_all()
                else:
                    map.insert()
コード例 #59
0
ファイル: i.pansharpen.py プロジェクト: rashadkm/grass_cmake
def main():
    if not hasNumPy:
        grass.fatal(_("Required dependency NumPy not found. Exiting."))

    sharpen = options['method']  # sharpening algorithm
    ms1 = options['blue']  # blue channel
    ms2 = options['green']  # green channel
    ms3 = options['red']  # red channel
    pan = options['pan']  # high res pan channel
    out = options['output']  # prefix for output RGB maps
    bladjust = flags['l']  # adjust blue channel
    sproc = flags['s']  # serial processing

    outb = grass.core.find_file('%s_blue' % out)
    outg = grass.core.find_file('%s_green' % out)
    outr = grass.core.find_file('%s_red' % out)

    if (outb['name'] != '' or outg['name'] != '' or outr['name'] != '') and not grass.overwrite():
        grass.warning(_('Maps with selected output prefix names already exist.'
                        ' Delete them or use overwrite flag'))
        return

    pid = str(os.getpid())

    # get PAN resolution:
    kv = grass.raster_info(map=pan)
    nsres = kv['nsres']
    ewres = kv['ewres']
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()

    grass.run_command('g.region', res=panres, align=pan)

    grass.message(_("Performing pan sharpening with hi res pan image: %f" % panres))

    if sharpen == "brovey":
        grass.verbose(_("Using Brovey algorithm"))

        # pan/intensity histogram matching using linear regression
        outname = 'tmp%s_pan1' % pid
        panmatch1 = matchhist(pan, ms1, outname)

        outname = 'tmp%s_pan2' % pid
        panmatch2 = matchhist(pan, ms2, outname)

        outname = 'tmp%s_pan3' % pid
        panmatch3 = matchhist(pan, ms3, outname)

        outr = '%s_red' % out
        outg = '%s_green' % out
        outb = '%s_blue' % out

        # calculate brovey transformation
        grass.message(_("Calculating Brovey transformation..."))

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0 * "$ms1" * "$panmatch1" / k'''
            grass.mapcalc(e, outr=outr, outg=outg, outb=outb,
                          panmatch1=panmatch1, panmatch2=panmatch2,
                          panmatch3=panmatch3, ms1=ms1, ms2=ms2, ms3=ms3,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms1, panmatch1, ms1, ms2, ms3),
                                     overwrite=True)
            pg = grass.mapcalc_start('%s_green = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms2, panmatch2, ms1, ms2, ms3),
                                     overwrite=True)
            pr = grass.mapcalc_start('%s_red = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms3, panmatch3, ms1, ms2, ms3),
                                     overwrite=True)

            pb.wait()
            pg.wait()
            pr.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name='%s,%s,%s' % (panmatch1, panmatch2, panmatch3))

    elif sharpen == "ihs":
        grass.verbose(_("Using IHS<->RGB algorithm"))
        # transform RGB channels into IHS color space
        grass.message(_("Transforming to IHS color space..."))
        grass.run_command('i.rgb.his', overwrite=True,
                          red=ms3,
                          green=ms2,
                          blue=ms1,
                          hue="tmp%s_hue" % pid,
                          intensity="tmp%s_int" % pid,
                          saturation="tmp%s_sat" % pid)

        # pan/intensity histogram matching using linear regression
        target = "tmp%s_int" % pid
        outname = "tmp%s_pan_int" % pid
        panmatch = matchhist(pan, target, outname)

        # substitute pan for intensity channel and transform back to RGB color space
        grass.message(_("Transforming back to RGB color space and sharpening..."))
        grass.run_command('i.his.rgb', overwrite=True,
                          hue="tmp%s_hue" % pid,
                          intensity="%s" % panmatch,
                          saturation="tmp%s_sat" % pid,
                          red="%s_red" % out,
                          green="%s_green" % out,
                          blue="%s_blue" % out)

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name=panmatch)

    elif sharpen == "pca":
        grass.verbose(_("Using PCA/inverse PCA algorithm"))
        grass.message(_("Creating PCA images and calculating eigenvectors..."))

        # initial PCA with RGB channels
        pca_out = grass.read_command('i.pca', quiet=True, rescale='0,0',
                                     input='%s,%s,%s' % (ms1, ms2, ms3),
                                     output='tmp%s.pca' % pid)
        if len(pca_out) < 1:
            grass.fatal(_("Input has no data. Check region settings."))

        b1evect = []
        b2evect = []
        b3evect = []
        for l in pca_out.replace('(', ',').replace(')', ',').splitlines():
            b1evect.append(float(l.split(',')[1]))
            b2evect.append(float(l.split(',')[2]))
            b3evect.append(float(l.split(',')[3]))

        # inverse PCA with hi res pan channel substituted for principal component 1
        pca1 = 'tmp%s.pca.1' % pid
        pca2 = 'tmp%s.pca.2' % pid
        pca3 = 'tmp%s.pca.3' % pid
        b1evect1 = b1evect[0]
        b1evect2 = b1evect[1]
        b1evect3 = b1evect[2]
        b2evect1 = b2evect[0]
        b2evect2 = b2evect[1]
        b2evect3 = b2evect[2]
        b3evect1 = b3evect[0]
        b3evect2 = b3evect[1]
        b3evect3 = b3evect[2]

        outname = 'tmp%s_pan' % pid
        panmatch = matchhist(pan, ms1, outname)

        grass.message(_("Performing inverse PCA ..."))

        stats1 = grass.parse_command("r.univar", map=ms1, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats2 = grass.parse_command("r.univar", map=ms2, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats3 = grass.parse_command("r.univar", map=ms3, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))

        b1mean = float(stats1['mean'])
        b2mean = float(stats2['mean'])
        b3mean = float(stats3['mean'])

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0* "$ms1" * "$panmatch1" / k'''

            outr = '%s_red' % out
            outg = '%s_green' % out
            outb = '%s_blue' % out

            cmd1 = "$outb = (1.0 * $panmatch * $b1evect1) + ($pca2 * $b2evect1) + ($pca3 * $b3evect1) + $b1mean"
            cmd2 = "$outg = (1.0 * $panmatch * $b1evect2) + ($pca2 * $b2evect1) + ($pca3 * $b3evect2) + $b2mean"
            cmd3 = "$outr = (1.0 * $panmatch * $b1evect3) + ($pca2 * $b2evect3) + ($pca3 * $b3evect3) + $b3mean"

            cmd = '\n'.join([cmd1, cmd2, cmd3])

            grass.mapcalc(cmd, outb=outb, outg=outg, outr=outr,
                          panmatch=panmatch, pca2=pca2, pca3=pca3,
                          b1evect1=b1evect1, b2evect1=b2evect1, b3evect1=b3evect1,
                          b1evect2=b1evect2, b2evect2=b2evect2, b3evect2=b3evect2,
                          b1evect3=b1evect3, b2evect3=b2evect3, b3evect3=b3evect3,
                          b1mean=b1mean, b2mean=b2mean, b3mean=b3mean,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect1, pca2,
                                        b2evect1, pca3, b3evect1, b1mean),
                                     overwrite=True)

            pg = grass.mapcalc_start('%s_green = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect2, pca2,
                                        b2evect2, pca3, b3evect2, b2mean),
                                     overwrite=True)

            pr = grass.mapcalc_start('%s_red = (%s * %f) + (%s * %f) + (%s * ''%f) + %f'
                                     % (out, panmatch, b1evect3, pca2,
                                        b2evect3, pca3, b3evect3, b3mean),
                                     overwrite=True)

            pr.wait()
            pg.wait()
            pb.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type="raster",
                          pattern='tmp%s*,%s' % (pid, panmatch))

    # Could add other sharpening algorithms here, e.g. wavelet transformation

    grass.message(_("Assigning grey equalized color tables to output images..."))
    # equalized grey scales give best contrast
    for ch in ['red', 'green', 'blue']:
        grass.run_command('r.colors', quiet=True, map="%s_%s" % (out, ch),
                          flags="e", color='grey')

    # Landsat too blue-ish because panchromatic band less sensitive to blue
    # light, so output blue channed can be modified
    if bladjust:
        grass.message(_("Adjusting blue channel color table..."))
        rules = grass.tempfile()
        colors = open(rules, 'w')
        colors.write('5 0 0 0\n20 200 200 200\n40 230 230 230\n67 255 255 255 \n')
        colors.close()

        grass.run_command('r.colors', map="%s_blue" % out, rules=rules)
        os.remove(rules)

    # output notice
    grass.verbose(_("The following pan-sharpened output maps have been generated:"))
    for ch in ['red', 'green', 'blue']:
        grass.verbose(_("%s_%s") % (out, ch))

    grass.verbose(_("To visualize output, run: g.region -p raster=%s_red" % out))
    grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out)))
    grass.verbose(_("If desired, combine channels into a single RGB map with 'r.composite'."))
    grass.verbose(_("Channel colors can be rebalanced using i.colors.enhance."))

    # write cmd history:
    for ch in ['red', 'green', 'blue']:
        grass.raster_history("%s_%s" % (out, ch))

    # create a group with the three output
    grass.run_command('i.group', group=out,
                      input="{n}_red,{n}_blue,{n}_green".format(n=out))

    # Cleanup
    grass.run_command('g.remove', flags="f", type="raster",
                      pattern="tmp%s*" % pid, quiet=True)
コード例 #60
0
def main():

    # Get the options
    input = options["input"]
    strds = options["strds"]
    where = options["where"]
    column = options["column"]
    method = options["method"]
    tempwhere = options["t_where"]
    sampling = options["sampling"]

    if where == "" or where == " " or where == "\n":
        where = None

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(input, "stvds", dbif)
    strds_sp = tgis.open_old_stds(strds, "strds", dbif)

    if strds_sp.get_temporal_type() != sp.get_temporal_type():
        dbif.close()
        grass.fatal(
            _("Input and aggregation dataset must "
              "have the same temporal type"))

    # Check if intervals are present in the sample dataset
    if sp.get_temporal_type() == "absolute":
        map_time = sp.absolute_time.get_map_time()
    else:
        map_time = sp.relative_time.get_map_time()

    if map_time != "interval":
        dbif.close()
        grass.fatal(
            _("All registered maps of the space time vector "
              "dataset must have time intervals"))

    rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time",
                                  tempwhere, "start_time", dbif)

    if not rows:
        dbif.close()
        grass.fatal(_("Space time vector dataset <%s> is empty") % sp.get_id())

    # Sample the raster dataset with the vector dataset and run v.what.rast
    for row in rows:
        start = row["start_time"]
        end = row["end_time"]
        vectmap = row["name"] + "@" + row["mapset"]
        layer = row["layer"]

        raster_maps = tgis.collect_map_names(strds_sp, dbif, start, end,
                                             sampling)

        aggreagated_map_name = None

        if raster_maps:
            # Aggregation
            if method != "disabled" and len(raster_maps) > 1:
                # Generate the temporary map name
                aggreagated_map_name = "aggreagated_map_name_" + \
                    str(os.getpid())
                new_map = tgis.aggregate_raster_maps(raster_maps,
                                                     aggreagated_map_name,
                                                     start, end, 0, method,
                                                     False, dbif)
                aggreagated_map_name = aggreagated_map_name + "_0"
                if new_map is None:
                    continue
                # We overwrite the raster_maps list
                raster_maps = (new_map.get_id(), )

            for rastermap in raster_maps:

                if column:
                    col_name = column
                else:
                    # Create a new column with the SQL compliant
                    # name of the sampled raster map
                    col_name = rastermap.split("@")[0].replace(".", "_")

                coltype = "DOUBLE PRECISION"
                # Get raster type
                rasterinfo = raster.raster_info(rastermap)
                if rasterinfo["datatype"] == "CELL":
                    coltype = "INT"

                try:
                    if layer:
                        grass.run_command("v.db.addcolumn",
                                          map=vectmap,
                                          layer=layer,
                                          column="%s %s" % (col_name, coltype),
                                          overwrite=grass.overwrite())
                    else:
                        grass.run_command("v.db.addcolumn",
                                          map=vectmap,
                                          column="%s %s" % (col_name, coltype),
                                          overwrite=grass.overwrite())
                except CalledModuleError:
                    dbif.close()
                    grass.fatal(
                        _("Unable to add column %s to vector map <%s>") %
                        (col_name, vectmap))

                # Call v.what.rast
                try:
                    if layer:
                        grass.run_command("v.what.rast",
                                          map=vectmap,
                                          layer=layer,
                                          raster=rastermap,
                                          column=col_name,
                                          where=where)
                    else:
                        grass.run_command("v.what.rast",
                                          map=vectmap,
                                          raster=rastermap,
                                          column=col_name,
                                          where=where)
                except CalledModuleError:
                    dbif.close()
                    grass.fatal(
                        _("Unable to run v.what.rast for vector map "
                          "<%s> and raster map <%s>") % (vectmap, rastermap))

                if aggreagated_map_name:
                    try:
                        grass.run_command("g.remove",
                                          flags='f',
                                          type='raster',
                                          name=aggreagated_map_name)
                    except CalledModuleError:
                        dbif.close()
                        grass.fatal(
                            _("Unable to remove raster map <%s>") %
                            (aggreagated_map_name))

                # Use the first map in case a column names was provided
                if column:
                    break

    dbif.close()