def tmp_map_name(name):
    """
    Return a temporary map name, for example:

    tmp_avg_lse = tmp + '.avg_lse'
    """
    temporary_file = grass.tempfile()
    tmp = "tmp." + grass.basename(temporary_file)  # use its basename
    return tmp + '.' + str(name)
Ejemplo n.º 2
0
def tmp_map_name(name):
    """
    Return a temporary map name, for example:

    tmp_avg_lse = tmp + '.avg_lse'
    """
    temporary_file = grass.tempfile()
    tmp = "tmp." + grass.basename(temporary_file)  # use its basename
    return tmp + '.' + str(name)
Ejemplo n.º 3
0
def temporary_filename(filename=None):
    """Returns a temporary filename using grass.script.tempfile() and
    grass.script.basename()

    Parameters
    ----------
    filename :
        Name for a file

    Returns
    -------
    temporary_filename :
        A temporary file name

    Examples
    --------
    >>> temporary_filename(potential)
    tmp.SomeTemporaryString.potential
    """
    temporary_absolute_filename = grass.tempfile()
    temporary_filename = "tmp." + grass.basename(temporary_absolute_filename)
    if filename:
        temporary_filename = temporary_filename + "." + str(filename)
    return temporary_filename
Ejemplo n.º 4
0
def main():
    input = options['input']
    db_table = options['db_table']
    output = options['output']
    key = options['key']

    mapset = grass.gisenv()['MAPSET']

    if db_table:
        input = db_table

    if not output:
        tmpname = input.replace('.', '_')
        output = grass.basename(tmpname)

    # check if table exists
    try:
        nuldev = open(os.devnull, 'w+')
        s = grass.read_command('db.tables', flags='p', quiet=True, stderr=nuldev)
        nuldev.close()
    except CalledModuleError:
        # check connection parameters, set if uninitialized
        grass.read_command('db.connect', flags='c')
        s = grass.read_command('db.tables', flags='p', quiet=True)

    for l in decode(s).splitlines():
        if l == output:
            if grass.overwrite():
                grass.warning(_("Table <%s> already exists and will be "
                                "overwritten") % output)
                grass.write_command('db.execute', input='-',
                                    stdin="DROP TABLE %s" % output)
                break
            else:
                grass.fatal(_("Table <%s> already exists") % output)

    # treat DB as real vector map...
    layer = db_table if db_table else None

    vopts = {}
    if options['encoding']:
        vopts['encoding'] = options['encoding']

    try:
        grass.run_command('v.in.ogr', flags='o', input=input, output=output,
                          layer=layer, quiet=True, **vopts)
    except CalledModuleError:
        if db_table:
            grass.fatal(
                _("Input table <%s> not found or not readable") %
                input)
        else:
            grass.fatal(_("Input DSN <%s> not found or not readable") % input)

    # rename ID col if requested from cat to new name
    if key:
        grass.write_command('db.execute', quiet=True, input='-',
                            stdin="ALTER TABLE %s ADD COLUMN %s integer" %
                                  (output, key))
        grass.write_command('db.execute', quiet=True, input='-',
                            stdin="UPDATE %s SET %s=cat" % (output, key))

    # ... and immediately drop the empty geometry
    vectfile = grass.find_file(output, element='vector', mapset=mapset)['file']
    if not vectfile:
        grass.fatal(_("Something went wrong. Should not happen"))
    else:
        # remove the vector part
        grass.run_command('v.db.connect', quiet=True, map=output, layer='1',
                          flags='d')
        grass.run_command('g.remove', flags='f', quiet=True, type='vector',
                          name=output)

    # get rid of superfluous auto-added cat column (and cat_ if present)
    nuldev = open(os.devnull, 'w+')
    grass.run_command('db.dropcolumn', quiet=True, flags='f', table=output,
                      column='cat', stdout=nuldev, stderr=nuldev)
    nuldev.close()

    records = grass.db_describe(output)['nrows']
    grass.message(_("Imported table <%s> with %d rows") % (output, records))
Ejemplo n.º 5
0
def main():
    dsn = options['dsn']
    db_table = options['db_table']
    output = options['output']
    key = options['key']

    mapset = grass.gisenv()['MAPSET']

    if db_table:
        input = db_table
    else:
        input = dsn

    if not output:
        tmpname = input.replace('.', '_')
        output = grass.basename(tmpname)

    if not grass.overwrite():
        s = grass.read_command('db.tables', flags='p')
        for l in s.splitlines():
            if l == output:
                grass.fatal(_("Table <%s> already exists") % output)
    else:
        grass.write_command('db.execute',
                            input='-',
                            stdin="DROP TABLE %s" % output)

    # treat DB as real vector map...
    if db_table:
        layer = db_table
    else:
        layer = None

    if grass.run_command(
            'v.in.ogr', flags='o', dsn=dsn, output=output, layer=layer,
            quiet=True) != 0:
        if db_table:
            grass.fatal(
                _("Input table <%s> not found or not readable") % input)
        else:
            grass.fatal(_("Input DSN <%s> not found or not readable") % input)

    # rename ID col if requested from cat to new name
    if key:
        grass.write_command('db.execute',
                            quiet=True,
                            input='-',
                            stdin="ALTER TABLE %s ADD COLUMN %s integer" %
                            (output, key))
        grass.write_command('db.execute',
                            quiet=True,
                            input='-',
                            stdin="UPDATE %s SET %s=cat" % (output, key))

    # ... and immediately drop the empty geometry
    vectfile = grass.find_file(output, element='vector', mapset=mapset)['file']
    if not vectfile:
        grass.fatal(_("Something went wrong. Should not happen"))
    else:
        # remove the vector part
        grass.run_command('v.db.connect',
                          quiet=True,
                          map=output,
                          layer='1',
                          flags='d')
        grass.run_command('g.remove',
                          flags='f',
                          quiet=True,
                          type='vect',
                          pattern=output)

    # get rid of superfluous auto-added cat column (and cat_ if present)
    nuldev = file(os.devnull, 'w+')
    grass.run_command('db.dropcolumn',
                      quiet=True,
                      flags='f',
                      table=output,
                      column='cat',
                      stdout=nuldev,
                      stderr=nuldev)
    nuldev.close()

    records = grass.db_describe(output)['nrows']
    grass.message(_("Imported table <%s> with %d rows") % (output, records))
Ejemplo n.º 6
0
def main():
    global tmp, tmpname, rastertmp1, rastertmp2, rastertmp3
    rastertmp1 = False
    rastertmp2 = False
    rastertmp3 = False

    #### setup temporary files
    tmp = grass.tempfile()
    # we need a random name
    tmpname = grass.basename(tmp)

    vector = options["vector"]
    layer = options["layer"]
    column = options["column"]
    weight = options["weight"]
    output = options["output"]

    # vector exists?
    result = grass.find_file(vector, element="vector")
    if len(result["name"]) == 0:
        grass.fatal(_("Input vector <%s> not found") % vector)

    # raster exists?
    result = grass.find_file(weight, element="cell")
    if len(result["name"]) == 0:
        grass.fatal(_("Input weight raster <%s> not found") % weight)

    # column exists ?
    if column not in grass.vector_columns(vector, layer).keys():
        grass.fatal(
            _("Column does not exist for vector <%s>, layer %s") %
            (vector, layer))

    # is column numeric?
    coltype = grass.vector_columns(vector, layer)[column]["type"]

    if coltype not in ("INTEGER", "DOUBLE PRECISION"):
        grass.fatal(_("Column must be numeric"))

    # rasterize with cats (will be base layer)
    # strip off mapset for tmp output
    vector_basename = vector.split("@")[0]
    rastertmp1 = "%s_%s_1" % (vector_basename, tmpname)
    try:
        grass.run_command("v.to.rast",
                          input=vector,
                          output=rastertmp1,
                          use="cat",
                          quiet=True)
    except CalledModuleError:
        grass.fatal(_("An error occurred while converting vector to raster"))

    # rasterize with column
    rastertmp2 = "%s_%s_2" % (vector_basename, tmpname)
    try:
        grass.run_command(
            "v.to.rast",
            input=vector,
            output=rastertmp2,
            use="attr",
            layer=layer,
            attrcolumn=column,
            quiet=True,
        )
    except CalledModuleError:
        grass.fatal(_("An error occurred while converting vector to raster"))

    # zonal statistics
    rastertmp3 = "%s_%s_3" % (vector_basename, tmpname)
    try:
        grass.run_command(
            "r.stats.zonal",
            base=rastertmp1,
            cover=weight,
            method="sum",
            output=rastertmp3,
            quiet=True,
        )
    except CalledModuleError:
        grass.fatal(_("An error occurred while calculating zonal statistics"))

    # weighted interpolation
    exp = "$output = if($sumweight == 0, if(isnull($area_val), null(), 0), double($area_val) * $weight / $sumweight)"

    grass.mapcalc(exp,
                  output=output,
                  sumweight=rastertmp3,
                  area_val=rastertmp2,
                  weight=weight)

    sys.exit(0)
Ejemplo n.º 7
0
def main():
    dsn = options['dsn']
    db_table = options['db_table']
    output = options['output']
    key = options['key']

    mapset = grass.gisenv()['MAPSET']

    if db_table:
	input = db_table
    else:
	input = dsn

    if not output:
	tmpname = input.replace('.', '_')
	output = grass.basename(tmpname)

    if not grass.overwrite():
	s = grass.read_command('db.tables', flags = 'p')
	for l in s.splitlines():
	    if l == output:
		grass.fatal(_("Table <%s> already exists") % output)
    else:
	grass.write_command('db.execute', input = '-', stdin = "DROP TABLE %s" % output)

    # treat DB as real vector map...
    if db_table:
	layer = db_table
    else:
	layer = None

    if grass.run_command('v.in.ogr', flags = 'o', dsn = dsn, output = output,
			 layer = layer, quiet = True) != 0:
	if db_table:
	    grass.fatal(_("Input table <%s> not found or not readable") % input)
	else:
	    grass.fatal(_("Input DSN <%s> not found or not readable") % input)

    nuldev = file(os.devnull, 'w')

    # rename ID col if requested from cat to new name
    if key:
	grass.run_command('db.execute', quiet = True, flags = 'f',
                          input = '-', 
			  stdin = "ALTER TABLE %s ADD COLUMN %s integer" % (output, key) )
	grass.run_command('db.execute', quiet = True, flags = 'f',
                          input = '-', 
			  stdin = "UPDATE %s SET %s=cat" % (output, key) )

    # ... and immediately drop the empty geometry
    vectfile = grass.find_file(output, element = 'vector', mapset = mapset)['file']
    if not vectfile:
	grass.fatal(_("Something went wrong. Should not happen"))
    else:
	# remove the vector part
	grass.run_command('v.db.connect', quiet = True, map = output, layer = '1', flags = 'd')
	grass.run_command('g.remove', quiet = True, vect = output)

    # get rid of superfluous auto-added cat column (and cat_ if present)
    grass.run_command('db.dropcolumn', quiet = True, flags = 'f', table = output,
		      column = 'cat', stdout = nuldev, stderr = nuldev)

    records = grass.db_describe(output)['nrows']
    grass.message(_("Imported table <%s> with %d rows") % (output, records))
Ejemplo n.º 8
0
def main():
    global tmp, sqltmp, tmpname, nuldev, vector, rastertmp
    rastertmp = False
    # setup temporary files
    tmp = grass.tempfile()
    sqltmp = tmp + ".sql"
    # we need a random name
    tmpname = grass.basename(tmp)

    nuldev = open(os.devnull, 'w')

    rasters = options['raster'].split(',')
    colprefixes = options['column_prefix'].split(',')
    vector = options['map']
    layer = options['layer']
    percentile = options['percentile']
    basecols = options['method'].split(',')

    ### setup enviro vars ###
    env = grass.gisenv()
    mapset = env['MAPSET']

    vs = vector.split('@')
    if len(vs) > 1:
        vect_mapset = vs[1]
    else:
        vect_mapset = mapset

    # does map exist in CURRENT mapset?
    if vect_mapset != mapset or not grass.find_file(vector, 'vector',
                                                    mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # colprefix for every raster map?
    if len(colprefixes) != len(rasters):
        grass.fatal(
            _("Number of raster maps ({0}) different from \
                      number of column prefixes ({1})".format(
                len(rasters), len(colprefixes))))

    vector = vs[0]

    rastertmp = "%s_%s" % (vector, tmpname)

    for raster in rasters:
        # check the input raster map
        if not grass.find_file(raster, 'cell')['file']:
            grass.fatal(_("Raster map <%s> not found") % raster)

    # save current settings:
    grass.use_temp_region()

    # Temporarily aligning region resolution to $RASTER resolution
    # keep boundary settings
    grass.run_command('g.region', align=rasters[0])

    # prepare base raster for zonal statistics
    try:
        nlines = grass.vector_info_topo(vector)['lines']
        # Create densified lines rather than thin lines
        if flags['d'] and nlines > 0:
            grass.run_command('v.to.rast',
                              input=vector,
                              layer=layer,
                              output=rastertmp,
                              use='cat',
                              flags='d',
                              quiet=True)
        else:
            grass.run_command('v.to.rast',
                              input=vector,
                              layer=layer,
                              output=rastertmp,
                              use='cat',
                              quiet=True)
    except CalledModuleError:
        grass.fatal(_("An error occurred while converting vector to raster"))

    # dump cats to file to avoid "too many argument" problem:
    p = grass.pipe_command('r.category', map=rastertmp, sep=';', quiet=True)
    cats = []

    for line in p.stdout:
        line = decode(line)
        cats.append(line.rstrip('\r\n').split(';')[0])
    p.wait()

    number = len(cats)
    if number < 1:
        grass.fatal(_("No categories found in raster map"))

    # Check if all categories got converted
    # Report categories from vector map
    vect_cats = grass.read_command('v.category',
                                   input=vector,
                                   option='report',
                                   flags='g').rstrip('\n').split('\n')

    # get number of all categories in selected layer
    for vcl in vect_cats:
        if vcl.split(' ')[0] == layer and vcl.split(' ')[1] == 'all':
            vect_cats_n = int(vcl.split(' ')[2])

    if vect_cats_n != number:
        grass.warning(
            _("Not all vector categories converted to raster. \
                         Converted {0} of {1}.".format(number, vect_cats_n)))

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # Find out which table is linked to the vector map on the given layer
    if not fi['table']:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))

    # replaced by user choiche
    #basecols = ['n', 'min', 'max', 'range', 'mean', 'stddev', 'variance', 'cf_var', 'sum']

    for i in xrange(len(rasters)):
        raster = rasters[i]
        colprefix = colprefixes[i]
        # we need at least three chars to distinguish [mea]n from [med]ian
        # so colprefix can't be longer than 6 chars with DBF driver
        if dbfdriver:
            colprefix = colprefix[:6]
            variables_dbf = {}

        # by default perccol variable is used only for "variables" variable
        perccol = "percentile"
        perc = None
        for b in basecols:
            if b.startswith('p'):
                perc = b
        if perc:
            # namespace is limited in DBF but the % value is important
            if dbfdriver:
                perccol = "per" + percentile
            else:
                perccol = "percentile_" + percentile
            percindex = basecols.index(perc)
            basecols[percindex] = perccol

        # dictionary with name of methods and position in "r.univar -gt"  output
        variables = {
            'number': 2,
            'null_cells': 2,
            'minimum': 4,
            'maximum': 5,
            'range': 6,
            'average': 7,
            'stddev': 9,
            'variance': 10,
            'coeff_var': 11,
            'sum': 12,
            'first_quartile': 14,
            'median': 15,
            'third_quartile': 16,
            perccol: 17
        }
        # this list is used to set the 'e' flag for r.univar
        extracols = ['first_quartile', 'median', 'third_quartile', perccol]
        addcols = []
        colnames = []
        extstat = ""
        for i in basecols:
            # this check the complete name of out input that should be truncated
            for k in variables.keys():
                if i in k:
                    i = k
                    break
            if i in extracols:
                extstat = 'e'
            # check if column already present
            currcolumn = ("%s_%s" % (colprefix, i))
            if dbfdriver:
                currcolumn = currcolumn[:10]
                variables_dbf[currcolumn.replace("%s_" % colprefix, '')] = i

            colnames.append(currcolumn)
            if currcolumn in grass.vector_columns(vector, layer).keys():
                if not flags['c']:
                    grass.fatal(
                        (_("Cannot create column <%s> (already present). ") %
                         currcolumn) +
                        _("Use -c flag to update values in this column."))
            else:
                if i == "n":
                    coltype = "INTEGER"
                else:
                    coltype = "DOUBLE PRECISION"
                addcols.append(currcolumn + ' ' + coltype)

        if addcols:
            grass.verbose(_("Adding columns '%s'") % addcols)
            try:
                grass.run_command('v.db.addcolumn',
                                  map=vector,
                                  columns=addcols,
                                  layer=layer)
            except CalledModuleError:
                grass.fatal(_("Adding columns failed. Exiting."))

        # calculate statistics:
        grass.message(_("Processing input data (%d categories)...") % number)

        # get rid of any earlier attempts
        grass.try_remove(sqltmp)

        f = open(sqltmp, 'w')

        # do the stats
        p = grass.pipe_command('r.univar',
                               flags='t' + extstat,
                               map=raster,
                               zones=rastertmp,
                               percentile=percentile,
                               sep=';')

        first_line = 1

        f.write("{0}\n".format(grass.db_begin_transaction(fi['driver'])))
        for line in p.stdout:
            if first_line:
                first_line = 0
                continue

            vars = decode(line).rstrip('\r\n').split(';')

            f.write("UPDATE %s SET" % fi['table'])
            first_var = 1
            for colname in colnames:
                variable = colname.replace("%s_" % colprefix, '', 1)
                if dbfdriver:
                    variable = variables_dbf[variable]
                i = variables[variable]
                value = vars[i]
                # convert nan, +nan, -nan, inf, +inf, -inf, Infinity, +Infinity,
                # -Infinity to NULL
                if value.lower().endswith('nan') or 'inf' in value.lower():
                    value = 'NULL'
                if not first_var:
                    f.write(" , ")
                else:
                    first_var = 0
                f.write(" %s=%s" % (colname, value))

            f.write(" WHERE %s=%s;\n" % (fi['key'], vars[0]))
        f.write("{0}\n".format(grass.db_commit_transaction(fi['driver'])))
        p.wait()
        f.close()

        grass.message(_("Updating the database ..."))
        exitcode = 0
        try:
            grass.run_command('db.execute',
                              input=sqltmp,
                              database=fi['database'],
                              driver=fi['driver'])
            grass.verbose(
                (_("Statistics calculated from raster map <{raster}>"
                   " and uploaded to attribute table"
                   " of vector map <{vector}>.").format(raster=raster,
                                                        vector=vector)))
        except CalledModuleError:
            grass.warning(
                _("Failed to upload statistics to attribute table of vector map <%s>."
                  ) % vector)
            exitcode = 1

            sys.exit(exitcode)
Ejemplo n.º 9
0
def main():

    global acq_time, esd
    """1st, get input, output, options and flags"""

    spectral_bands = options['band'].split(',')
    outputsuffix = options['outputsuffix']
    utc = options['utc']
    doy = options['doy']
    sea = options['sea']

    radiance = flags['r']
    keep_region = flags['k']

#    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
#    imglst = [spectral_bands]
#    images = {}
#    for img in imglst:  # Retrieving Image Info
#        images[img] = Info(img, mapset)
#        images[img].read()

    # -----------------------------------------------------------------------
    # Temporary Region and Files
    # -----------------------------------------------------------------------

    if not keep_region:
        grass.use_temp_region()  # to safely modify the region
    tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
    tmp = "tmp." + grass.basename(tmpfile)  # use its basename

    # -----------------------------------------------------------------------
    # Global Metadata: Earth-Sun distance, Sun Zenith Angle
    # -----------------------------------------------------------------------

    # Earth-Sun distance
    if doy:
        esd = jd_to_esd(int(doy))

    elif utc:
        acq_utc = AcquisitionTime(utc)  # will hold esd (earth-sun distance)
        acq_dat = datetime(acq_utc.year, acq_utc.month, acq_utc.day)
        esd = acq_utc.esd

    else:
        grass.fatal(_("Either the UTC string or "
                      "the Day-of-Year (doy) are required!"))

    sza = 90 - float(sea)  # Sun Zenith Angle based on Sun Elevation Angle

    # -----------------------------------------------------------------------
    # Loop processing over all bands
    # -----------------------------------------------------------------------
    for band in spectral_bands:

        global tmp_rad

        g.message("|* Processing the %s spectral band" % band, flags='i')

        if not keep_region:
            g.message("\n|! Matching region to %s" % band)  # set region
            run('g.region', rast=band)   # ## FixMe

        # -------------------------------------------------------------------
        # Converting to Spectral Radiance
        # -------------------------------------------------------------------

        msg = "\n|> Converting to Spectral Radiance: " \
#            "L(λ) = 10^4 x DN(λ) / CalCoef(λ) x Bandwidth(λ)"  # Unicode? ##
        g.message(msg)

        # -------------------------------------------------------------------
        # Band dependent metadata for Spectral Radiance
        # -------------------------------------------------------------------

        # Why is this necessary?  Any function to remove the mapsets name?
        if '@' in band:
            band_key = (band.split('@')[0])
        else:
            band_key = band

        # get coefficients
        if acq_dat < cc_update:
            g.message("\n|! Using Pre-2001 Calibration Coefficient values",
                      flags='i')
            cc = float(CC[band_key][0])
        else:
            cc = float(CC[band_key][1])

        # get bandwidth
        bw = float(CC[band_key][2])

        # inform
        msg = "   [Calibration Coefficient=%d, Bandwidth=%.1f]" \
            % (cc, bw)
        g.message(msg)

        # convert
        tmp_rad = "%s.Radiance" % tmp  # Temporary Map
        rad = "%s = 10^4 * %s / %f * %f" \
            % (tmp_rad, band, cc, bw)
        grass.mapcalc(rad, overwrite=True)

        # string for metadata
        history_rad = rad
        history_rad += "Calibration Coefficient=%d; Effective Bandwidth=%.1f" \
            % (cc, bw)
        title_rad = "%s band (Spectral Radiance)" % band
        units_rad = "W / m2 / μm / ster"
        description_rad = "At-sensor %s band spectral Radiance (W/m2/μm/sr)" \
            % band

        if not radiance:

            # ---------------------------------------------------------------
            # Converting to Top-of-Atmosphere Reflectance
            # ---------------------------------------------------------------

            global tmp_toar

            msg = "\n|> Converting to Top-of-Atmosphere Reflectance" \
                  # "ρ(p) = π x L(λ) x d^2 / ESUN(λ) x cos(θ(S))"  # Unicode?
            g.message(msg)

            # ---------------------------------------------------------------
            # Band dependent metadata for Spectral Radiance
            # ---------------------------------------------------------------

            # get esun
            esun = CC[band_key][3]

            # inform
            msg = "   [Earth-Sun distane=%f, Mean solar exoatmospheric " \
                "irradiance=%.1f]" % (esd, esun)
            g.message(msg)

            # convert
            tmp_toar = "%s.Reflectance" % tmp  # Spectral Reflectance
            toar = "%s = %f * %s * %f^2 / %f * cos(%f)" \
                % (tmp_toar, math.pi, tmp_rad, esd, esun, sza)
            grass.mapcalc(toar, overwrite=True)

            # strings for output's metadata
            history_toar = toar
            history_toar += "ESD=%f; BAND_Esun=%f; SZA=%f" % (esd, esun, sza)
            title_toar = "%s band (Top of Atmosphere Reflectance)" % band
            units_toar = "Unitless planetary reflectance"
            description_toar = "Top of Atmosphere `echo ${BAND}` band spectral"
            " Reflectance (unitless)"

        if tmp_toar:

            # history entry
            run("r.support", map=tmp_toar, title=title_toar,
                units=units_toar, description=description_toar,
                source1=source1_toar, source2=source2_toar,
                history=history_toar)

            # add suffix to basename & rename end product
            toar_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_toar, toar_name))

        elif tmp_rad:

            # history entry
            run("r.support", map=tmp_rad,
                title=title_rad, units=units_rad, description=description_rad,
                source1=source1_rad, source2=source2_rad, history=history_rad)

            # add suffix to basename & rename end product
            rad_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_rad, rad_name))

    # visualising-related information
    if not keep_region:
        grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Region's resolution restored!")
    g.message("\n>>> Hint: rebalancing colors "
              "(i.colors.enhance) may improve appearance of RGB composites!",
              flags='i')
Ejemplo n.º 10
0
def main():
    global tmp, sqltmp, tmpname, nuldev, vector, mask_found, rastertmp
    mask_found = False
    rastertmp = False
    #### setup temporary files
    tmp = grass.tempfile()
    sqltmp = tmp + ".sql"
    # we need a random name
    tmpname = grass.basename(tmp)

    nuldev = file(os.devnull, 'w')

    raster = options['raster']
    colprefix = options['column_prefix']
    vector = options['map']
    layer = options['layer']
    percentile = options['percentile']
    basecols = options['method'].split(',')

    ### setup enviro vars ###
    env = grass.gisenv()
    mapset = env['MAPSET']

    vs = vector.split('@')
    if len(vs) > 1:
        vect_mapset = vs[1]
    else:
        vect_mapset = mapset

    # does map exist in CURRENT mapset?
    if vect_mapset != mapset or not grass.find_file(vector, 'vector',
                                                    mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    vector = vs[0]

    rastertmp = "%s_%s" % (vector, tmpname)

    # check the input raster map
    if not grass.find_file(raster, 'cell')['file']:
        grass.fatal(_("Raster map <%s> not found") % raster)

    # check presence of raster MASK, put it aside
    mask_found = bool(grass.find_file('MASK', 'cell')['file'])
    if mask_found:
        grass.message(_("Raster MASK found, temporarily disabled"))
        grass.run_command('g.rename',
                          rast=('MASK', tmpname + "_origmask"),
                          quiet=True)

    # save current settings:
    grass.use_temp_region()

    # Temporarily aligning region resolution to $RASTER resolution
    # keep boundary settings
    grass.run_command('g.region', align=raster)

    # prepare raster MASK
    if grass.run_command(
            'v.to.rast', input=vector, output=rastertmp, use='cat',
            quiet=True) != 0:
        grass.fatal(_("An error occurred while converting vector to raster"))

    # dump cats to file to avoid "too many argument" problem:
    p = grass.pipe_command('r.category', map=rastertmp, sep=';', quiet=True)
    cats = []

    for line in p.stdout:
        cats.append(line.rstrip('\r\n').split(';')[0])
    p.wait()

    number = len(cats)
    if number < 1:
        grass.fatal(_("No categories found in raster map"))

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # Find out which table is linked to the vector map on the given layer
    if not fi['table']:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))

    # replaced by user choiche
    #basecols = ['n', 'min', 'max', 'range', 'mean', 'stddev', 'variance', 'cf_var', 'sum']

    # we need at least three chars to distinguish [mea]n from [med]ian
    # so colprefix can't be longer than 6 chars with DBF driver
    if dbfdriver:
        colprefix = colprefix[:6]
        variables_dbf = {}

    # by default perccol variable is used only for "variables" variable
    perccol = "percentile"
    perc = None
    for b in basecols:
        if b.startswith('p'):
            perc = b
    if perc:
        # namespace is limited in DBF but the % value is important
        if dbfdriver:
            perccol = "per" + percentile
        else:
            perccol = "percentile_" + percentile
        percindex = basecols.index(perc)
        basecols[percindex] = perccol

    # dictionary with name of methods and position in "r.univar -gt"  output
    variables = {
        'number': 2,
        'minimum': 4,
        'maximum': 5,
        'range': 6,
        'average': 7,
        'stddev': 9,
        'variance': 10,
        'coeff_var': 11,
        'sum': 12,
        'first_quartile': 14,
        'median': 15,
        'third_quartile': 16,
        perccol: 17
    }
    # this list is used to set the 'e' flag for r.univar
    extracols = ['first_quartile', 'median', 'third_quartile', perccol]
    addcols = []
    colnames = []
    extstat = ""
    for i in basecols:
        # this check the complete name of out input that should be truncated
        for k in variables.keys():
            if i in k:
                i = k
                break
        if i in extracols:
            extstat = 'e'
        # check if column already present
        currcolumn = ("%s_%s" % (colprefix, i))
        if dbfdriver:
            currcolumn = currcolumn[:10]
            variables_dbf[currcolumn.replace("%s_" % colprefix, '')] = i

        colnames.append(currcolumn)
        if currcolumn in grass.vector_columns(vector, layer).keys():
            if not flags['c']:
                grass.fatal(
                    (_("Cannot create column <%s> (already present). ") %
                     currcolumn) +
                    _("Use -c flag to update values in this column."))
        else:
            if i == "n":
                coltype = "INTEGER"
            else:
                coltype = "DOUBLE PRECISION"
            addcols.append(currcolumn + ' ' + coltype)

    if addcols:
        grass.verbose(_("Adding columns '%s'") % addcols)
        if grass.run_command(
                'v.db.addcolumn', map=vector, columns=addcols,
                layer=layer) != 0:
            grass.fatal(_("Adding columns failed. Exiting."))

    # calculate statistics:
    grass.message(_("Processing data (%d categories)...") % number)

    # get rid of any earlier attempts
    grass.try_remove(sqltmp)

    f = file(sqltmp, 'w')

    # do the stats
    p = grass.pipe_command('r.univar',
                           flags='t' + 'g' + extstat,
                           map=raster,
                           zones=rastertmp,
                           percentile=percentile,
                           sep=';')

    first_line = 1

    if not dbfdriver:
        f.write("BEGIN TRANSACTION\n")
    for line in p.stdout:
        if first_line:
            first_line = 0
            continue

        vars = line.rstrip('\r\n').split(';')

        f.write("UPDATE %s SET" % fi['table'])
        first_var = 1
        for colname in colnames:
            variable = colname.replace("%s_" % colprefix, '')
            if dbfdriver:
                variable = variables_dbf[variable]
            i = variables[variable]
            value = vars[i]
            # convert nan, +nan, -nan to NULL
            if value.lower().endswith('nan'):
                value = 'NULL'
            if not first_var:
                f.write(" , ")
            else:
                first_var = 0
            f.write(" %s=%s" % (colname, value))

        f.write(" WHERE %s=%s;\n" % (fi['key'], vars[0]))
    if not dbfdriver:
        f.write("COMMIT\n")
    p.wait()
    f.close()

    grass.message(_("Updating the database ..."))
    exitcode = grass.run_command('db.execute',
                                 input=sqltmp,
                                 database=fi['database'],
                                 driver=fi['driver'])

    grass.run_command('g.remove',
                      flags='f',
                      type='rast',
                      pattern='MASK',
                      quiet=True,
                      stderr=nuldev)

    if exitcode == 0:
        grass.verbose(
            (_("Statistics calculated from raster map <%s>") % raster) +
            (_(" and uploaded to attribute table of vector map <%s>.") %
             vector))
    else:
        grass.warning(
            _("Failed to upload statistics to attribute table of vector map <%s>."
              ) % vector)

    sys.exit(exitcode)
def main():
    """ """
    sensor = options['sensor']

    mapsets = options['mapsets']
    prefix = options['input_prefix']
    suffix = options['output_suffix']

    metafile = grass.basename(options['metafile'])

    # 6S parameter names shortened following i.atcorr's manual
    atm = int(options['atmospheric_model'])  # Atmospheric model [index]
    aer = int(options['aerosols_model'])  # Aerosols model [index]

    vis = options['visibility_range']  # Visibility [km]
    aod = options['aerosol_optical_depth']  # Aerosol Optical Depth at 550nm

    xps = options['altitude']  # Mean Target Altitude [negative km]
    if not xps:
        msg = "Note, this value will be overwritten if a DEM raster has been "\
              "defined as an input!"
        g.message(msg)

    elevation_map = options['elevation']
    visibility_map = options['visibility']

    radiance = flags['r']
    if radiance:
        global rad_flg
        radiance_flag = 'r'
    else:
        radiance_flag = ''

    # If the scene to be processed was imported via the (custom) python
    # Landsat import script, then, Mapset name == Scene identifier

    mapset = grass.gisenv()['MAPSET']
    if mapset == 'PERMANENT':
        grass.fatal(_('Please change to another mapset than the PERMANENT'))

#    elif 'L' not in mapset:
#        msg = "Assuming the Landsat scene(s) ha-s/ve been imported using the "\
#              "custom python import script, the Mapset's name *should* begin "\
#              "with the letter L!"
#        grass.fatal(_(msg))

    else:
        result = grass.find_file(element='cell_misc',
                                 name=metafile,
                                 mapset='.')
        if not result['file']:
            grass.fatal("The metadata file <%s> is not in GRASS' data base!"
                        % metafile)
        else:
            metafile = result['file']

    #
    # Acquisition's metadata
    #

    msg = "Acquisition metadata for 6S code (line 2 in Parameters file)\n"

    # Month, day
    date = grass.parse_command('i.landsat.toar', flags='p',
                               input='dummy', output='dummy',
                               metfile=metafile, lsatmet='date')
    mon = int(date['date'][5:7])  # Month of acquisition
    day = int(date['date'][8:10])  # Day of acquisition

    # GMT in decimal hours
    gmt = grass.read_command('i.landsat.toar', flags='p',
                             input='dummy', output='dummy',
                             metfile=metafile, lsatmet='time')
    gmt = float(gmt.rstrip('\n'))

    # Scene's center coordinates
    cll = grass.parse_command('g.region', flags='clg')
    lon = float(cll['center_long'])  # Center Longitude [decimal degrees]
    lat = float(cll['center_lat'])  # Center Latitude [decimal degrees]

    msg += str(mon) + ' ' + str(day) + ' ' + str(gmt) + ' ' + \
        str(lon) + ' ' + str(lat)
    g.message(msg)
   
    # 
    # AOD
    #
    if aod:
        aod = float(options['aerosol_optical_depth'])

    else:
        # sane defaults
        if 4 < mon < 10:
            aod = float(0.222)  # summer
        else:
            aod = float(0.111)  # winter

    #
    # Mapsets are Scenes. Read'em all!
    #

    if mapsets == 'all':
        scenes = grass.mapsets()

    elif mapsets == 'current':
        scenes = [mapset]

    else:
        scenes = mapsets.split(',')

    if 'PERMANENT' in scenes:
        scenes.remove('PERMANENT')

    # access only to specific mapsets!
    msg = "\n|* Performing atmospheric correction for scenes:  %s" % scenes
    g.message(msg)

    for scene in scenes:

        # ensure access only to *current* mapset
        run('g.mapsets', mapset='.', operation='set')

        # scene's basename as in GRASS' db
        basename = grass.read_command('g.mapset', flags='p')
        msg = "   | Processing scene:  %s" % basename
        g.message(msg)

        # loop over Landsat bands in question
        for band in sensors[sensor].keys():

            inputband = prefix + str(band)
            msg = '\n>>> Processing band: {band}'.format(band=inputband)
            g.message(msg)


            # Generate 6S parameterization file
            p6s = Parameters(geo=geo[sensor],
                             mon=mon, day=day, gmt=gmt, lon=lon, lat=lat,
                             atm=atm,
                             aer=aer,
                             vis=vis,
                             aod=aod,
                             xps=xps, xpp=xpp,
                             bnd=sensors[sensor][band])
            
            #
            # Temporary files
            #
            tmpfile = grass.tempfile()
            tmp = "tmp." + grass.basename(tmpfile)  # use its basename

            tmp_p6s = grass.tempfile()  # 6S Parameters ASCII file
            tmp_atm_cor = "%s_cor_out" % tmp  # Atmospherically Corrected Img

            p6s.export_ascii(tmp_p6s)

            # Process band-wise atmospheric correction with 6s
            msg = "6S parameters:\n\n"
            msg += p6s.parameters
            g.message(msg)

            # inform about input's range?
            input_range = grass.parse_command('r.info', flags='r', map=inputband)
            input_range['min'] = float(input_range['min'])
            input_range['max'] = float(input_range['max'])
            msg = "Input range: %.2f ~ %.2f" % (input_range['min'], input_range['max'])
            g.message(msg)

            #
            # Applying 6S Atmospheric Correction algorithm
            #
            run_i_atcorr(radiance_flag,
                         inputband,
                         input_range,
                         elevation_map,
                         visibility_map,
                         tmp_p6s,
                         tmp_atm_cor,
                         (0,1))
        
            # inform about output's range?
            output_range = grass.parse_command('r.info', flags='r', map=tmp_atm_cor)
            output_range['min'] = float(output_range['min'])
            output_range['max'] = float(output_range['max'])
            msg = "Output range: %.2f ~ %.2f" \
                % (output_range['min'], output_range['max'])
            g.message(msg)

            # add suffix to basename & rename end product
            atm_cor_nam = ("%s%s.%s" % (prefix, suffix, band))
            run('g.rename', rast=(tmp_atm_cor, atm_cor_nam))
Ejemplo n.º 12
0
def main():

    pan = options['pan']
    msxlst = options['msx'].split(',')
    outputsuffix = options['suffix']
    custom_ratio = options['ratio']
    center = options['center']
    center2 = options['center2']
    modulation = options['modulation']
    modulation2 = options['modulation2']

    if options['trim']:
        trimming_factor = float(options['trim'])
    else:
        trimming_factor = False

    histogram_match = flags['l']
    second_pass = flags['2']
    color_match = flags['c']

#    # Check & warn user about "ns == ew" resolution of current region ======
#    region = grass.region()
#    nsr = region['nsres']
#    ewr = region['ewres']
#
#    if nsr != ewr:
#        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
#               'resolutions do not match!')
#        msg = msg.format(ns=nsr, ew=ewr)
#        g.message(msg, flags='w')

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    panres = images[pan].nsres  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    run('g.region', res=panres)  # Respect extent, change resolution
    g.message("|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        g.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        g.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            g.message('Using custom ratio, overriding standard method!',
                      flags='w')

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            g.message("   > Retrieving image resolutions")

            msxres = images[msx].nsres

            # check
            if panres == msxres:
                msg = ("The Panchromatic's image resolution ({pr}) "
                       "equals to the Multi-Spectral's one ({mr}). "
                       "Something is probably not right! "
                       "Please check your input images.")
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = ('   >> Resolution ratio '
                         'low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}')
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            g.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            g.message("   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                      "   >>> If you insist, force it via the <ratio> option!",
                      flags='i')
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        g.message('\n|2 High Pass Filtering the Panchromatic Image')

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = 'tmp.' + grass.basename(tmpfile)  # use its basenam
        tmp_pan_hpf = '{tmp}_pan_hpf'.format(tmp=tmp)  # HPF image
        tmp_msx_blnr = '{tmp}_msx_blnr'.format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = '{tmp}_msx_hpf'.format(tmp=tmp)  # Fused image
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run('r.mfilter', input=pan, filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title='High Pass Filtered Panchromatic image',
            overwrite=True)

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            tmp_pan_hpf_2 = '{tmp}_pan_hpf_2'.format(tmp=tmp)  # 2nd Pass HPF image
            tmp_hpf_matrix_2 = grass.tempfile()  # 2nd Pass ASCII filter
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run('r.mfilter',
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title='2-High-Pass Filtered Panchromatic Image',
                overwrite=True)

        #
        # 3. Upsampling low resolution image
        #

        g.message("\n|3 Upsampling (bilinearly) low resolution image")

        run('r.resamp.interp',
            method='bilinear', input=msx, output=tmp_msx_blnr, overwrite=True)

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        g.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " \
            "Modulating Factor"
        g.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        g.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx, sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        g.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        g.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        g.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = '{hpf} = {msx} + {pan} * {wgt}'
        fusion = fusion.format(hpf=tmp_msx_hpf, msx=tmp_msx_blnr,
                               pan=tmp_pan_hpf, wgt=weighting)
        grass.mapcalc(fusion)

        # command history
        hst = 'Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}'
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            g.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            g.message("   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = '   >> 2nd Pass Modulating Factor: {m:.2f}'
            g.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            g.message("\n|5+ Adding small-kernel-based weighted 2nd HPFi "
                      "back to fused image")

            add_back = '{final} = {msx_hpf} + {pan_hpf} * {wgt}'
            add_back = add_back.format(final=tmp_msx_hpf, msx_hpf=tmp_msx_hpf,
                                       pan_hpf=tmp_pan_hpf_2, wgt=weighting_2)
            grass.mapcalc(add_back)

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        if color_match:
            g.message("\n|* Matching output to input color table")
            run('r.colors', map=tmp_msx_hpf, raster=msx)

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            g.message("\n|+ Matching histogram of Pansharpened image "
                      "to %s" % (msx), flags='v')

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            # expression for mapcalc
            lhm = '{out} = ({hpf} - {hpfavg}) / {hpfsd} * {msxsd} + {msxavg}'
            lhm = lhm.format(out=tmp_msx_hpf, hpf=tmp_msx_hpf,
                             hpfavg=msx_hpf_avg, hpfsd=msx_hpf_sd,
                             msxsd=msx_sd, msxavg=msx_avg)

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = '\n|* Trimming output image border pixels by '
            msg += '{factor} times the low resolution\n'.format(factor=tf)
            nsew = '   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}'
            nsew = nsew.format(n=region.n, s=region.s, e=region.e, w=region.w)
            msg += nsew

            g.message(msg)

            # re-set borders
            region.n -= tf * images[msx].nsres
            region.s += tf * images[msx].nsres
            region.e -= tf * images[msx].ewres
            region.w += tf * images[msx].ewres

            # communicate and act
            msg = '   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}'
            msg = msg.format(n=region.n, s=region.s, e=region.e, w=region.w)
            g.message(msg)

            # modify only the extent
            run('g.region',
                n=region.n, s=region.s, e=region.e, w=region.w)
            trim = "{out} = {input}".format(out=tmp_msx_hpf, input=tmp_msx_hpf)
            grass.mapcalc(trim)

        #
        # End of Algorithm

        # history entry
        run("r.support", map=tmp_msx_hpf, history="\n".join(cmd_history))

        # add suffix to basename & rename end product
        msx_name = "{base}.{suffix}"
        msx_name = msx_name.format(base=msx.split('@')[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Original Region restored")
    g.message("\n>>> Hint, rebalancing colors (via i.colors.enhance) "
              "may improve appearance of RGB composites!",
              flags='i')
Ejemplo n.º 13
0
def main():
    global tmp_hpf_matrix

    pan = options['pan']
    msxlst = options['msx'].split(',')
    outputprefix = options['outputprefix']
    custom_ratio = options['ratio']
    center = options['center']
    center2 = options['center2']
    modulation = options['modulation']
    modulation2 = options['modulation2']
    histogram_match = flags['l']
    second_pass = flags['2']

    # Check & warn user about "ns == ew" resolution of current region ======
    region = grass.region()
    nsr = region['nsres']
    ewr = region['ewres']

    if nsr != ewr:
        g.message(">>> Region's North:South (%s) and East:West (%s)"
                  "resolutions do not match!" % (nsr, ewr), flags='w')
    # ======================================================================

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    panres = images[pan].nsres  # Panchromatic resolution

    run('g.region', res=panres)  # Respect extent, change resolution
    g.message("|  Region's resolution set to %f" % panres)


    for msx in msxlst:  # Loop over Multi-Spectral images |||||||||||||||||||

        global tmp

        # Inform
        g.message("\nProcessing image: %s" % msx)

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = ''

        # -------------------------------------------------------------------
        # 1. Compute Ratio
        # -------------------------------------------------------------------

        g.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            global ratio
            ratio = float(custom_ratio)
            g.message('Using custom ratio, overriding standard method!',
                      flags='w')

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            g.message("   > Retrieving image resolutions")

            msxres = images[msx].nsres
            ratio = msxres / panres
            msg_ratio = '   >> Low (%.3f) to high resolution (%.3f) ratio: %.1f'\
                % (msxres, panres, ratio)
            g.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            g.message("   >>> Ratio < 5.5 -- WON'T perform 2nd pass! Use <ratio> option to override.",
                      flags='i')
            second_pass = bool(0)

        # -------------------------------------------------------------------
        # 2. High Pass Filtering
        # -------------------------------------------------------------------

        g.message('\n|2 High Pass Filtering the Panchromatic Image')

        # ========================================== end of Temporary files #
        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = "tmp." + grass.basename(tmpfile)  # use its basenam
        tmp_pan_hpf = "%s_pan_hpf" % tmp  # HPF image
        tmp_msx_blnr = "%s_msx_blnr" % tmp  # Upsampled MSx
        tmp_msx_hpf = "%s_msx_hpf" % tmp  # Fused image

        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        if second_pass and ratio > 5.5:  # 2nd Pass?
            tmp_pan_hpf_2 = "%s_pan_hpf_2" % tmp  # 2nd Pass HPF image
            tmp_hpf_matrix_2 = grass.tempfile()  # 2nd Pass ASCII filter

        # Temporary files ===================================================

        # Construct Filter
        hpf = High_Pass_Filter(ratio, center, modulation, False, None)
        hpf_ascii(center, hpf, tmp_hpf_matrix, 1)

        # Construct 2nd Filter
        if second_pass and ratio > 5.5:
            hpf_2 = High_Pass_Filter(ratio, center2, None, True, modulation2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, 2)

        # Filtering
        run('r.mfilter', input=pan, filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title="High Pass Filtered Panchromatic image",
            overwrite=True)

        # 2nd Filtering
        if second_pass and ratio > 5.5:
            run('r.mfilter', input=pan, filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title="2-High-Pass Filtered Panchromatic Image",
                overwrite=True)

        # -------------------------------------------------------------------
        # 3. Upsampling low resolution image
        # -------------------------------------------------------------------

        g.message("\n|3 Upsampling (bilinearly) low resolution image")

        # resample -- named "linear" in G7
        run('r.resamp.interp',
            method='bilinear', input=msx, output=tmp_msx_blnr, overwrite=True)

        # -------------------------------------------------------------------
        # 4. Weighting the High Pass Filtered image(s)
        # -------------------------------------------------------------------

        g.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * "
        "Modulating Factor"
        g.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        g.message("   >> StdDev of <%s>: %.3f" % (msx, msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        g.message("   >> StdDev of HPFi: %.3f" % hpf_sd)

        # Modulating factor
        g.message("   >> Modulating Factor: %.2f" % modulator)

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        # -------------------------------------------------------------------
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        # -------------------------------------------------------------------

        g.message("\n|5 Adding weighted HPFi to upsampled image")

        fusion = "%s = %s + %s * %f" \
            % (tmp_msx_hpf, tmp_msx_blnr, tmp_pan_hpf, weighting)
        grass.mapcalc(fusion)

        # history ***********************************************************
        cmd_history += "Weigthing applied: %.3f / %.3f * %.3f | " \
            % (msx_sd, hpf_sd, modulator)

        if second_pass and ratio > 5.5:
            # ---------------------------------------------------------------
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            # ---------------------------------------------------------------
            g.message("\n|4+ 2nd Pass Weighting the HPFi")

            # Compute 2nd Pass Weighting
            # Formula? Don't inform again...

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            g.message("   >> StdDev of 2nd HPFi: %.3f" % hpf_2_sd)

            # Modulating factor #2
            g.message("   >> 2nd Pass Modulating Factor: %.2f" % modulator_2)

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            # ---------------------------------------------------------------
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            # ---------------------------------------------------------------

            g.message("\n|5+ Adding small-kernel-based weighted 2nd HPFi "
                      "back to fused image")

            add_back = "%s = %s + %s * %f" \
                % (tmp_msx_hpf, tmp_msx_hpf, tmp_pan_hpf_2, weighting_2)
            grass.mapcalc(add_back)

            # 2nd Pass history entry ****************************************
            cmd_history += "2nd Pass Weighting: %s / %s * %s | " \
                % (msx_sd, hpf_2_sd, modulator_2)

        # -------------------------------------------------------------------
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        # -------------------------------------------------------------------

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            g.message("\n|  Matching histogram of Pansharpened image"
                      "to %s" % (msx), flags='v')

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            # expression for mapcalc
            lhm = "%s = (%s - %f) / %f * %f + %f" \
                % (tmp_msx_hpf,
                   tmp_msx_hpf, msx_hpf_avg,
                   msx_hpf_sd, msx_sd, msx_avg)

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)
            
            # update history string *****************************************
            cmd_history += "Linear Histogram Matching: %s |" % lhm

        # histogram matching - history entry ********************************
        run("r.support", map=tmp_msx_hpf, history=cmd_history)

        # Rename end product
        run("g.rename", rast=(tmp_msx_hpf, "%s_%s" % (msx, outputprefix)))

    # visualising output
    g.message("\n>>> Rebalance colors "
              "(e.g. via i.colors.enhance) before working on RGB composites!",
              flags='i')
Ejemplo n.º 14
0
def main():
    input = options['input']
    db_table = options['db_table']
    output = options['output']
    key = options['key']

    mapset = grass.gisenv()['MAPSET']

    if db_table:
        input = db_table

    if not output:
        tmpname = input.replace('.', '_')
        output = grass.basename(tmpname)

    # check if table exists
    try:
        nuldev = file(os.devnull, 'w+')
        s = grass.read_command('db.tables', flags='p', quiet=True, stderr=nuldev)
        nuldev.close()
    except CalledModuleError:
        # check connection parameters, set if uninitialized
        grass.read_command('db.connect', flags='c')
        s = grass.read_command('db.tables', flags='p', quiet=True)

    for l in s.splitlines():
        if l == output:
            if grass.overwrite():
                grass.warning(_("Table <%s> already exists and will be "
                                "overwritten") % output)
                grass.write_command('db.execute', input='-',
                                    stdin="DROP TABLE %s" % output)
                break
            else:
                grass.fatal(_("Table <%s> already exists") % output)

    # treat DB as real vector map...
    layer = db_table if db_table else None

    vopts = {}
    if options['encoding']:
        vopts['encoding'] = options['encoding']

    try:
        grass.run_command('v.in.ogr', flags='o', input=input, output=output,
                          layer=layer, quiet=True, **vopts)
    except CalledModuleError:
        if db_table:
            grass.fatal(
                _("Input table <%s> not found or not readable") %
                input)
        else:
            grass.fatal(_("Input DSN <%s> not found or not readable") % input)

    # rename ID col if requested from cat to new name
    if key:
        grass.write_command('db.execute', quiet=True, input='-',
                            stdin="ALTER TABLE %s ADD COLUMN %s integer" %
                                  (output, key))
        grass.write_command('db.execute', quiet=True, input='-',
                            stdin="UPDATE %s SET %s=cat" % (output, key))

    # ... and immediately drop the empty geometry
    vectfile = grass.find_file(output, element='vector', mapset=mapset)['file']
    if not vectfile:
        grass.fatal(_("Something went wrong. Should not happen"))
    else:
        # remove the vector part
        grass.run_command('v.db.connect', quiet=True, map=output, layer='1',
                          flags='d')
        grass.run_command('g.remove', flags='f', quiet=True, type='vector',
                          name=output)

    # get rid of superfluous auto-added cat column (and cat_ if present)
    nuldev = file(os.devnull, 'w+')
    grass.run_command('db.dropcolumn', quiet=True, flags='f', table=output,
                      column='cat', stdout=nuldev, stderr=nuldev)
    nuldev.close()

    records = grass.db_describe(output)['nrows']
    grass.message(_("Imported table <%s> with %d rows") % (output, records))
Ejemplo n.º 15
0
def main():

    global acq_time, esd

    """1st, get input, output, options and flags"""

    spectral_bands = options['band'].split(',')
    outputsuffix = options['outputsuffix']
    utc = options['utc']
    doy = options['doy']
    sea = options['sea']

    radiance = flags['r']
    if radiance and outputsuffix == 'toar':
        outputsuffix = 'rad'
        g.message("Output-suffix set to %s" % outputsuffix)

    keep_region = flags['k']
    info = flags['i']

    # -----------------------------------------------------------------------
    # Equations
    # -----------------------------------------------------------------------

    if info:
        # conversion to Radiance based on (1)
        msg = "|i Spectral Radiance = K * DN / Effective Bandwidth | " \
              "Reflectance = ( Pi * Radiance * ESD^2 ) / BAND_Esun * cos(SZA)"
        g.message(msg)

    # -----------------------------------------------------------------------
    # List images and their properties
    # -----------------------------------------------------------------------

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?

#    imglst = [pan]
#    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in spectral_bands:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    # -----------------------------------------------------------------------
    # Temporary Region and Files
    # -----------------------------------------------------------------------

    if not keep_region:
        grass.use_temp_region()  # to safely modify the region
    tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
    tmp = "tmp." + grass.basename(tmpfile)  # use its basename

    # -----------------------------------------------------------------------
    # Global Metadata: Earth-Sun distance, Sun Zenith Angle
    # -----------------------------------------------------------------------

    # Earth-Sun distance
    if doy:
        g.message("|! Using Day of Year to calculate Earth-Sun distance.")
        esd = jd_to_esd(int(doy))

    elif (not doy) and utc:
        acq_utc = AcquisitionTime(utc)  # will hold esd (earth-sun distance)
        esd = acq_utc.esd

    else:
        grass.fatal(_("Either the UTC string or "
                      "the Day-of-Year (doy) are required!"))

    sza = 90 - float(sea)  # Sun Zenith Angle based on Sun Elevation Angle

    # -----------------------------------------------------------------------
    # Loop processing over all bands
    # -----------------------------------------------------------------------
    for band in spectral_bands:

        global tmp_rad

        # -------------------------------------------------------------------
        # Match bands region if... ?
        # -------------------------------------------------------------------

        if not keep_region:
            run('g.region', rast=band)   # ## FixMe?
            msg = "\n|! Region matching the %s spectral band" % band
            g.message(msg)

        elif keep_region:
            msg = "|! Operating on current region"
            g.message(msg)

        # -------------------------------------------------------------------
        # Band dependent metadata for Spectral Radiance
        # -------------------------------------------------------------------

        g.message("\n|* Processing the %s band" % band, flags='i')

        # Why is this necessary?  Any function to remove the mapsets name?
        if '@' in band:
            band = (band.split('@')[0])

        # get absolute calibration factor
        acf = float(CF_BW_ESUN[band][2])
        acf_msg = "K=" + str(acf)

        # effective bandwidth
        bw = float(CF_BW_ESUN[band][0])

        # -------------------------------------------------------------------
        # Converting to Spectral Radiance
        # -------------------------------------------------------------------

        msg = "\n|> Converting to Spectral Radiance " \
              "| Conversion Factor %s, Bandwidth=%.3f" % (acf_msg, bw)
        g.message(msg)

        # convert
        tmp_rad = "%s.Radiance" % tmp  # Temporary Map
        rad = "%s = %f * %s / %f" \
            % (tmp_rad, acf, band, bw)  # Attention: 32-bit calculations requ.
        grass.mapcalc(rad, overwrite=True)

        # strings for metadata
        history_rad = rad
        history_rad += "Conversion Factor=%f; Effective Bandwidth=%.3f" \
            % (acf, bw)
        title_rad = ""
        description_rad = "Top-of-Atmosphere %s band spectral Radiance " \
                          "[W/m^2/sr/μm]" % band
        units_rad = "W / sq.m. / μm / ster"

        if not radiance:

            # ---------------------------------------------------------------
            # Converting to Top-of-Atmosphere Reflectance
            # ---------------------------------------------------------------

            global tmp_toar

            msg = "\n|> Converting to Top-of-Atmosphere Reflectance"
            g.message(msg)

            esun = float(CF_BW_ESUN[band][1])
            msg = "   %s band mean solar exoatmospheric irradiance=%.2f" \
                % (band, esun)
            g.message(msg)

            # convert
            tmp_toar = "%s.Reflectance" % tmp  # Spectral Reflectance
            toar = "%s = %f * %s * %f^2 / %f * cos(%f)" \
                % (tmp_toar, math.pi, tmp_rad, esd, esun, sza)
            grass.mapcalc(toar, overwrite=True)

            # report range? Using a flag and skip actual conversion?
            # todo?

            # strings for metadata
            title_toar = "%s band (Top of Atmosphere Reflectance)" % band
            description_toar = "Top of Atmosphere %s band spectral Reflectance" \
                % band
            units_toar = "Unitless planetary reflectance"
            history_toar = "K=%f; Bandwidth=%.1f; ESD=%f; Esun=%.2f; SZA=%.1f" \
                % (acf, bw, esd, esun, sza)

        if tmp_toar:

            # history entry
            run("r.support", map=tmp_toar, title=title_toar,
                units=units_toar, description=description_toar,
                source1=source1_toar, source2=source2_toar,
                history=history_toar)

            # add suffix to basename & rename end product
            toar_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_toar, toar_name))

        elif tmp_rad:

            # history entry
            run("r.support", map=tmp_rad,
                title=title_rad, units=units_rad, description=description_rad,
                source1=source1_rad, source2=source2_rad, history=history_rad)

            # add suffix to basename & rename end product
            rad_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_rad, rad_name))

    # visualising-related information
    if not keep_region:
        grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Region's resolution restored!")
    g.message("\n>>> Hint: rebalancing colors "
              "(i.colors.enhance) may improve appearance of RGB composites!",
              flags='i')
Ejemplo n.º 16
0
def main():
    global vrtfile, tmpfile

    infile  = options['input']
    rast = options['output']
    also = flags['a']

    #### check for gdalinfo (just to check if installation is complete)
    if not grass.find_program('gdalinfo', '--help'):
	grass.fatal(_("'gdalinfo' not found, install GDAL tools first (http://www.gdal.org)"))

    pid = str(os.getpid())
    tmpfile = grass.tempfile()

    ################### let's go

    spotdir = os.path.dirname(infile)
    spotname = grass.basename(infile, 'hdf')

    if rast:
	name = rast
    else:
	name = spotname

    if not grass.overwrite() and grass.find_file(name)['file']:
	grass.fatal(_("<%s> already exists. Aborting.") % name)

    # still a ZIP file?  (is this portable?? see the r.in.srtm script for ideas)
    if infile.lower().endswith('.zip'):
	grass.fatal(_("Please extract %s before import.") % infile)

    try:
	p = grass.Popen(['file', '-ib', infile], stdout = grass.PIPE)
	s = p.communicate()[0]
	if s == "application/x-zip":
	    grass.fatal(_("Please extract %s before import.") % infile)
    except:
	pass

    ### create VRT header for NDVI

    projfile = os.path.join(spotdir, "0001_LOG.TXT")
    vrtfile = tmpfile + '.vrt'

    # first process the NDVI:
    grass.try_remove(vrtfile)
    create_VRT_file(projfile, vrtfile, infile)

    ## let's import the NDVI map...
    grass.message(_("Importing SPOT VGT NDVI map..."))
    try:
        grass.run_command('r.in.gdal', input=vrtfile, output=name)
    except CalledModuleError:
        grass.fatal(_("An error occurred. Stop."))

    grass.message(_("Imported SPOT VEGETATION NDVI map <%s>.") % name)

    #################
    ## http://www.vgt.vito.be/faq/FAQS/faq19.html
    # What is the relation between the digital number and the real NDVI ?
    # Real NDVI =coefficient a * Digital Number + coefficient b
    #           = a * DN +b
    #
    # Coefficient a = 0.004
    # Coefficient b = -0.1

    # clone current region
    # switch to a temporary region
    grass.use_temp_region()

    grass.run_command('g.region', raster = name, quiet = True)

    grass.message(_("Remapping digital numbers to NDVI..."))
    tmpname = "%s_%s" % (name, pid)
    grass.mapcalc("$tmpname = 0.004 * $name - 0.1", tmpname = tmpname, name = name)
    grass.run_command('g.remove', type = 'raster', name = name, quiet = True, flags = 'f')
    grass.run_command('g.rename', raster = (tmpname, name), quiet = True)

    # write cmd history:
    grass.raster_history(name)

    #apply color table:
    grass.run_command('r.colors', map = name, color = 'ndvi', quiet = True)

    ##########################
    # second, optionally process the SM quality map:
    
    #SM Status Map
    # http://nieuw.vgt.vito.be/faq/FAQS/faq22.html
    #Data about
    # Bit NR 7: Radiometric quality for B0 coded as 0 if bad and 1 if good
    # Bit NR 6: Radiometric quality for B2 coded as 0 if bad and 1 if good
    # Bit NR 5: Radiometric quality for B3 coded as 0 if bad and 1 if good
    # Bit NR 4: Radiometric quality for MIR coded as 0 if bad and 1 if good
    # Bit NR 3: land code 1 or water code 0
    # Bit NR 2: ice/snow code 1 , code 0 if there is no ice/snow
    # Bit NR 1:	0	0	1		1
    # Bit NR 0:	0	1	0		1
    # 		clear	shadow	uncertain	cloud
    #
    #Note:
    # pos 7     6    5    4    3    2   1   0 (bit position)
    #   128    64   32   16    8    4   2   1 (values for 8 bit)
    #
    #
    # Bit 4-7 should be 1: their sum is 240
    # Bit 3   land code, should be 1, sum up to 248 along with higher bits
    # Bit 2   ice/snow code
    # Bit 0-1 should be 0
    #
    # A good map threshold: >= 248

    if also:
	grass.message(_("Importing SPOT VGT NDVI quality map..."))
	grass.try_remove(vrtfile)
	qname = spotname.replace('NDV','SM')
	qfile = os.path.join(spotdir, qname)
	create_VRT_file(projfile, vrtfile, qfile)

	## let's import the SM quality map...
	smfile = name + '.sm'
        try:
            grass.run_command('r.in.gdal', input=vrtfile, output=smfile)
        except CalledModuleError:
            grass.fatal(_("An error occurred. Stop."))

	# some of the possible values:
	rules = [r + '\n' for r in [
	    '8 50 50 50',
	    '11 70 70 70',
	    '12 90 90 90',
	    '60 grey',
	    '155 blue',
	    '232 violet',
	    '235 red',
	    '236 brown',
	    '248 orange',
	    '251 yellow',
	    '252 green'
	    ]]
	grass.write_command('r.colors', map = smfile, rules = '-', stdin = rules)

	grass.message(_("Imported SPOT VEGETATION SM quality map <%s>.") % smfile)
	grass.message(_("Note: A snow map can be extracted by category 252 (d.rast %s cat=252)") % smfile)
	grass.message("")
	grass.message(_("Filtering NDVI map by Status Map quality layer..."))

	filtfile = "%s_filt" % name
	grass.mapcalc("$filtfile = if($smfile % 4 == 3 || ($smfile / 16) % 16 == 0, null(), $name)",
		      filtfile = filtfile, smfile = smfile, name = name)
	grass.run_command('r.colors', map = filtfile, color = 'ndvi', quiet = True)
	grass.message(_("Filtered SPOT VEGETATION NDVI map <%s>.") % filtfile)

	# write cmd history:
	grass.raster_history(smfile)
	grass.raster_history(filtfile)

    grass.message(_("Done."))
Ejemplo n.º 17
0
def main():
    input = options["input"]
    gdal_config = options["gdal_config"]
    gdal_doo = options["gdal_doo"]
    db_table = options["db_table"]
    output = options["output"]
    key = options["key"]

    mapset = grass.gisenv()["MAPSET"]

    if db_table:
        input = db_table

    if not output:
        tmpname = input.replace(".", "_")
        output = grass.basename(tmpname)

    # check if table exists
    try:
        nuldev = open(os.devnull, "w+")
        s = grass.read_command("db.tables", flags="p", quiet=True, stderr=nuldev)
        nuldev.close()
    except CalledModuleError:
        # check connection parameters, set if uninitialized
        grass.read_command("db.connect", flags="c")
        s = grass.read_command("db.tables", flags="p", quiet=True)

    for l in decode(s).splitlines():
        if l == output:
            if grass.overwrite():
                grass.warning(
                    _("Table <%s> already exists and will be " "overwritten") % output
                )
                grass.write_command(
                    "db.execute", input="-", stdin="DROP TABLE %s" % output
                )
                break
            else:
                grass.fatal(_("Table <%s> already exists") % output)

    # treat DB as real vector map...
    layer = db_table if db_table else None

    vopts = {}
    if options["encoding"]:
        vopts["encoding"] = options["encoding"]

    try:
        grass.run_command(
            "v.in.ogr",
            flags="o",
            input=input,
            gdal_config=gdal_config,
            gdal_doo=gdal_doo,
            output=output,
            layer=layer,
            quiet=True,
            **vopts,
        )
    except CalledModuleError:
        if db_table:
            grass.fatal(_("Input table <%s> not found or not readable") % input)
        else:
            grass.fatal(_("Input DSN <%s> not found or not readable") % input)

    # rename ID col if requested from cat to new name
    if key:
        grass.write_command(
            "db.execute",
            quiet=True,
            input="-",
            stdin="ALTER TABLE %s ADD COLUMN %s integer" % (output, key),
        )
        grass.write_command(
            "db.execute",
            quiet=True,
            input="-",
            stdin="UPDATE %s SET %s=cat" % (output, key),
        )

    # ... and immediately drop the empty geometry
    vectfile = grass.find_file(output, element="vector", mapset=mapset)["file"]
    if not vectfile:
        grass.fatal(_("Something went wrong. Should not happen"))
    else:
        # remove the vector part
        grass.run_command("v.db.connect", quiet=True, map=output, layer="1", flags="d")
        grass.run_command("g.remove", flags="f", quiet=True, type="vector", name=output)

    # get rid of superfluous auto-added cat column (and cat_ if present)
    nuldev = open(os.devnull, "w+")
    grass.run_command(
        "db.dropcolumn",
        quiet=True,
        flags="f",
        table=output,
        column="cat",
        stdout=nuldev,
        stderr=nuldev,
    )
    nuldev.close()

    records = grass.db_describe(output)["nrows"]
    grass.message(_("Imported table <%s> with %d rows") % (output, records))
Ejemplo n.º 18
0
def main():

    global acq_time, esd
    """1st, get input, output, options and flags"""

    spectral_bands = options['band'].split(',')
    outputsuffix = options['outputsuffix']
    utc = options['utc']
    doy = options['doy']
    sea = options['sea']

    radiance = flags['r']
    keep_region = flags['k']

    #    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
    #    imglst = [spectral_bands]
    #    images = {}
    #    for img in imglst:  # Retrieving Image Info
    #        images[img] = Info(img, mapset)
    #        images[img].read()

    # -----------------------------------------------------------------------
    # Temporary Region and Files
    # -----------------------------------------------------------------------

    if not keep_region:
        grass.use_temp_region()  # to safely modify the region
    tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
    tmp = "tmp." + grass.basename(tmpfile)  # use its basename

    # -----------------------------------------------------------------------
    # Global Metadata: Earth-Sun distance, Sun Zenith Angle
    # -----------------------------------------------------------------------

    # Earth-Sun distance
    if doy:
        esd = jd_to_esd(int(doy))

    elif utc:
        acq_utc = AcquisitionTime(utc)  # will hold esd (earth-sun distance)
        acq_dat = datetime(acq_utc.year, acq_utc.month, acq_utc.day)
        esd = acq_utc.esd

    else:
        grass.fatal(
            _("Either the UTC string or "
              "the Day-of-Year (doy) are required!"))

    sza = 90 - float(sea)  # Sun Zenith Angle based on Sun Elevation Angle

    # -----------------------------------------------------------------------
    # Loop processing over all bands
    # -----------------------------------------------------------------------
    for band in spectral_bands:

        global tmp_rad

        g.message("|* Processing the %s spectral band" % band, flags='i')

        if not keep_region:
            g.message("\n|! Matching region to %s" % band)  # set region
            run('g.region', rast=band)  # ## FixMe

        # -------------------------------------------------------------------
        # Converting to Spectral Radiance
        # -------------------------------------------------------------------

        msg = "\n|> Converting to Spectral Radiance: " \
#            "L(λ) = 10^4 x DN(λ) / CalCoef(λ) x Bandwidth(λ)"  # Unicode? ##

        g.message(msg)

        # -------------------------------------------------------------------
        # Band dependent metadata for Spectral Radiance
        # -------------------------------------------------------------------

        # Why is this necessary?  Any function to remove the mapsets name?
        if '@' in band:
            band_key = (band.split('@')[0])
        else:
            band_key = band

        # get coefficients
        if acq_dat < cc_update:
            g.message("\n|! Using Pre-2001 Calibration Coefficient values",
                      flags='i')
            cc = float(CC[band_key][0])
        else:
            cc = float(CC[band_key][1])

        # get bandwidth
        bw = float(CC[band_key][2])

        # inform
        msg = "   [Calibration Coefficient=%d, Bandwidth=%.1f]" \
            % (cc, bw)
        g.message(msg)

        # convert
        tmp_rad = "%s.Radiance" % tmp  # Temporary Map
        rad = "%s = 10^4 * %s / %f * %f" \
            % (tmp_rad, band, cc, bw)
        grass.mapcalc(rad, overwrite=True)

        # string for metadata
        history_rad = rad
        history_rad += "Calibration Coefficient=%d; Effective Bandwidth=%.1f" \
            % (cc, bw)
        title_rad = "%s band (Spectral Radiance)" % band
        units_rad = "W / m2 / μm / ster"
        description_rad = "At-sensor %s band spectral Radiance (W/m2/μm/sr)" \
            % band

        if not radiance:

            # ---------------------------------------------------------------
            # Converting to Top-of-Atmosphere Reflectance
            # ---------------------------------------------------------------

            global tmp_toar

            msg = "\n|> Converting to Top-of-Atmosphere Reflectance" \
                  # "ρ(p) = π x L(λ) x d^2 / ESUN(λ) x cos(θ(S))"  # Unicode?

            g.message(msg)

            # ---------------------------------------------------------------
            # Band dependent metadata for Spectral Radiance
            # ---------------------------------------------------------------

            # get esun
            esun = CC[band_key][3]

            # inform
            msg = "   [Earth-Sun distane=%f, Mean solar exoatmospheric " \
                "irradiance=%.1f]" % (esd, esun)
            g.message(msg)

            # convert
            tmp_toar = "%s.Reflectance" % tmp  # Spectral Reflectance
            toar = "%s = %f * %s * %f^2 / %f * cos(%f)" \
                % (tmp_toar, math.pi, tmp_rad, esd, esun, sza)
            grass.mapcalc(toar, overwrite=True)

            # strings for output's metadata
            history_toar = toar
            history_toar += "ESD=%f; BAND_Esun=%f; SZA=%f" % (esd, esun, sza)
            title_toar = "%s band (Top of Atmosphere Reflectance)" % band
            units_toar = "Unitless planetary reflectance"
            description_toar = "Top of Atmosphere `echo ${BAND}` band spectral"
            " Reflectance (unitless)"

        if tmp_toar:

            # history entry
            run("r.support",
                map=tmp_toar,
                title=title_toar,
                units=units_toar,
                description=description_toar,
                source1=source1_toar,
                source2=source2_toar,
                history=history_toar)

            # add suffix to basename & rename end product
            toar_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_toar, toar_name))

        elif tmp_rad:

            # history entry
            run("r.support",
                map=tmp_rad,
                title=title_rad,
                units=units_rad,
                description=description_rad,
                source1=source1_rad,
                source2=source2_rad,
                history=history_rad)

            # add suffix to basename & rename end product
            rad_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_rad, rad_name))

    # visualising-related information
    if not keep_region:
        grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Region's resolution restored!")
    g.message(
        "\n>>> Hint: rebalancing colors "
        "(i.colors.enhance) may improve appearance of RGB composites!",
        flags='i')
Ejemplo n.º 19
0
def main():
    global tmp, sqltmp, tmpname, nuldev, vector, mask_found, rastertmp
    mask_found = False
    rastertmp = False
    #### setup temporary files
    tmp = grass.tempfile()
    sqltmp = tmp + ".sql"
    # we need a random name
    tmpname = grass.basename(tmp)

    nuldev = file(os.devnull, 'w')

    raster = options['raster']
    colprefix = options['column_prefix']
    vector = options['vector']
    layer = options['layer']
    percentile = options['percentile']

    ### setup enviro vars ###
    env = grass.gisenv()
    mapset = env['MAPSET']

    vs = vector.split('@')
    if len(vs) > 1:
	vect_mapset = vs[1]
    else:
	vect_mapset = mapset

    # does map exist in CURRENT mapset?
    if vect_mapset != mapset or not grass.find_file(vector, 'vector', mapset)['file']:
	grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    vector = vs[0]

    rastertmp = "%s_%s" % (vector, tmpname)

    # check the input raster map
    if not grass.find_file(raster, 'cell')['file']:
	grass.fatal(_("Raster map <%s> not found") % raster)

    # check presence of raster MASK, put it aside
    mask_found = bool(grass.find_file('MASK', 'cell')['file'])
    if mask_found:
	grass.message(_("Raster MASK found, temporarily disabled"))
	grass.run_command('g.rename', rast = ('MASK', tmpname + "_origmask"), quiet = True)

    # save current settings:
    grass.use_temp_region()

    # Temporarily aligning region resolution to $RASTER resolution
    # keep boundary settings
    grass.run_command('g.region', align = raster)

    # prepare raster MASK
    if grass.run_command('v.to.rast', input = vector, output = rastertmp,
			 use = 'cat', quiet = True) != 0:
	grass.fatal(_("An error occurred while converting vector to raster"))

    # dump cats to file to avoid "too many argument" problem:
    p = grass.pipe_command('r.category', map = rastertmp, fs = ';', quiet = True)
    cats = []
    for line in p.stdout:
	cats.append(line.rstrip('\r\n').split(';')[0])
    p.wait()

    number = len(cats)
    if number < 1:
	grass.fatal(_("No categories found in raster map"))

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map = vector)[int(layer)]
    except KeyError:
	grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # Find out which table is linked to the vector map on the given layer
    if not fi['table']:
	grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'))

    basecols = ['n', 'min', 'max', 'range', 'mean', 'stddev', 'variance', 'cf_var', 'sum']

    # we need at least three chars to distinguish [mea]n from [med]ian
    # so colprefix can't be longer than 6 chars with DBF driver
    if dbfdriver:
	colprefix = colprefix[:6]

    # do extended stats?
    if flags['e']:
	# namespace is limited in DBF but the % value is important
	if dbfdriver:
	    perccol = "per" + percentile
	else:
	    perccol = "percentile_" + percentile
	extracols = ['first_quartile', 'median', 'third_quartile'] + [perccol]
    else:
	extracols = []

    addcols = []
    for i in basecols + extracols:
	# check if column already present
	currcolumn = ("%s_%s" % (colprefix, i))
	if dbfdriver:
	    currcolumn = currcolumn[:10]

	if currcolumn in grass.vector_columns(vector, layer).keys():
	    if not flags['c']:
		grass.fatal((_("Cannot create column <%s> (already present). ") % currcolumn) +
			    _("Use -c flag to update values in this column."))
	else:
	    if i == "n":
		coltype = "INTEGER"
	    else:
		coltype = "DOUBLE PRECISION"
	    addcols.append(currcolumn + ' ' + coltype)

    if addcols:
	grass.verbose(_("Adding columns '%s'") % addcols)
	if grass.run_command('v.db.addcolumn', map = vector, columns = addcols) != 0:
	    grass.fatal(_("Adding columns failed. Exiting."))

    # calculate statistics:
    grass.message(_("Processing data (%d categories)...") % number)

    # get rid of any earlier attempts
    grass.try_remove(sqltmp)

    colnames = []
    for var in basecols + extracols:
	colname = '%s_%s' % (colprefix, var)
	if dbfdriver:
	    colname = colname[:10]
	colnames.append(colname)

    ntabcols = len(colnames)

    # do extended stats?
    if flags['e']:
	extstat = 'e'
    else:
	extstat = ""
	
    f = file(sqltmp, 'w')

    # do the stats
    p = grass.pipe_command('r.univar', flags = 't' + 'g' + extstat, map = raster, 
                      zones = rastertmp, percentile = percentile, fs = ';')

    first_line = 1
    for line in p.stdout:
	if first_line:
	    first_line = 0
	    continue

	vars = line.rstrip('\r\n').split(';')

	f.write("UPDATE %s SET" % fi['table'])
	i = 2
	first_var = 1
	for colname in colnames:
	    value = vars[i]
	    # convert nan, +nan, -nan to NULL
	    if value.lower().endswith('nan'):
		value = 'NULL'
	    if not first_var:
		f.write(" , ")
	    else:
		first_var = 0
	    f.write(" %s=%s" % (colname, value))
	    i += 1
	    # skip n_null_cells, mean_of_abs, sum_of_abs
	    if i == 3 or i == 8 or i == 13:
		i += 1

	f.write(" WHERE %s=%s;\n" % (fi['key'], vars[0]))

    p.wait()
    f.close()

    grass.message(_("Updating the database ..."))
    exitcode = grass.run_command('db.execute', input = sqltmp,
				 database = fi['database'], driver = fi['driver'])

    grass.run_command('g.remove', rast = 'MASK', quiet = True, stderr = nuldev)

    if exitcode == 0:
	grass.message((_("Statistics calculated from raster map <%s>") % raster) +
		      (_(" and uploaded to attribute table of vector map <%s>.") % vector))
    else:
	grass.warning(_("Failed to upload statistics to attribute table of vector map <%s>.") % vector)
    
    
    sys.exit(exitcode)
def main():
    """
    Main program: get names for input, output suffix, options and flags
    """
    input_list = options['image'].split(',')
    outputsuffix = options['suffix']

    # Select model based on author
    author_year = options['model']
    if 'elvidge' in author_year:
        version = author_year[7:]
        author_year = 'elvidge'
    else:
        version = None
    Model = MODELS[author_year]
    # ----------------------------

    # flags
    citation = flags['c']
    info = flags['i']
    extend_region = flags['x']
    timestamps = not(flags['t'])
    zero = flags['z']
    null = flags['n']  # either zero or null, not both --- FixMe! ###
    evaluation = flags['e']
    shell = flags['g']

    global temporary_maps
    temporary_maps = []


    msg = ("|i Inter-satellite calibration of DMSP-OLS Nighttime Stable "
        "Lights")
    g.message(msg)
    del(msg)

    '''Temporary Region and Files'''

    if extend_region:
        grass.use_temp_region()  # to safely modify the region

    tmpfile = grass.basename(grass.tempfile())
    tmp = "tmp." + tmpfile

    '''Loop over list of input images'''

    for image in input_list:

        satellite = image[0:3]
        year = image[3:7]

        '''If requested, match region to input image'''

        if extend_region:
            run('g.region', rast=image)   # ## FixMe?
            msg = "\n|! Matching region extent to map {name}"
            msg = msg.format(name=image)
            g.message(msg)
            del(msg)

        elif not extend_region:
            grass.warning(_('Operating on current region'))

        '''Retrieve coefficients'''

        msg = "\n|> Calibrating average visible Digital Number values "
        g.message(msg)
        del(msg)

        # if "version" == True use Elvidge, else use Liu2012 or Wu2013
        args = (satellite, year, version) if version else (satellite, year)
        model_parameters = retrieve_model_parameters(Model, *args)

#        # print model's generic equation?
#        if info:
#            print this
#            print that

        # split parameters in usable variables
        citation_string, coefficients, r2, mapcalc_formula = model_parameters
        msg = '|>>> Regression coefficients: ' + str(coefficients)
        msg += '\n' + '|>>> ' + r2
        g.message(msg)
        del(msg)

        # Temporary Map
        tmp_cdn = "{prefix}.Calibrated".format(prefix=tmp)
        temporary_maps.append(tmp_cdn)

        '''Formula for mapcalc'''

        equation = "{out} = {inputs}"
        calibration_formula = equation.format(out=tmp_cdn, inputs=mapcalc_formula)

        # alternatives
        if zero:
            zcf = "{out} = if(Input == 0, 0, {formula})"
            calibration_formula = zcf.format(out=tmp_cdn, formula=mapcalc_formula)
            msg = "\n|i Excluding zero cells from the analysis"
            g.message(msg)
            del(msg)

        elif null:
            ncf = "{out} = if(Input == 0, null(), {formula})"
            calibration_formula = ncf.format(out=tmp_cdn, formula=mapcalc_formula)
            msg = "\n|i Setting zero cells to NULL"
            g.message(msg)
            del(msg)

        # Compress even more? -----------------------------------------------
#        if zero or null:
#            zero = 0 if zero else ('null()')
#            equation = "{out} = if(Input == 0, {zn}, {formula})"
#            calibration_formula = equation.format(out=tmp_cdn, zero, formula=mapcalc_formula)
        # ----------------------------------------------- Compress even more?

        # replace the "dummy" string...
        calibration_formula = calibration_formula.replace("Input", image)

        '''Calibrate'''

        if info:
            msg = "\n|i Mapcalc formula: {formula}"
            g.message(msg.format(formula=mapcalc_formula))
            del(msg)

        grass.mapcalc(calibration_formula, overwrite=True)

        '''Transfer timestamps, if any'''

        if timestamps:

            try:
                datetime = grass.read_command("r.timestamp", map=image)
                run("r.timestamp", map=tmp_cdn, date=datetime)

                msg = "\n|i Timestamping: {stamp}".format(stamp=datetime)
                g.message(msg)

            except CalledModuleError:
                grass.fatal(_('\n|* Timestamp is missing! '
                'Please add one to the input map if further times series '
                'analysis is important. '
                'If you don\'t need it, you may use the -t flag.'))

        else:
            grass.warning(_('As requested, timestamp transferring not attempted.'))

        # -------------------------------------------------------------------------
        # add timestamps and register to spatio-temporal raster data set
        # -------------------------------------------------------------------------

        # ToDo -- borrowed from r.sun.daily
        # - change flag for "don't timestamp", see above
        # - use '-t' for temporal, makes more sense
        # - adapt following

            # temporal = flags['t']
            # if temporal:
            #     core.info(_("Registering created maps into temporal dataset..."))
            #     import grass.temporal as tgis

            #     def registerToTemporal(basename, suffixes, mapset, start_day, day_step,
            #                            title, desc):
            #         """
            #         Register daily output maps in spatio-temporal raster data set
            #         """
            #         maps = ','.join([basename + suf + '@' + mapset for suf in suffixes])
            #         tgis.open_new_stds(basename, type='strds', temporaltype='relative',
            #                            title=title, descr=desc, semantic='sum',
            #                            dbif=None, overwrite=grass.overwrite())

            #         tgis.register_maps_in_space_time_dataset(type='rast',
            #                                                  name=basename, maps=maps,
            #                                                  start=start_day, end=None,
            #                                                  unit='days',
            #                                                  increment=day_step,
            #                                                  dbif=None, interval=False)

        '''Normalised Difference Index (NDI), if requested'''

        ndi = float()
        if evaluation:

            # total light indices for input, tmp_cdn images
            tli_image = total_light_index(image)
            tli_tmp_cdn = total_light_index(tmp_cdn)

            # build
            ndi = normalised_difference_index(tli_image, tli_tmp_cdn)

            # report if -g
            if shell:
                msg = 'ndi={index}'.format(index=round(ndi,3))
                g.message(msg)
                del(msg)

            # else, report
            else:
                msg = '\n|i Normalised Difference Index for {dn}: {index}'
                msg = msg.format(dn=image, index=round(ndi,3))
                g.message(msg)
                del(msg)

        '''Strings for metadata'''

        history_calibration = 'Regression model: '
        history_calibration += mapcalc_formula
        history_calibration += '\n\n'
        if ndi:
            history_calibration += 'NDI: {ndi}'.format(ndi=round(ndi,10))
        title_calibration = 'Calibrated DMSP-OLS Stable Lights'
        description_calibration = ('Inter-satellite calibrated average '
                                   'Digital Number values')
        units_calibration = 'Digital Numbers (Calibrated)'

        source1_calibration = citation_string
        source2_calibration = ''

        # history entry
        run("r.support",
             map=tmp_cdn,
             title=title_calibration,
             units=units_calibration,
             description=description_calibration,
             source1=source1_calibration,
             source2=source2_calibration,
             history=history_calibration)

        '''Add suffix to basename & rename end product'''

        name = "{prefix}.{suffix}"
        name = name.format(prefix=image.split('@')[0], suffix=outputsuffix)
        calibrated_name = name
        run("g.rename", rast=(tmp_cdn, calibrated_name))
        temporary_maps.remove(tmp_cdn)

        '''Restore previous computational region'''

        if extend_region:
            grass.del_temp_region()
            g.message("\n|! Original Region restored")


        '''Things left to do...'''

        if citation:
            msg = "\n|i Citation: {string}".format(string=citation_string)
            g.message(msg)
            del(msg)
Ejemplo n.º 21
0
def main():

    global acq_time, esd
    """1st, get input, output, options and flags"""

    spectral_bands = options['band'].split(',')
    outputsuffix = options['outputsuffix']
    utc = options['utc']
    doy = options['doy']
    sea = options['sea']

    radiance = flags['r']
    if radiance and outputsuffix == 'toar':
        outputsuffix = 'rad'
        g.message("Output-suffix set to %s" % outputsuffix)

    keep_region = flags['k']
    info = flags['i']

    # -----------------------------------------------------------------------
    # Equations
    # -----------------------------------------------------------------------

    if info:
        # conversion to Radiance based on (1)
        msg = "|i Spectral Radiance = K * DN / Effective Bandwidth | " \
              "Reflectance = ( Pi * Radiance * ESD^2 ) / BAND_Esun * cos(SZA)"
        g.message(msg)

    # -----------------------------------------------------------------------
    # List images and their properties
    # -----------------------------------------------------------------------

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?

    #    imglst = [pan]
    #    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in spectral_bands:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    # -----------------------------------------------------------------------
    # Temporary Region and Files
    # -----------------------------------------------------------------------

    if not keep_region:
        grass.use_temp_region()  # to safely modify the region
    tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
    tmp = "tmp." + grass.basename(tmpfile)  # use its basename

    # -----------------------------------------------------------------------
    # Global Metadata: Earth-Sun distance, Sun Zenith Angle
    # -----------------------------------------------------------------------

    # Earth-Sun distance
    if doy:
        g.message("|! Using Day of Year to calculate Earth-Sun distance.")
        esd = jd_to_esd(int(doy))

    elif (not doy) and utc:
        acq_utc = AcquisitionTime(utc)  # will hold esd (earth-sun distance)
        esd = acq_utc.esd

    else:
        grass.fatal(
            _("Either the UTC string or "
              "the Day-of-Year (doy) are required!"))

    sza = 90 - float(sea)  # Sun Zenith Angle based on Sun Elevation Angle

    # -----------------------------------------------------------------------
    # Loop processing over all bands
    # -----------------------------------------------------------------------
    for band in spectral_bands:

        global tmp_rad

        # -------------------------------------------------------------------
        # Match bands region if... ?
        # -------------------------------------------------------------------

        if not keep_region:
            run('g.region', rast=band)  # ## FixMe?
            msg = "\n|! Region matching the %s spectral band" % band
            g.message(msg)

        elif keep_region:
            msg = "|! Operating on current region"
            g.message(msg)

        # -------------------------------------------------------------------
        # Band dependent metadata for Spectral Radiance
        # -------------------------------------------------------------------

        g.message("\n|* Processing the %s band" % band, flags='i')

        # Why is this necessary?  Any function to remove the mapsets name?
        if '@' in band:
            band = (band.split('@')[0])

        # get absolute calibration factor
        acf = float(CF_BW_ESUN[band][2])
        acf_msg = "K=" + str(acf)

        # effective bandwidth
        bw = float(CF_BW_ESUN[band][0])

        # -------------------------------------------------------------------
        # Converting to Spectral Radiance
        # -------------------------------------------------------------------

        msg = "\n|> Converting to Spectral Radiance " \
              "| Conversion Factor %s, Bandwidth=%.3f" % (acf_msg, bw)
        g.message(msg)

        # convert
        tmp_rad = "%s.Radiance" % tmp  # Temporary Map
        rad = "%s = %f * %s / %f" \
            % (tmp_rad, acf, band, bw)  # Attention: 32-bit calculations requ.
        grass.mapcalc(rad, overwrite=True)

        # strings for metadata
        history_rad = rad
        history_rad += "Conversion Factor=%f; Effective Bandwidth=%.3f" \
            % (acf, bw)
        title_rad = ""
        description_rad = "Top-of-Atmosphere %s band spectral Radiance " \
                          "[W/m^2/sr/μm]" % band
        units_rad = "W / sq.m. / μm / ster"

        if not radiance:

            # ---------------------------------------------------------------
            # Converting to Top-of-Atmosphere Reflectance
            # ---------------------------------------------------------------

            global tmp_toar

            msg = "\n|> Converting to Top-of-Atmosphere Reflectance"
            g.message(msg)

            esun = float(CF_BW_ESUN[band][1])
            msg = "   %s band mean solar exoatmospheric irradiance=%.2f" \
                % (band, esun)
            g.message(msg)

            # convert
            tmp_toar = "%s.Reflectance" % tmp  # Spectral Reflectance
            toar = "%s = %f * %s * %f^2 / %f * cos(%f)" \
                % (tmp_toar, math.pi, tmp_rad, esd, esun, sza)
            grass.mapcalc(toar, overwrite=True)

            # report range? Using a flag and skip actual conversion?
            # todo?

            # strings for metadata
            title_toar = "%s band (Top of Atmosphere Reflectance)" % band
            description_toar = "Top of Atmosphere %s band spectral Reflectance" \
                % band
            units_toar = "Unitless planetary reflectance"
            history_toar = "K=%f; Bandwidth=%.1f; ESD=%f; Esun=%.2f; SZA=%.1f" \
                % (acf, bw, esd, esun, sza)

        if tmp_toar:

            # history entry
            run("r.support",
                map=tmp_toar,
                title=title_toar,
                units=units_toar,
                description=description_toar,
                source1=source1_toar,
                source2=source2_toar,
                history=history_toar)

            # add suffix to basename & rename end product
            toar_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_toar, toar_name))

        elif tmp_rad:

            # history entry
            run("r.support",
                map=tmp_rad,
                title=title_rad,
                units=units_rad,
                description=description_rad,
                source1=source1_rad,
                source2=source2_rad,
                history=history_rad)

            # add suffix to basename & rename end product
            rad_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_rad, rad_name))

    # visualising-related information
    if not keep_region:
        grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Region's resolution restored!")
    g.message(
        "\n>>> Hint: rebalancing colors "
        "(i.colors.enhance) may improve appearance of RGB composites!",
        flags='i')
Ejemplo n.º 22
0
def main():
    global tmp, sqltmp, tmpname, nuldev, vector, rastertmp
    rastertmp = False
    # setup temporary files
    tmp = grass.tempfile()
    sqltmp = tmp + ".sql"
    # we need a random name
    tmpname = grass.basename(tmp)

    nuldev = open(os.devnull, 'w')

    rasters = options['raster'].split(',')
    colprefixes = options['column_prefix'].split(',')
    vector = options['map']
    layer = options['layer']
    vtypes = options['type']
    where = options['where']
    percentile = options['percentile']
    basecols = options['method'].split(',')

    ### setup enviro vars ###
    env = grass.gisenv()
    mapset = env['MAPSET']

    vs = vector.split('@')
    if len(vs) > 1:
        vect_mapset = vs[1]
    else:
        vect_mapset = mapset

    # does map exist in CURRENT mapset?
    if vect_mapset != mapset or not grass.find_file(vector, 'vector',
                                                    mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # colprefix for every raster map?
    if len(colprefixes) != len(rasters):
        grass.fatal(
            _("Number of raster maps ({0}) different from \
                      number of column prefixes ({1})".format(
                len(rasters), len(colprefixes))))

    vector = vs[0]

    rastertmp = "%s_%s" % (vector, tmpname)

    for raster in rasters:
        # check the input raster map
        if not grass.find_file(raster, 'cell')['file']:
            grass.fatal(_("Raster map <%s> not found") % raster)

    # save current settings:
    grass.use_temp_region()

    # Temporarily aligning region resolution to $RASTER resolution
    # keep boundary settings
    grass.run_command('g.region', align=rasters[0])

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. '
              'Run v.db.connect or v.db.addtable first.'))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # Find out which table is linked to the vector map on the given layer
    if not fi['table']:
        grass.fatal(
            _('There is no table connected to this map. '
              'Run v.db.connect or v.db.addtable first.'))

    # prepare base raster for zonal statistics
    prepare_base_raster(vector, layer, rastertmp, vtypes, where)

    # get number of raster categories to be processed
    number = get_nr_of_categories(vector, layer, rasters, rastertmp,
                                  percentile, colprefixes, basecols, dbfdriver,
                                  flags['c'])

    # calculate statistics:
    grass.message(_("Processing input data (%d categories)...") % number)

    for i in range(len(rasters)):
        raster = rasters[i]

        colprefix, variables_dbf, variables, colnames, extstat = set_up_columns(
            vector, layer, percentile, colprefixes[i], basecols, dbfdriver,
            flags['c'])

        # get rid of any earlier attempts
        grass.try_remove(sqltmp)

        # do the stats
        perform_stats(raster, percentile, fi, dbfdriver, colprefix,
                      variables_dbf, variables, colnames, extstat)

        grass.message(_("Updating the database ..."))
        exitcode = 0
        try:
            grass.run_command('db.execute',
                              input=sqltmp,
                              database=fi['database'],
                              driver=fi['driver'])
            grass.verbose(
                (_("Statistics calculated from raster map <{raster}>"
                   " and uploaded to attribute table"
                   " of vector map <{vector}>.").format(raster=raster,
                                                        vector=vector)))
        except CalledModuleError:
            grass.warning(
                _("Failed to upload statistics to attribute table of vector map <%s>."
                  ) % vector)
            exitcode = 1

            sys.exit(exitcode)
Ejemplo n.º 23
0
def main():
    global tmp, sqltmp, tmpname, nuldev, vector, rastertmp
    rastertmp = False
    #### setup temporary files
    tmp = grass.tempfile()
    sqltmp = tmp + ".sql"
    # we need a random name
    tmpname = grass.basename(tmp)

    nuldev = file(os.devnull, 'w')

    raster = options['raster']
    colprefix = options['column_prefix']
    vector = options['map']
    layer = options['layer']
    percentile = options['percentile']
    basecols = options['method'].split(',')

    ### setup enviro vars ###
    env = grass.gisenv()
    mapset = env['MAPSET']

    vs = vector.split('@')
    if len(vs) > 1:
        vect_mapset = vs[1]
    else:
        vect_mapset = mapset

    # does map exist in CURRENT mapset?
    if vect_mapset != mapset or not grass.find_file(vector, 'vector', mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    vector = vs[0]

    rastertmp = "%s_%s" % (vector, tmpname)

    # check the input raster map
    if not grass.find_file(raster, 'cell')['file']:
        grass.fatal(_("Raster map <%s> not found") % raster)

    # save current settings:
    grass.use_temp_region()

    # Temporarily aligning region resolution to $RASTER resolution
    # keep boundary settings
    grass.run_command('g.region', align=raster)

    grass.message(_("Preprocessing input data..."))
    try:
        grass.run_command('v.to.rast', input=vector, layer=layer, output=rastertmp,
                          use='cat', quiet=True)
    except CalledModuleError:
        grass.fatal(_("An error occurred while converting vector to raster"))

    # dump cats to file to avoid "too many argument" problem:
    p = grass.pipe_command('r.category', map=rastertmp, sep=';', quiet=True)
    cats = []

    for line in p.stdout:
        cats.append(line.rstrip('\r\n').split(';')[0])
    p.wait()

    number = len(cats)
    if number < 1:
        grass.fatal(_("No categories found in raster map"))

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # Find out which table is linked to the vector map on the given layer
    if not fi['table']:
        grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'))

    # replaced by user choiche
    #basecols = ['n', 'min', 'max', 'range', 'mean', 'stddev', 'variance', 'cf_var', 'sum']

    # we need at least three chars to distinguish [mea]n from [med]ian
    # so colprefix can't be longer than 6 chars with DBF driver
    if dbfdriver:
        colprefix = colprefix[:6]
        variables_dbf = {}

    # by default perccol variable is used only for "variables" variable
    perccol = "percentile"
    perc = None
    for b in basecols:
        if b.startswith('p'):
            perc = b
    if perc:
        # namespace is limited in DBF but the % value is important
        if dbfdriver:
            perccol = "per" + percentile
        else:
            perccol = "percentile_" + percentile
        percindex = basecols.index(perc)
        basecols[percindex] = perccol

    # dictionary with name of methods and position in "r.univar -gt"  output
    variables = {'number': 2, 'minimum': 4, 'maximum': 5, 'range': 6,
                 'average': 7, 'stddev': 9, 'variance': 10, 'coeff_var': 11,
                 'sum': 12, 'first_quartile': 14, 'median': 15,
                 'third_quartile': 16, perccol: 17}
    # this list is used to set the 'e' flag for r.univar
    extracols = ['first_quartile', 'median', 'third_quartile', perccol]
    addcols = []
    colnames = []
    extstat = ""
    for i in basecols:
        # this check the complete name of out input that should be truncated
        for k in variables.keys():
            if i in k:
                i = k
                break
        if i in extracols:
            extstat = 'e'
        # check if column already present
        currcolumn = ("%s_%s" % (colprefix, i))
        if dbfdriver:
            currcolumn = currcolumn[:10]
            variables_dbf[currcolumn.replace("%s_" % colprefix, '')] = i

        colnames.append(currcolumn)
        if currcolumn in grass.vector_columns(vector, layer).keys():
            if not flags['c']:
                grass.fatal((_("Cannot create column <%s> (already present). ") % currcolumn) +
                             _("Use -c flag to update values in this column."))
        else:
            if i == "n":
                coltype = "INTEGER"
            else:
                coltype = "DOUBLE PRECISION"
            addcols.append(currcolumn + ' ' + coltype)

    if addcols:
        grass.verbose(_("Adding columns '%s'") % addcols)
        try:
            grass.run_command('v.db.addcolumn', map=vector, columns=addcols,
                              layer=layer)
        except CalledModuleError:
            grass.fatal(_("Adding columns failed. Exiting."))

    # calculate statistics:
    grass.message(_("Processing input data (%d categories)...") % number)

    # get rid of any earlier attempts
    grass.try_remove(sqltmp)

    f = file(sqltmp, 'w')

    # do the stats
    p = grass.pipe_command('r.univar', flags='t' + extstat, map=raster,
                           zones=rastertmp, percentile=percentile, sep=';')

    first_line = 1

    f.write("{}\n".format(grass.db_begin_transaction(fi['driver'])))
    for line in p.stdout:
        if first_line:
            first_line = 0
            continue

        vars = line.rstrip('\r\n').split(';')

        f.write("UPDATE %s SET" % fi['table'])
        first_var = 1
        for colname in colnames:
            variable = colname.replace("%s_" % colprefix, '', 1)
            if dbfdriver:
                variable = variables_dbf[variable]
            i = variables[variable]
            value = vars[i]
            # convert nan, +nan, -nan, inf, +inf, -inf, Infinity, +Infinity,
            # -Infinity to NULL
            if value.lower().endswith('nan') or 'inf' in value.lower():
                value = 'NULL'
            if not first_var:
                f.write(" , ")
            else:
                first_var = 0
            f.write(" %s=%s" % (colname, value))

        f.write(" WHERE %s=%s;\n" % (fi['key'], vars[0]))
    f.write("{}\n".format(grass.db_commit_transaction(fi['driver'])))
    p.wait()
    f.close()

    grass.message(_("Updating the database ..."))
    exitcode = 0
    try:
        grass.run_command('db.execute', input=sqltmp,
                          database=fi['database'], driver=fi['driver'])
        grass.verbose((_("Statistics calculated from raster map <{raster}>"
                         " and uploaded to attribute table"
                         " of vector map <{vector}>."
                         ).format(raster=raster, vector=vector)))
    except CalledModuleError:
        grass.warning(_("Failed to upload statistics to attribute table of vector map <%s>.") % vector)
        exitcode = 1

    sys.exit(exitcode)
Ejemplo n.º 24
0
def main():
    global vrtfile, tmpfile

    infile = options['input']
    rast = options['output']
    also = flags['a']

    #### check for gdalinfo (just to check if installation is complete)
    if not grass.find_program('gdalinfo', '--help'):
        grass.fatal(
            _("'gdalinfo' not found, install GDAL tools first (http://www.gdal.org)"
              ))

    pid = str(os.getpid())
    tmpfile = grass.tempfile()

    ################### let's go

    spotdir = os.path.dirname(infile)
    spotname = grass.basename(infile, 'hdf')

    if rast:
        name = rast
    else:
        name = spotname

    if not grass.overwrite() and grass.find_file(name)['file']:
        grass.fatal(_("<%s> already exists. Aborting.") % name)

    # still a ZIP file?  (is this portable?? see the r.in.srtm script for ideas)
    if infile.lower().endswith('.zip'):
        grass.fatal(_("Please extract %s before import.") % infile)

    try:
        p = grass.Popen(['file', '-ib', infile], stdout=grass.PIPE)
        s = p.communicate()[0]
        if s == "application/x-zip":
            grass.fatal(_("Please extract %s before import.") % infile)
    except:
        pass

    ### create VRT header for NDVI

    projfile = os.path.join(spotdir, "0001_LOG.TXT")
    vrtfile = tmpfile + '.vrt'

    # first process the NDVI:
    grass.try_remove(vrtfile)
    create_VRT_file(projfile, vrtfile, infile)

    ## let's import the NDVI map...
    grass.message(_("Importing SPOT VGT NDVI map..."))
    if grass.run_command('r.in.gdal', input=vrtfile, output=name) != 0:
        grass.fatal(_("An error occurred. Stop."))

    grass.message(_("Imported SPOT VEGETATION NDVI map <%s>.") % name)

    #################
    ## http://www.vgt.vito.be/faq/FAQS/faq19.html
    # What is the relation between the digital number and the real NDVI ?
    # Real NDVI =coefficient a * Digital Number + coefficient b
    #           = a * DN +b
    #
    # Coefficient a = 0.004
    # Coefficient b = -0.1

    # clone current region
    # switch to a temporary region
    grass.use_temp_region()

    grass.run_command('g.region', rast=name, quiet=True)

    grass.message(_("Remapping digital numbers to NDVI..."))
    tmpname = "%s_%s" % (name, pid)
    grass.mapcalc("$tmpname = 0.004 * $name - 0.1", tmpname=tmpname, name=name)
    grass.run_command('g.remove',
                      flags='f',
                      type='rast',
                      pattern=name,
                      quiet=True)
    grass.run_command('g.rename', rast=(tmpname, name), quiet=True)

    # write cmd history:
    grass.raster_history(name)

    #apply color table:
    grass.run_command('r.colors', map=name, color='ndvi', quiet=True)

    ##########################
    # second, optionally process the SM quality map:

    #SM Status Map
    # http://nieuw.vgt.vito.be/faq/FAQS/faq22.html
    #Data about
    # Bit NR 7: Radiometric quality for B0 coded as 0 if bad and 1 if good
    # Bit NR 6: Radiometric quality for B2 coded as 0 if bad and 1 if good
    # Bit NR 5: Radiometric quality for B3 coded as 0 if bad and 1 if good
    # Bit NR 4: Radiometric quality for MIR coded as 0 if bad and 1 if good
    # Bit NR 3: land code 1 or water code 0
    # Bit NR 2: ice/snow code 1 , code 0 if there is no ice/snow
    # Bit NR 1:	0	0	1		1
    # Bit NR 0:	0	1	0		1
    # 		clear	shadow	uncertain	cloud
    #
    #Note:
    # pos 7     6    5    4    3    2   1   0 (bit position)
    #   128    64   32   16    8    4   2   1 (values for 8 bit)
    #
    #
    # Bit 4-7 should be 1: their sum is 240
    # Bit 3   land code, should be 1, sum up to 248 along with higher bits
    # Bit 2   ice/snow code
    # Bit 0-1 should be 0
    #
    # A good map threshold: >= 248

    if also:
        grass.message(_("Importing SPOT VGT NDVI quality map..."))
        grass.try_remove(vrtfile)
        qname = spotname.replace('NDV', 'SM')
        qfile = os.path.join(spotdir, qname)
        create_VRT_file(projfile, vrtfile, qfile)

        ## let's import the SM quality map...
        smfile = name + '.sm'
        if grass.run_command('r.in.gdal', input=vrtfile, output=smfile) != 0:
            grass.fatal(_("An error occurred. Stop."))

        # some of the possible values:
        rules = [
            r + '\n' for r in [
                '8 50 50 50', '11 70 70 70', '12 90 90 90', '60 grey',
                '155 blue', '232 violet', '235 red', '236 brown', '248 orange',
                '251 yellow', '252 green'
            ]
        ]
        grass.write_command('r.colors', map=smfile, rules='-', stdin=rules)

        grass.message(
            _("Imported SPOT VEGETATION SM quality map <%s>.") % smfile)
        grass.message(
            _("Note: A snow map can be extracted by category 252 (d.rast %s cat=252)"
              ) % smfile)
        grass.message("")
        grass.message(_("Filtering NDVI map by Status Map quality layer..."))

        filtfile = "%s_filt" % name
        grass.mapcalc(
            "$filtfile = if($smfile % 4 == 3 || ($smfile / 16) % 16 == 0, null(), $name)",
            filtfile=filtfile,
            smfile=smfile,
            name=name)
        grass.run_command('r.colors', map=filtfile, color='ndvi', quiet=True)
        grass.message(_("Filtered SPOT VEGETATION NDVI map <%s>.") % filtfile)

        # write cmd history:
        grass.raster_history(smfile)
        grass.raster_history(filtfile)

    grass.message(_("Done."))
def random_layer_name(prefix='tmp'):
    ### Function that return a name for a temporary layer in GRASS GIS ###
    tmp = gscript.tempfile()
    return prefix + '_' +gscript.basename(tmp).replace('.','_')
Ejemplo n.º 26
0
def main():

    pan = options['pan']
    msxlst = options['msx'].split(',')
    outputsuffix = options['suffix']
    custom_ratio = options['ratio']
    center = options['center']
    center2 = options['center2']
    modulation = options['modulation']
    modulation2 = options['modulation2']

    if options['trim']:
        trimming_factor = float(options['trim'])
    else:
        trimming_factor = False

    histogram_match = flags['l']
    second_pass = flags['2']
    color_match = flags['c']

    #    # Check & warn user about "ns == ew" resolution of current region ======
    #    region = grass.region()
    #    nsr = region['nsres']
    #    ewr = region['ewres']
    #
    #    if nsr != ewr:
    #        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
    #               'resolutions do not match!')
    #        msg = msg.format(ns=nsr, ew=ewr)
    #        grass.message(msg, flag='w')

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    # pygrass.raster.abstract.Info can not cope with
    # Info(name@mapset, mapset)
    # -> fully qualified names and input images from other mapsets are
    # not supported
    # -> use r.info via raster_info

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        # images[img] = Info(img, mapset)
        # images[img].read()
        try:
            images[img] = grass.raster_info(img)
        except:
            grass.fatal(_("msx input not found"))

    panres = images[pan]['nsres']  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    if flags['a']:
        run('g.region', align=pan)  # Respect extent, change resolution
    else:
        run('g.region', res=panres)  # Respect extent, change resolution
        grass.message(
            "|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        grass.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        grass.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            grass.warning('Using custom ratio, overriding standard method!')

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            grass.message("   > Retrieving image resolutions")

            msxres = images[msx]['nsres']

            # check
            if panres == msxres:
                msg = ("The Panchromatic's image resolution ({pr}) "
                       "equals to the Multi-Spectral's one ({mr}). "
                       "Something is probably not right! "
                       "Please check your input images.")
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = ('   >> Resolution ratio '
                         'low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}')
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            grass.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            grass.message(
                "   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                "   >>> If you insist, force it via the <ratio> option!",
                flag='i')
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        grass.message('\n|2 High Pass Filtering the Panchromatic Image')

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = 'tmp.' + grass.basename(tmpfile)  # use its basename
        tmp_pan_hpf = '{tmp}_pan_hpf'.format(tmp=tmp)  # HPF image
        tmp_msx_blnr = '{tmp}_msx_blnr'.format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = '{tmp}_msx_hpf'.format(tmp=tmp)  # Fused image
        tmp_msx_mapcalc = tmp_msx_hpf + '_mapcalc'
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run('r.mfilter',
            input=pan,
            filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title='High Pass Filtered Panchromatic image',
            overwrite=True)

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            # 2nd Pass HPF image
            tmp_pan_hpf_2 = '{tmp}_pan_hpf_2'.format(tmp=tmp)
            # 2nd Pass ASCII filter
            tmp_hpf_matrix_2 = grass.tempfile()
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run('r.mfilter',
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title='2-High-Pass Filtered Panchromatic Image',
                overwrite=True)

        #
        # 3. Upsampling low resolution image
        #

        grass.message("\n|3 Upsampling (bilinearly) low resolution image")

        run('r.resamp.interp',
            method='bilinear',
            input=msx,
            output=tmp_msx_blnr,
            overwrite=True)

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        grass.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " \
            "Modulating Factor"
        grass.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        grass.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx,
                                                               sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        grass.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        grass.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        grass.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = '{hpf} = {msx} + {pan} * {wgt}'
        fusion = fusion.format(hpf=tmp_msx_hpf,
                               msx=tmp_msx_blnr,
                               pan=tmp_pan_hpf,
                               wgt=weighting)
        grass.mapcalc(fusion)

        # command history
        hst = 'Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}'
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            grass.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            grass.message(
                "   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = '   >> 2nd Pass Modulating Factor: {m:.2f}'
            grass.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            grass.message("\n|5+ Adding small-kernel-based weighted "
                          "2nd HPFi back to fused image")

            add_back = '{final} = {msx_hpf} + {pan_hpf} * {wgt}'
            # r.mapcalc: do not use input as output
            add_back = add_back.format(final=tmp_msx_mapcalc,
                                       msx_hpf=tmp_msx_hpf,
                                       pan_hpf=tmp_pan_hpf_2,
                                       wgt=weighting_2)
            grass.mapcalc(add_back)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(
                hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            # technically, this is not histogram matching but
            # normalizing to the input's mean + stddev
            grass.message("\n|+ Matching histogram of Pansharpened image "
                          "to %s" % (msx))

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            msx_info = images[msx]
            outfn = 'round'
            if msx_info['datatype'] == 'FCELL':
                outfn = 'float'
            elif msx_info['datatype'] == 'DCELL':
                outfn = 'double'

            # expression for mapcalc
            lhm = "{out} = {outfn}(double({hpf} - {hpfavg}) / {hpfsd} * " \
                          "{msxsd} + {msxavg})"
            # r.mapcalc: do not use input as output
            lhm = lhm.format(out=tmp_msx_mapcalc,
                             outfn=outfn,
                             hpf=tmp_msx_hpf,
                             hpfavg=msx_hpf_avg,
                             hpfsd=msx_hpf_sd,
                             msxsd=msx_sd,
                             msxavg=msx_avg)

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # snap outliers to input range
            snapout = "{out} = {outfn}(if({hpf} < {oldmin}, {oldmin}, " \
                              "if({hpf} > {oldmax}, {oldmax}, {hpf})))"
            snapout = snapout.format(out=tmp_msx_mapcalc,
                                     outfn=outfn,
                                     hpf=tmp_msx_hpf,
                                     oldmin=msx_info['min'],
                                     oldmax=msx_info['max'])

            grass.mapcalc(snapout, quiet=True, overwrite=True)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)
        else:
            # scale result to input using quantiles
            grass.message("\n|+ Quantile scaling of Pansharpened image "
                          "to %s" % (msx))

            msx_info = images[msx]
            outfn = 'round'
            if msx_info['datatype'] == 'FCELL':
                outfn = 'float'
            elif msx_info['datatype'] == 'DCELL':
                outfn = 'double'

            # quantile scaling
            percentiles = "10,50,90"
            allq = grass.read_command('r.quantile',
                                      input=msx,
                                      percentiles=percentiles,
                                      quiet=True)
            allq = allq.splitlines()
            msx_plo = float(allq[0].split(':')[2])
            msx_med = float(allq[1].split(':')[2])
            msx_phi = float(allq[2].split(':')[2])

            allq = grass.read_command('r.quantile',
                                      input=tmp_msx_hpf,
                                      percentiles=percentiles,
                                      quiet=True)
            allq = allq.splitlines()
            hpf_plo = float(allq[0].split(':')[2])
            hpf_med = float(allq[1].split(':')[2])
            hpf_phi = float(allq[2].split(':')[2])

            # scale factors
            sfplo = (msx_med - msx_plo) / (hpf_med - hpf_plo)
            sfphi = (msx_phi - msx_med) / (hpf_phi - hpf_med)

            scale = "{out} = {outfn}(double({hpf} - {hpf_med}) * " \
                            "if({hpf} < {hpf_med}, {sfplo}, " \
                            "{sfphi}) + {msx_med})"
            scale = scale.format(out=tmp_msx_mapcalc,
                                 outfn=outfn,
                                 hpf=tmp_msx_hpf,
                                 hpf_med=hpf_med,
                                 sfplo=sfplo,
                                 sfphi=sfphi,
                                 msx_med=msx_med)
            grass.mapcalc(scale, quiet=True)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # snap outliers to input range
            snapout = "{out} = {outfn}(if({hpf} < {oldmin}, {oldmin}, " \
                              "if({hpf} > {oldmax}, {oldmax}, {hpf})))"
            snapout = snapout.format(out=tmp_msx_mapcalc,
                                     outfn=outfn,
                                     hpf=tmp_msx_hpf,
                                     oldmin=msx_info['min'],
                                     oldmax=msx_info['max'])

            grass.mapcalc(snapout, quiet=True, overwrite=True)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # update history string
            cmd_history.append("Linear Scaling: %s" % scale)

        if color_match:
            grass.message("\n|* Matching output to input color table")
            run('r.colors', map=tmp_msx_hpf, raster=msx)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = '\n|* Trimming output image border pixels by '
            msg += '{factor} times the low resolution\n'.format(factor=tf)
            nsew = '   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}'
            nsew = nsew.format(n=region['n'],
                               s=region['s'],
                               e=region['e'],
                               w=region['w'])
            msg += nsew

            grass.message(msg)

            # re-set borders
            region.n -= tf * images[msx]['nsres']
            region.s += tf * images[msx]['nsres']
            region.e -= tf * images[msx]['ewres']
            region.w += tf * images[msx]['ewres']

            # communicate and act
            msg = '   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}'
            msg = msg.format(n=region['n'],
                             s=region['s'],
                             e=region['e'],
                             w=region['w'])
            grass.message(msg)

            # modify only the extent
            run('g.region',
                n=region['n'],
                s=region['s'],
                e=region['e'],
                w=region['w'])
            # r.mapcalc: do not use input as output
            trim = "{out} = {input}".format(out=tmp_msx_mapcalc,
                                            input=tmp_msx_hpf)
            grass.mapcalc(trim)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

        #
        # End of Algorithm

        # history entry
        run("r.support", map=tmp_msx_hpf, history="\n".join(cmd_history))

        # add suffix to basename & rename end product
        msx_name = "{base}{suffix}"
        msx_name = msx_name.format(base=msx.split('@')[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    grass.message("\n|! Original Region restored")
    grass.message(
        "\n>>> Hint, rebalancing colors (via i.colors.enhance) "
        "may improve appearance of RGB composites!",
        flag='i')
def main():
    """ """
    sensor = options['sensor']

    mapsets = options['mapsets']
    prefix = options['input_prefix']
    suffix = options['output_suffix']

    metafile = grass.basename(options['metafile'])
    atm = int(options['atmospheric_model'])  # Atmospheric model [index]
    aer = int(options['aerosols_model'])  # Aerosols model [index]

    vis = options['visual_range']  # Visibility [km]
    aod = options['aod']  # Aerosol Optical Depth at 550nm

    xps = options['altitude']  # Mean Target Altitude [negative km]
    if not xps:
        msg = "Note, this value will be overwritten if a DEM raster has been "\
              "defined as an input!"
        g.message(msg)

    elevation = options['elevation']
    vis_map = options['visibility_map']

    radiance = flags['r']
    if radiance:
        global rad_flg
        rad_flg = 'r'
    else:
        rad_flg = ''

    # If the scene to be processed was imported via the (custom) python
    # Landsat import script, then, Mapset name == Scene identifier

    mapset = grass.gisenv()['MAPSET']
    if mapset == 'PERMANENT':
        grass.fatal(_('Please change to another Mapset than the PERMANENT'))

#    elif 'L' not in mapset:
#        msg = "Assuming the Landsat scene(s) ha-s/ve been imported using the "\
#              "custom python import script, the Mapset's name *should* begin "\
#              "with the letter L!"
#        grass.fatal(_(msg))

    else:
        result = grass.find_file(element='cell_misc',
                                 name=metafile,
                                 mapset='.')
        if not result['file']:
            grass.fatal("The metadata file <%s> is not in GRASS' data base!"
                        % metafile)
        else:
            metafile = result['file']

    # -----------------------------------------------------------------------
    # Acquisition's metadata
    # -----------------------------------------------------------------------

    msg = "Acquisition metadata for 6S code (line 2 in Parameters file)\n"

    # Month, day
    date = grass.parse_command('i.landsat.toar', flags='p',
                               input='dummy', output='dummy',
                               metfile=metafile, lsatmet='date')
    mon = int(date['date'][5:7])  # Month of acquisition
    day = int(date['date'][8:10])  # Day of acquisition

    # GMT in decimal hours
    gmt = grass.read_command('i.landsat.toar', flags='p',
                             input='dummy', output='dummy',
                             metfile=metafile, lsatmet='time')
    gmt = float(gmt.rstrip('\n'))

    # Scene's center coordinates
    cll = grass.parse_command('g.region', flags='clg')
    lon = float(cll['center_long'])  # Center Longitude [decimal degrees]
    lat = float(cll['center_lat'])  # Center Latitude [decimal degrees]

    msg += str(mon) + ' ' + str(day) + ' ' + str(gmt) + ' ' + \
        str(lon) + ' ' + str(lat)
    g.message(msg)

    # -----------------------------------------------------------------------
    # Mapsets are Scenes. Read'em all!
    # -----------------------------------------------------------------------

    if mapsets == 'all':
        scenes = grass.mapsets()

    elif mapsets == 'current':
        scenes = [mapset]

    else:
        scenes = mapsets.split(',')

    if 'PERMANENT' in scenes:
        scenes.remove('PERMANENT')

    # access only to specific mapsets!
    msg = "\n|* Performing atmospheric correction for scenes:  %s" % scenes
    g.message(msg)

    for scene in scenes:

        # ensure access only to *current* mapset
        run('g.mapsets', mapset='.', operation='set')

        # scene's basename as in GRASS' db
        basename = grass.read_command('g.mapset', flags='p')
        msg = "   | Processing scene:  %s" % basename
        g.message(msg)

        # loop over Landsat bands in question
        for band in sensors[sensor].keys():

            inputband = prefix + str(band)
            msg = "\n>>> Processing band:  %s" % inputband
            g.message(msg)

            # sane aod defaults?
            if not aod:
                if 4 < mon < 10:
                    aod = 0.222  # summer
                else:
                    aod = 0.111  # winter
            else:
                aod = float(options['aod'])

            # Generate 6S parameterization file
            p6s = Parameters(geo=geo[sensor],
                             mon=mon, day=day, gmt=gmt, lon=lon, lat=lat,
                             atm=atm,
                             aer=aer,
                             vis=vis,
                             aod=aod,
                             xps=xps, xpp=xpp,
                             bnd=sensors[sensor][band])

            # ========================================== Temporary files ====
            tmpfile = grass.tempfile()  # replace with os.getpid?
            tmp = "tmp." + grass.basename(tmpfile)  # use its basename

            tmp_p6s = grass.tempfile()  # 6S Parameters ASCII file
            tmp_atm_cor = "%s_cor_out" % tmp  # Atmospherically Corrected Img
            # Temporary files ===============================================

            p6s.export_ascii(tmp_p6s)

            # Process band-wise atmospheric correction with 6s
            msg = "6S parameters:\n\n"
            msg += p6s.parameters
            g.message(msg)

            # inform about input's range?
            inp_rng = grass.parse_command('r.info', flags='r', map=inputband)
            inp_rng['min'] = float(inp_rng['min'])
            inp_rng['max'] = float(inp_rng['max'])
            msg = "Input range: %.2f ~ %.2f" % (inp_rng['min'], inp_rng['max'])
            g.message(msg)

            # ---------------------------------------------------------------
            # Applying 6S Atmospheric Correction algorithm
            # ---------------------------------------------------------------

            if vis_map:
                run('i.atcorr',
                    flags=rad_flg,
                    input=inputband,
                    range=(inp_rng['min'], inp_rng['max']),
                    parameters=tmp_p6s,
                    visibility=vis_map,
                    output=tmp_atm_cor,
                    rescale=(0, 1))

            if elevation:
                """Using an elevation map.
                Attention: does the elevation cover the area of the images?"""
                run('i.atcorr',
                    flags=rad_flg,
                    input=inputband,
                    range=(inp_rng['min'], inp_rng['max']),
                    parameters=tmp_p6s,
                    elevation=elevation,
                    output=tmp_atm_cor,
                    rescale=(0, 1))

            else:
                """Output is reflectance ranging in [0,1]"""
                run('i.atcorr',
                    flags=rad_flg,
                    input=inputband,
                    range=(inp_rng['min'], inp_rng['max']),
                    parameters=tmp_p6s,
                    output=tmp_atm_cor,
                    rescale=(0, 1))

            # inform about output's range?
            out_rng = grass.parse_command('r.info', flags='r', map=tmp_atm_cor)
            out_rng['min'] = float(out_rng['min'])
            out_rng['max'] = float(out_rng['max'])
            msg = "Output range: %.2f ~ %.2f" \
                % (out_rng['min'], out_rng['max'])
            g.message(msg)

            # add suffix to basename & rename end product
            atm_cor_nam = ("%s%s.%s" % (prefix, suffix, band))
            run('g.rename', rast=(tmp_atm_cor, atm_cor_nam))