Exemplo n.º 1
0
def convert_map(output, field):
    """Convert imported raster map unit and format."""

    # prepare for unit conversion
    if flags['c'] and field in ['tmin', 'tmax', 'tmean']:
        grass.message("Converting <%s> to degree Celcius..." % output)
        a = 0.1
        b = 0
    elif flags['k'] and field in ['tmin', 'tmax', 'tmean']:
        grass.message("Converting <%s> to Kelvin..." % output)
        a = 0.1
        b = 273.15
    elif flags['y'] and field == 'prec':
        grass.message("Converting <%s> to meter per year..." % output)
        a = 0.012
        b = 0
    elif flags['f']:
        grass.message("Converting <%s> to floating-point..." % output)
        a = 1
        b = 0
    else:
        a = None
        b = None

    # convert unit and format
    if a or b:
        grass.use_temp_region()
        grass.run_command('g.region', rast=output)
        grass.mapcalc('$output=float($output*%s+%s)' % (a, b), output=output)
        grass.del_temp_region()
def main():

    # process command options
    input = options['input']
    if not gs.find_file(input)['file']:
        gs.fatal(_("Raster map <%s> not found") % input)

    output = options['output']
    if gs.find_file(output)['file'] and not gs.overwrite():
        gs.fatal(_("Output map <%s> already exists") % output)

    # set aside region for internal use
    gs.use_temp_region()

    # subset input if desired
    region = options.get('region')
    if region:
        if not gs.find_file(region)['file']:
            gs.fatal(_("Raster map <%s> not found") % region)
        gs.message("Setting region to %s" % region, flag='i')
        gs.run_command('g.region', rast=region, align=input)
    else:
        gs.message("Using existing GRASS region", flag='i')
    gs.debug('='*50)
    gs.debug('\n'.join(gs.parse_command('g.region', 'p').keys()))
    gs.debug('='*50)

    calculate_noise(input, output)

    # restore original region
    gs.del_temp_region()

    return None
Exemplo n.º 3
0
def convert_map(output, variable):
    """Convert imported raster map unit and format."""

    # prepare for unit conversion
    if flags['c'] and variable in ['tmin', 'tmax', 'tmean']:
        grass.message("Converting {} to degree Celcius...".format(output))
        a = 0.1
        b = 0
    elif flags['k'] and variable in ['tmin', 'tmax', 'tmean']:
        grass.message("Converting {} to Kelvin...".format(output))
        a = 0.1
        b = 273.15
    elif flags['y'] and variable == 'prec':
        grass.message("Converting {} to meter per year...".format(output))
        a = 0.012
        b = 0
    elif flags['f']:
        grass.message("Converting {} to floating-point...".format(output))
        a = 1
        b = 0
    else:
        a = None
        b = None

    # convert unit and format
    if a or b:
        grass.use_temp_region()
        grass.run_command('g.region', rast=output)
        grass.mapcalc('$output=float($output*$a+$b)', a=a, b=b, output=output,
                      overwrite=True)
        grass.del_temp_region()
Exemplo n.º 4
0
def main():

    #create a list for parallel mapcalc modules and a mapcalc module to act as template 
    mapcalc_list = []
    mapcalc = Module("r.mapcalc", overwrite=True, run_=False)
    
    #get number of tiles in each row and col from arguments 
    tile_rows = int(sys.argv[1])
    tile_cols = int(sys.argv[2])
    
    #Create que for parallel processes
    queue = ParallelModuleQueue(nprocs=sys.argv[3])
    
    #Use temporary region that will be reset after execution of this script
    gscript.use_temp_region()
    
    #Input raster (can be grass raster dataset or externally linked dataset such as tiff vrt etc.)
    input="input_raster"
    
    #Read raster boundaries and resolution into numeric variables
    info = gscript.raster_info(input)
    no = float(info['north'])
    so = float(info['south'])
    we = float(info['west'])
    ea = float(info['east'])
    ro = int(info['rows'])
    co = int(info['cols'])
    ewr = int(info['ewres'])
    nsr = int(info['nsres'])

    #Start mapcalc module for each tile
    k = 0
    for i in xrange(tile_rows):
        for j in xrange(tile_cols):
            #Set processing region to specific part of the raster (column+row)
            gscript.run_command('g.region', 
                    n=so+(i+1)*ro/tile_rows*nsr, 
                    s=so+i*ro/tile_rows*nsr, 
                    e=we+(1+j)*co/tile_cols*ewr, 
                    w=we+j*co/tile_cols*ewr, 
                    rows=ro/tile_rows, 
                    cols=co/tile_cols, 
                    nsres=nsr, 
                    ewres=ewr)
            #Create a copy of mapcalc template, give it mapcalc expression and put it into parallel que where it will be executed when a process becomes available.
            new_mapcalc = copy.deepcopy(mapcalc)
            mapcalc_list.append(new_mapcalc)
            m = new_mapcalc(expression="test_pygrass_%i = %s * (%i+1)"%(k,input, k))
            queue.put(m)
            k+=1
    #wait for all mapcalc modules to have finished execution
    queue.wait()

    #print mapcalc returncodes to check that everything went as expected
    for mapcalc in mapcalc_list:
        print(mapcalc.popen.returncode)

    #delete temporary region to restore the region that was in use at the start of the script
    gscript.del_temp_region()    
Exemplo n.º 5
0
def main():
    # start messenger
    msgr = Messenger()

    # Use temporary GRASS region
    grass.use_temp_region()

    # reads CLI options
    rast_n_file_name = options['friction']
    rast_dem_name = options['dem']
    rast_start_file_name = options['start_file']
    rast_bc_name = options['bc']
    rast_bcval_name = options['bcval']
    rast_user_name = options['user_inflow']

    # Load *.par file
    par = Par(msgr, options['par_file'])
    par.read()

    # Write DEM file
    par.write_dem(rast_dem_name, grass.overwrite())
    # set computational region to match DEM
    par.set_region_from_map(rast_dem_name)
    # Write friction
    par.write_n_map(rast_n_file_name, grass.overwrite())
    # Write start file
    par.write_start_h(rast_start_file_name, grass.overwrite())

    # boundary conditions
    bc = BoundaryConditions(msgr, sim_time=par.sim_time,
                        region=par.region)
    if par.bci_file:
        bci_full_path = os.path.join(par.directory, par.bci_file)
        bc.read_bci(bci_full_path)
    if par.bdy_file:
        bdy_full_path = os.path.join(par.directory, par.bdy_file)
        bc.read_bdy(bdy_full_path)
    # create STDS
    bc.create_stds(stds_name=rast_user_name, overwrite=grass.overwrite())
    bc.create_stds(rast_bcval_name, overwrite=grass.overwrite())
    # Write maps en register them in STDS
    bc.populate_user_flow_stds(rast_quser_name=rast_user_name,
                                overwrite=grass.overwrite())
    bc.populate_bc_stds(rast_bcval_name, grass.overwrite())
    # write Boundary condition type map
    bc.write_bctype(rast_bc_name, grass.overwrite())

    # Restore original region
    grass.del_temp_region()
    return 0
def main():
    # temporary region
    gscript.use_temp_region()

    # set parameters
    overwrite = True
    tension = 25
    smooth = 5
    npmin = 300
    dmin = 0.5
    resolution = 10000

    # set region
    region = "dem@PERMANENT"

    # list scanned DEMs for experiment 1
    dems = gscript.list_grouped('rast', pattern='*dem_1')['data']

    # iterate through scanned DEMs
    for dem in dems:

        # check alignment
        gscript.run_command('r.region', map=dem, raster=region)

        # reinterpolate DEM from random points using regularized spline with tension
        gscript.run_command('g.region', raster=region, res=3)
        gscript.run_command('r.random', input=dem, npoints=resolution, vector=dem.replace("dem","points"), flags='bd', overwrite=overwrite)
        gscript.run_command('v.surf.rst', input=dem.replace("dem","points"), elevation=dem,  tension=tension, smooth=smooth, npmin=npmin, dmin=dmin, overwrite=overwrite)
        gscript.run_command('r.colors', map=dem, color="elevation")
        gscript.run_command('g.remove', type='vector', pattern='*points*', flags='f')

    # list scanned DEMs for experiment 2
    dems = gscript.list_grouped('rast', pattern='*dem_2')['reinterpolation']

    # iterate through scanned DEMs
    for dem in dems:

        # check alignment
        gscript.run_command('r.region', map=dem, raster=region)

        # reinterpolate DEM from random points using regularized spline with tension
        gscript.run_command('g.region', raster=region, res=3)
        gscript.run_command('r.random', input=dem, npoints=resolution, vector=dem.replace("dem","points"), flags='bd', overwrite=overwrite)
        gscript.run_command('v.surf.rst', input=dem.replace("dem","points"), elevation=dem,  tension=tension, smooth=smooth, npmin=npmin, dmin=dmin, overwrite=overwrite)
        gscript.run_command('r.colors', map=dem, color="elevation")
        gscript.run_command('g.remove', type='vector', pattern='*points*', flags='f')
Exemplo n.º 7
0
def patch_tiles(mt, out, vari, bc=None, mnth=None):
    """Set region to tiles, and run r.patch"""

    bc = (str(bc) if bc else '')
    mnth = (str(mnth) if mnth else '')
    grass.message("Patching the tiles for {}{}{} to {}"
                  .format(vari, bc, mnth, out))
    if len(mt) > 1:
        grass.use_temp_region()
        grass.run_command("g.region", raster=mt)
        grass.run_command("r.patch", input=mt, output=out)
        grass.run_command("g.remove", type="raster", name=mt, flags="f",
                          quiet=True)
        grass.del_temp_region()
    else:
        # simply rename if there is only one tile
        grass.run_command("g.rename", raster=[mt, out])
Exemplo n.º 8
0
def run_benchmark(resolution_list, runs, testdict, profile):
    regions = []
    for resolution in resolution_list:
        core.use_temp_region()
        core.run_command('g.region', e=50, w=-50, n=50, s=-50, res=resolution, flags='p')

        # Adjust the computational region for this process
        region = libgis.Cell_head()
        libraster.Rast_get_window(ctypes.byref(region))
        region.e = 50
        region.w = -50
        region.n = 50
        region.s = -50
        region.ew_res = resolution
        region.ns_res = resolution

        libgis.G_adjust_Cell_head(ctypes.byref(region), 0, 0)

        libraster.Rast_set_window(ctypes.byref(region))
        libgis.G_set_window(ctypes.byref(region))

        # Create two raster maps with random numbers
        core.mapcalc("test_a = rand(0, 100)", quite=True, overwrite=True)
        core.mapcalc("test_b = rand(0.0, 1.0)", quite=True, overwrite=True)
        result = collections.OrderedDict()
        result['res'] = resolution
        result['cols'] = region.cols
        result['rows'] = region.rows
        result['cells'] = region.rows * region.cols
        result['results'] = copy.deepcopy(testdict)
        for execmode, operation in result['results'].items():
            print(execmode)
            for oper, operdict in operation.items():
                operdict['time'], operdict['times'] = mytimer(operdict['func'],runs)
                if profile:
                    filename = '{}_{}_{}'.format(execmode, oper, profile)
                    cProfile.runctx(operdict['func'].__name__ + '()',
                                    globals(), locals(), filename = filename)
                print(('    {0}: {1: 40.6f}s'.format(oper, operdict['time'])))
                del(operdict['func'])

        regions.append(result)
        core.del_temp_region()

    return regions
Exemplo n.º 9
0
def main():
    global tmp_img, tmp_grad_abs, tmp_grad_rel

    os.environ['GRASS_OVERWRITE'] = '1'

    color_dir = os.path.join(os.environ['GISBASE'], "etc", "colors")
    output_dir = os.path.join(os.environ['GISBASE'], "docs", "html")

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    pid = os.getpid()
    tmp_grad_abs = "tmp_grad_abs_%d" % pid
    tmp_grad_rel = "tmp_grad_rel_%d" % pid
    tmp_img = grass.tempfile() + ".ppm"

    os.environ['GRASS_RENDER_WIDTH'] = '%d' % width
    os.environ['GRASS_RENDER_HEIGHT'] = '%d' % height
    os.environ['GRASS_RENDER_FRAME'] = '%f,%f,%f,%f' % (0,height,0,width)
    os.environ['GRASS_RENDER_FILE'] = tmp_img
    os.environ['GRASS_RENDER_TRUECOLOR'] = 'TRUE'
    os.environ['GRASS_RENDER_FILE_READ'] = 'FALSE'
    os.environ['GRASS_RENDER_FILE_MAPPED'] = 'FALSE'
    os.environ['GRASS_RENDER_TRANSPARENT'] = 'FALSE'
    os.environ['GRASS_RENDER_BACKGROUNDCOLOR'] = 'ffffff'
    os.environ['GRASS_RENDER_IMMEDIATE'] = 'cairo'

    for var in ['GRASS_RENDER_LINE_WIDTH', 'GRASS_RENDER_ANTIALIAS']:
        if var in os.environ:
            del os.environ[var]

    grass.use_temp_region()
    grass.run_command('g.region', rows = 100, cols = 100)

    grass.mapcalc("$grad = row()/1.0", grad = tmp_grad_rel, quiet = True)
    
    for table in os.listdir(color_dir):
        path = os.path.join(color_dir, table)
        grad = make_gradient(path)
        make_image(output_dir, table, grad)
    
    grass.mapcalc("$grad = row()", grad = tmp_grad_abs, quiet = True)
    for table in ['grey.eq', 'grey.log', 'random']:
        make_image(output_dir, table, tmp_grad_abs, True)
def main():

    # process command options
    input = options['input']
    if not gs.find_file(input)['file']:
        gs.fatal(_("Raster map <%s> not found") % input)

    smooth = options['output']
    if gs.find_file(smooth)['file'] and not gs.overwrite():
        gs.fatal(_("Output map <%s> already exists") % smooth)

    sd = options['sd']
    try:
        sd = float(sd)
    except ValueError:
        if not gs.find_file(sd)['file']:
            gs.fatal(_("Raster map <%s> not found") % sd)

    alpha = float(options['alpha'])

    # set aside region for internal use
    gs.use_temp_region()

    # subset input if desired
    region = options.get('region')
    if region:
        if not gs.find_file(region)['file']:
            gs.fatal(_("Raster map <%s> not found") % region)
        gs.message("Setting region to %s" % region, flag='i')
        gs.run_command('g.region', rast=region, align=input)
    else:
        gs.message("Using existing GRASS region", flag='i')
    gs.debug('='*50)
    gs.debug('\n'.join(gs.parse_command('g.region', 'p').keys()))
    gs.debug('='*50)

    multiscalesmooth(input, smooth, sd, alpha)

    # restore original region
    gs.del_temp_region()

    return None
Exemplo n.º 11
0
def CalculateTOAR(rastItems, output, radiance):
    raster = rastItems[0]
    absCalFactor = rastItems[1]
    theta = rastItems[2]
    earthSunDist = rastItems[3]
    eSun = rastItems[4]
    effectiveBandwidth = rastItems[5]

    grass.use_temp_region()
    grass.run_command('g.region', rast=raster)

    if radiance:
        calc = "$output = ($absCal * $input_rast) / $efb"
        grass.message("Calculating Top of Atmosphere Radiance for {}".format(raster))
        grass.mapcalc(calc, output=output, absCal=absCalFactor, input_rast=raster,
                      efb=effectiveBandwidth)

    else:
        calc = "$output = ((($absCal * $input_rast) / $efb) * $esd^2 * $pi) / ($eSun * cos($theta))"
        grass.message("Calculating Top of Atmosphere Reflectance for %s" % raster)
        grass.mapcalc(calc, output=output, absCal=absCalFactor, input_rast=raster,
                      esd=earthSunDist, pi=math.pi, eSun=eSun, theta=theta,
                      efb=effectiveBandwidth)
Exemplo n.º 12
0
    def initialize(self):
	grass.use_temp_region()

	run('g.region', raster = self.inmap)

	reg = grass.region()
	for k, f in wind_keys.values():
	    self.total[k] = (f)(reg[k])

	if self.cols > self.total['cols']:
	    self.cols = self.total['cols']
	if self.rows > self.total['rows']:
	    self.rows = self.total['rows']

	tempbase = grass.tempfile()
	grass.try_remove(tempbase)

	self.tempfile = tempbase + '.ppm'
	self.tempmap = 'tmp.d.rast.edit'

	atexit.register(self.cleanup)

	run('g.copy', raster = (self.inmap, self.outmap), overwrite = True)
	run('r.colors', map = self.outmap, rast = self.inmap)
Exemplo n.º 13
0
def main():
    global insert_sql
    insert_sql = None
    global temporary_vect
    temporary_vect = None
    global stats_temp_file
    stats_temp_file = None
    global content
    content = None
    global raster
    raster = options['raster']
    global decimals
    decimals = int(options['decimals'])
    global zone_map
    zone_map = options['zone_map']

    csvfile = options['csvfile'] if options['csvfile'] else []
    separator = gscript.separator(options['separator'])
    prefix = options['prefix'] if options['prefix'] else []
    classes_list = options['classes_list'].split(
        ',') if options['classes_list'] else []
    vectormap = options['vectormap'] if options['vectormap'] else []
    prop = False if 'proportion' not in options['statistics'].split(
        ',') else True
    mode = False if 'mode' not in options['statistics'].split(',') else True

    if flags[
            'c']:  # Check only if flag activated - Can be bottleneck in case of very large raster.
        # Check if input layer is CELL
        if gscript.parse_command('r.info', flags='g',
                                 map=raster)['datatype'] != 'CELL':
            gscript.fatal(
                _("The type of the input map 'raster' is not CELL. Please use raster with integer values"
                  ))
        if gscript.parse_command('r.info', flags='g',
                                 map=zone_map)['datatype'] != 'CELL':
            gscript.fatal(
                _("The type of the input map 'zone_map' is not CELL. Please use raster with integer values"
                  ))

    # Check if 'decimals' is + and with credible value
    if decimals <= 0:
        gscript.fatal(_("The number of decimals should be positive"))
    if decimals > 100:
        gscript.fatal(_("The number of decimals should not be more than 100"))

    # Adjust region to input map is flag active
    if flags['r']:
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=zone_map)

    # R.STATS
    tmpfile = gscript.tempfile()
    try:
        if flags['n']:
            gscript.run_command(
                'r.stats',
                overwrite=True,
                flags='c',
                input='%s,%s' % (zone_map, raster),
                output=tmpfile,
                separator=separator)  # Consider null values in R.STATS
        else:
            gscript.run_command(
                'r.stats',
                overwrite=True,
                flags='cn',
                input='%s,%s' % (zone_map, raster),
                output=tmpfile,
                separator=separator)  # Do not consider null values in R.STATS
        gscript.message(_("r.stats command finished..."))
    except:
        gscript.fatal(_("The execution of r.stats failed"))

    # COMPUTE STATISTICS
    # Open csv file and create a csv reader
    rstatsfile = open(tmpfile, 'r')
    reader = csv.reader(rstatsfile, delimiter=separator)
    # Total pixels per category per zone
    totals_dict = {}
    for row in reader:
        if row[0] not in totals_dict:  # Will pass the condition only if the current zone ID does not exists yet in the dictionary
            totals_dict[row[0]] = {
            }  # Declare a new embedded dictionnary for the current zone ID
        if flags['l'] and row[
                1] in classes_list:  # Will pass only if flag -l is active and if the current class is in the 'classes_list'
            totals_dict[row[0]][row[1]] = int(row[2])
        else:
            totals_dict[row[0]][row[1]] = int(row[2])
    # Delete key '*' in 'totals_dict' that could append if there are null values on the zone raster
    if '*' in totals_dict:
        del totals_dict['*']
    # Close file
    rstatsfile.close()
    # Get list of ID
    id_list = [ID for ID in totals_dict]
    # Mode
    if mode:
        modalclass_dict = {}
        for ID in id_list:
            # The trick was found here : https://stackoverflow.com/a/268285/8013239
            mode = max(iter(totals_dict[ID].items()),
                       key=operator.itemgetter(1))[0]
            if mode == '*':  # If the mode is NULL values
                modalclass_dict[ID] = 'NULL'
            else:
                modalclass_dict[ID] = mode
    # Class proportions
    if prop:
        # Get list of categories to output
        if classes_list:  # If list of classes provided by user
            class_dict = {str(int(a)): ''
                          for a in classes_list
                          }  #To be sure it's string format
        else:
            class_dict = {}
        # Proportion of each category per zone
        proportion_dict = {}
        for ID in id_list:
            proportion_dict[ID] = {}
            for cl in totals_dict[ID]:
                if flags['l'] and cl not in classes_list:  # with flag -l, output will contain only classes from 'classes_list'
                    continue
                if flags['p']:
                    prop_value = float(totals_dict[ID][cl]) / sum(
                        totals_dict[ID].values()) * 100
                else:
                    prop_value = float(totals_dict[ID][cl]) / sum(
                        totals_dict[ID].values())
                proportion_dict[ID][cl] = '{:.{}f}'.format(
                    prop_value, decimals)
                if cl == '*':
                    class_dict['NULL'] = ''
                else:
                    class_dict[cl] = ''
        # Fill class not met in the raster with zero
        for ID in proportion_dict:
            for cl in class_dict:
                if cl not in proportion_dict[ID].keys():
                    proportion_dict[ID][cl] = '{:.{}f}'.format(0, decimals)
        # Get list of class sorted by value (arithmetic ordering)
        if 'NULL' in class_dict.keys():
            class_list = [int(k) for k in class_dict.keys() if k != 'NULL']
            class_list.sort()
            class_list.append('NULL')
        else:
            class_list = [int(k) for k in class_dict.keys()]
            class_list.sort()
    gscript.verbose(_("Statistics computed..."))
    # Set 'totals_dict' to None to try RAM release
    totals_dict = None
    # OUTPUT CONTENT
    # Header
    header = [
        'cat',
    ]
    if mode:
        if prefix:
            header.append('%s_mode' % prefix)
        else:
            header.append('mode')
    if prop:
        if prefix:
            [header.append('%s_prop_%s' % (prefix, cl)) for cl in class_list]
        else:
            [header.append('prop_%s' % cl) for cl in class_list]
    # Values
    value_dict = {}
    for ID in id_list:
        value_dict[ID] = []
        value_dict[ID].append(ID)
        if mode:
            value_dict[ID].append(modalclass_dict[ID])
        if prop:
            for cl in class_list:
                value_dict[ID].append(proportion_dict[ID]['%s' % cl])
    # WRITE OUTPUT
    if csvfile:
        with open(csvfile, 'w', newline='') as outfile:
            writer = csv.writer(outfile, delimiter=separator)
            writer.writerow(header)
            writer.writerows(value_dict.values())
    if vectormap:
        gscript.message(_("Creating output vector map..."))
        temporary_vect = 'rzonalclasses_tmp_vect_%d' % os.getpid()
        gscript.run_command('r.to.vect',
                            input_=zone_map,
                            output=temporary_vect,
                            type_='area',
                            flags='vt',
                            overwrite=True,
                            quiet=True)
        insert_sql = gscript.tempfile()
        with open(insert_sql, 'w', newline='') as fsql:
            fsql.write('BEGIN TRANSACTION;\n')
            if gscript.db_table_exist(temporary_vect):
                if gscript.overwrite():
                    fsql.write('DROP TABLE %s;' % temporary_vect)
                else:
                    gscript.fatal(
                        _("Table %s already exists. Use --o to overwrite") %
                        temporary_vect)
            create_statement = 'CREATE TABLE %s (cat int PRIMARY KEY);\n' % temporary_vect
            fsql.write(create_statement)
            for col in header[1:]:
                if col.split(
                        '_')[-1] == 'mode':  # Mode column should be integer
                    addcol_statement = 'ALTER TABLE %s ADD COLUMN %s integer;\n' % (
                        temporary_vect, col)
                else:  # Proportions column should be double precision
                    addcol_statement = 'ALTER TABLE %s ADD COLUMN %s double precision;\n' % (
                        temporary_vect, col)
                fsql.write(addcol_statement)
            for key in value_dict:
                insert_statement = 'INSERT INTO %s VALUES (%s);\n' % (
                    temporary_vect, ','.join(value_dict[key]))
                fsql.write(insert_statement)
            fsql.write('END TRANSACTION;')
        gscript.run_command('db.execute', input=insert_sql, quiet=True)
        gscript.run_command('v.db.connect',
                            map_=temporary_vect,
                            table=temporary_vect,
                            quiet=True)
        gscript.run_command('g.copy',
                            vector='%s,%s' % (temporary_vect, vectormap),
                            quiet=True)
Exemplo n.º 14
0
def main():

    global acq_time, esd

    """1st, get input, output, options and flags"""

    spectral_bands = options['band'].split(',')
    outputsuffix = options['outputsuffix']
    utc = options['utc']
    doy = options['doy']
    sea = options['sea']

    radiance = flags['r']
    if radiance and outputsuffix == 'toar':
        outputsuffix = 'rad'
        g.message("Output-suffix set to %s" % outputsuffix)

    keep_region = flags['k']
    info = flags['i']

    # -----------------------------------------------------------------------
    # Equations
    # -----------------------------------------------------------------------

    if info:
        # conversion to Radiance based on (1)
        msg = "|i Spectral Radiance = K * DN / Effective Bandwidth | " \
              "Reflectance = ( Pi * Radiance * ESD^2 ) / BAND_Esun * cos(SZA)"
        g.message(msg)

    # -----------------------------------------------------------------------
    # List images and their properties
    # -----------------------------------------------------------------------

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?

#    imglst = [pan]
#    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in spectral_bands:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    # -----------------------------------------------------------------------
    # Temporary Region and Files
    # -----------------------------------------------------------------------

    if not keep_region:
        grass.use_temp_region()  # to safely modify the region
    tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
    tmp = "tmp." + grass.basename(tmpfile)  # use its basename

    # -----------------------------------------------------------------------
    # Global Metadata: Earth-Sun distance, Sun Zenith Angle
    # -----------------------------------------------------------------------

    # Earth-Sun distance
    if doy:
        g.message("|! Using Day of Year to calculate Earth-Sun distance.")
        esd = jd_to_esd(int(doy))

    elif (not doy) and utc:
        acq_utc = AcquisitionTime(utc)  # will hold esd (earth-sun distance)
        esd = acq_utc.esd

    else:
        grass.fatal(_("Either the UTC string or "
                      "the Day-of-Year (doy) are required!"))

    sza = 90 - float(sea)  # Sun Zenith Angle based on Sun Elevation Angle

    # -----------------------------------------------------------------------
    # Loop processing over all bands
    # -----------------------------------------------------------------------
    for band in spectral_bands:

        global tmp_rad

        # -------------------------------------------------------------------
        # Match bands region if... ?
        # -------------------------------------------------------------------

        if not keep_region:
            run('g.region', rast=band)   # ## FixMe?
            msg = "\n|! Region matching the %s spectral band" % band
            g.message(msg)

        elif keep_region:
            msg = "|! Operating on current region"
            g.message(msg)

        # -------------------------------------------------------------------
        # Band dependent metadata for Spectral Radiance
        # -------------------------------------------------------------------

        g.message("\n|* Processing the %s band" % band, flags='i')

        # Why is this necessary?  Any function to remove the mapsets name?
        if '@' in band:
            band = (band.split('@')[0])

        # get absolute calibration factor
        acf = float(CF_BW_ESUN[band][2])
        acf_msg = "K=" + str(acf)

        # effective bandwidth
        bw = float(CF_BW_ESUN[band][0])

        # -------------------------------------------------------------------
        # Converting to Spectral Radiance
        # -------------------------------------------------------------------

        msg = "\n|> Converting to Spectral Radiance " \
              "| Conversion Factor %s, Bandwidth=%.3f" % (acf_msg, bw)
        g.message(msg)

        # convert
        tmp_rad = "%s.Radiance" % tmp  # Temporary Map
        rad = "%s = %f * %s / %f" \
            % (tmp_rad, acf, band, bw)  # Attention: 32-bit calculations requ.
        grass.mapcalc(rad, overwrite=True)

        # strings for metadata
        history_rad = rad
        history_rad += "Conversion Factor=%f; Effective Bandwidth=%.3f" \
            % (acf, bw)
        title_rad = ""
        description_rad = "Top-of-Atmosphere %s band spectral Radiance " \
                          "[W/m^2/sr/μm]" % band
        units_rad = "W / sq.m. / μm / ster"

        if not radiance:

            # ---------------------------------------------------------------
            # Converting to Top-of-Atmosphere Reflectance
            # ---------------------------------------------------------------

            global tmp_toar

            msg = "\n|> Converting to Top-of-Atmosphere Reflectance"
            g.message(msg)

            esun = float(CF_BW_ESUN[band][1])
            msg = "   %s band mean solar exoatmospheric irradiance=%.2f" \
                % (band, esun)
            g.message(msg)

            # convert
            tmp_toar = "%s.Reflectance" % tmp  # Spectral Reflectance
            toar = "%s = %f * %s * %f^2 / %f * cos(%f)" \
                % (tmp_toar, math.pi, tmp_rad, esd, esun, sza)
            grass.mapcalc(toar, overwrite=True)

            # report range? Using a flag and skip actual conversion?
            # todo?

            # strings for metadata
            title_toar = "%s band (Top of Atmosphere Reflectance)" % band
            description_toar = "Top of Atmosphere %s band spectral Reflectance" \
                % band
            units_toar = "Unitless planetary reflectance"
            history_toar = "K=%f; Bandwidth=%.1f; ESD=%f; Esun=%.2f; SZA=%.1f" \
                % (acf, bw, esd, esun, sza)

        if tmp_toar:

            # history entry
            run("r.support", map=tmp_toar, title=title_toar,
                units=units_toar, description=description_toar,
                source1=source1_toar, source2=source2_toar,
                history=history_toar)

            # add suffix to basename & rename end product
            toar_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_toar, toar_name))

        elif tmp_rad:

            # history entry
            run("r.support", map=tmp_rad,
                title=title_rad, units=units_rad, description=description_rad,
                source1=source1_rad, source2=source2_rad, history=history_rad)

            # add suffix to basename & rename end product
            rad_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_rad, rad_name))

    # visualising-related information
    if not keep_region:
        grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Region's resolution restored!")
    g.message("\n>>> Hint: rebalancing colors "
              "(i.colors.enhance) may improve appearance of RGB composites!",
              flags='i')
Exemplo n.º 15
0
def main(parameter_file):
    """
    It performs the following actions:
    1. Gets the parameters, required for simulation, from parameter.yaml file. 
    2. calls DEM_creator() --> for generating DEM grid
    3. Erosion modelling 
    4. Flow modelling
    5. Landcover class allocation using decision tree
    6. Geometric feature development
    7. road mapping
    """
    time1 = time.time()
    #*****************parameter handling *************************************
    # Get the parameters from parameter.yaml file
    yaml_file  = open(parameter_file, 'r')
    stream     = yaml.load(yaml_file)
    resolution = stream['resolution']
    H          = stream['H']
    H_wt       =  stream['H_wt']
    seed       = stream['seed']
    sigma      = stream['sigma']
    elev_range = stream['elev_range']
    max_level  = stream['max_level']
    DEMcreator_option = stream['DEMcreator_option']
    output_dir = stream['output_dir']
    river_drop = stream['river_drop']
    Erosion_permission = stream['Erosion_permission']
    decision_tree = stream['decision_tree']
    counter    = stream['counter']
    elev_filename      = stream['training_elev_filename']
    landcover_filename = stream['training_landcover_filename']
    river_filename     = stream['training_river_filename']
    no_of_veg_class    = stream['no_of_veg_class']
    min_area     = stream['min_area']
    max_area     = stream['max_area']
    aspect_ratio = stream['aspect_ratio']
    agri_area_limit = stream['agri_area_limit']
    yaml_file.close() 

    #**************************print statistics***********************************
    print ("Running simulation with follwing parameters")
    print ("H: %s" % H)
    print ("H_wt: %s" % H_wt)
    print ("seed: %s" % seed)
    print ("sigma: %f" % sigma) 
    print ("elev_range: %s" % elev_range)
    print ("max_level: %s" % max_level)
    print ("DEMcreator_option: %s" % DEMcreator_option)
    print ("output_dir: %s" % output_dir)
    print ("River drop: %d" % river_drop)
    print ("counter: %d" % counter)
    print ("no of vegetation class %d" % no_of_veg_class)
    print ("min area: %f" % min_area)
    print ("max area: %f" % max_area)
    print ("aspect ratio: %f" % aspect_ratio)
    print ("agricultural area limit: %f" % agri_area_limit)
    gradient = 0   #fixed for now TODO incorporate gradient in next version
    #*****************************DEM genaration************************************
    # Generate DEM using FM2D/SS algorithm by calling DEM_creator(args...) function
    DEM_Result = DEM_generator.DEM_creator(H, H_wt, seed, elev_range,sigma,gradient,max_level, DEMcreator_option)
    pathname = os.path.dirname(sys.argv[0])
    fullpath = os.path.abspath(pathname)
    filename = fullpath + "/" + output_dir
    if not os.path.exists(filename):
        os.makedirs(filename)          # create output directory if it doesn't exist 
    DEM_arr = DEM_Result[0]
    DEM_Result = 0 #free space
    #****************************region adjustment***********************************
    # We create a temporary region that is only valid in this python session
    g.use_temp_region()
    rows = DEM_arr.shape[0]
    cols = DEM_arr.shape[1]
    n = 4928050 #some arbitrary value
    s = n - resolution*rows
    e = 609000  #some arbitrary value
    w = e - resolution*cols
    g.run_command('g.region', flags = 'ap', n = n ,s = s, e = e, w = w,res = resolution, rows = rows ,cols = cols)   
    #*************************Flow accumulation with Erosion modelling****************************
    filename = fullpath + "/ascii_files"
    if not os.path.exists(filename):
        os.makedirs(filename)
    if not Erosion_permission:
        counter = 0
        DEM_arr_to_ascii(DEM_arr,resolution)
        g.run_command('r.in.ascii', overwrite = True, flags='i', input = fullpath +'/'+'ascii_files' +'/DEM.asc', output='test_DEM')
        #Flow computation for massive grids (float version) 
        g.run_command('r.terraflow', overwrite = True, elevation = 'test_DEM@user1', filled = 'flooded_DEM',\
          direction = 'DEM_flow_direction',swatershed = 'DEM_sink_watershed', accumulation = 'DEM_flow_accum', tci = 'DEM_tci')
        g.run_command('r.out.ascii',flags='h',input='DEM_flow_accum@user1',output=fullpath +'/ascii_files'+ '/DEM_flow_accum',null='0')
        f = open(fullpath +'/ascii_files'+ '/DEM_flow_accum', 'r')
        Flow_accum_arr = numpy.loadtxt(f)
        f.close()
    for iteration in range(0,counter):
        DEM_arr_to_ascii(DEM_arr,resolution)
        #Input the DEM ascii file into grass
        g.run_command('r.in.ascii', overwrite = True, flags='i', input = fullpath +'/'+'ascii_files' +'/DEM.asc', output='test_DEM')
        #Flow computation for massive grids (float version) 
        g.run_command('r.terraflow', overwrite = True, elevation = 'test_DEM@user1', filled = 'flooded_DEM',\
          direction = 'DEM_flow_direction',swatershed = 'DEM_sink_watershed', accumulation = 'DEM_flow_accum', tci = 'DEM_tci')
        g.run_command('r.out.ascii',flags='h',input='DEM_flow_accum@user1',output=fullpath +'/ascii_files'+ '/DEM_flow_accum',null='0')
        f = open(fullpath +'/ascii_files'+ '/DEM_flow_accum', 'r')
        Flow_accum_arr = numpy.loadtxt(f)
        f.close()
        #call erosion modelling function
        DEM_arr = Erosion(Flow_accum_arr, DEM_arr, river_drop)
    output=fullpath +'/'+output_dir+ '/DEM.asc'
    arr_to_ascii(DEM_arr,output)
    output=fullpath +'/'+output_dir+ '/flow_accum.asc'
    arr_to_ascii(Flow_accum_arr,output)
    #****************************landcover allocation using decision tree********************************
    # Get slope and Aspect using grass functions
    g.run_command('r.slope.aspect',overwrite=True,elevation='test_DEM@user1',slope='DEM_Slope',aspect='DEM_Aspect')
    g.run_command('r.out.ascii',flags='h',input='DEM_Slope@user1',output=fullpath + '/ascii_files'+'/DEM_Slope',null='0')
    f = open('ascii_files/DEM_Slope', 'r')
    DEM_Slope_arr = numpy.loadtxt(f)
    f.close()
    g.run_command('r.out.ascii',flags='h',input='DEM_Aspect@user1',output=fullpath +'/ascii_files'+'/DEM_Aspect',null='0')
    f = open('ascii_files/DEM_Aspect', 'r')
    DEM_Aspect_arr = numpy.loadtxt(f)
    f.close()
    Distance_arr = dist.CityBlock(Flow_accum_arr,flag = 0)
    # Normalize the elevation values to use decision tree
    minimum_elev = numpy.min(DEM_arr)
    factor = numpy.max(DEM_arr) - minimum_elev
    Elev_arr = (DEM_arr[:,:] - minimum_elev)*100/factor
    # Create various list to hold test data
    Elevation = []
    Slope = []
    RiverDistance = []
    Aspect = []
    # Append the data into respective list
    x_len = DEM_arr.shape[0]
    y_len = DEM_arr.shape[1]
    for i in range(0,x_len):
        for j in range(0,y_len):
            Elevation.append(int(Elev_arr[i][j]))
            Slope.append(int(DEM_Slope_arr[i][j]))
            RiverDistance.append(int(Distance_arr[i][j]))
            Aspect.append(int(DEM_Aspect_arr[i][j]))
    Elev_arr = 0 #free space
    DEM_slope_arr = 0 #free space
    DEM_Aspect_arr = 0 #free space
    Distance_arr = 0 #free space
    # Create dictionary to apply R's predict command on it 
    Test_data = {'Elevation':Elevation ,'Slope':Slope ,'RiverDistance':RiverDistance,'Aspect':Aspect}
    #free spaces
    Elevation = []
    Slope = []
    RiverDistance = []
    Aspect = []
    # create decision tree from training data
    fit = DecisionTree(no_of_veg_class,elev_filename, landcover_filename, river_filename,decision_tree)
    g.run_command('g.region', flags = 'ap', n = n ,s = s, e = e, w = w,res = resolution, rows = rows ,cols = cols)
    # Alloctae vegetation array for holding predicted landcover values
    Veg_arr = numpy.zeros(DEM_arr.shape, dtype = "uint8")
    rpy.r.library("rpart")
    rpy.set_default_mode(rpy.BASIC_CONVERSION)
    # values contain probability values of the predicted landcover classes
    values = rpy.r.predict(fit,newdata=Test_data,method="class")
    Test_data = 0 #free space
    x_len = Veg_arr.shape[0]
    y_len = Veg_arr.shape[1]
    for i in range(0,x_len):
        for j in range(0,y_len):
        # Get the class having max probability for each test data point
            a = ndimage.maximum_position(values[i*y_len + j])
            Veg_arr[i,j] = (a[0]) # Assign them some value to facilitate visualization
    values = 0 #free space
    filename=fullpath +'/'+output_dir+ "/landcover.asc"
    arr_to_ascii(Veg_arr,filename)
    # Allocate and initialize Suitabilty map 
    Suitability = numpy.zeros( DEM_arr.shape, dtype = "uint8")
    for i in range(0,DEM_arr.shape[0]):
        for j in range(0,DEM_arr.shape[1]):
            #TODO can use mask here, needs to be generalised
            if Veg_arr[i][j] == 0: # Ignore
                Suitability[i][j] = 0 
            elif Veg_arr[i][j] == 25: # Deciduous woodland
                Suitability[i][j] = 60 
            elif Veg_arr[i][j] == 50: # Coniferous woodland
                Suitability[i][j] = 55 
            elif Veg_arr[i][j] == 75: # Agriculture including pasture
                Suitability[i][j] = 98 
            elif Veg_arr[i][j] == 100: # Semi-natural grassland
                Suitability[i][j] = 90 
            elif Veg_arr[i][j] == 125: # Bog and swamp
                Suitability[i][j] = 50
            elif Veg_arr[i][j] == 150: # Heath
                Suitability[i][j] = 75 
            elif Veg_arr[i][j] == 175: # Montane habitat
                Suitability[i][j] = 20 
            elif Veg_arr[i][j] == 200: # Rock and quarry
                Suitability[i][j] = 30 
            elif Veg_arr[i][j] == 225: # Urban
                Suitability[i][j] = 80
    Display_fields = Geometry.GeometricFeature(Suitability, min_area,max_area ,aspect_ratio ,agri_area_limit)
    f = open('fields_arr', 'w')
    numpy.save(f,Display_fields)
    f.close()
    pylab.imsave(output_dir+"/fields.png",Display_fields)
    time2 = time.time()
    print "time taken", time2-time1
    shutil.rmtree(fullpath+'/ascii_files')
Exemplo n.º 16
0
def main():
    import matplotlib  # required by windows

    matplotlib.use("wxAGG")  # required by windows
    import matplotlib.pyplot as plt

    input = options["input"]
    output = None
    if options["csv_output"]:
        output = options["csv_output"]
    plot_output = None
    if options["plot_output"]:
        plot_output = options["plot_output"]
    min_cells = False
    if options["min_cells"]:
        min_cells = int(options["min_cells"])
    target_res = None
    if options["max_size"]:
        target_res = float(options["max_size"])
    step = float(options["step"])

    global temp_resamp_map, temp_variance_map
    temp_resamp_map = "temp_resamp_map_%d" % os.getpid()
    temp_variance_map = "temp_variance_map_%d" % os.getpid()
    resolutions = []
    variances = []

    region = gscript.parse_command("g.region", flags="g")
    cells = int(region["cells"])
    res = (float(region["nsres"]) + float(region["ewres"])) / 2
    north = float(region["n"])
    south = float(region["s"])
    west = float(region["w"])
    east = float(region["e"])

    if res % 1 == 0 and step % 1 == 0:
        template_string = "%d,%f\n"
    else:
        template_string = "%f,%f\n"

    if min_cells:
        target_res_cells = int(
            sqrt(((east - west) * (north - south)) / min_cells))
        if target_res > target_res_cells:
            target_res = target_res_cells
            gscript.message(
                _("Max resolution leads to less cells than defined by 'min_cells' (%d)."
                  % min_cells))
            gscript.message(_("Max resolution reduced to %d" % target_res))

    nb_iterations = target_res - res / step
    if nb_iterations < 3:
        message = _("Less than 3 iterations. Cannot determine maxima.\n")
        message += _("Please increase max_size or reduce min_cells.")
        gscript.fatal(_(message))

    gscript.use_temp_region()

    gscript.message(_("Calculating variance at different resolutions"))
    while res <= target_res:
        gscript.percent(res, target_res, step)
        gscript.run_command(
            "r.resamp.stats",
            input=input,
            output=temp_resamp_map,
            method="average",
            quiet=True,
            overwrite=True,
        )
        gscript.run_command(
            "r.neighbors",
            input=temp_resamp_map,
            method="variance",
            output=temp_variance_map,
            quiet=True,
            overwrite=True,
        )
        varianceinfo = gscript.parse_command("r.univar",
                                             map_=temp_variance_map,
                                             flags="g",
                                             quiet=True)
        resolutions.append(res)
        variances.append(float(varianceinfo["mean"]))
        res += step
        region = gscript.parse_command("g.region",
                                       res=res,
                                       n=north,
                                       s=south,
                                       w=west,
                                       e=east,
                                       flags="ag")
        cells = int(region["cells"])

    indices, differences = FindMaxima(variances)
    max_resolutions = [resolutions[x] for x in indices]
    gscript.message(_("resolution,min_diff"))
    for i in range(len(max_resolutions)):
        print("%g,%g" % (max_resolutions[i], differences[i]))

    if output:
        header = "resolution,variance\n"
        of = open(output, "w")
        of.write(header)
        for i in range(len(resolutions)):
            output_string = template_string % (resolutions[i], variances[i])
            of.write(output_string)
        of.close()

    if plot_output:
        plt.plot(resolutions, variances)
        plt.xlabel("Resolution")
        plt.ylabel("Variance")
        plt.grid(True)
        if plot_output == "-":
            plt.show()
        else:
            plt.savefig(plot_output)
Exemplo n.º 17
0
    def erosion_deposition(self):
        """a small-scale, process-based landscape evolution model using simulated net erosion and deposition to carve a DEM"""

        # assign variables
        slope = 'slope'
        aspect = 'aspect'
        dx = 'dx'
        dy = 'dy'
        rain = 'rain'
        depth = 'depth'
        dc = 'dc'
        tc = 'tc'
        tau = 'tau'
        rho = 'rho'
        erdep = 'erdep'
        flux = 'flux'
        erosion_deposition = 'erosion_deposition'
        evolving_dem = 'evolving_dem'

        # parse time
        year=int(self.start[:4])
        month=int(self.start[5:7])
        day=int(self.start[8:10])
        hours=int(self.start[11:13])
        minutes=int(self.start[14:16])
        seconds=int(self.start[17:19])
        time = datetime.datetime(year,month,day,hours,minutes,seconds)
        
        # advance time
        time = time + datetime.timedelta(minutes = self.rain_interval)
        time = time.isoformat(" ")

        # timestamp
        evolved_dem='dem_'+time.replace(" ","_").replace("-","_").replace(":","_")

        # set temporary region
        gscript.use_temp_region()

        # compute slope, aspect, and partial derivatives
        gscript.run_command('r.slope.aspect', elevation=self.dem, slope=slope, aspect=aspect, dx=dx, dy=dy, overwrite=True)

#        # comute the slope and aspect
#        gscript.run_command('r.param.scale', input=self.dem, output=slope, size=search_size, method="slope", overwrite=True)
#        gscript.run_command('r.param.scale', input=self.dem, output=aspect, size=search_size, method="aspect", overwrite=True)

#        # comute the partial derivatives from the slope and aspect
#        # dz/dy = tan(slope)*sin(aspect)
#        gscript.run_command('r.mapcalc', expression="{dx} = tan({slope}* 0.01745)*cos((({aspect}*(-1))+450)*0.01745)".format(aspect=aspect, slope=slope, dx=dx), overwrite=True)       
#        # dz/dy = tan(slope)*sin(aspect)
#        gscript.run_command('r.mapcalc', expression="{dy} = tan({slope}* 0.01745)*sin((({aspect}*(-1))+450)*0.01745)".format(aspect=aspect, slope=slope, dy=dy), overwrite=True)

        # crop temporary region to trim edge effects of moving window computations
        info=gscript.parse_command('g.region', flags='g')
        n=float(info.n)-float(info.nsres)
        s=float(info.s)+float(info.nsres)
        e=float(info.e)-float(info.ewres)
        w=float(info.w)+float(info.ewres)
        gscript.run_command('g.region', n=n, s=s, e=e, w=w)

        # hyrdology parameters
        gscript.run_command('r.mapcalc', expression="{rain} = {rain_intensity}*{runoff}".format(rain=rain, rain_intensity=self.rain_intensity,runoff=self.runoff), overwrite=True)

        # hydrologic simulation
        gscript.run_command('r.sim.water', elevation=self.dem, dx=dx, dy=dy, rain=rain, man_value=self.mannings, depth=depth, niterations=self.rain_interval, nwalkers=self.walkers, overwrite=True)

        # erosion parameters
        gscript.run_command('r.mapcalc', expression="{dc} = {detachment}".format(dc=dc, detachment=self.detachment), overwrite=True)
        gscript.run_command('r.mapcalc', expression="{tc} = {transport}".format(tc=tc, transport=self.transport), overwrite=True)
        gscript.run_command('r.mapcalc', expression="{tau} = {shearstress}".format(tau=tau, shearstress=self.shearstress), overwrite=True)
        gscript.run_command('r.mapcalc', expression="{rho} = {density}*1000".format(rho=rho, density=self.density), overwrite=True) # convert g/cm^3 to kg/m^3

        # erosion-deposition simulation
        gscript.run_command('r.sim.sediment', elevation=self.dem, water_depth=depth, dx=dx, dy=dy, detachment_coeff=dc, transport_coeff=tc, shear_stress=tau, man_value=self.mannings, erosion_deposition=erdep, sediment_flux=flux, niterations=self.rain_interval, nwalkers=self.walkers, overwrite=True)

        # filter outliers
        gscript.run_command('r.mapcalc', expression="{erosion_deposition} = if({erdep}<{erdepmin},{erdepmin},if({erdep}>{erdepmax},{erdepmax},{erdep}))".format(erosion_deposition=erosion_deposition, erdep=erdep, erdepmin=self.erdepmin, erdepmax=self.erdepmax), overwrite=True)
        gscript.run_command('r.colors', map=erosion_deposition, raster=erdep)

        # evolve landscape
        """change in elevation (m) = change in time (s) * net erosion-deposition (kg/m^2s) / sediment mass density (kg/m^3)"""
        gscript.run_command('r.mapcalc', expression="{evolving_dem} = {dem}-({rain_interval}*60*{erosion_deposition}/{rho})".format(evolving_dem=evolving_dem, dem=self.dem, rain_interval=self.rain_interval, erosion_deposition=erosion_deposition, rho=rho), overwrite=True)

        # reset region
        n=float(info.n)
        s=float(info.s)
        e=float(info.e)
        w=float(info.w)
        gscript.run_command('g.region', n=n, s=s, e=e, w=w)

        # rebuild edges
        gscript.run_command('r.mapcalc', expression="{evolved_dem} = if(isnull({evolving_dem}),{dem},{evolving_dem})".format(evolved_dem=evolved_dem, evolving_dem=evolving_dem, dem=self.dem), overwrite=True)
        gscript.run_command('r.colors', map=evolved_dem, flags='e', color='elevation')

        # remove temporary maps
        gscript.run_command('g.remove', type='raster', name=['rain', 'evolving_dem', 'dc', 'tc', 'tau', 'rho', 'dx', 'dy'], flags='f')

        return evolved_dem, time
Exemplo n.º 18
0
def main():

    global acq_time, esd
    """1st, get input, output, options and flags"""

    spectral_bands = options['band'].split(',')
    outputsuffix = options['outputsuffix']
    utc = options['utc']
    doy = options['doy']
    sea = options['sea']

    radiance = flags['r']
    keep_region = flags['k']

#    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
#    imglst = [spectral_bands]
#    images = {}
#    for img in imglst:  # Retrieving Image Info
#        images[img] = Info(img, mapset)
#        images[img].read()

    # -----------------------------------------------------------------------
    # Temporary Region and Files
    # -----------------------------------------------------------------------

    if not keep_region:
        grass.use_temp_region()  # to safely modify the region
    tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
    tmp = "tmp." + grass.basename(tmpfile)  # use its basename

    # -----------------------------------------------------------------------
    # Global Metadata: Earth-Sun distance, Sun Zenith Angle
    # -----------------------------------------------------------------------

    # Earth-Sun distance
    if doy:
        esd = jd_to_esd(int(doy))

    elif utc:
        acq_utc = AcquisitionTime(utc)  # will hold esd (earth-sun distance)
        acq_dat = datetime(acq_utc.year, acq_utc.month, acq_utc.day)
        esd = acq_utc.esd

    else:
        grass.fatal(_("Either the UTC string or "
                      "the Day-of-Year (doy) are required!"))

    sza = 90 - float(sea)  # Sun Zenith Angle based on Sun Elevation Angle

    # -----------------------------------------------------------------------
    # Loop processing over all bands
    # -----------------------------------------------------------------------
    for band in spectral_bands:

        global tmp_rad

        g.message("|* Processing the %s spectral band" % band, flags='i')

        if not keep_region:
            g.message("\n|! Matching region to %s" % band)  # set region
            run('g.region', rast=band)   # ## FixMe

        # -------------------------------------------------------------------
        # Converting to Spectral Radiance
        # -------------------------------------------------------------------

        msg = "\n|> Converting to Spectral Radiance: " \
#            "L(λ) = 10^4 x DN(λ) / CalCoef(λ) x Bandwidth(λ)"  # Unicode? ##
        g.message(msg)

        # -------------------------------------------------------------------
        # Band dependent metadata for Spectral Radiance
        # -------------------------------------------------------------------

        # Why is this necessary?  Any function to remove the mapsets name?
        if '@' in band:
            band_key = (band.split('@')[0])
        else:
            band_key = band

        # get coefficients
        if acq_dat < cc_update:
            g.message("\n|! Using Pre-2001 Calibration Coefficient values",
                      flags='i')
            cc = float(CC[band_key][0])
        else:
            cc = float(CC[band_key][1])

        # get bandwidth
        bw = float(CC[band_key][2])

        # inform
        msg = "   [Calibration Coefficient=%d, Bandwidth=%.1f]" \
            % (cc, bw)
        g.message(msg)

        # convert
        tmp_rad = "%s.Radiance" % tmp  # Temporary Map
        rad = "%s = 10^4 * %s / %f * %f" \
            % (tmp_rad, band, cc, bw)
        grass.mapcalc(rad, overwrite=True)

        # string for metadata
        history_rad = rad
        history_rad += "Calibration Coefficient=%d; Effective Bandwidth=%.1f" \
            % (cc, bw)
        title_rad = "%s band (Spectral Radiance)" % band
        units_rad = "W / m2 / μm / ster"
        description_rad = "At-sensor %s band spectral Radiance (W/m2/μm/sr)" \
            % band

        if not radiance:

            # ---------------------------------------------------------------
            # Converting to Top-of-Atmosphere Reflectance
            # ---------------------------------------------------------------

            global tmp_toar

            msg = "\n|> Converting to Top-of-Atmosphere Reflectance" \
                  # "ρ(p) = π x L(λ) x d^2 / ESUN(λ) x cos(θ(S))"  # Unicode?
            g.message(msg)

            # ---------------------------------------------------------------
            # Band dependent metadata for Spectral Radiance
            # ---------------------------------------------------------------

            # get esun
            esun = CC[band_key][3]

            # inform
            msg = "   [Earth-Sun distane=%f, Mean solar exoatmospheric " \
                "irradiance=%.1f]" % (esd, esun)
            g.message(msg)

            # convert
            tmp_toar = "%s.Reflectance" % tmp  # Spectral Reflectance
            toar = "%s = %f * %s * %f^2 / %f * cos(%f)" \
                % (tmp_toar, math.pi, tmp_rad, esd, esun, sza)
            grass.mapcalc(toar, overwrite=True)

            # strings for output's metadata
            history_toar = toar
            history_toar += "ESD=%f; BAND_Esun=%f; SZA=%f" % (esd, esun, sza)
            title_toar = "%s band (Top of Atmosphere Reflectance)" % band
            units_toar = "Unitless planetary reflectance"
            description_toar = "Top of Atmosphere `echo ${BAND}` band spectral"
            " Reflectance (unitless)"

        if tmp_toar:

            # history entry
            run("r.support", map=tmp_toar, title=title_toar,
                units=units_toar, description=description_toar,
                source1=source1_toar, source2=source2_toar,
                history=history_toar)

            # add suffix to basename & rename end product
            toar_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_toar, toar_name))

        elif tmp_rad:

            # history entry
            run("r.support", map=tmp_rad,
                title=title_rad, units=units_rad, description=description_rad,
                source1=source1_rad, source2=source2_rad, history=history_rad)

            # add suffix to basename & rename end product
            rad_name = ("%s.%s" % (band.split('@')[0], outputsuffix))
            run("g.rename", rast=(tmp_rad, rad_name))

    # visualising-related information
    if not keep_region:
        grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Region's resolution restored!")
    g.message("\n>>> Hint: rebalancing colors "
              "(i.colors.enhance) may improve appearance of RGB composites!",
              flags='i')
Exemplo n.º 19
0
def main():
    global temp_ng, temp_ncin, temp_ncout

    # we discard stderrs when not debugging
    # ideally stderrs should be printed when an exception was raised
    # this would be done easily with StringIO
    # but it doesn't work with subprocess
    if not grass.debug_level():
        nuldev = file(os.devnull, 'w')
    else:
        nuldev = sys.stderr

    # Initalise temporary verctor map names
    temp_ng = "v_lidar_mcc_tmp_ng_" + str(os.getpid())
    temp_ncin = "v_lidar_mcc_tmp_ncin_" + str(os.getpid())
    temp_ncout = "v_lidar_mcc_tmp_ncout_" + str(os.getpid())

    input = options['input']
    g_output = options['ground']
    ng_output = options['nonground']

    # does map exist?
    if not grass.find_file(input, element='vector')['file']:
        grass.fatal(_("Vector map <%s> not found") % input)

    # Count points in input map
    n_input = grass.vector_info(input)['points']

    # does map contain points ?
    if not (n_input > 0):
        grass.fatal(_("Vector map <%s> does not contain points") % input)

    flag_n = flags['n']

    ### Scale domain (l)
    # Evans & Hudak 2007 used scale domains 1 to 3
    l = int(1)
    l_stop = int(options['nl'])
    if (l_stop < 1):
        grass.fatal("The minimum number of scale domains is 1.")

    ### Curvature tolerance threshold (t)
    # Evans & Hudak 2007 used a t-value of 0.3
    t = float(options['t'])
    ###Increase of curvature tolerance threshold for each
    ti = t / 3.0

    ### Convergence threshold (j)
    # Evans & Hudak 2007 used a convergence threshold of 0.3
    j = float(options['j'])
    if (j <= 0):
        grass.fatal("The convergence threshold has to be > 0.")

    ### Tension parameter (f)
    # Evans & Hudak 2007 used a tension parameter 1.5
    f = float(options['f'])
    if (f <= 0):
        grass.fatal("The tension parameter has to be > 0.")

    ### Spline steps parameter (s)
    # Evans & Hudak 2007 used the 12 nearest neighbors
    # (used spline steps $res * 5 before)
    s = int(options['s'])
    if (s <= 0):
        grass.fatal("The spline step parameter has to be > 0.")

    ###Read desired resolution from region
    #Evans & Hudak 2007 used a desired resolution (delta) of 1.5
    gregion = grass.region()
    x_res_fin = gregion['ewres']
    y_res_fin = gregion['nsres']

    # Defineresolution steps in iteration
    n_res_steps = (l_stop + 1) / 2

    # Pass ame of input map to v.outlier
    nc_points = input

    # controls first creation of the output map before patching
    ng_output_exists = False
    # append and do not build topology
    vpatch_flags = 'ab'

    # 7.x requires topology to see z coordinate
    # 7.1 v.patch has flags to use z even without topology
    # see #2433 on Trac and r66822 in Subversion
    build_before_patch = True
    unused, gver_minor, unused = grass.version()['version'].split('.')
    if int(gver_minor) >= 1:
        build_before_patch = False
        # do not expect topology and expect z
        vpatch_flags += 'nz'

    # Loop through scale domaines
    while (l <= l_stop):
        i = 1
        convergence = 100
        if (l < ((l_stop + 1) / 2)):
            xres = x_res_fin / (n_res_steps - (l - 1))
            yres = y_res_fin / (n_res_steps - (l - 1))
        elif (l == ((l_stop + 1) / 2)):
            xres = x_res_fin
            yres = y_res_fin
        else:
            xres = x_res_fin * ((l + 1) - n_res_steps)
            yres = y_res_fin * ((l + 1) - n_res_steps)

        grass.use_temp_region()
        grass.run_command("g.region",
                          s=gregion['s'],
                          w=gregion['w'],
                          nsres=yres,
                          ewres=xres,
                          flags="a")
        xs_s = xres * s
        ys_s = yres * s
        grass.message("Processing scale domain " + str(l) + "...")
        # Repeat application of v.outlier until convergence level is reached
        while (convergence > j):
            grass.verbose("Number of input points in iteration " + str(i) +
                          ": " + str(n_input))
            # Run v.outlier
            if flag_n == False:
                grass.run_command('v.outlier',
                                  input=nc_points,
                                  output=temp_ncout,
                                  outlier=temp_ng,
                                  ew_step=xs_s,
                                  ns_step=ys_s,
                                  lambda_=f,
                                  threshold=t,
                                  filter='positive',
                                  overwrite=True,
                                  quiet=True,
                                  stderr=nuldev)
            else:
                grass.run_command('v.outlier',
                                  input=nc_points,
                                  output=temp_ncout,
                                  outlier=temp_ng,
                                  ew_step=xs_s,
                                  ns_step=ys_s,
                                  lambda_=f,
                                  threshold=t,
                                  filter='negative',
                                  overwrite=True,
                                  quiet=True,
                                  stderr=nuldev)

            # Get information about results for calculating convergence level
            ng = grass.vector_info(temp_ng)['points']
            nc = n_input - ng
            n_input = nc
            grass.run_command('g.remove',
                              flags='f',
                              type='vector',
                              name=temp_ncin,
                              quiet=True,
                              stderr=nuldev)
            grass.run_command("g.rename",
                              vector=temp_ncout + "," + temp_ncin,
                              quiet=True,
                              stderr=nuldev)
            nc_points = temp_ncin
            # Give information on process status
            grass.verbose("Unclassified points after iteration " + str(i) +
                          ": " + str(nc))
            grass.verbose("Points classified as non ground after iteration " +
                          str(i) + ": " + str(ng))
            # Set convergence level
            if (nc > 0):
                convergence = float(float(ng) / float(nc))
                if build_before_patch:
                    grass.run_command('v.build', map=temp_ng, stderr=nuldev)
                # Patch non-ground points to non-ground output map
                if ng_output_exists:
                    grass.run_command('v.patch',
                                      input=temp_ng,
                                      output=ng_output,
                                      flags=vpatch_flags,
                                      overwrite=True,
                                      quiet=True,
                                      stderr=nuldev)
                else:
                    grass.run_command('g.copy',
                                      vector=(temp_ng, ng_output),
                                      stderr=nuldev)
                    ng_output_exists = True
            else:
                convergence = 0
            # Give information on convergence level
            grass.verbose("Convergence level after run " + str(i) +
                          " in scale domain " + str(l) + ": " +
                          str(round(convergence, 3)))
            # Increase iterator
            i = i + 1
        # Adjust curvature tolerance and reset scale domain
        t = t + ti
        l = l + 1
        # Delete temporary region
        grass.del_temp_region()

    # Rename temporary map of points whichhave not been classified as non-ground to output vector map containing ground points
    grass.run_command("g.rename",
                      vector=nc_points + "," + g_output,
                      quiet=True,
                      stderr=nuldev)
Exemplo n.º 20
0
def set_temp_region(region_id):
    """Set a temporary GRASS region
    """
    gscript.use_temp_region()
    gscript.run_command("g.region", region=region_id)
Exemplo n.º 21
0
def isocalc(isoraw):

    global isos_extract
    global isos_extract_rast
    global isos_grow_cat
    global isos_grow_cat_int
    global isos_grow_distance
    global isos_grow_distance_recode
    global isos_poly_all

    isos_extract = "isos_extract_%d" % os.getpid()
    isos_extract_rast = "isos_extract_rast_%d" % os.getpid()
    isos_grow_cat = "isos_grow_cat_%d" % os.getpid()
    isos_grow_cat_int = "isos_grow_cat_int_%d" % os.getpid()
    isos_grow_distance = "isos_grow_distance_%d" % os.getpid()
    isos_grow_distance_recode = "isos_grow_distance_recode_%d" % os.getpid()
    isos_poly_all = "isos_poly_all_%d" % os.getpid()

    grass.use_temp_region()

    grass.run_command(
        "v.extract",
        input_=isoraw,
        cat=output_cats[0:-1],
        output=isos_extract,
        overwrite=True,
    )
    grass.run_command("g.region", vect=isos_extract, flags="a")
    grass.run_command(
        "v.to.rast",
        input_=isos_extract,
        use="cat",
        output=isos_extract_rast,
        overwrite=True,
    )
    grass.run_command(
        "r.grow.distance",
        input=isos_extract_rast,
        value=isos_grow_cat,
        distance=isos_grow_distance,
        flags="m",
        overwrite=True,
    )
    grass.mapcalc(isos_grow_cat_int + " = int(" + isos_grow_cat + ")")
    if max_distance:
        recode_str = "0:%f:1\n%f:*:0" % (max_distance, max_distance)
        grass.write_command(
            "r.recode",
            input_=isos_grow_distance,
            output=isos_grow_distance_recode,
            rules="-",
            stdin=recode_str,
            overwrite=True,
        )
        grass.run_command("r.mask",
                          raster=isos_grow_distance_recode,
                          maskcats=1,
                          overwrite=True)
    grass.run_command(
        "r.to.vect",
        input_=isos_grow_cat_int,
        output=isos_poly_all,
        type_="area",
        flags="sv",
        overwrite=True,
    )
    grass.run_command("v.extract",
                      input_=isos_poly_all,
                      output=isos_final,
                      cats=output_cats[0:-1])
    if max_distance:
        grass.run_command("r.mask", flags="r")
def main():
    """
    Main program: get names for input, output suffix, options and flags
    """
    input_list = options['image'].split(',')
    outputsuffix = options['suffix']

    # Select model based on author
    author_year = options['model']
    if 'elvidge' in author_year:
        version = author_year[7:]
        author_year = 'elvidge'
    else:
        version = None
    Model = MODELS[author_year]
    # ----------------------------

    # flags
    citation=flags['c']
    info = flags['i']
    extend_region = flags['x']
    timestamps = not(flags['t'])
    zero = flags['z']
    null = flags['n']  ### either zero or null, not both --- FixMe! ###
    evaluation = flags['e']
    shell = flags['g']

    global temporary_maps
    temporary_maps = []


    msg = ("|i Inter-satellite calibration of DMSP-OLS Nighttime Stable "
        "Lights")
    g.message(msg)
    del(msg)

    '''Temporary Region and Files'''

    if extend_region:
        grass.use_temp_region()  # to safely modify the region

    tmpfile = grass.basename(grass.tempfile())
    tmp = "tmp." + tmpfile

    '''Loop over list of input images'''

    for image in input_list:

        satellite = image[0:3]
        year = image[3:7]

        '''If requested, match region to input image'''

        if extend_region:
            run('g.region', rast=image)   # ## FixMe?
            msg = "\n|! Matching region extent to map {name}"
            msg = msg.format(name=image)
            g.message(msg)
            del(msg)

        elif not extend_region:
            grass.warning(_('Operating on current region'))

        '''Retrieve coefficients'''

        msg = "\n|> Calibrating average visible Digital Number values "
        g.message(msg)
        del(msg)

        # if "version" == True use Elvidge, else use Liu2012 or Wu2013
        args = (satellite, year, version) if version else (satellite, year)
        model_parameters = retrieve_model_parameters(Model, *args)

#        # print model's generic equation?
#        if info:
#            print this
#            print that

        # split parameters in usable variables
        citation_string, coefficients, r2, mapcalc_formula = model_parameters
        msg = '|>>> Regression coefficients: ' + str(coefficients)
        msg += '\n' + '|>>> ' + r2
        g.message(msg)
        del(msg)

        # Temporary Map
        tmp_cdn = "{prefix}.Calibrated".format(prefix=tmp)
        temporary_maps.append(tmp_cdn)

        '''Formula for mapcalc'''

        equation = "{out} = {inputs}"
        calibration_formula = equation.format(out=tmp_cdn, inputs=mapcalc_formula)

        # alternatives
        if zero:
            zcf = "{out} = if(Input == 0, 0, {formula})"
            calibration_formula = zcf.format(out=tmp_cdn, formula=mapcalc_formula)
            msg = "\n|i Excluding zero cells from the analysis"
            g.message(msg)
            del(msg)

        elif null:
            ncf = "{out} = if(Input == 0, null(), {formula})"
            calibration_formula = ncf.format(out=tmp_cdn, formula=mapcalc_formula)
            msg = "\n|i Setting zero cells to NULL"
            g.message(msg)
            del(msg)

        # Compress even more? -----------------------------------------------
#        if zero or null:
#            zero = 0 if zero else ('null()')
#            equation = "{out} = if(Input == 0, {zn}, {formula})"
#            calibration_formula = equation.format(out=tmp_cdn, zero, formula=mapcalc_formula)
        # ----------------------------------------------- Compress even more? 

        # replace the "dummy" string...
        calibration_formula = calibration_formula.replace("Input", image)

        '''Calibrate'''

        if info:
            msg = "\n|i Mapcalc formula: {formula}"
            g.message(msg.format(formula=mapcalc_formula))
            del(msg)

        grass.mapcalc(calibration_formula, overwrite=True)

        '''Transfer timestamps, if any'''

        if timestamps:

            try:
                datetime = grass.read_command("r.timestamp", map=image)
                run("r.timestamp", map=tmp_cdn, date=datetime)

                msg = "\n|i Timestamping: {stamp}".format(stamp=datetime)
                g.message(msg)

            except CalledModuleError:
                    grass.fatal(_('\n|* Timestamp is missing! '
                    'Please add one to the input map if further times series '
                    'analysis is important. '
                    'If you don\'t need it, you may use the -t flag.'))

        else:
            grass.warning(_('As requested, timestamp transferring not attempted.'))

        # -------------------------------------------------------------------------
        # add timestamps and register to spatio-temporal raster data set
        # -------------------------------------------------------------------------

        # ToDo -- borrowed from r.sun.daily
        # - change flag for "don't timestamp", see above
        # - use '-t' for temporal, makes more sense
        # - adapt following

                # temporal = flags['t']
                # if temporal:
                #     core.info(_("Registering created maps into temporal dataset..."))
                #     import grass.temporal as tgis

                #     def registerToTemporal(basename, suffixes, mapset, start_day, day_step,
                #                            title, desc):
                #         """
                #         Register daily output maps in spatio-temporal raster data set
                #         """
                #         maps = ','.join([basename + suf + '@' + mapset for suf in suffixes])
                #         tgis.open_new_stds(basename, type='strds', temporaltype='relative',
                #                            title=title, descr=desc, semantic='sum',
                #                            dbif=None, overwrite=grass.overwrite())

                #         tgis.register_maps_in_space_time_dataset(type='rast',
                #                                                  name=basename, maps=maps,
                #                                                  start=start_day, end=None,
                #                                                  unit='days',
                #                                                  increment=day_step,
                #                                                  dbif=None, interval=False)

        '''Normalised Difference Index (NDI), if requested'''

        ndi = float()
        if evaluation:

            # total light indices for input, tmp_cdn images
            tli_image = total_light_index(image)
            tli_tmp_cdn = total_light_index(tmp_cdn)

            # build
            ndi = normalised_difference_index(tli_image, tli_tmp_cdn)

            # report if -g
            if shell:
                msg = 'ndi={index}'.format(index=round(ndi,3))
                g.message(msg)
                del(msg)

            # else, report
            else:
                msg = '\n|i Normalised Difference Index for {dn}: {index}'
                msg = msg.format(dn=image, index=round(ndi,3))
                g.message(msg)
                del(msg)

        '''Strings for metadata'''

        history_calibration = 'Regression model: '
        history_calibration += mapcalc_formula
        history_calibration += '\n\n'
        if ndi:
            history_calibration += 'NDI: {ndi}'.format(ndi=round(ndi,10))
        title_calibration = 'Calibrated DMSP-OLS Stable Lights'
        description_calibration = ('Inter-satellite calibrated average '
                                   'Digital Number values')
        units_calibration = 'Digital Numbers (Calibrated)'

        source1_calibration = citation_string
        source2_calibration = ''

        # history entry
        run("r.support",
             map=tmp_cdn,
             title=title_calibration,
             units=units_calibration,
             description=description_calibration,
             source1=source1_calibration,
             source2=source2_calibration,
             history=history_calibration)

        '''Add suffix to basename & rename end product'''

        name = "{prefix}.{suffix}"
        name = name.format(prefix=image.split('@')[0], suffix=outputsuffix)
        calibrated_name = name
        run("g.rename", rast=(tmp_cdn, calibrated_name))
        temporary_maps.remove(tmp_cdn)

        '''Restore previous computational region'''

        if extend_region:
            grass.del_temp_region()
            g.message("\n|! Original Region restored")


        '''Things left to do...'''

        if citation:
            msg = "\n|i Citation: {string}".format(string=citation_string)
            g.message(msg)
            del(msg)
Exemplo n.º 23
0
def main():
    # split input images
    all_images = options["input"]
    images = all_images.split(",")
    # number of images
    n_images = len(images)
    # database path
    dbopt = options["database"]
    # output suffix
    suffix = options["suffix"]
    # output mosaic map
    mosaic = options["output"]
    output_names = []
    # name for average table
    table_ave = "t%s_average" % suffix
    # increment of one the maximum value for a correct use of range function
    max_value = int(options["max"]) + 1
    # if the db path is the default one
    if dbopt.find("$GISDBASE/$LOCATION_NAME/$MAPSET") == 0:
        dbopt_split = dbopt.split("/")[-1]
        env = grass.gisenv()
        path = os.path.join(env["GISDBASE"], env["LOCATION_NAME"], env["MAPSET"])
        dbpath = os.path.join(path, dbopt_split)
    else:
        if os.access(os.path.dirname(dbopt), os.W_OK):
            path = os.path.dirname(dbopt)
            dbpath = dbopt
        else:
            grass.fatal(
                _(
                    "Folder to write database files does not"
                    + " exist or is not writeable"
                )
            )
    # connect to the db
    db = sqlite3.connect(dbpath)
    curs = db.cursor()
    grass.message(_("Calculating Cumulative Distribution Functions ..."))

    # number of pixels per value, summarized for all images
    numPixelValue = list(range(0, max_value))
    for n in range(0, max_value):
        numPixelValue[n] = 0

    # cumulative histogram for each value and each image
    cumulHistoValue = list(range(0, max_value))

    # set up temp region only once
    grass.use_temp_region()

    # for each image
    for i in images:
        iname = i.split("@")[0]
        # drop table if exist
        query_drop = 'DROP TABLE if exists "t%s"' % iname
        curs.execute(query_drop)
        # create table
        query_create = 'CREATE TABLE "t%s" (grey_value integer,pixel_frequency ' % iname
        query_create += "integer, cumulative_histogram integer, cdf real)"
        curs.execute(query_create)
        index_create = 'CREATE UNIQUE INDEX "t%s_grey_value" ON "t%s" (grey_value) ' % (
            iname,
            iname,
        )
        curs.execute(index_create)
        # set the region on the raster
        grass.run_command("g.region", raster=i)
        # calculate statistics
        stats_out = grass.pipe_command("r.stats", flags="cin", input=i, separator=":")
        stats = stats_out.communicate()[0].decode("utf-8").split("\n")[:-1]
        stats_dict = dict(s.split(":", 1) for s in stats)
        cdf = 0
        curs.execute("BEGIN")
        # for each number in the range
        for n in range(0, max_value):
            # try to insert the values otherwise insert 0

            try:
                val = int(stats_dict[str(n)])
                cdf += val
                numPixelValue[n] += val
                insert = 'INSERT INTO "t%s" VALUES (%i, %i, %i, 0.000000)' % (
                    iname,
                    n,
                    val,
                    cdf,
                )
                curs.execute(insert)
            except:
                insert = 'INSERT INTO "t%s" VALUES (%i, 0, %i, 0.000000)' % (
                    iname,
                    n,
                    cdf,
                )
                curs.execute(insert)
            # save cumulative_histogram for the second loop
            cumulHistoValue[n] = cdf
        curs.execute("COMMIT")
        db.commit()
        # number of pixel is the cdf value
        numPixel = cdf
        # for each number in the range
        # cdf is updated using the number of non-null pixels for the current image
        curs.execute("BEGIN")
        for n in range(0, max_value):
            # select value for cumulative_histogram for the range number
            """
            select_ch = "SELECT cumulative_histogram FROM \"t%s\" WHERE " % iname
            select_ch += "(grey_value=%i)" % n
            result = curs.execute(select_ch)
            val = result.fetchone()[0]
            """
            val = cumulHistoValue[n]
            # update cdf with new value
            if val != 0 and numPixel != 0:
                update_cdf = round(float(val) / float(numPixel), 6)
                update_cdf = 'UPDATE "t%s" SET cdf=%s WHERE (grey_value=%i)' % (
                    iname,
                    update_cdf,
                    n,
                )
                curs.execute(update_cdf)

        curs.execute("COMMIT")
        db.commit()
    db.commit()
    pixelTot = 0

    # get total number of pixels divided by number of images
    # for each number in the range
    for n in range(0, max_value):
        """
        numPixel = 0
        # for each image
        for i in images:
            iname = i.split('@')[0]
            pixel_freq = "SELECT pixel_frequency FROM \"t%s\" WHERE (grey_value=%i)" % (
                                                                iname, n)
            result = curs.execute(pixel_freq)
            val = result.fetchone()[0]
            numPixel += val
        """
        # calculate number of pixel divide by number of images
        div = int(numPixelValue[n] / n_images)
        pixelTot += div

    # drop average table
    query_drop = "DROP TABLE if exists %s" % table_ave
    curs.execute(query_drop)
    # create average table
    query_create = "CREATE TABLE %s (grey_value integer,average " % table_ave
    query_create += "integer, cumulative_histogram integer, cdf real)"
    curs.execute(query_create)
    index_create = 'CREATE UNIQUE INDEX "%s_grey_value" ON "%s" (grey_value) ' % (
        table_ave,
        table_ave,
    )
    curs.execute(index_create)
    cHist = 0
    # for each number in the range
    curs.execute("BEGIN")
    for n in range(0, max_value):
        tot = 0
        """
        # for each image
        for i in images:
            iname = i.split('@')[0]
            # select pixel frequency
            pixel_freq = "SELECT pixel_frequency FROM \"t%s\" WHERE (grey_value=%i)" % (
                                                            iname, n)
            result = curs.execute(pixel_freq)
            val = result.fetchone()[0]
            tot += val
        """
        tot = numPixelValue[n]
        # calculate new value of pixel_frequency
        average = tot / n_images
        cHist = cHist + int(average)
        # insert new values into average table
        if cHist != 0 and pixelTot != 0:
            cdf = float(cHist) / float(pixelTot)
            insert = "INSERT INTO %s VALUES (%i, %i, %i, %s)" % (
                table_ave,
                n,
                int(average),
                cHist,
                cdf,
            )
            curs.execute(insert)
    curs.execute("COMMIT")
    db.commit()

    # for each image
    grass.message(_("Reclassifying bands based on average histogram..."))
    for i in images:
        iname = i.split("@")[0]
        grass.run_command("g.region", raster=i)
        # write average rules file
        outfile = open(grass.tempfile(), "w")
        new_grey = 0
        for n in range(0, max_value):
            select_newgrey = 'SELECT b.grey_value FROM "t%s" as a, ' % iname
            select_newgrey += (
                "%s as b WHERE a.grey_value=%i " % (table_ave, n)
                + "ORDER BY abs(a.cdf-b.cdf) LIMIT 1"
            )
            # write line with old and new value
            try:
                result_new = curs.execute(select_newgrey)
                new_grey = result_new.fetchone()[0]
                out_line = "%d = %d\n" % (n, new_grey)
                outfile.write(out_line)
            except:
                out_line = "%d = %d\n" % (n, new_grey)
                outfile.write(out_line)

        outfile.close()
        outname = "%s.%s" % (iname, suffix)
        # check if a output map already exists
        result = grass.core.find_file(outname, element="cell")
        if result["fullname"] and grass.overwrite():
            grass.run_command("g.remove", flags="f", type="raster", name=outname)
            grass.run_command("r.reclass", input=i, out=outname, rules=outfile.name)
        elif result["fullname"] and not grass.overwrite():
            grass.warning(
                _("Raster map %s already exists and will not be overwritten" % i)
            )
        else:
            grass.run_command("r.reclass", input=i, out=outname, rules=outfile.name)
        output_names.append(outname)
        # remove the rules file
        grass.try_remove(outfile.name)
        # write cmd history:
        grass.raster_history(outname)
    db.commit()
    db.close()
    if mosaic:
        grass.message(_("Processing mosaic <%s>..." % mosaic))
        grass.run_command("g.region", raster=all_images)
        grass.run_command("r.patch", input=output_names, output=mosaic)
Exemplo n.º 24
0
def main():
    global insert_sql
    insert_sql = None
    global temporary_vect
    temporary_vect = None
    global stats_temp_file
    stats_temp_file = None
    global content
    content = None
    global raster
    raster = options["raster"]
    global decimals
    decimals = int(options["decimals"])
    global zone_map
    zone_map = options["zone_map"]

    csvfile = options["csvfile"] if options["csvfile"] else []
    separator = gscript.separator(options["separator"])
    prefix = options["prefix"] if options["prefix"] else []
    classes_list = options["classes_list"].split(
        ",") if options["classes_list"] else []
    vectormap = options["vectormap"] if options["vectormap"] else []
    prop = False if "proportion" not in options["statistics"].split(
        ",") else True
    mode = False if "mode" not in options["statistics"].split(",") else True

    if flags[
            "c"]:  # Check only if flag activated - Can be bottleneck in case of very large raster.
        # Check if input layer is CELL
        if gscript.parse_command("r.info", flags="g",
                                 map=raster)["datatype"] != "CELL":
            gscript.fatal(
                _("The type of the input map 'raster' is not CELL. Please use raster with integer values"
                  ))
        if (gscript.parse_command("r.info", flags="g",
                                  map=zone_map)["datatype"] != "CELL"):
            gscript.fatal(
                _("The type of the input map 'zone_map' is not CELL. Please use raster with integer values"
                  ))

    # Check if 'decimals' is + and with credible value
    if decimals <= 0:
        gscript.fatal(_("The number of decimals should be positive"))
    if decimals > 100:
        gscript.fatal(_("The number of decimals should not be more than 100"))

    # Adjust region to input map is flag active
    if flags["r"]:
        gscript.use_temp_region()
        gscript.run_command("g.region", raster=zone_map)

    # R.STATS
    tmpfile = gscript.tempfile()
    try:
        if flags["n"]:
            gscript.run_command(
                "r.stats",
                overwrite=True,
                flags="c",
                input="%s,%s" % (zone_map, raster),
                output=tmpfile,
                separator=separator,
            )  # Consider null values in R.STATS
        else:
            gscript.run_command(
                "r.stats",
                overwrite=True,
                flags="cn",
                input="%s,%s" % (zone_map, raster),
                output=tmpfile,
                separator=separator,
            )  # Do not consider null values in R.STATS
        gscript.message(_("r.stats command finished..."))
    except:
        gscript.fatal(_("The execution of r.stats failed"))

    # COMPUTE STATISTICS
    # Open csv file and create a csv reader
    rstatsfile = open(tmpfile, "r")
    reader = csv.reader(rstatsfile, delimiter=separator)
    # Total pixels per category per zone
    totals_dict = {}
    for row in reader:
        if (
                row[0] not in totals_dict
        ):  # Will pass the condition only if the current zone ID does not exists yet in the dictionary
            totals_dict[row[0]] = {
            }  # Declare a new embedded dictionnary for the current zone ID
        if (
                flags["l"] and row[1] in classes_list
        ):  # Will pass only if flag -l is active and if the current class is in the 'classes_list'
            totals_dict[row[0]][row[1]] = int(row[2])
        else:
            totals_dict[row[0]][row[1]] = int(row[2])
    # Delete key '*' in 'totals_dict' that could append if there are null values on the zone raster
    if "*" in totals_dict:
        del totals_dict["*"]
    # Close file
    rstatsfile.close()
    # Get list of ID
    id_list = [ID for ID in totals_dict]
    # Mode
    if mode:
        modalclass_dict = {}
        for ID in id_list:
            # The trick was found here : https://stackoverflow.com/a/268285/8013239
            mode = max(iter(totals_dict[ID].items()),
                       key=operator.itemgetter(1))[0]
            if mode == "*":  # If the mode is NULL values
                modalclass_dict[ID] = "NULL"
            else:
                modalclass_dict[ID] = mode
    # Class proportions
    if prop:
        # Get list of categories to output
        if classes_list:  # If list of classes provided by user
            class_dict = {str(int(a)): ""
                          for a in classes_list
                          }  # To be sure it's string format
        else:
            class_dict = {}
        # Proportion of each category per zone
        proportion_dict = {}
        for ID in id_list:
            proportion_dict[ID] = {}
            for cl in totals_dict[ID]:
                if (
                        flags["l"] and cl not in classes_list
                ):  # with flag -l, output will contain only classes from 'classes_list'
                    continue
                if flags["p"]:
                    prop_value = (float(totals_dict[ID][cl]) /
                                  sum(totals_dict[ID].values()) * 100)
                else:
                    prop_value = float(totals_dict[ID][cl]) / sum(
                        totals_dict[ID].values())
                proportion_dict[ID][cl] = "{:.{}f}".format(
                    prop_value, decimals)
                if cl == "*":
                    class_dict["NULL"] = ""
                else:
                    class_dict[cl] = ""
        # Fill class not met in the raster with zero
        for ID in proportion_dict:
            for cl in class_dict:
                if cl not in proportion_dict[ID].keys():
                    proportion_dict[ID][cl] = "{:.{}f}".format(0, decimals)
        # Get list of class sorted by value (arithmetic ordering)
        if "NULL" in class_dict.keys():
            class_list = sorted(
                [int(k) for k in class_dict.keys() if k != "NULL"])
            class_list.append("NULL")
        else:
            class_list = sorted([int(k) for k in class_dict.keys()])
    gscript.verbose(_("Statistics computed..."))
    # Set 'totals_dict' to None to try RAM release
    totals_dict = None
    # OUTPUT CONTENT
    # Header
    header = [
        "cat",
    ]
    if mode:
        if prefix:
            header.append("%s_mode" % prefix)
        else:
            header.append("mode")
    if prop:
        if prefix:
            [header.append("%s_prop_%s" % (prefix, cl)) for cl in class_list]
        else:
            [header.append("prop_%s" % cl) for cl in class_list]
    # Values
    value_dict = {}
    for ID in id_list:
        value_dict[ID] = []
        value_dict[ID].append(ID)
        if mode:
            value_dict[ID].append(modalclass_dict[ID])
        if prop:
            for cl in class_list:
                value_dict[ID].append(proportion_dict[ID]["%s" % cl])
    # WRITE OUTPUT
    if csvfile:
        with open(csvfile, "w", newline="") as outfile:
            writer = csv.writer(outfile, delimiter=separator)
            writer.writerow(header)
            writer.writerows(value_dict.values())
    if vectormap:
        gscript.message(_("Creating output vector map..."))
        temporary_vect = "rzonalclasses_tmp_vect_%d" % os.getpid()
        gscript.run_command(
            "r.to.vect",
            input_=zone_map,
            output=temporary_vect,
            type_="area",
            flags="vt",
            overwrite=True,
            quiet=True,
        )
        insert_sql = gscript.tempfile()
        with open(insert_sql, "w", newline="") as fsql:
            fsql.write("BEGIN TRANSACTION;\n")
            if gscript.db_table_exist(temporary_vect):
                if gscript.overwrite():
                    fsql.write("DROP TABLE %s;" % temporary_vect)
                else:
                    gscript.fatal(
                        _("Table %s already exists. Use --o to overwrite") %
                        temporary_vect)
            create_statement = ("CREATE TABLE %s (cat int PRIMARY KEY);\n" %
                                temporary_vect)
            fsql.write(create_statement)
            for col in header[1:]:
                if col.split(
                        "_")[-1] == "mode":  # Mode column should be integer
                    addcol_statement = "ALTER TABLE %s ADD COLUMN %s integer;\n" % (
                        temporary_vect,
                        col,
                    )
                else:  # Proportions column should be double precision
                    addcol_statement = (
                        "ALTER TABLE %s ADD COLUMN %s double precision;\n" %
                        (temporary_vect, col))
                fsql.write(addcol_statement)
            for key in value_dict:
                insert_statement = "INSERT INTO %s VALUES (%s);\n" % (
                    temporary_vect,
                    ",".join(value_dict[key]),
                )
                fsql.write(insert_statement)
            fsql.write("END TRANSACTION;")
        gscript.run_command("db.execute", input=insert_sql, quiet=True)
        gscript.run_command("v.db.connect",
                            map_=temporary_vect,
                            table=temporary_vect,
                            quiet=True)
        gscript.run_command("g.copy",
                            vector="%s,%s" % (temporary_vect, vectormap),
                            quiet=True)
Exemplo n.º 25
0
def main():
    global usermask, mapset, tmp_rmaps, tmp_vmaps

    input = options['input']
    output = options['output']
    tension = options['tension']
    smooth = options['smooth']
    method = options['method']
    edge = int(options['edge'])
    segmax = int(options['segmax'])
    npmin = int(options['npmin'])
    lambda_ = float(options['lambda'])
    memory = options['memory']
    quiet = True  # FIXME
    mapset = grass.gisenv()['MAPSET']
    unique = str(os.getpid())  # Shouldn't we use temp name?
    prefix = 'r_fillnulls_%s_' % unique
    failed_list = list()  # a list of failed holes. Caused by issues with v.surf.rst. Connected with #1813

    # check if input file exists
    if not grass.find_file(input)['file']:
        grass.fatal(_("Raster map <%s> not found") % input)

    # save original region
    reg_org = grass.region()

    # check if a MASK is already present
    # and remove it to not interfere with NULL lookup part
    # as we don't fill MASKed parts!
    if grass.find_file('MASK', mapset=mapset)['file']:
        usermask = "usermask_mask." + unique
        grass.message(_("A user raster mask (MASK) is present. Saving it..."))
        grass.run_command('g.rename', quiet=quiet, raster=('MASK', usermask))

    # check if method is rst to use v.surf.rst
    if method == 'rst':
        # idea: filter all NULLS and grow that area(s) by 3 pixel, then
        # interpolate from these surrounding 3 pixel edge
        filling = prefix + 'filled'

        grass.use_temp_region()
        grass.run_command('g.region', align=input, quiet=quiet)
        region = grass.region()
        ns_res = region['nsres']
        ew_res = region['ewres']

        grass.message(_("Using RST interpolation..."))
        grass.message(_("Locating and isolating NULL areas..."))

        # creating binary (0/1) map
        if usermask:
            grass.message(_("Skipping masked raster parts"))
            grass.mapcalc("$tmp1 = if(isnull(\"$input\") && !($mask == 0 || isnull($mask)),1,null())",
                          tmp1=prefix + 'nulls', input=input, mask=usermask)
        else:
            grass.mapcalc("$tmp1 = if(isnull(\"$input\"),1,null())",
                          tmp1=prefix + 'nulls', input=input)
        tmp_rmaps.append(prefix + 'nulls')

        # restoring user's mask, if present
        # to ignore MASKed original values
        if usermask:
            grass.message(_("Restoring user mask (MASK)..."))
            try:
                grass.run_command('g.rename', quiet=quiet, raster=(usermask, 'MASK'))
            except CalledModuleError:
                grass.warning(_("Failed to restore user MASK!"))
            usermask = None

        # grow identified holes by X pixels
        grass.message(_("Growing NULL areas"))
        tmp_rmaps.append(prefix + 'grown')
        try:
            grass.run_command('r.grow', input=prefix + 'nulls',
                              radius=edge + 0.01, old=1, new=1,
                              out=prefix + 'grown', quiet=quiet)
        except CalledModuleError:
            grass.fatal(_("abandoned. Removing temporary map, restoring "
                          "user mask if needed:"))

        # assign unique IDs to each hole or hole system (holes closer than edge distance)
        grass.message(_("Assigning IDs to NULL areas"))
        tmp_rmaps.append(prefix + 'clumped')
        try:
            grass.run_command(
                'r.clump',
                input=prefix +
                'grown',
                output=prefix +
                'clumped',
                quiet=quiet)
        except CalledModuleError:
            grass.fatal(_("abandoned. Removing temporary map, restoring "
                          "user mask if needed:"))

        # get a list of unique hole cat's
        grass.mapcalc("$out = if(isnull($inp), null(), $clumped)",
                      out=prefix + 'holes', inp=prefix + 'nulls', clumped=prefix + 'clumped')
        tmp_rmaps.append(prefix + 'holes')

        # use new IDs to identify holes
        try:
            grass.run_command('r.to.vect', flags='v',
                              input=prefix + 'holes', output=prefix + 'holes',
                              type='area', quiet=quiet)
        except:
            grass.fatal(_("abandoned. Removing temporary maps, restoring "
                          "user mask if needed:"))
        tmp_vmaps.append(prefix + 'holes')

        # get a list of unique hole cat's
        cats_file_name = grass.tempfile(False)
        grass.run_command(
            'v.db.select',
            flags='c',
            map=prefix + 'holes',
            columns='cat',
            file=cats_file_name,
            quiet=quiet)
        cat_list = list()
        cats_file = open(cats_file_name)
        for line in cats_file:
            cat_list.append(line.rstrip('\n'))
        cats_file.close()
        os.remove(cats_file_name)

        if len(cat_list) < 1:
            grass.fatal(_("Input map has no holes. Check region settings."))

        # GTC Hole is NULL area in a raster map
        grass.message(_("Processing %d map holes") % len(cat_list))
        first = True
        hole_n = 1
        for cat in cat_list:
            holename = prefix + 'hole_' + cat
            # GTC Hole is a NULL area in a raster map
            grass.message(_("Filling hole %s of %s") % (hole_n, len(cat_list)))
            hole_n = hole_n + 1
            # cut out only CAT hole for processing
            try:
                grass.run_command('v.extract', input=prefix + 'holes',
                                  output=holename + '_pol',
                                  cats=cat, quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))
            tmp_vmaps.append(holename + '_pol')

            # zoom to specific hole with a buffer of two cells around the hole to
            # remove rest of data
            try:
                grass.run_command('g.region',
                                  vector=holename + '_pol', align=input,
                                  w='w-%d' % (edge * 2 * ew_res),
                                  e='e+%d' % (edge * 2 * ew_res),
                                  n='n+%d' % (edge * 2 * ns_res),
                                  s='s-%d' % (edge * 2 * ns_res),
                                  quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))

            # remove temporary map to not overfill disk
            try:
                grass.run_command('g.remove', flags='fb', type='vector',
                                  name=holename + '_pol', quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))
            tmp_vmaps.remove(holename + '_pol')

            # copy only data around hole
            grass.mapcalc("$out = if($inp == $catn, $inp, null())",
                          out=holename, inp=prefix + 'holes', catn=cat)
            tmp_rmaps.append(holename)

            # If here loop is split into two, next part of loop can be run in parallel
            # (except final result patching)
            # Downside - on large maps such approach causes large disk usage

            # grow hole border to get it's edge area
            tmp_rmaps.append(holename + '_grown')
            try:
                grass.run_command('r.grow', input=holename, radius=edge + 0.01,
                                  old=-1, out=holename + '_grown', quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary map, restoring "
                              "user mask if needed:"))

            # no idea why r.grow old=-1 doesn't replace existing values with NULL
            grass.mapcalc("$out = if($inp == -1, null(), \"$dem\")",
                          out=holename + '_edges', inp=holename + '_grown', dem=input)
            tmp_rmaps.append(holename + '_edges')

            # convert to points for interpolation
            tmp_vmaps.append(holename)
            try:
                grass.run_command('r.to.vect',
                                  input=holename + '_edges', output=holename,
                                  type='point', flags='z', quiet=quiet)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))

            # count number of points to control segmax parameter for interpolation:
            pointsnumber = grass.vector_info_topo(map=holename)['points']
            grass.verbose(_("Interpolating %d points") % pointsnumber)

            if pointsnumber < 2:
                grass.verbose(_("No points to interpolate"))
                failed_list.append(holename)
                continue

            # Avoid v.surf.rst warnings
            if pointsnumber < segmax:
                use_npmin = pointsnumber
                use_segmax = pointsnumber * 2
            else:
                use_npmin = npmin
                use_segmax = segmax

            # launch v.surf.rst
            tmp_rmaps.append(holename + '_dem')
            try:
                grass.run_command('v.surf.rst', quiet=quiet,
                                  input=holename, elev=holename + '_dem',
                                  tension=tension, smooth=smooth,
                                  segmax=use_segmax, npmin=use_npmin)
            except CalledModuleError:
                # GTC Hole is NULL area in a raster map
                grass.fatal(_("Failed to fill hole %s") % cat)

            # v.surf.rst sometimes fails with exit code 0
            # related bug #1813
            if not grass.find_file(holename + '_dem')['file']:
                try:
                    tmp_rmaps.remove(holename)
                    tmp_rmaps.remove(holename + '_grown')
                    tmp_rmaps.remove(holename + '_edges')
                    tmp_rmaps.remove(holename + '_dem')
                    tmp_vmaps.remove(holename)
                except:
                    pass
                grass.warning(
                    _("Filling has failed silently. Leaving temporary maps "
                      "with prefix <%s> for debugging.") %
                    holename)
                failed_list.append(holename)
                continue

            # append hole result to interpolated version later used to patch into original DEM
            if first:
                tmp_rmaps.append(filling)
                grass.run_command('g.region', align=input, raster=holename + '_dem', quiet=quiet)
                grass.mapcalc("$out = if(isnull($inp), null(), $dem)",
                              out=filling, inp=holename, dem=holename + '_dem')
                first = False
            else:
                tmp_rmaps.append(filling + '_tmp')
                grass.run_command(
                    'g.region', align=input, raster=(
                        filling, holename + '_dem'), quiet=quiet)
                grass.mapcalc(
                    "$out = if(isnull($inp), if(isnull($fill), null(), $fill), $dem)",
                    out=filling + '_tmp',
                    inp=holename,
                    dem=holename + '_dem',
                    fill=filling)
                try:
                    grass.run_command('g.rename',
                                      raster=(filling + '_tmp', filling),
                                      overwrite=True, quiet=quiet)
                except CalledModuleError:
                    grass.fatal(
                        _("abandoned. Removing temporary maps, restoring user "
                          "mask if needed:"))
                # this map has been removed. No need for later cleanup.
                tmp_rmaps.remove(filling + '_tmp')

            # remove temporary maps to not overfill disk
            try:
                tmp_rmaps.remove(holename)
                tmp_rmaps.remove(holename + '_grown')
                tmp_rmaps.remove(holename + '_edges')
                tmp_rmaps.remove(holename + '_dem')
            except:
                pass
            try:
                grass.run_command('g.remove', quiet=quiet,
                                  flags='fb', type='raster',
                                  name=(holename,
                                        holename + '_grown',
                                        holename + '_edges',
                                        holename + '_dem'))
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring "
                              "user mask if needed:"))
            try:
                tmp_vmaps.remove(holename)
            except:
                pass
            try:
                grass.run_command('g.remove', quiet=quiet, flags='fb',
                                  type='vector', name=holename)
            except CalledModuleError:
                grass.fatal(_("abandoned. Removing temporary maps, restoring user mask if needed:"))

    # check if method is different from rst to use r.resamp.bspline
    if method != 'rst':
        grass.message(_("Using %s bspline interpolation") % method)

        # clone current region
        grass.use_temp_region()
        grass.run_command('g.region', align=input)

        reg = grass.region()
        # launch r.resamp.bspline
        tmp_rmaps.append(prefix + 'filled')
        # If there are no NULL cells, r.resamp.bslpine call
        # will end with an error although for our needs it's fine
        # Only problem - this state must be read from stderr
        new_env = dict(os.environ)
        new_env['LC_ALL'] = 'C'
        if usermask:
            try:
                p = grass.core.start_command(
                    'r.resamp.bspline',
                    input=input,
                    mask=usermask,
                    output=prefix + 'filled',
                    method=method,
                    ew_step=3 * reg['ewres'],
                    ns_step=3 * reg['nsres'],
                    lambda_=lambda_,
                    memory=memory,
                    flags='n',
                    stderr=subprocess.PIPE,
                    env=new_env)
                stdout, stderr = p.communicate()
                if "No NULL cells found" in stderr:
                    grass.run_command('g.copy', raster='%s,%sfilled' % (input, prefix), overwrite=True)
                    p.returncode = 0
                    grass.warning(_("Input map <%s> has no holes. Copying to output without modification.") % (input,))
            except CalledModuleError as e:
                grass.fatal(_("Failure during bspline interpolation. Error message: %s") % stderr)
        else:
            try:
                p = grass.core.start_command(
                    'r.resamp.bspline',
                    input=input,
                    output=prefix + 'filled',
                    method=method,
                    ew_step=3 * reg['ewres'],
                    ns_step=3 * reg['nsres'],
                    lambda_=lambda_,
                    memory=memory,
                    flags='n',
                    stderr=subprocess.PIPE,
                    env=new_env)
                stdout, stderr = p.communicate()
                if "No NULL cells found" in stderr:
                    grass.run_command('g.copy', raster='%s,%sfilled' % (input, prefix), overwrite=True)
                    p.returncode = 0
                    grass.warning(_("Input map <%s> has no holes. Copying to output without modification.") % (input,))
            except CalledModuleError as e:
                grass.fatal(_("Failure during bspline interpolation. Error message: %s") % stderr)

    # restoring user's mask, if present:
    if usermask:
        grass.message(_("Restoring user mask (MASK)..."))
        try:
            grass.run_command('g.rename', quiet=quiet, raster=(usermask, 'MASK'))
        except CalledModuleError:
            grass.warning(_("Failed to restore user MASK!"))
        usermask = None

    # set region to original extents, align to input
    grass.run_command('g.region', n=reg_org['n'], s=reg_org['s'],
                      e=reg_org['e'], w=reg_org['w'], align=input)

    # patch orig and fill map
    grass.message(_("Patching fill data into NULL areas..."))
    # we can use --o here as g.parser already checks on startup
    grass.run_command('r.patch', input=(input, prefix + 'filled'),
                      output=output, overwrite=True)

    # restore the real region
    grass.del_temp_region()

    grass.message(_("Filled raster map is: %s") % output)

    # write cmd history:
    grass.raster_history(output)

    if len(failed_list) > 0:
        grass.warning(
            _("Following holes where not filled. Temporary maps with are left "
              "in place to allow examination of unfilled holes"))
        outlist = failed_list[0]
        for hole in failed_list[1:]:
            outlist = ', ' + outlist
        grass.message(outlist)

    grass.message(_("Done."))
Exemplo n.º 26
0
    if len(sys.argv) == 6:
        exclude = sys.argv[5]
    else:
        exclude = None
else:
    sys.exit(
        "Usage: script.py memory elevation pattern [exclude]\nExample: %s dem '^ContArea_SW_[0-9]+_vect$'"
        % sys.argv[0])

contAreas = gs.read_command("g.list",
                            type="vector",
                            pattern=pattern,
                            exclude=exclude,
                            flags="e").splitlines()

gs.use_temp_region()

gs.run_command("g.region", raster=elevation)

if memory == "all":
    memory = None
    flags = None
else:
    flags = "m"

print("Processing {} raster maps...".format(len(contAreas)))

for area in contAreas:
    gs.run_command("g.region", vector=area, align=elevation)

    try:
Exemplo n.º 27
0
def main(options, flags):

    gisbase = os.getenv('GISBASE')
    if not gisbase:
        gs.fatal(_('$GISBASE not defined'))
        return 0

    # Reference / sample area or points
    ref_rast = options['ref_rast']
    ref_vect = options['ref_vect']
    if ref_rast:
        reftype = gs.raster_info(ref_rast)
        if reftype['datatype'] != "CELL":
            gs.fatal(_("The ref_rast map must have type CELL (integer)"))
        if ((reftype['min'] != 0 and reftype['min'] != 1) or
           reftype['max'] != 1):
            gs.fatal(_("The ref_rast map must be a binary raster,"
                       " i.e. it should contain only values 0 and 1 or 1 only"
                       " (now the minimum is {} and maximum is {})".
                       format(reftype['min'], reftype['max'])))

    # old environmental layers & variable names
    REF = options['env']
    REF = REF.split(',')
    raster_exists(REF)
    ipn = [z.split('@')[0] for z in REF]
    ipn = [x.lower() for x in ipn]

    # new environmental variables
    PROJ = options['env_proj']
    if not PROJ:
        RP = False
        PROJ = REF
    else:
        RP = True
        PROJ = PROJ.split(',')
        raster_exists(PROJ)
        if len(PROJ) != len(REF) and len(PROJ) != 0:
            gs.fatal(_("The number of reference and predictor variables"
                       " should be the same. You provided {} reference and {}"
                       " projection variables".format(len(REF), len(PROJ))))

    # output layers
    opl = options['output']
    opc = opl + '_MES'
    ipi = [opl + '_' + i for i in ipn]

    # flags
    flm = flags['m']
    flk = flags['k']
    fln = flags['n']
    fli = flags['i']
    flc = flags['c']

    # digits / precision
    digits = int(options['digits'])
    digits2 = pow(10, digits)

    # get current region settings, to compare to new ones later
    region_1 = gs.parse_command("g.region", flags="g")

    # Text for history in metadata
    opt2 = dict((k, v) for k, v in options.iteritems() if v)
    hist = ' '.join("{!s}={!r}".format(k, v) for (k, v) in opt2.iteritems())
    hist = "r.mess {}".format(hist)
    unused, tmphist = tempfile.mkstemp()
    with open(tmphist, "w") as text_file:
        text_file.write(hist)

    # Create reference layer if not defined
    if not ref_rast and not ref_vect:
        ref_rast = tmpname("tmp0")
        gs.mapcalc("$i = if(isnull($r),null(),1)", i=ref_rast, r=REF[0],
                   quiet=True)

    # Create the recode table - Reference distribution is raster
    citiam = gs.find_file(name='MASK', element='cell',
                          mapset=gs.gisenv()['MAPSET'])
    if citiam['fullname']:
        rname = tmpname('tmp3')
        gs.mapcalc('$rname = MASK', rname=rname, quiet=True)

    if ref_rast:
        vtl = ref_rast

        # Create temporary layer based on reference layer
        tmpf0 = tmpname('tmp2')
        gs.mapcalc("$tmpf0 = int($vtl * 1)", vtl=vtl, tmpf0=tmpf0, quiet=True)
        gs.run_command("r.null", map=tmpf0, setnull=0, quiet=True)
        if citiam['fullname']:
            gs.run_command("r.mask", flags="r", quiet=True)
        for i in xrange(len(REF)):

            # Create mask based on combined MASK/reference layer
            gs.run_command("r.mask", raster=tmpf0, quiet=True)

            # Calculate the frequency distribution
            tmpf1 = tmpname('tmp4')
            gs.mapcalc("$tmpf1 = int($dignum * $inplay)", tmpf1=tmpf1,
                       inplay=REF[i], dignum=digits2, quiet=True)
            p = gs.pipe_command('r.stats', quiet=True, flags='cn',
                                input=tmpf1, sort='asc', sep=';')
            stval = {}
            for line in p.stdout:
                [val, count] = line.strip(os.linesep).split(';')
                stval[float(val)] = float(count)
            p.wait()
            sstval = sorted(stval.items(), key=operator.itemgetter(0))
            sstval = np.matrix(sstval)
            a = np.cumsum(np.array(sstval), axis=0)
            b = np.sum(np.array(sstval), axis=0)
            c = a[:, 1] / b[1] * 100

            # Remove tmp mask and set region to env_proj if needed
            gs.run_command("r.mask", quiet=True, flags="r")
            if RP:
                gs.use_temp_region()
                gs.run_command("g.region", quiet=True, raster=PROJ[0])

            # get new region settings, to compare to original ones later
            region_2 = gs.parse_command("g.region", flags="g")

            # Get min and max values for recode table (based on full map)
            tmpf2 = tmpname('tmp5')
            gs.mapcalc("$tmpf2 = int($dignum * $inplay)",
                       tmpf2=tmpf2, inplay=PROJ[i], dignum=digits2,
                       quiet=True)
            d = gs.parse_command("r.univar", flags="g", map=tmpf2, quiet=True)

            # Create recode rules
            Dmin = int(d['min'])
            Dmax = int(d['max'])
            envmin = np.min(np.array(sstval), axis=0)[0]
            envmax = np.max(np.array(sstval), axis=0)[0]

            if Dmin < envmin:
                e1 = Dmin - 1
            else:
                e1 = envmin - 1
            if Dmax > envmax:
                e2 = Dmax + 1
            else:
                e2 = envmax + 1

            a1 = np.hstack([(e1), np.array(sstval.T[0])[0, :]])
            a2 = np.hstack([np.array(sstval.T[0])[0, :] - 1, (e2)])
            b1 = np.hstack([(0), c])

            fd2, tmprule = tempfile.mkstemp(suffix=ipn[i])
            with open(tmprule, "w") as text_file:
                for k in np.arange(0, len(b1.T)):
                    text_file.write("%s:%s:%s\n" % (str(int(a1[k])),
                                    str(int(a2[k])),
                                    str(b1[k])))

            # Create the recode layer and calculate the IES
            compute_ies(tmprule, ipi[i], tmpf2, envmin, envmax)
            gs.run_command("r.support", map=ipi[i],
                           title="IES {}".format(REF[i]),
                           units="0-100 (relative score",
                           description="Environmental similarity {}"
                           .format(REF[i]), loadhistory=tmphist)

            # Clean up
            os.close(fd2)
            os.remove(tmprule)

            # Change region back to original
            gs.del_temp_region()

    # Create the recode table - Reference distribution is vector
    else:
        vtl = ref_vect

        # Copy point layer and add columns for variables
        tmpf0 = tmpname('tmp7')
        gs.run_command("v.extract", quiet=True, flags="t", input=vtl,
                       type="point", output=tmpf0)
        gs.run_command("v.db.addtable", quiet=True, map=tmpf0)

        # TODO: see if there is a more efficient way to handle the mask
        if citiam['fullname']:
            gs.run_command("r.mask", quiet=True, flags="r")

        # Upload raster values and get value in python as frequency table
        sql1 = "SELECT cat FROM {}".format(str(tmpf0))
        cn = len(np.hstack(db.db_select(sql=sql1)))
        for m in xrange(len(REF)):

            # Set mask back (this means that points outside the mask will
            # be ignored in the computation of the frequency distribution
            # of the reference variabele env(m))
            if citiam['fullname']:
                gs.run_command("g.copy", raster=[rname, 'MASK'], quiet=True)

            # Compute frequency distribution of variable(m)
            mid = str(m)
            laytype = gs.raster_info(REF[m])['datatype']
            if laytype == 'CELL':
                columns = "envvar_{} integer".format(str(mid))
            else:
                columns = "envvar_%s double precision" % mid
            gs.run_command("v.db.addcolumn", map=tmpf0, columns=columns,
                           quiet=True)
            sql2 = "UPDATE {} SET envvar_{} = NULL".format(str(tmpf0), str(mid))
            gs.run_command("db.execute", sql=sql2, quiet=True)
            coln = "envvar_%s" % mid
            gs.run_command("v.what.rast", quiet=True, map=tmpf0,
                           layer=1, raster=REF[m], column=coln)
            sql3 = ("SELECT {0}, count({0}) from {1} WHERE {0} IS NOT NULL "
                    "GROUP BY {0} ORDER BY {0}").format(coln, tmpf0)
            volval = np.vstack(db.db_select(sql=sql3))
            volval = volval.astype(np.float, copy=False)
            a = np.cumsum(volval[:, 1], axis=0)
            b = np.sum(volval[:, 1], axis=0)
            c = a / b * 100

            # Check for point without values
            if b < cn:
                gs.info(_("Please note that there were {} points without "
                          "value. This is probably because they are outside "
                          "the computational region or mask".format((cn - b))))

            # Set region to env_proj layers (if different from env) and remove
            # mask (if set above)
            if citiam['fullname']:
                gs.run_command("r.mask", quiet=True, flags="r")
            if RP:
                gs.use_temp_region()
                gs.run_command("g.region", quiet=True, raster=PROJ[0])
            region_2 = gs.parse_command("g.region", flags="g")

            # Multiply env_proj layer with dignum
            tmpf2 = tmpname('tmp8')
            gs.mapcalc("$tmpf2 = int($dignum * $inplay)", tmpf2=tmpf2,
                       inplay=PROJ[m], dignum=digits2, quiet=True)

            # Calculate min and max values of sample points and raster layer
            envmin = int(min(volval[:, 0]) * digits2)
            envmax = int(max(volval[:, 0]) * digits2)
            Drange = gs.read_command("r.info", flags="r", map=tmpf2)
            Drange = str.splitlines(Drange)
            Drange = np.hstack([i.split('=') for i in Drange])
            Dmin = int(Drange[1])
            Dmax = int(Drange[3])

            if Dmin < envmin:
                e1 = Dmin - 1
            else:
                e1 = envmin - 1
            if Dmax > envmax:
                e2 = Dmax + 1
            else:
                e2 = envmax + 1

            a0 = volval[:, 0] * digits2
            a0 = a0.astype(np.int, copy=False)
            a1 = np.hstack([(e1), a0])
            a2 = np.hstack([a0 - 1, (e2)])
            b1 = np.hstack([(0), c])

            fd3, tmprule = tempfile.mkstemp(suffix=ipn[m])
            with open(tmprule, "w") as text_file:
                for k in np.arange(0, len(b1)):
                    rtmp = "{}:{}:{}\n".format(str(int(a1[k])),
                                               str(int(a2[k])), str(b1[k]))
                    text_file.write(rtmp)

            # Create the recode layer and calculate the IES
            compute_ies(tmprule, ipi[m], tmpf2, envmin, envmax)
            gs.run_command("r.support", map=ipi[m],
                           title="IES {}".format(REF[m]),
                           units="0-100 (relative score",
                           description="Environmental similarity {}"
                           .format(REF[m]), loadhistory=tmphist)

            # Clean up
            os.close(fd3)
            os.remove(tmprule)

            # Change region back to original
            gs.del_temp_region()

    # Calculate MESS statistics
    # Set region to env_proj layers (if different from env)
    # Note: this changes the region, to ensure the newly created layers
    # are actually visible to the user. This goes against normal practise
    # There will be a warning.
    if RP:
        gs.run_command("g.region", quiet=True, raster=PROJ[0])

    # MES
    gs.run_command("r.series", quiet=True, output=opc, input=ipi,
                   method="minimum")
    gs.write_command("r.colors", map=opc, rules='-',
                     stdin=COLORS_MES, quiet=True)

    # Write layer metadata
    gs.run_command("r.support", map=opc,
                   title="Areas with novel conditions",
                   units="0-100 (relative score",
                   description="The multivariate environmental similarity"
                   "(MES)", loadhistory=tmphist)

    # Area with negative MES
    if fln:
        mod1 = "{}_novel".format(opl)
        gs.mapcalc("$mod1 = int(if( $opc < 0, 1, 0))",
                   mod1=mod1, opc=opc, quiet=True)

        # Write category labels
        gs.write_command("r.category", map=mod1, rules='-', stdin=RECL_MESNEG,
                         quiet=True)

        # Write layer metadata
        gs.run_command("r.support", map=mod1,
                       title="Areas with novel conditions",
                       units="-",
                       source1="Based on {}".format(opc),
                       description="1 = novel conditions, 0 = within range",
                       loadhistory=tmphist)

    # Most dissimilar variable (MoD)
    if flm:
        tmpf4 = tmpname('tmp9')
        mod2 = "{}_MoD".format(opl)
        gs.run_command("r.series", quiet=True, output=tmpf4,
                       input=ipi, method="min_raster")
        gs.mapcalc("$mod2 = int($tmpf4)", mod2=mod2, tmpf4=tmpf4, quiet=True)

        fd4, tmpcat = tempfile.mkstemp()
        with open(tmpcat, "w") as text_file:
            for cats in xrange(len(ipi)):
                text_file.write("{}:{}\n".format(str(cats), REF[cats]))
        gs.run_command("r.category", quiet=True, map=mod2, rules=tmpcat,
                       separator=":")
        os.close(fd4)
        os.remove(tmpcat)

        # Write layer metadata
        gs.run_command("r.support", map=mod2,
                       title="Most dissimilar variable (MoD)",
                       units="-",
                       source1="Based on {}".format(opc),
                       description="Name of most dissimilar variable",
                       loadhistory=tmphist)

    # sum(IES), where IES < 0
    if flk:
        mod3 = "{}_SumNeg".format(opl)
        c0 = -0.01/digits2
        gs.run_command("r.series", quiet=True, input=ipi, method="sum",
                       range=('-inf', c0), output=mod3)
        gs.write_command("r.colors", map=mod3, rules='-',
                         stdin=COLORS_MES, quiet=True)

        # Write layer metadata
        gs.run_command("r.support", map=mod3,
                       title="Sum of negative IES values",
                       units="-",
                       source1="Based on {}".format(opc),
                       description="Sum of negative IES values",
                       loadhistory=tmphist)

    # Number of layers with negative values
    if flc:
        tmpf5 = tmpname('tmp10')
        mod4 = "{}_CountNeg".format(opl)
        MinMes = gs.read_command("r.info", quiet=True, flags="r", map=opc)
        MinMes = str.splitlines(MinMes)
        MinMes = float(np.hstack([i.split('=') for i in MinMes])[1])
        c0 = -0.0001/digits2
        gs.run_command("r.series", quiet=True, input=ipi, output=tmpf5,
                       method="count", range=(MinMes, c0))
        gs.mapcalc("$mod4 = int($tmpf5)", mod4=mod4, tmpf5=tmpf5, quiet=True)

        # Write layer metadata
        gs.run_command("r.support", map=mod4,
                       title="Number of layers with negative values",
                       units="-",
                       source1="Based on {}".format(opc),
                       description="Number of layers with negative values",
                       loadhistory=tmphist)

    # Remove IES layers
    if fli:
        gs.run_command("g.remove", quiet=True, flags="f", type="raster",
                       name=ipi)
    # Clean up tmp file
    os.remove(tmphist)

    gs.message(_("Finished ...\n"))
    if region_1 != region_2:
        gs.message(_("\nPlease note that the region has been changes to match"
                     " the set of projection (env_proj) variables.\n"))
Exemplo n.º 28
0
def main():
    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE, TMP_REG_NAME

    GDALdatasource = options['input']
    output = options['output']
    method = options['resample']
    memory = options['memory']
    bands = options['band']
    tgtres = options['resolution']
    title = options["title"]
    if flags['e'] and not output:
        output = 'rimport_tmp'  # will be removed with the entire tmp location
    if options['resolution_value']:
        if tgtres != 'value':
            grass.fatal(_("To set custom resolution value, select 'value' in resolution option"))
        tgtres_value = float(options['resolution_value'])
        if tgtres_value <= 0:
            grass.fatal(_("Resolution value can't be smaller than 0"))
    elif tgtres == 'value':
        grass.fatal(
            _("Please provide the resolution for the imported dataset or change to 'estimated' resolution"))

    # try r.in.gdal directly first
    additional_flags = 'l' if flags['l'] else ''
    if flags['o']:
        additional_flags += 'o'
    region_flag = ''
    if options['extent'] == 'region':
        region_flag += 'r'
    if flags['o'] or grass.run_command('r.in.gdal', input=GDALdatasource, flags='j',
                                       errors='status', quiet=True) == 0:
        parameters = dict(input=GDALdatasource, output=output,
                          memory=memory, flags='ak' + additional_flags + region_flag)
        if bands:
            parameters['band'] = bands
        try:
            grass.run_command('r.in.gdal', **parameters)
            grass.verbose(
                _("Input <%s> successfully imported without reprojection") %
                GDALdatasource)
            return 0
        except CalledModuleError as e:
            grass.fatal(_("Unable to import GDAL dataset <%s>") % GDALdatasource)

    grassenv = grass.gisenv()
    tgtloc = grassenv['LOCATION_NAME']

    # make sure target is not xy
    if grass.parse_command('g.proj', flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for current location <%s>") %
            tgtloc)

    tgtmapset = grassenv['MAPSET']
    GISDBASE = grassenv['GISDBASE']
    TGTGISRC = os.environ['GISRC']
    SRCGISRC = grass.tempfile()

    TMPLOC = 'temp_import_location_' + str(os.getpid())
    TMP_REG_NAME = 'vreg_tmp_' + str(os.getpid())

    f = open(SRCGISRC, 'w')
    f.write('MAPSET: PERMANENT\n')
    f.write('GISDBASE: %s\n' % GISDBASE)
    f.write('LOCATION_NAME: %s\n' % TMPLOC)
    f.write('GUI: text\n')
    f.close()

    tgtsrs = grass.read_command('g.proj', flags='j', quiet=True)

    # create temp location from input without import
    grass.verbose(_("Creating temporary location for <%s>...") % GDALdatasource)
    parameters = dict(input=GDALdatasource, output=output,
                      memory=memory, flags='c', title=title,
                      location=TMPLOC, quiet=True)
    if bands:
        parameters['band'] = bands
    try:
        grass.run_command('r.in.gdal', **parameters)
    except CalledModuleError:
        grass.fatal(_("Unable to read GDAL dataset <%s>") % GDALdatasource)

    # prepare to set region in temp location
    if 'r' in region_flag:
        tgtregion = TMP_REG_NAME
        grass.run_command('v.in.region', **dict(output=tgtregion, flags='d'))

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    # print projection at verbose level
    grass.verbose(grass.read_command('g.proj', flags='p').rstrip(os.linesep))

    # make sure input is not xy
    if grass.parse_command('g.proj', flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(_("Coordinate reference system not available for input <%s>") % GDALdatasource)

    # import into temp location
    grass.verbose(_("Importing <%s> to temporary location...") % GDALdatasource)
    parameters = dict(input=GDALdatasource, output=output,
                      memory=memory, flags='ak' + additional_flags)
    if bands:
        parameters['band'] = bands
    if 'r' in region_flag:
        grass.run_command('v.proj', **dict(location=tgtloc, mapset=tgtmapset,
                          input=tgtregion, output=tgtregion))
        grass.run_command('g.region', **dict(vector=tgtregion))
        parameters['flags'] = parameters['flags'] + region_flag
    try:
        grass.run_command('r.in.gdal', **parameters)
    except CalledModuleError:
        grass.fatal(_("Unable to import GDAL dataset <%s>") % GDALdatasource)

    outfiles = grass.list_grouped('raster')['PERMANENT']

    # is output a group?
    group = False
    path = os.path.join(GISDBASE, TMPLOC, 'group', output)
    if os.path.exists(path):
        group = True
        path = os.path.join(GISDBASE, TMPLOC, 'group', output, 'POINTS')
        if os.path.exists(path):
            grass.fatal(_("Input contains GCPs, rectification is required"))

    if 'r' in region_flag:
        grass.run_command('g.remove', type="vector", flags="f",
                          name=tgtregion)

    # switch to target location
    os.environ['GISRC'] = str(TGTGISRC)

    if 'r' in region_flag:
        grass.run_command('g.remove', **dict(type="vector", flags="f",
                          name=tgtregion))

    region = grass.region()

    rflags = None
    if flags['n']:
        rflags = 'n'

    vreg = TMP_REG_NAME

    for outfile in outfiles:

        n = region['n']
        s = region['s']
        e = region['e']
        w = region['w']

        grass.use_temp_region()

        if options['extent'] == 'input':
            # r.proj -g
            try:
                tgtextents = grass.read_command('r.proj', location=TMPLOC,
                                                mapset='PERMANENT',
                                                input=outfile, flags='g',
                                                memory=memory, quiet=True)
            except CalledModuleError:
                grass.fatal(_("Unable to get reprojected map extent"))
            try:
                srcregion = grass.parse_key_val(tgtextents, val_type=float, vsep=' ')
                n = srcregion['n']
                s = srcregion['s']
                e = srcregion['e']
                w = srcregion['w']
            except ValueError:  # import into latlong, expect 53:39:06.894826N
                srcregion = grass.parse_key_val(tgtextents, vsep=' ')
                n = grass.float_or_dms(srcregion['n'][:-1]) * \
                    (-1 if srcregion['n'][-1] == 'S' else 1)
                s = grass.float_or_dms(srcregion['s'][:-1]) * \
                    (-1 if srcregion['s'][-1] == 'S' else 1)
                e = grass.float_or_dms(srcregion['e'][:-1]) * \
                    (-1 if srcregion['e'][-1] == 'W' else 1)
                w = grass.float_or_dms(srcregion['w'][:-1]) * \
                    (-1 if srcregion['w'][-1] == 'W' else 1)

            grass.run_command('g.region', n=n, s=s, e=e, w=w)

        # v.in.region in tgt
        grass.run_command('v.in.region', output=vreg, quiet=True)

        grass.del_temp_region()

        # reproject to src
        # switch to temp location
        os.environ['GISRC'] = str(SRCGISRC)
        try:
            grass.run_command('v.proj', input=vreg, output=vreg,
                              location=tgtloc, mapset=tgtmapset, quiet=True)
            # test if v.proj created a valid area
            if grass.vector_info_topo(vreg)['areas'] != 1:
                rass.fatal(_("Please check the 'extent' parameter"))
        except CalledModuleError:
            grass.fatal(_("Unable to reproject to source location"))

        # set region from region vector
        grass.run_command('g.region', raster=outfile)
        grass.run_command('g.region', vector=vreg)
        # align to first band
        grass.run_command('g.region', align=outfile)
        # get number of cells
        cells = grass.region()['cells']

        estres = math.sqrt((n - s) * (e - w) / cells)
        # remove from source location for multi bands import
        grass.run_command('g.remove', type='vector', name=vreg,
                          flags='f', quiet=True)

        os.environ['GISRC'] = str(TGTGISRC)
        grass.run_command('g.remove', type='vector', name=vreg,
                          flags='f', quiet=True)

        grass.message(
            _("Estimated target resolution for input band <{out}>: {res}").format(
                out=outfile, res=estres))
        if flags['e']:
            continue

        if options['extent'] == 'input' or tgtres == 'value':
            grass.use_temp_region()

        if options['extent'] == 'input':
            grass.run_command('g.region', n=n, s=s, e=e, w=w)

        res = None
        if tgtres == 'estimated':
            res = estres
        elif tgtres == 'value':
            res = tgtres_value
            grass.message(
                _("Using given resolution for input band <{out}>: {res}").format(
                    out=outfile, res=res))
            # align to requested resolution
            grass.run_command('g.region', res=res, flags='a')
        else:
            curr_reg = grass.region()
            grass.message(_("Using current region resolution for input band "
                            "<{out}>: nsres={ns}, ewres={ew}").format(out=outfile, ns=curr_reg['nsres'],
                                                                      ew=curr_reg['ewres']))

        # r.proj
        grass.message(_("Reprojecting <%s>...") % outfile)
        try:
            grass.run_command('r.proj', location=TMPLOC,
                              mapset='PERMANENT', input=outfile,
                              method=method, resolution=res,
                              memory=memory, flags=rflags, quiet=True)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % outfile)

        if grass.raster_info(outfile)['min'] is None:
            grass.fatal(_("The reprojected raster <%s> is empty") % outfile)

        if options['extent'] == 'input' or tgtres == 'value':
            grass.del_temp_region()

    if flags['e']:
        return 0

    if group:
        grass.run_command('i.group', group=output, input=','.join(outfiles))

    # TODO: write metadata with r.support

    return 0
Exemplo n.º 29
0
def main():
    global vrtfile, tmpfile

    infile  = options['input']
    rast = options['output']
    also = flags['a']

    #### check for gdalinfo (just to check if installation is complete)
    if not grass.find_program('gdalinfo', '--help'):
	grass.fatal(_("'gdalinfo' not found, install GDAL tools first (http://www.gdal.org)"))

    pid = str(os.getpid())
    tmpfile = grass.tempfile()

    ################### let's go

    spotdir = os.path.dirname(infile)
    spotname = grass.basename(infile, 'hdf')

    if rast:
	name = rast
    else:
	name = spotname

    if not grass.overwrite() and grass.find_file(name)['file']:
	grass.fatal(_("<%s> already exists. Aborting.") % name)

    # still a ZIP file?  (is this portable?? see the r.in.srtm script for ideas)
    if infile.lower().endswith('.zip'):
	grass.fatal(_("Please extract %s before import.") % infile)

    try:
	p = grass.Popen(['file', '-ib', infile], stdout = grass.PIPE)
	s = p.communicate()[0]
	if s == "application/x-zip":
	    grass.fatal(_("Please extract %s before import.") % infile)
    except:
	pass

    ### create VRT header for NDVI

    projfile = os.path.join(spotdir, "0001_LOG.TXT")
    vrtfile = tmpfile + '.vrt'

    # first process the NDVI:
    grass.try_remove(vrtfile)
    create_VRT_file(projfile, vrtfile, infile)

    ## let's import the NDVI map...
    grass.message(_("Importing SPOT VGT NDVI map..."))
    try:
        grass.run_command('r.in.gdal', input=vrtfile, output=name)
    except CalledModuleError:
        grass.fatal(_("An error occurred. Stop."))

    grass.message(_("Imported SPOT VEGETATION NDVI map <%s>.") % name)

    #################
    ## http://www.vgt.vito.be/faq/FAQS/faq19.html
    # What is the relation between the digital number and the real NDVI ?
    # Real NDVI =coefficient a * Digital Number + coefficient b
    #           = a * DN +b
    #
    # Coefficient a = 0.004
    # Coefficient b = -0.1

    # clone current region
    # switch to a temporary region
    grass.use_temp_region()

    grass.run_command('g.region', raster = name, quiet = True)

    grass.message(_("Remapping digital numbers to NDVI..."))
    tmpname = "%s_%s" % (name, pid)
    grass.mapcalc("$tmpname = 0.004 * $name - 0.1", tmpname = tmpname, name = name)
    grass.run_command('g.remove', type = 'raster', name = name, quiet = True, flags = 'f')
    grass.run_command('g.rename', raster = (tmpname, name), quiet = True)

    # write cmd history:
    grass.raster_history(name)

    #apply color table:
    grass.run_command('r.colors', map = name, color = 'ndvi', quiet = True)

    ##########################
    # second, optionally process the SM quality map:
    
    #SM Status Map
    # http://nieuw.vgt.vito.be/faq/FAQS/faq22.html
    #Data about
    # Bit NR 7: Radiometric quality for B0 coded as 0 if bad and 1 if good
    # Bit NR 6: Radiometric quality for B2 coded as 0 if bad and 1 if good
    # Bit NR 5: Radiometric quality for B3 coded as 0 if bad and 1 if good
    # Bit NR 4: Radiometric quality for MIR coded as 0 if bad and 1 if good
    # Bit NR 3: land code 1 or water code 0
    # Bit NR 2: ice/snow code 1 , code 0 if there is no ice/snow
    # Bit NR 1:	0	0	1		1
    # Bit NR 0:	0	1	0		1
    # 		clear	shadow	uncertain	cloud
    #
    #Note:
    # pos 7     6    5    4    3    2   1   0 (bit position)
    #   128    64   32   16    8    4   2   1 (values for 8 bit)
    #
    #
    # Bit 4-7 should be 1: their sum is 240
    # Bit 3   land code, should be 1, sum up to 248 along with higher bits
    # Bit 2   ice/snow code
    # Bit 0-1 should be 0
    #
    # A good map threshold: >= 248

    if also:
	grass.message(_("Importing SPOT VGT NDVI quality map..."))
	grass.try_remove(vrtfile)
	qname = spotname.replace('NDV','SM')
	qfile = os.path.join(spotdir, qname)
	create_VRT_file(projfile, vrtfile, qfile)

	## let's import the SM quality map...
	smfile = name + '.sm'
        try:
            grass.run_command('r.in.gdal', input=vrtfile, output=smfile)
        except CalledModuleError:
            grass.fatal(_("An error occurred. Stop."))

	# some of the possible values:
	rules = [r + '\n' for r in [
	    '8 50 50 50',
	    '11 70 70 70',
	    '12 90 90 90',
	    '60 grey',
	    '155 blue',
	    '232 violet',
	    '235 red',
	    '236 brown',
	    '248 orange',
	    '251 yellow',
	    '252 green'
	    ]]
	grass.write_command('r.colors', map = smfile, rules = '-', stdin = rules)

	grass.message(_("Imported SPOT VEGETATION SM quality map <%s>.") % smfile)
	grass.message(_("Note: A snow map can be extracted by category 252 (d.rast %s cat=252)") % smfile)
	grass.message("")
	grass.message(_("Filtering NDVI map by Status Map quality layer..."))

	filtfile = "%s_filt" % name
	grass.mapcalc("$filtfile = if($smfile % 4 == 3 || ($smfile / 16) % 16 == 0, null(), $name)",
		      filtfile = filtfile, smfile = smfile, name = name)
	grass.run_command('r.colors', map = filtfile, color = 'ndvi', quiet = True)
	grass.message(_("Filtered SPOT VEGETATION NDVI map <%s>.") % filtfile)

	# write cmd history:
	grass.raster_history(smfile)
	grass.raster_history(filtfile)

    grass.message(_("Done."))
Exemplo n.º 30
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]

    # Make sure the temporal database exists
    tgis.init()

    mapset = grass.gisenv()["MAPSET"]

    sp = tgis.open_old_stds(input, "strds")

    grass.use_temp_region()

    maps = sp.get_registered_maps_as_objects_by_granularity()
    num_maps = len(maps)
    # get datatype of the first map
    if maps:
        maps[0][0].select()
        datatype = maps[0][0].metadata.get_datatype()
    else:
        datatype = None

    # Get the granularity and set bottom, top and top-bottom resolution
    granularity = sp.get_granularity()

    # This is the reference time to scale the z coordinate
    reftime = datetime(1900, 1, 1)

    # We set top and bottom according to the start time in relation
    # to the date 1900-01-01 00:00:00
    # In case of days, hours, minutes and seconds, a double number
    # is used to represent days and fracs of a day

    # Space time voxel cubes with montly or yearly granularity can not be
    # mixed with other temporal units

    # Compatible temporal units are : days, hours, minutes and seconds
    # Incompatible are years and moths
    start, end = sp.get_temporal_extent_as_tuple()

    if sp.is_time_absolute():
        unit = granularity.split(" ")[1]
        granularity = float(granularity.split(" ")[0])

        print("Gran from stds %0.15f" % (granularity))

        if unit == "years" or unit == "year":
            bottom = float(start.year - 1900)
            top = float(granularity * num_maps)
        elif unit == "months" or unit == "month":
            bottom = float((start.year - 1900) * 12 + start.month)
            top = float(granularity * num_maps)
        else:
            bottom = float(tgis.time_delta_to_relative_time(start - reftime))
            days = 0.0
            hours = 0.0
            minutes = 0.0
            seconds = 0.0
            if unit == "days" or unit == "day":
                days = float(granularity)
            if unit == "hours" or unit == "hour":
                hours = float(granularity)
            if unit == "minutes" or unit == "minute":
                minutes = float(granularity)
            if unit == "seconds" or unit == "second":
                seconds = float(granularity)

            granularity = float(days + hours / 24.0 + minutes / 1440.0 +
                                seconds / 86400.0)
    else:
        unit = sp.get_relative_time_unit()
        bottom = start

    top = float(bottom + granularity * float(num_maps))
    try:
        grass.run_command("g.region", t=top, b=bottom, tbres=granularity)
    except CalledModuleError:
        grass.fatal(_("Unable to set 3D region"))

    # Create a NULL map to fill the gaps
    null_map = "temporary_null_map_%i" % os.getpid()
    if datatype == "DCELL":
        grass.mapcalc("%s = double(null())" % (null_map))
    elif datatype == "FCELL":
        grass.mapcalc("%s = float(null())" % (null_map))
    else:
        grass.mapcalc("%s = null()" % (null_map))

    if maps:
        count = 0
        map_names = ""
        for map in maps:
            # Use the first map
            id = map[0].get_id()
            # None ids will be replaced by NULL maps
            if id is None:
                id = null_map

            if count == 0:
                map_names = id
            else:
                map_names += ",%s" % id

            count += 1

        try:
            grass.run_command(
                "r.to.rast3",
                input=map_names,
                output=output,
                overwrite=grass.overwrite(),
            )
        except CalledModuleError:
            grass.fatal(_("Unable to create 3D raster map <%s>" % output))

    grass.run_command("g.remove", flags="f", type="raster", name=null_map)

    title = _("Space time voxel cube")
    descr = _("This space time voxel cube was created with t.rast.to.rast3")

    # Set the unit
    try:
        grass.run_command(
            "r3.support",
            map=output,
            vunit=unit,
            title=title,
            description=descr,
            overwrite=grass.overwrite(),
        )
    except CalledModuleError:
        grass.warning(_("%s failed to set units.") % "r3.support")

    # Register the space time voxel cube in the temporal GIS
    if output.find("@") >= 0:
        id = output
    else:
        id = output + "@" + mapset

    start, end = sp.get_temporal_extent_as_tuple()
    r3ds = tgis.Raster3DDataset(id)

    if r3ds.is_in_db():
        r3ds.select()
        r3ds.delete()
        r3ds = tgis.Raster3DDataset(id)

    r3ds.load()

    if sp.is_time_absolute():
        r3ds.set_absolute_time(start, end)
    else:
        r3ds.set_relative_time(start, end, sp.get_relative_time_unit())

    r3ds.insert()
# -*- coding: utf-8 -*-

"""
@brief: UAV point cloud interpolation and analysis
This program is free software under the GNU General Public License
(>=v2). Read the file COPYING that comes with GRASS for details.
@author: Brendan Harmon ([email protected])
"""

import os
import grass.script as gscript

# temporary region
gscript.use_temp_region()

# set graphics driver
driver = "cairo"

# set odm directory
odm_directory = os.path.normpath("C:/Users/Brendan/odm/data/reconstruction-with-image-size-2400-results")

# file
file = "pointcloud_georef.las"
fullpath_filename = os.path.join(odm_directory, file)

# set rendering directory
render = os.path.normpath("C:/Users/Brendan/Documents/grassdata/rendering/odm_analytics")

# set region
gscript.run_command("g.region", res=0.3)
Exemplo n.º 32
0
def main():
    inputraster = options['input']
    number_lines = int(options['number_lines'])
    edge_detection_algorithm = options['edge_detection']
    no_edge_friction = int(options['no_edge_friction'])
    lane_border_multiplier = int(options['lane_border_multiplier'])
    min_tile_size = None
    if options['min_tile_size']:
        min_tile_size = float(options['min_tile_size'])
    existing_cutlines = None
    if options['existing_cutlines']:
        existing_cutlines = options['existing_cutlines'].split(',')
    tiles = options['output']
    memory = int(options['memory'])
    tiled = False

    if options['tile_width']:
        tiled = True
        gscript.message(_("Using tiles processing for edge detection"))
        width = int(options['tile_width'])
        height = int(options['tile_height'])
        overlap = int(options['overlap'])

    processes = int(options['processes'])

    global temp_maps
    temp_maps = []
    r = 'raster'
    v = 'vector'

    if existing_cutlines:
        existingcutlinesmap = 'temp_icutlines_existingcutlinesmap_%i' % os.getpid(
        )
        if len(existing_cutlines) > 1:
            gscript.run_command('v.patch',
                                input_=existing_cutlines,
                                output=existingcutlinesmap,
                                quiet=True,
                                overwrite=True)
            existing_cutlines = existingcutlinesmap

        gscript.run_command('v.to.rast',
                            input_=existing_cutlines,
                            output=existingcutlinesmap,
                            use='val',
                            type_='line,boundary',
                            overwrite=True,
                            quiet=True)

        temp_maps.append([existingcutlinesmap, r])

    temp_edge_map = "temp_icutlines_edgemap_%d" % os.getpid()
    temp_maps.append([temp_edge_map, r])

    gscript.message(_("Creating edge map"))
    if edge_detection_algorithm == 'zc':
        kwargs = {
            'input': inputraster,
            'output': temp_edge_map,
            'width_': int(options['zc_width']),
            'threshold': float(options['zc_threshold']),
            'quiet': True
        }

        if tiled:
            grd = GridModule('i.zc',
                             width=width,
                             height=height,
                             overlap=overlap,
                             processes=processes,
                             split=False,
                             **kwargs)
            grd.run()
        else:
            gscript.run_command('i.zc', **kwargs)

    elif edge_detection_algorithm == 'canny':
        if not gscript.find_program('i.edge', '--help'):
            message = _("You need to install the addon i.edge to use ")
            message += _("the Canny edge detector.\n")
            message += _(
                " You can install the addon with 'g.extension i.edge'")
            gscript.fatal(message)

        kwargs = {
            'input': inputraster,
            'output': temp_edge_map,
            'low_threshold': float(options['canny_low_threshold']),
            'high_threshold': float(options['canny_high_threshold']),
            'sigma': float(options['canny_sigma']),
            'quiet': True
        }

        if tiled:
            grd = GridModule('i.edge',
                             width=width,
                             height=height,
                             overlap=overlap,
                             processes=processes,
                             split=False,
                             **kwargs)
            grd.run()
        else:
            gscript.run_command('i.edge', **kwargs)

    else:
        gscript.fatal(
            "Only zero-crossing and Canny available as edge detection algorithms."
        )

    region = gscript.region()
    gscript.message(_("Finding cutlines in both directions"))

    nsrange = float(region.n - region.s - region.nsres)
    ewrange = float(region.e - region.w - region.ewres)

    if nsrange > ewrange:
        hnumber_lines = number_lines
        vnumber_lines = int(number_lines * (nsrange / ewrange))
    else:
        vnumber_lines = number_lines
        hnumber_lines = int(number_lines * (ewrange / nsrange))

    # Create the lines in horizonal direction
    nsstep = float(region.n - region.s - region.nsres) / hnumber_lines
    hpointsy = [((region.n - i * nsstep) - region.nsres / 2.0)
                for i in range(0, hnumber_lines + 1)]
    hlanepointsy = [y - nsstep / 2.0 for y in hpointsy]
    hstartpoints = listzip([region.w + 0.2 * region.ewres] * len(hpointsy),
                           hpointsy)
    hstoppoints = listzip([region.e - 0.2 * region.ewres] * len(hpointsy),
                          hpointsy)
    hlanestartpoints = listzip([region.w + 0.2 * region.ewres] *
                               len(hlanepointsy), hlanepointsy)
    hlanestoppoints = listzip([region.e - 0.2 * region.ewres] *
                              len(hlanepointsy), hlanepointsy)

    hlanemap = 'temp_icutlines_hlanemap_%i' % os.getpid()
    temp_maps.append([hlanemap, v])
    temp_maps.append([hlanemap, r])

    os.environ['GRASS_VERBOSE'] = '0'
    new = VectorTopo(hlanemap)
    new.open('w')
    for line in listzip(hlanestartpoints, hlanestoppoints):
        new.write(geom.Line(line), cat=1)
    new.close()
    del os.environ['GRASS_VERBOSE']

    gscript.run_command('v.to.rast',
                        input_=hlanemap,
                        output=hlanemap,
                        use='val',
                        type_='line',
                        overwrite=True,
                        quiet=True)

    hbasemap = 'temp_icutlines_hbasemap_%i' % os.getpid()
    temp_maps.append([hbasemap, r])

    # Building the cost maps using the following logic
    # - Any pixel not on an edge, nor on an existing cutline gets a
    # no_edge_friction cost, or no_edge_friction_cost x 10  if there are
    # existing cutlines
    # - Any pixel on an edge gets a cost of 1 if there are no existing cutlines,
    # and a cost of no_edge_friction if there are
    # - A lane line gets a very high cost (lane_border_multiplier x cost of no
    # edge pixel - the latter depending on the existence of cutlines).

    mapcalc_expression = "%s = " % hbasemap
    mapcalc_expression += "if(isnull(%s), " % hlanemap
    if existing_cutlines:
        mapcalc_expression += "if(%s == 0 && isnull(%s), " % (
            temp_edge_map, existingcutlinesmap)
        mapcalc_expression += "%i, " % (no_edge_friction * 10)
        mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap,
                                                           no_edge_friction)
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction * 10)
    else:
        mapcalc_expression += "if(%s == 0, " % temp_edge_map
        mapcalc_expression += "%i, " % no_edge_friction
        mapcalc_expression += "1), "
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction)
    gscript.run_command('r.mapcalc',
                        expression=mapcalc_expression,
                        quiet=True,
                        overwrite=True)

    hcumcost = 'temp_icutlines_hcumcost_%i' % os.getpid()
    temp_maps.append([hcumcost, r])
    hdir = 'temp_icutlines_hdir_%i' % os.getpid()
    temp_maps.append([hdir, r])

    # Create the lines in vertical direction
    ewstep = float(region.e - region.w - region.ewres) / vnumber_lines
    vpointsx = [((region.e - i * ewstep) - region.ewres / 2.0)
                for i in range(0, vnumber_lines + 1)]
    vlanepointsx = [x + ewstep / 2.0 for x in vpointsx]
    vstartpoints = listzip(vpointsx,
                           [region.n - 0.2 * region.nsres] * len(vpointsx))
    vstoppoints = listzip(vpointsx,
                          [region.s + 0.2 * region.nsres] * len(vpointsx))
    vlanestartpoints = listzip(vlanepointsx, [region.n - 0.2 * region.nsres] *
                               len(vlanepointsx))
    vlanestoppoints = listzip(vlanepointsx, [region.s + 0.2 * region.nsres] *
                              len(vlanepointsx))

    vlanemap = 'temp_icutlines_vlanemap_%i' % os.getpid()
    temp_maps.append([vlanemap, v])
    temp_maps.append([vlanemap, r])

    os.environ['GRASS_VERBOSE'] = '0'
    new = VectorTopo(vlanemap)
    new.open('w')
    for line in listzip(vlanestartpoints, vlanestoppoints):
        new.write(geom.Line(line), cat=1)
    new.close()
    del os.environ['GRASS_VERBOSE']

    gscript.run_command('v.to.rast',
                        input_=vlanemap,
                        output=vlanemap,
                        use='val',
                        type_='line',
                        overwrite=True,
                        quiet=True)

    vbasemap = 'temp_icutlines_vbasemap_%i' % os.getpid()
    temp_maps.append([vbasemap, r])
    mapcalc_expression = "%s = " % vbasemap
    mapcalc_expression += "if(isnull(%s), " % vlanemap
    if existing_cutlines:
        mapcalc_expression += "if(%s == 0 && isnull(%s), " % (
            temp_edge_map, existingcutlinesmap)
        mapcalc_expression += "%i, " % (no_edge_friction * 10)
        mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap,
                                                           no_edge_friction)
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction * 10)
    else:
        mapcalc_expression += "if(%s == 0, " % temp_edge_map
        mapcalc_expression += "%i, " % no_edge_friction
        mapcalc_expression += "1), "
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction)
    gscript.run_command('r.mapcalc',
                        expression=mapcalc_expression,
                        quiet=True,
                        overwrite=True)

    vcumcost = 'temp_icutlines_vcumcost_%i' % os.getpid()
    temp_maps.append([vcumcost, r])
    vdir = 'temp_icutlines_vdir_%i' % os.getpid()
    temp_maps.append([vdir, r])

    if processes > 1:
        pmemory = memory / 2.0
        rcv = gscript.start_command('r.cost',
                                    input_=vbasemap,
                                    startcoordinates=vstartpoints,
                                    stopcoordinates=vstoppoints,
                                    output=vcumcost,
                                    outdir=vdir,
                                    memory=pmemory,
                                    quiet=True,
                                    overwrite=True)

        rch = gscript.start_command('r.cost',
                                    input_=hbasemap,
                                    startcoordinates=hstartpoints,
                                    stopcoordinates=hstoppoints,
                                    output=hcumcost,
                                    outdir=hdir,
                                    memory=pmemory,
                                    quiet=True,
                                    overwrite=True)
        rcv.wait()
        rch.wait()

    else:
        gscript.run_command('r.cost',
                            input_=vbasemap,
                            startcoordinates=vstartpoints,
                            stopcoordinates=vstoppoints,
                            output=vcumcost,
                            outdir=vdir,
                            memory=memory,
                            quiet=True,
                            overwrite=True)

        gscript.run_command('r.cost',
                            input_=hbasemap,
                            startcoordinates=hstartpoints,
                            stopcoordinates=hstoppoints,
                            output=hcumcost,
                            outdir=hdir,
                            memory=memory,
                            quiet=True,
                            overwrite=True)

    hlines = 'temp_icutlines_hlines_%i' % os.getpid()
    temp_maps.append([hlines, r])
    vlines = 'temp_icutlines_vlines_%i' % os.getpid()
    temp_maps.append([vlines, r])

    if processes > 1:
        rdh = gscript.start_command('r.drain',
                                    input_=hcumcost,
                                    direction=hdir,
                                    startcoordinates=hstoppoints,
                                    output=hlines,
                                    flags='d',
                                    quiet=True,
                                    overwrite=True)

        rdv = gscript.start_command('r.drain',
                                    input_=vcumcost,
                                    direction=vdir,
                                    startcoordinates=vstoppoints,
                                    output=vlines,
                                    flags='d',
                                    quiet=True,
                                    overwrite=True)

        rdh.wait()
        rdv.wait()

    else:
        gscript.run_command('r.drain',
                            input_=hcumcost,
                            direction=hdir,
                            startcoordinates=hstoppoints,
                            output=hlines,
                            flags='d',
                            quiet=True,
                            overwrite=True)

        gscript.run_command('r.drain',
                            input_=vcumcost,
                            direction=vdir,
                            startcoordinates=vstoppoints,
                            output=vlines,
                            flags='d',
                            quiet=True,
                            overwrite=True)

    # Combine horizontal and vertical lines
    temp_raster_tile_borders = 'temp_icutlines_raster_tile_borders_%i' % os.getpid(
    )
    temp_maps.append([temp_raster_tile_borders, r])
    gscript.run_command('r.patch',
                        input_=[hlines, vlines],
                        output=temp_raster_tile_borders,
                        quiet=True,
                        overwrite=True)

    gscript.message(_("Creating vector polygons"))

    # Create vector polygons

    # First we need to shrink the region a bit to make sure that all vector
    # points / lines fall within the raster
    gscript.use_temp_region()
    gscript.run_command('g.region',
                        s=region.s + region.nsres,
                        e=region.e - region.ewres,
                        quiet=True)

    region_map = 'temp_icutlines_region_map_%i' % os.getpid()
    temp_maps.append([region_map, v])
    temp_maps.append([region_map, r])
    gscript.run_command('v.in.region',
                        output=region_map,
                        type_='line',
                        quiet=True,
                        overwrite=True)

    gscript.del_temp_region()

    gscript.run_command('v.to.rast',
                        input_=region_map,
                        output=region_map,
                        use='val',
                        type_='line',
                        quiet=True,
                        overwrite=True)

    temp_raster_polygons = 'temp_icutlines_raster_polygons_%i' % os.getpid()
    temp_maps.append([temp_raster_polygons, r])
    gscript.run_command('r.patch',
                        input_=[temp_raster_tile_borders, region_map],
                        output=temp_raster_polygons,
                        quiet=True,
                        overwrite=True)

    temp_raster_polygons_thin = 'temp_icutlines_raster_polygons_thin_%i' % os.getpid(
    )
    temp_maps.append([temp_raster_polygons_thin, r])
    gscript.run_command('r.thin',
                        input_=temp_raster_polygons,
                        output=temp_raster_polygons_thin,
                        quiet=True,
                        overwrite=True)

    # Create a series of temporary map names as we have to go
    # through several steps until we reach the final map.
    temp_vector_polygons1 = 'temp_icutlines_vector_polygons1_%i' % os.getpid()
    temp_maps.append([temp_vector_polygons1, v])
    temp_vector_polygons2 = 'temp_icutlines_vector_polygons2_%i' % os.getpid()
    temp_maps.append([temp_vector_polygons2, v])
    temp_vector_polygons3 = 'temp_icutlines_vector_polygons3_%i' % os.getpid()
    temp_maps.append([temp_vector_polygons3, v])
    temp_vector_polygons4 = 'temp_icutlines_vector_polygons4_%i' % os.getpid()
    temp_maps.append([temp_vector_polygons4, v])

    gscript.run_command('r.to.vect',
                        input_=temp_raster_polygons_thin,
                        output=temp_vector_polygons1,
                        type_='line',
                        flags='t',
                        quiet=True,
                        overwrite=True)

    # Erase all category values from the lines
    gscript.run_command('v.category',
                        input_=temp_vector_polygons1,
                        op='del',
                        cat='-1',
                        output=temp_vector_polygons2,
                        quiet=True,
                        overwrite=True)

    # Transform lines to boundaries
    gscript.run_command('v.type',
                        input_=temp_vector_polygons2,
                        from_type='line',
                        to_type='boundary',
                        output=temp_vector_polygons3,
                        quiet=True,
                        overwrite=True)

    # Add centroids
    gscript.run_command('v.centroids',
                        input_=temp_vector_polygons3,
                        output=temp_vector_polygons4,
                        quiet=True,
                        overwrite=True)

    # If a threshold is given erase polygons that are too small
    if min_tile_size:
        gscript.run_command('v.clean',
                            input_=temp_vector_polygons4,
                            tool='rmarea',
                            threshold=min_tile_size,
                            output=tiles,
                            quiet=True,
                            overwrite=True)
    else:
        gscript.run_command('g.copy',
                            vect=[temp_vector_polygons4, tiles],
                            quiet=True,
                            overwrite=True)

    gscript.vector_history(tiles)
def main():
    """Do the main processing
    """

    # Parse input options:
    patch_map = options['input']
    patches = patch_map.split('@')[0]
    patches_mapset = patch_map.split('@')[1] if len(patch_map.split('@')) > 1 else None
    pop_proxy = options['pop_proxy']
    layer = options['layer']
    costs = options['costs']
    cutoff = float(options['cutoff'])
    border_dist = int(options['border_dist'])
    conefor_dir = options['conefor_dir']
    memory = int(options['memory'])

    # Parse output options:
    prefix = options['prefix']
    edge_map = '{}_edges'.format(prefix)
    vertex_map = '{}_vertices'.format(prefix)
    shortest_paths = '{}_shortest_paths'.format(prefix)

    # Parse flags:
    p_flag = flags['p']
    t_flag = flags['t']
    r_flag = flags['r']

    dist_flags = 'kn' if flags['k'] else 'n'

    lin_cat = 1
    zero_dist = None

    folder = grass.tempdir()
    if not os.path.exists(folder):
        os.makedirs(folder)

    # Setup counter for progress message
    counter = 0

    # Check if location is lat/lon (only in lat/lon geodesic distance
    # measuring is supported)
    if grass.locn_is_latlong():
        grass.verbose("Location is lat/lon: Geodesic distance \
                      measure is used")

    # Check if prefix is legal GRASS name
    if not grass.legal_name(prefix):
        grass.fatal('{} is not a legal name for GRASS \
                    maps.'.format(prefix))

    if prefix[0].isdigit():
        grass.fatal('Tables names starting with a digit are not SQL \
                    compliant.'.format(prefix))

    # Check if output maps not already exists or could be overwritten
    for output in [edge_map, vertex_map, shortest_paths]:
        if grass.db.db_table_exist(output) and not grass.overwrite():
            grass.fatal('Vector map <{}> already exists'.format(output))

    # Check if input has required attributes
    in_db_connection = grass.vector.vector_db(patch_map)
    if not int(layer) in in_db_connection.keys():
        grass.fatal('No attribute table connected vector map {} at \
                    layer {}.'.format(patches, layer))

    #Check if cat column exists
    pcols = grass.vector.vector_columns(patch_map, layer=layer)

    #Check if cat column exists
    if 'cat' not in pcols.keys():
        grass.fatal('Cannot find the reqired column cat in vector map \
                    {}.'.format(patches))

    #Check if pop_proxy column exists
    if pop_proxy not in pcols.keys():
        grass.fatal('Cannot find column {} in vector map \
                    {}'.format(pop_proxy, patches))

    #Check if pop_proxy column is numeric type
    if not pcols[pop_proxy]['type'] in ['INTEGER', 'REAL',
                                        'DOUBLE PRECISION']:
        grass.fatal('Column {} is of type {}. Only numeric types \
                    (integer or double precision) \
                    allowed!'.format(pop_proxy,
                                     pcols[pop_proxy]['type']))

    #Check if pop_proxy column does not contain values <= 0
    pop_vals = np.fromstring(grass.read_command('v.db.select',
                                                flags='c',
                                                map=patches,
                                                columns=pop_proxy,
                                                nv=-9999
                                                ).rstrip('\n'),
                             dtype=float, sep='\n')

    if np.min(pop_vals) <= 0:
        grass.fatal('Column {} contains values <= 0 or NULL. Neither \
                    values <= 0 nor NULL allowed!}'.format(pop_proxy))

    ##############################################
    # Use pygrass region instead of grass.parse_command !?!
    start_reg = grass.parse_command('g.region', flags='ugp')

    max_n = start_reg['n']
    min_s = start_reg['s']
    max_e = start_reg['e']
    min_w = start_reg['w']
    # cost_nsres = reg['nsres']
    # cost_ewres = reg['ewres']

    # Rasterize patches
    # http://www.gdal.org/gdal_tutorial.html
    # http://geoinformaticstutorial.blogspot.no/2012/11/convert-
    # shapefile-to-raster-with-gdal.html
    if t_flag:
        # Rasterize patches with "all-touched" mode using GDAL
        # Read region-settings (not needed canuse max_n, min_s, max_e,
        # min_w nsres, ewres...
        prast = os.path.join(folder, 'patches_rast.tif')

        # Check if GDAL-GRASS plugin is installed
        if ogr.GetDriverByName('GRASS'):
            #With GDAL-GRASS plugin
            #Locate file for patch vector map
            pfile = grass.parse_command('g.findfile', element='vector',
                                        file=patches,
                                        mapset=patches_mapset)['file']
            pfile = os.path.join(pfile, 'head')

        else:
            # Without GDAL-GRASS-plugin
            grass.warning("Cannot find GDAL-GRASS plugin. Consider \
                          installing it in order to save time for \
                          all-touched rasterisation")
            pfile = os.path.join(folder, 'patches_vect.gpkg')
            # Export patch vector map to temp-file in a GDAL-readable
            # format (shp)
            grass.run_command('v.out.ogr', flags='m', quiet=True,
                              input=patch_map, type='area',
                              layer=layer, output=pfile,
                              lco='GEOMETRY_NAME=geom')

        # Rasterize vector map with all-touched option
        os.system('gdal_rasterize -l {} -at -tr {} {} \
                  -te {} {} {} {} -ot Uint32 -a cat \
                  {} {} -q'.format(patches, start_reg['ewres'],
                                   start_reg['nsres'],
                                   start_reg['w'],
                                   start_reg['s'],
                                   start_reg['e'],
                                   start_reg['n'],
                                   pfile,
                                   prast))

        if not ogr.GetDriverByName('GRASS'):
            # Remove vector temp-file
            os.remove(os.path.join(folder, 'patches_vect.gpkg'))

        # Import rasterized patches
        grass.run_command('r.external', flags='o',
                          quiet=True,
                          input=prast,
                          output='{}_patches_pol'.format(TMP_PREFIX))

    else:
        # Simple rasterisation (only area)
        # in G 7.6 also with support for 'centroid'
        if float(grass.version()['version'][:3]) >= 7.6:
            conv_types = ['area', 'centroid']
        else:
            conv_types = ['area']
        grass.run_command('v.to.rast', quiet=True,
                          input=patches, use='cat',
                          type=conv_types,
                          output='{}_patches_pol'.format(TMP_PREFIX))

    # Extract boundaries from patch raster map
    grass.run_command('r.mapcalc', expression='{p}_patches_boundary=if(\
    {p}_patches_pol,\
    if((\
    (isnull({p}_patches_pol[-1,0])||| \
    {p}_patches_pol[-1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,1])||| \
    {p}_patches_pol[0,1]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[1,0])||| \
    {p}_patches_pol[1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,-1])||| \
    {p}_patches_pol[0,-1]!={p}_patches_pol)), \
    {p}_patches_pol,null()), null())'.format(p=TMP_PREFIX), quiet=True)

    rasterized_cats = grass.read_command('r.category', separator='newline',
                                         map='{p}_patches_boundary'.format(p=TMP_PREFIX)
                                         ).replace('\t','').strip('\n')
    rasterized_cats = list(map(int, set([x for x in rasterized_cats.split('\n')  if x != ''])))

    #Init output vector maps if they are requested by user
    network = VectorTopo(edge_map)
    network_columns = [(u'cat', 'INTEGER PRIMARY KEY'),
                       (u'from_p', 'INTEGER'),
                       (u'to_p', 'INTEGER'),
                       (u'min_dist', 'DOUBLE PRECISION'),
                       (u'dist', 'DOUBLE PRECISION'),
                       (u'max_dist', 'DOUBLE PRECISION')]
    network.open('w',
                 tab_name=edge_map,
                 tab_cols=network_columns)

    vertex = VectorTopo(vertex_map)
    vertex_columns = [(u'cat', 'INTEGER PRIMARY KEY'),
                      (pop_proxy, 'DOUBLE PRECISION'),]
    vertex.open('w',
                tab_name=vertex_map,
                tab_cols=vertex_columns)

    if p_flag:
        # Init cost paths file for start-patch
        grass.run_command('v.edit', quiet=True, map=shortest_paths,
                          tool='create')
        grass.run_command('v.db.addtable', quiet=True,
                          map=shortest_paths,
                          columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision")

    start_region_bbox = Bbox(north=float(max_n), south=float(min_s),
                             east=float(max_e), west=float(min_w))
    vpatches = VectorTopo(patches, mapset=patches_mapset)
    vpatches.open('r', layer=int(layer))

    ###Loop through patches
    vpatch_ids = np.array(vpatches.features_to_wkb_list(feature_type="centroid",
                                                        bbox=start_region_bbox),
                          dtype=[('vid', 'uint32'),
                                 ('cat', 'uint32'),
                                 ('geom', '|S10')])
    cats = set(vpatch_ids['cat'])
    n_cats = len(cats)
    if n_cats < len(vpatch_ids['cat']):
        grass.verbose('At least one MultiPolygon found in patch map.\n \
                      Using average coordinates of the centroids for \
                      visual representation of the patch.')

    for cat in cats:
        if cat not in rasterized_cats:
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            continue
        grass.verbose("Calculating connectivity-distances for patch \
                      number {}".format(cat))

        # Filter
        from_vpatch = vpatch_ids[vpatch_ids['cat'] == cat]

        # Get patch ID
        if from_vpatch['vid'].size == 1:
            from_centroid = Centroid(v_id=int(from_vpatch['vid']),
                                     c_mapinfo=vpatches.c_mapinfo)
            from_x = from_centroid.x
            from_y = from_centroid.y

            # Get centroid
            if not from_centroid:
                continue
        else:
            xcoords = []
            ycoords = []
            for f_p in from_vpatch['vid']:
                from_centroid = Centroid(v_id=int(f_p),
                                         c_mapinfo=vpatches.c_mapinfo)
                xcoords.append(from_centroid.x)
                ycoords.append(from_centroid.y)

                # Get centroid
                if not from_centroid:
                    continue
            from_x = np.average(xcoords)
            from_y = np.average(ycoords)

        # Get BoundingBox
        from_bbox = grass.parse_command('v.db.select', map=patch_map,
                                        flags='r',
                                        where='cat={}'.format(cat))

        attr_filter = vpatches.table.filters.select(pop_proxy)
        attr_filter = attr_filter.where("cat={}".format(cat))
        proxy_val = vpatches.table.execute().fetchone()

        # Prepare start patch
        start_patch = '{}_patch_{}'.format(TMP_PREFIX, cat)
        reclass_rule = grass.encode('{} = 1\n* = NULL'.format(cat))
        recl = grass.feed_command('r.reclass', quiet=True,
                                  input='{}_patches_boundary'.format(TMP_PREFIX),
                                  output=start_patch,
                                  rules='-')
        recl.stdin.write(reclass_rule)
        recl.stdin.close()
        recl.wait()

        # Check if patch was rasterised (patches smaller raster resolution and close to larger patches may not be rasterised)
        #start_check = grass.parse_command('r.info', flags='r', map=start_patch)
        #start_check = grass.parse_command('r.univar', flags='g', map=start_patch)
        #print(start_check)
        """if start_check['min'] != '1':
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            grass.run_command('g.remove', flags='f', vector=start_patch,
                              raster=start_patch, quiet=True)
            grass.del_temp_region()
            continue"""

        # Prepare stop patches
        ############################################
        reg = grass.parse_command('g.region', flags='ug', quiet=True,
                                  raster=start_patch,
                                  n=float(from_bbox['n']) + float(cutoff),
                                  s=float(from_bbox['s']) - float(cutoff),
                                  e=float(from_bbox['e']) + float(cutoff),
                                  w=float(from_bbox['w']) - float(cutoff),
                                  align='{}_patches_pol'.format(TMP_PREFIX))

        north = reg['n'] if max_n > reg['n'] else max_n
        south = reg['s'] if min_s < reg['s'] else min_s
        east = reg['e'] if max_e < reg['e'] else max_e
        west = reg['w'] if min_w > reg['w'] else min_w

        # Set region to patch search radius
        grass.use_temp_region()
        grass.run_command('g.region', quiet=True,
                          n=north, s=south, e=east, w=west,
                          align='{}_patches_pol'.format(TMP_PREFIX))

        # Create buffer around start-patch as a mask
        # for cost distance analysis
        grass.run_command('r.buffer', quiet=True,
                          input=start_patch,
                          output='MASK', distances=cutoff)
        grass.run_command('r.mapcalc', quiet=True,
                          expression='{pf}_patch_{p}_neighbours_contur=\
                                     if({pf}_patches_boundary=={p},\
                                     null(),\
                                     {pf}_patches_boundary)'.format(pf=TMP_PREFIX, p=cat))
        grass.run_command('r.mask', flags='r', quiet=True)

        # Calculate cost distance
        cost_distance_map = '{}_patch_{}_cost_dist'.format(prefix, cat)
        grass.run_command('r.cost', flags=dist_flags, quiet=True,
                          overwrite=True, input=costs,
                          output=cost_distance_map,
                          start_rast=start_patch, memory=memory)

        #grass.run_command('g.region', flags='up')
        # grass.raster.raster_history(cost_distance_map)
        cdhist = History(cost_distance_map)
        cdhist.clear()
        cdhist.creator = os.environ['USER']
        cdhist.write()
        # History object cannot modify description
        grass.run_command('r.support',
                          map=cost_distance_map,
                          description='Generated by r.connectivity.distance',
                          history=os.environ['CMDLINE'])


        # Export distance at boundaries
        maps = '{0}_patch_{1}_neighbours_contur,{2}_patch_{1}_cost_dist'
        maps = maps.format(TMP_PREFIX, cat, prefix),

        connections = grass.encode(grass.read_command('r.stats',
                                                          flags='1ng',
                                                          quiet=True,
                                                          input=maps,
                                                          separator=';').rstrip('\n'))
        if connections:
            con_array = np.genfromtxt(BytesIO(connections), delimiter=';',
                                      dtype=None,
                                      names=['x', 'y', 'cat', 'dist'])
        else:
            grass.warning('No connections for patch {}'.format(cat))

            # Write centroid to vertex map
            vertex.write(Point(from_x, from_y),
                         cat=int(cat),
                         attrs=proxy_val)
            vertex.table.conn.commit()

            # Remove temporary map data
            grass.run_command('g.remove', quiet=True, flags='f',
                              type=['raster', 'vector'],
                              pattern="{}*{}*".format(TMP_PREFIX, cat))
            grass.del_temp_region()
            continue

        #Find closest points on neigbour patches
        to_cats = set(np.atleast_1d(con_array['cat']))
        to_coords = []
        for to_cat in to_cats:
            connection = con_array[con_array['cat'] == to_cat]
            connection.sort(order=['dist'])
            pixel = border_dist if len(connection) > border_dist else len(connection) - 1
            # closest_points_x = connection['x'][pixel]
            # closest_points_y = connection['y'][pixel]
            closest_points_to_cat = to_cat
            closest_points_min_dist = connection['dist'][0]
            closest_points_dist = connection['dist'][pixel]
            closest_points_max_dist = connection['dist'][-1]
            to_patch_ids = vpatch_ids[vpatch_ids['cat'] == int(to_cat)]['vid']

            if len(to_patch_ids) == 1:
                to_centroid = Centroid(v_id=to_patch_ids,
                                       c_mapinfo=vpatches.c_mapinfo)
                to_x = to_centroid.x
                to_y = to_centroid.y
            elif len(to_patch_ids) >= 1:
                xcoords = []
                ycoords = []
                for t_p in to_patch_ids:
                    to_centroid = Centroid(v_id=int(t_p),
                                             c_mapinfo=vpatches.c_mapinfo)
                    xcoords.append(to_centroid.x)
                    ycoords.append(to_centroid.y)

                    # Get centroid
                    if not to_centroid:
                        continue
                to_x = np.average(xcoords)
                to_y = np.average(ycoords)

            to_coords.append('{},{},{},{},{},{}'.format(connection['x'][0],
                                                  connection['y'][0],
                                                  to_cat,
                                                  closest_points_min_dist,
                                                  closest_points_dist,
                                                  closest_points_max_dist
                                                        ))

            #Save edges to network dataset
            if closest_points_dist <= 0:
                zero_dist = 1

            # Write data to network
            network.write(Line([(from_x, from_y),
                                (to_x, to_y)]),
                          cat=lin_cat,
                          attrs=(cat,
                                 int(closest_points_to_cat),
                                 closest_points_min_dist,
                                 closest_points_dist,
                                 closest_points_max_dist,))
            network.table.conn.commit()

            lin_cat = lin_cat + 1

        # Save closest points and shortest paths through cost raster as
        # vector map (r.drain limited to 1024 points) if requested
        if p_flag:
            grass.verbose('Extracting shortest paths for patch number \
                          {}...'.format(cat))

            points_n = len(to_cats)

            tiles = int(points_n / 1024.0)
            rest = points_n % 1024
            if not rest == 0:
                tiles = tiles + 1

            tile_n = 0
            while tile_n < tiles:
                tile_n = tile_n + 1
                #Import closest points for start-patch in 1000er blocks
                sp = grass.feed_command('v.in.ascii', flags='nr',
                                  overwrite=True, quiet=True,
                                  input='-', stderr=subprocess.PIPE,
                                  output="{}_{}_cp".format(TMP_PREFIX,
                                                           cat),
                                  separator=",",
                                  columns="x double precision,\
                                           y double precision,\
                                           to_p integer,\
                                           dist_min double precision,\
                                           dist double precision,\
                                           dist_max double precision")
                sp.stdin.write(grass.encode("\n".join(to_coords)))
                sp.stdin.close()
                sp.wait()

                # Extract shortest paths for start-patch in chunks of
                # 1024 points
                cost_paths = "{}_{}_cost_paths".format(TMP_PREFIX, cat)
                start_points = "{}_{}_cp".format(TMP_PREFIX, cat)
                grass.run_command('r.drain', overwrite=True, quiet=True,
                                  input=cost_distance_map,
                                  output=cost_paths,
                                  drain=cost_paths,
                                  start_points=start_points)

                grass.run_command('v.db.addtable',
                                  map=cost_paths,
                                  quiet=True,
                                  columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision")
                grass.run_command('v.db.update', map=cost_paths,
                                  column='from_p', value=cat,
                                  quiet=True)
                grass.run_command('v.distance', quiet=True,
                                  from_=cost_paths,
                                  to=start_points,
                                  upload='to_attr',
                                  column='to_p',
                                  to_column='to_p')
                grass.run_command('v.db.join', quiet=True,
                                  map=cost_paths,
                                  column='to_p', other_column='to_p',
                                  other_table=start_points,
                                  subset_columns='dist_min,dist,dist_max')

                #grass.run_command('v.info', flags='c',
                #                  map=cost_paths)
                grass.run_command('v.patch', flags='ae', overwrite=True,
                                  quiet=True,
                                  input=cost_paths,
                                  output=shortest_paths)

                # Remove temporary map data
                grass.run_command('g.remove', quiet=True, flags='f',
                                  type=['raster', 'vector'],
                                  pattern="{}*{}*".format(TMP_PREFIX,
                                                          cat))

        # Remove temporary map data for patch
        if r_flag:
            grass.run_command('g.remove', flags='f', type='raster',
                              name=cost_distance_map,
                              quiet=True)

        vertex.write(Point(from_x, from_y),
                     cat=int(cat),
                     attrs=proxy_val)

        vertex.table.conn.commit()

        # Print progress message
        grass.percent(i=int((float(counter) / n_cats) * 100),
                      n=100,
                      s=3)

        # Update counter for progress message
        counter = counter + 1

    if zero_dist:
        grass.warning('Some patches are directly adjacent to others. \
                       Minimum distance set to 0.0000000001')

    # Close vector maps and build topology
    network.close()
    vertex.close()

    # Add vertex attributes
    # grass.run_command('v.db.addtable', map=vertex_map)
    # grass.run_command('v.db.join', map=vertex_map, column='cat',
    #                   other_table=in_db_connection[int(layer)]['table'],
    #                   other_column='cat', subset_columns=pop_proxy,
    #                   quiet=True)

    # Add history and meta data to produced maps
    grass.run_command('v.support', flags='h', map=edge_map,
                      person=os.environ['USER'],
                      cmdhist=os.environ['CMDLINE'])

    grass.run_command('v.support', flags='h', map=vertex_map,
                      person=os.environ['USER'],
                      cmdhist=os.environ['CMDLINE'])

    if p_flag:
        grass.run_command('v.support', flags='h', map=shortest_paths,
                          person=os.environ['USER'],
                          cmdhist=os.environ['CMDLINE'])

    # Output also Conefor files if requested
    if conefor_dir:
        query = """SELECT p_from, p_to, avg(dist) FROM
                 (SELECT
                 CASE
                 WHEN from_p > to_p THEN to_p
                 ELSE from_p END AS p_from,
                    CASE
                 WHEN from_p > to_p THEN from_p
                 ELSE to_p END AS p_to,
                 dist
                 FROM {}) AS x
                 GROUP BY p_from, p_to""".format(edge_map)
        with open(os.path.join(conefor_dir,
                               'undirected_connection_file'),
                  'w') as edges:
            edges.write(grass.read_command('db.select', sql=query,
                                           separator=' '))
        with open(os.path.join(conefor_dir,
                               'directed_connection_file'),
                  'w') as edges:
            edges.write(grass.read_command('v.db.select', map=edge_map,
                                           separator=' ', flags='c'))
        with open(os.path.join(conefor_dir, 'node_file'), 'w') as nodes:
            nodes.write(grass.read_command('v.db.select',
                                           map=vertex_map,
                                           separator=' ', flags='c'))
Exemplo n.º 34
0
def main():

    # temporary region
    gscript.use_temp_region()

    # set parameters
    overwrite = True
    rain_value = 50.0
    man_value = 0.05
    niterations = 25
    nwalkers = 40000
    mapset = "fusion"

    # assign variables
    elevation = "fusion"
    depth = "depth"
    dx = "dx"
    dy = "dy"

    # set temporal parameters
    temporaltype = "relative"
    strds = "depth_timeseries"
    title = "depth_timeseries"
    description = "timeseries of depth maps"

    # assign temporal variables
    datatype = "strds"
    increment = str(niterations) + " minutes"
    raster = "raster"

    # water flow
    gscript.run_command("g.region", raster=elevation, res=1)
    gscript.run_command(
        "r.sim.water",
        elevation=elevation,
        dx=dx,
        dy=dy,
        rain_value=rain_value,
        depth=depth,
        man_value=man_value,
        nwalkers=nwalkers,
        niterations=niterations,
        flags="t",
        overwrite=overwrite,
    )

    # create a raster space time dataset
    gscript.run_command(
        "t.create",
        type=datatype,
        temporaltype=temporaltype,
        output=strds,
        title=title,
        description=description,
        overwrite=overwrite,
    )

    # list rasters
    timeseries = gscript.list_grouped("rast", pattern="depth.*")[mapset]

    # register the rasters
    gscript.run_command(
        "t.register", type=raster, input=strds, maps=timeseries, increment=increment, overwrite=overwrite
    )
Exemplo n.º 35
0
    def flux(self):
        """a detachment limited gully evolution model using simulated sediment flux to carve a DEM"""

        # assign variables
        slope='slope'
        aspect='aspect'
        dx='dx'
        dy='dy'
        rain='rain'
        depth='depth'
        dc='dc'
        tc='tc'
        tau='tau'
        rho='rho'
        flux='flux'
        sedflux='sedflux'
        evolving_dem = 'evolving_dem'

        # parse time
        year=int(self.start[:4])
        month=int(self.start[5:7])
        day=int(self.start[8:10])
        hours=int(self.start[11:13])
        minutes=int(self.start[14:16])
        seconds=int(self.start[17:19])
        time = datetime.datetime(year,month,day,hours,minutes,seconds)
        
        # advance time
        time = time + datetime.timedelta(minutes = self.rain_interval)
        time = time.isoformat(" ")

        # timestamp
        evolved_dem='dem_'+time.replace(" ","_").replace("-","_").replace(":","_")

        # set temporary region
        gscript.use_temp_region()

        # compute slope, aspect, and partial derivatives
        gscript.run_command('r.slope.aspect', elevation=self.dem, slope=slope, aspect=aspect, dx=dx, dy=dy, overwrite=True)

        # crop temporary region to trim edge effects of moving window computations
        info=gscript.parse_command('g.region', flags='g')
        n=float(info.n)-float(info.nsres)
        s=float(info.s)+float(info.nsres)
        e=float(info.e)-float(info.ewres)
        w=float(info.w)+float(info.ewres)
        gscript.run_command('g.region', n=n, s=s, e=e, w=w)

        # hyrdology parameters
        gscript.run_command('r.mapcalc', expression="{rain} = {rain_intensity}*{runoff}".format(rain=rain, rain_intensity=self.rain_intensity,runoff=self.runoff), overwrite=True)

        # hydrologic simulation
        gscript.run_command('r.sim.water', elevation=self.dem, dx=dx, dy=dy, rain=rain, man_value=self.mannings, depth=depth, niterations=self.rain_interval, nwalkers=self.walkers, overwrite=True)

        # erosion parameters
        gscript.run_command('r.mapcalc', expression="{dc} = {detachment}".format(dc=dc, detachment=self.detachment), overwrite=True)
        gscript.run_command('r.mapcalc', expression="{tc} = {transport}".format(tc=tc, transport=self.transport), overwrite=True)
        gscript.run_command('r.mapcalc', expression="{tau} = {shearstress}".format(tau=tau, shearstress=self.shearstress), overwrite=True)
        gscript.run_command('r.mapcalc', expression="{rho} = {mass}".format(rho=rho, mass=self.mass), overwrite=True)

        # erosion-deposition simulation
        gscript.run_command('r.sim.sediment', elevation=self.dem, water_depth=depth, dx=dx, dy=dy, detachment_coeff=dc, transport_coeff=tc, shear_stress=tau, man_value=self.mannings, sediment_flux=flux, niterations=self.rain_interval, nwalkers=self.walkers, overwrite=True)

        # filter outliers
        gscript.run_command('r.mapcalc', expression="{sedflux} = if({flux}<{fluxmin},{fluxmin},if({flux}>{fluxmax},{fluxmax},{flux}))".format(sedflux=sedflux, flux=flux, fluxmin=self.fluxmin, fluxmax=self.fluxmax), overwrite=True)
        gscript.run_command('r.colors', map=sedflux, raster=flux)

        # evolve landscape
        """change in elevation (m) = change in time (s) * sediment flux (kg/ms) / mass of sediment per unit area (kg/m^2)"""
        gscript.run_command('r.mapcalc', expression="{evolving_dem} = {dem}-({rain_interval}*60*{sedflux}/{rho})".format(evolving_dem=evolving_dem, dem=self.dem, rain_interval=self.rain_interval, sedflux=sedflux, rho=rho), overwrite=True)

        # reset region
        n=float(info.n)
        s=float(info.s)
        e=float(info.e)
        w=float(info.w)
        gscript.run_command('g.region', n=n, s=s, e=e, w=w)

        # rebuild edges
        gscript.run_command('r.mapcalc', expression="{evolved_dem} = if(isnull({evolving_dem}),{dem},{evolving_dem})".format(evolved_dem=evolved_dem, evolving_dem=evolving_dem, dem=self.dem), overwrite=True)
        gscript.run_command('r.colors', map=evolved_dem, flags='e', color='elevation')

        # remove temporary maps
        gscript.run_command('g.remove', type='raster', name=['rain', 'evolving_dem', 'dc', 'tc', 'tau', 'rho', 'dx', 'dy'], flags='f')


        return evolved_dem, time
Exemplo n.º 36
0
def main():
    
    # temporary region
    gscript.use_temp_region()
    
    # set graphics driver
    driver = "cairo"
    
    # set rendering directory
    render = os.path.normpath("C:/Users/Brendan/Documents/grassdata/rendering/fusion")
    
    # set maps
    highres_dem = "uav_dsm@fusion"
    lowres_dem = "lidar_dem@fusion"
    
    # set parameters
    overwrite = True
    tension = 20
    smooth = 1
    npmin = 80
    dmin = 0.3
    
    # assign variables
    highres_sample = "highres_sample"
    lowres_sample = "lowres_sample"
    cover = "cover"
    fused_points="fused_points"
    fusion = "fusion"
    dx = "dx"
    dy = "dy"
    
    # set parameters for resampling
    info = gscript.raster_info(highres_dem)
    highres_npoints = int(info.cols) * int(info.rows) / 10
    info = gscript.raster_info(lowres_dem)
    lowres_npoints = int(info.cols) * int(info.rows) / 10
    
    # randomly sample high resolution dem
    gscript.run_command('g.region', raster=highres_dem)
    gscript.run_command('r.random', input=highres_dem, npoints=highres_npoints, vector=highres_sample, flags='d', overwrite=overwrite)
    
    # set cover map
    gscript.run_command('g.region', raster=lowres_dem)
    gscript.run_command('r.mapcalc', expression="{cover} = if(isnull({highres_dem}),{lowres_dem},null())".format(cover=cover, lowres_dem=lowres_dem, highres_dem=highres_dem), overwrite=overwrite)
    
    # randomly sample low resolution dem
    gscript.run_command('g.region', raster=lowres_dem)
    gscript.run_command('r.random', input=lowres_dem, npoints=lowres_npoints, cover=cover, vector=lowres_sample, flags='d', overwrite=overwrite)
    
    # patch
    gscript.run_command('v.patch', input=(highres_sample,lowres_sample), output=fused_points ,flags='b', overwrite=overwrite)
    
    # interpolation with partial derivatives
    gscript.run_command('v.surf.rst', input=fused_points, elevation=fusion, slope=dx, aspect=dy, tension=tension, smooth=smooth, npmin=npmin, dmin=dmin, flags='d', overwrite=overwrite)
    gscript.run_command('r.colors', map=fusion, color="elevation")
    
    # render elevation
    gscript.run_command('g.region', raster=lowres_dem)
    info = gscript.parse_command('r.info', map=fusion, flags='g')
    relief = "relief"
    gscript.run_command('d.mon', start=driver, width=info.cols, height=info.rows, output=os.path.join(render,fusion+".png"), overwrite=overwrite)
    gscript.run_command('r.relief', input=fusion, output=relief, zscale=1, overwrite=overwrite)
    gscript.run_command('r.colors', map=fusion, color="elevation")
    gscript.run_command('d.shade', shade=relief, color=fusion, brighten=25)
    gscript.run_command('d.legend', raster=fusion, at=(5,50,5,7))
    gscript.run_command('d.mon', stop=driver)
    
    # remove temporary maps
    gscript.run_command('g.remove', type='vector', name=['highres_sample', 'lowres_sample', 'fused_points'], flags='f')
    gscript.run_command('g.remove', type='raster', name='cover', flags='f') 
Exemplo n.º 37
0
def main():
    # Temporary filenames
    tmp_avg_lse = tmp_map_name('avg_lse')
    tmp_delta_lse = tmp_map_name('delta_lse')
    #tmp_lst = tmp_map_name('lst')

    # user input
    mtl_file = options['mtl']

    if not options['prefix']:
        b10 = options['b10']
        b11 = options['b11']
        t10 = options['t10']
        t11 = options['t11']

        if not options['clouds']:
            qab = options['qab']
            cloud_map = False

        else:
            qab = False
            cloud_map = options['clouds']

    elif options['prefix']:
        prefix = options['prefix']
        b10 = prefix + '10'
        b11 = prefix + '11'

        if not options['clouds']:
            qab = prefix + 'QA'
            cloud_map = False

        else:
            cloud_map = options['clouds']
            qab = False

    qapixel = options['qapixel']
    lst_output = options['lst']

    # save Brightness Temperature maps?
    if options['prefix_bt']:
        brightness_temperature_prefix = options['prefix_bt']
    else:
        brightness_temperature_prefix = None

    if options['cwv']:
        tmp_cwv = options['cwv']
    else:
        tmp_cwv = tmp_map_name('cwv')
        cwv_window_size = int(options['window'])
        assertion_for_cwv_window_size_msg = MSG_ASSERTION_WINDOW_SIZE
        assert cwv_window_size >= 7, assertion_for_cwv_window_size_msg
    cwv_output = options['cwv_out']

    # optional maps
    average_emissivity_map = options['emissivity']
    delta_emissivity_map = options['delta_emissivity']

    # output for in-between maps?
    emissivity_output = options['emissivity_out']
    delta_emissivity_output = options['delta_emissivity_out']

    landcover_map = options['landcover']
    landcover_class = options['landcover_class']

    # flags
    info = flags['i']
    null = flags['n']
    scene_extent = flags['e']
    median = flags['m']
    accuracy = flags['a']
    rounding = flags['r']
    celsius = flags['c']
    timestamping = flags['t']

    #
    # Pre-production actions
    #

    if scene_extent:
        grass.use_temp_region()  # safely modify the region, restore at end
        msg = WARNING_REGION_MATCHING

        # TODO: Check if extent-B10 == extent-B11? #
        if b10:
            run('g.region', rast=b10, align=b10)
            msg = msg.format(name=b10)

        elif t10:
            run('g.region', rast=t10, align=t10)
            msg = msg.format(name=t10)
        # ---------------------------------------- #

        grass.warning(_(msg))

    #
    # 1. Mask clouds
    #

    if cloud_map:
        msg = f'\n|i Using user defined \'{cloud_map}\' as a MASK'
        g.message(msg)
        r.mask(raster=cloud_map, flags='i', overwrite=True)

    else:
        # using the quality assessment band and a "QA" pixel value
        mask_clouds(qab, qapixel)

    #
    # 2. TIRS > Brightness Temperatures
    #

    if mtl_file:
        # if MTL and b10 given, use it to compute at-satellite temperature t10
        if b10:
            t10 = tirs_to_at_satellite_temperature(
                b10,
                mtl_file,
                brightness_temperature_prefix,
                null,
                info=info,
            )
        # likewise for b11 -> t11
        if b11:
            t11 = tirs_to_at_satellite_temperature(
                b11,
                mtl_file,
                brightness_temperature_prefix,
                null,
                info=info,
            )

    #
    # 3. Land Surface Emissivities
    #

    split_window_lst = SplitWindowLST(landcover_class)

    if landcover_class:

        if split_window_lst.landcover_class is False:
            # replace with meaningful error
            grass.warning(MSG_UNKNOWN_LANDCOVER_CLASS)

        if landcover_class == 'Random':
            msg = MSG_RANDOM_EMISSIVITY_CLASS + \
                split_window_lst.landcover_class + ' '

        if landcover_class == 'Barren_Land':
            msg = MSG_BARREN_LAND + \
                split_window_lst.landcover_class + ' '

        else:
            msg = MSG_SINGLE_CLASS_AVERAGE_EMISSIVITY + f'{eclass} '

        if info:
            msg += MSG_AVERAGE_EMISSIVITIES
            msg += str(split_window_lst.emissivity_t10) + ', ' + \
                str(split_window_lst.emissivity_t11)

        g.message(msg)

    # use the FROM-GLC map
    elif landcover_map:

        if average_emissivity_map:
            tmp_avg_lse = average_emissivity_map

        if not average_emissivity_map:
            determine_average_emissivity(
                tmp_avg_lse,
                emissivity_output,
                landcover_map,
                split_window_lst.average_lse_mapcalc,
                info=info,
            )
            if options['emissivity_out']:
                tmp_avg_lse = options['emissivity_out']

        if delta_emissivity_map:
            tmp_delta_lse = delta_emissivity_map

        if not delta_emissivity_map:
            determine_delta_emissivity(
                tmp_delta_lse,
                delta_emissivity_output,
                landcover_map,
                split_window_lst.delta_lse_mapcalc,
                info=info,
            )
            if options['delta_emissivity_out']:
                tmp_delta_lse = options['delta_emissivity_out']

    #
    # 4. Estimate Column Water Vapor
    #

    if not options['cwv']:
        estimate_cwv(
            temporary_map=tmp_cwv,
            cwv_map=cwv_output,
            t10=t10,
            t11=t11,
            window_size=cwv_window_size,
            median=median,
            info=info,
        )
    else:
        msg = f'\n|! User defined map \'{tmp_cwv}\' for atmospheric column water vapor'
        g.message(msg)

    if cwv_output:
        tmp_cwv = cwv_output

    #
    # 5. Estimate Land Surface Temperature
    #

    if info and landcover_class == 'Random':
        msg = MSG_PICK_RANDOM_CLASS
        grass.verbose(msg)

    estimate_lst(
        outname=lst_output,
        t10=t10,
        t11=t11,
        landcover_map=landcover_map,
        landcover_class=landcover_class,
        avg_lse_map=tmp_avg_lse,
        delta_lse_map=tmp_delta_lse,
        cwv_map=tmp_cwv,
        lst_expression=split_window_lst.sw_lst_mapcalc,
        rounding=rounding,
        celsius=celsius,
        info=info,
    )

    #
    # Post-production actions
    #

    # remove MASK
    r.mask(flags='r', verbose=True)

    if timestamping:
        add_timestamp(mtl_file, lst_output)

        if cwv_output:
            add_timestamp(mtl_file, cwv_output)

    if celsius:
        run('r.colors', map=lst_output, color='celsius')

    else:
        run('r.colors', map=lst_output, color='kelvin')

    # metadata

    history_lst = '\n' + CITATION_SPLIT_WINDOW
    history_lst += '\n\n' + CITATION_COLUMN_WATER_VAPOR
    history_lst += '\n\nSplit-Window model: '
    history_lst += split_window_lst._equation  # :wsw_lst_mapcalc
    description_lst = DESCRIPTION_LST
    if celsius:
        title_lst = 'Land Surface Temperature (C)'
        units_lst = 'Celsius'
    else:
        title_lst = 'Land Surface Temperature (K)'
        units_lst = 'Kelvin'
    landsat8_metadata = Landsat8_MTL(mtl_file)
    source1_lst = landsat8_metadata.scene_id
    source2_lst = landsat8_metadata.origin
    run(
        "r.support",
        map=lst_output,
        title=title_lst,
        units=units_lst,
        description=description_lst,
        source1=source1_lst,
        source2=source2_lst,
        history=history_lst,
    )

    if scene_extent:
        grass.del_temp_region()  # restoring previous region
        grass.warning(WARNING_REGION_RESTORING)

    if info:
        g.message('\nSource: ' + CITATION_SPLIT_WINDOW)
Exemplo n.º 38
0
def main():
    global tmp, sqltmp, tmpname, nuldev, vector, rastertmp
    rastertmp = False
    #### setup temporary files
    tmp = grass.tempfile()
    sqltmp = tmp + ".sql"
    # we need a random name
    tmpname = grass.basename(tmp)

    nuldev = file(os.devnull, 'w')

    raster = options['raster']
    colprefix = options['column_prefix']
    vector = options['map']
    layer = options['layer']
    percentile = options['percentile']
    basecols = options['method'].split(',')

    ### setup enviro vars ###
    env = grass.gisenv()
    mapset = env['MAPSET']

    vs = vector.split('@')
    if len(vs) > 1:
        vect_mapset = vs[1]
    else:
        vect_mapset = mapset

    # does map exist in CURRENT mapset?
    if vect_mapset != mapset or not grass.find_file(vector, 'vector', mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    vector = vs[0]

    rastertmp = "%s_%s" % (vector, tmpname)

    # check the input raster map
    if not grass.find_file(raster, 'cell')['file']:
        grass.fatal(_("Raster map <%s> not found") % raster)

    # save current settings:
    grass.use_temp_region()

    # Temporarily aligning region resolution to $RASTER resolution
    # keep boundary settings
    grass.run_command('g.region', align=raster)

    grass.message(_("Preprocessing input data..."))
    try:
        grass.run_command('v.to.rast', input=vector, layer=layer, output=rastertmp,
                          use='cat', quiet=True)
    except CalledModuleError:
        grass.fatal(_("An error occurred while converting vector to raster"))

    # dump cats to file to avoid "too many argument" problem:
    p = grass.pipe_command('r.category', map=rastertmp, sep=';', quiet=True)
    cats = []

    for line in p.stdout:
        cats.append(line.rstrip('\r\n').split(';')[0])
    p.wait()

    number = len(cats)
    if number < 1:
        grass.fatal(_("No categories found in raster map"))

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # Find out which table is linked to the vector map on the given layer
    if not fi['table']:
        grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'))

    # replaced by user choiche
    #basecols = ['n', 'min', 'max', 'range', 'mean', 'stddev', 'variance', 'cf_var', 'sum']

    # we need at least three chars to distinguish [mea]n from [med]ian
    # so colprefix can't be longer than 6 chars with DBF driver
    if dbfdriver:
        colprefix = colprefix[:6]
        variables_dbf = {}

    # by default perccol variable is used only for "variables" variable
    perccol = "percentile"
    perc = None
    for b in basecols:
        if b.startswith('p'):
            perc = b
    if perc:
        # namespace is limited in DBF but the % value is important
        if dbfdriver:
            perccol = "per" + percentile
        else:
            perccol = "percentile_" + percentile
        percindex = basecols.index(perc)
        basecols[percindex] = perccol

    # dictionary with name of methods and position in "r.univar -gt"  output
    variables = {'number': 2, 'minimum': 4, 'maximum': 5, 'range': 6,
                 'average': 7, 'stddev': 9, 'variance': 10, 'coeff_var': 11,
                 'sum': 12, 'first_quartile': 14, 'median': 15,
                 'third_quartile': 16, perccol: 17}
    # this list is used to set the 'e' flag for r.univar
    extracols = ['first_quartile', 'median', 'third_quartile', perccol]
    addcols = []
    colnames = []
    extstat = ""
    for i in basecols:
        # this check the complete name of out input that should be truncated
        for k in variables.keys():
            if i in k:
                i = k
                break
        if i in extracols:
            extstat = 'e'
        # check if column already present
        currcolumn = ("%s_%s" % (colprefix, i))
        if dbfdriver:
            currcolumn = currcolumn[:10]
            variables_dbf[currcolumn.replace("%s_" % colprefix, '')] = i

        colnames.append(currcolumn)
        if currcolumn in grass.vector_columns(vector, layer).keys():
            if not flags['c']:
                grass.fatal((_("Cannot create column <%s> (already present). ") % currcolumn) +
                             _("Use -c flag to update values in this column."))
        else:
            if i == "n":
                coltype = "INTEGER"
            else:
                coltype = "DOUBLE PRECISION"
            addcols.append(currcolumn + ' ' + coltype)

    if addcols:
        grass.verbose(_("Adding columns '%s'") % addcols)
        try:
            grass.run_command('v.db.addcolumn', map=vector, columns=addcols,
                              layer=layer)
        except CalledModuleError:
            grass.fatal(_("Adding columns failed. Exiting."))

    # calculate statistics:
    grass.message(_("Processing input data (%d categories)...") % number)

    # get rid of any earlier attempts
    grass.try_remove(sqltmp)

    f = file(sqltmp, 'w')

    # do the stats
    p = grass.pipe_command('r.univar', flags='t' + extstat, map=raster,
                           zones=rastertmp, percentile=percentile, sep=';')

    first_line = 1

    f.write("{}\n".format(grass.db_begin_transaction(fi['driver'])))
    for line in p.stdout:
        if first_line:
            first_line = 0
            continue

        vars = line.rstrip('\r\n').split(';')

        f.write("UPDATE %s SET" % fi['table'])
        first_var = 1
        for colname in colnames:
            variable = colname.replace("%s_" % colprefix, '', 1)
            if dbfdriver:
                variable = variables_dbf[variable]
            i = variables[variable]
            value = vars[i]
            # convert nan, +nan, -nan, inf, +inf, -inf, Infinity, +Infinity,
            # -Infinity to NULL
            if value.lower().endswith('nan') or 'inf' in value.lower():
                value = 'NULL'
            if not first_var:
                f.write(" , ")
            else:
                first_var = 0
            f.write(" %s=%s" % (colname, value))

        f.write(" WHERE %s=%s;\n" % (fi['key'], vars[0]))
    f.write("{}\n".format(grass.db_commit_transaction(fi['driver'])))
    p.wait()
    f.close()

    grass.message(_("Updating the database ..."))
    exitcode = 0
    try:
        grass.run_command('db.execute', input=sqltmp,
                          database=fi['database'], driver=fi['driver'])
        grass.verbose((_("Statistics calculated from raster map <{raster}>"
                         " and uploaded to attribute table"
                         " of vector map <{vector}>."
                         ).format(raster=raster, vector=vector)))
    except CalledModuleError:
        grass.warning(_("Failed to upload statistics to attribute table of vector map <%s>.") % vector)
        exitcode = 1

    sys.exit(exitcode)
Exemplo n.º 39
0
def DecisionTree(no_of_veg_class, elev_filename, landcover_filename, river_filename,decision_tree):
    """
    Generates a decision tree given the training data
    Input:
        no_of_veg_class: No of landcover class in training data
        elev_filename  : Name of training file having elevation values
        landcover_filename: Name of training file having landcover values
        river_filename: Name of training file having river presence absence info
    """
    rpy.r.library("rpart")
    g.use_temp_region()
    #TODO generalize no of rows and columns for training data
    rows = 2001
    cols = 1201
    resolution = 50
    n = 4928050 #some arbitrary value
    s = n - resolution*rows
    e = 609000  #some arbitrary value
    w = e - resolution*cols
    g.run_command('g.region', flags = 'ap', n = n ,s = s, e = e, w = w,res = 50, rows = 2001 ,cols = 1201)
    pathname = os.path.dirname(sys.argv[0])        
    fullpath = os.path.abspath(pathname)
    if decision_tree:
    # Convert ascii DEM into grass raster map that will help in getting slope and aspect
        file_name = "/Training/%s" % elev_filename
        g.run_command('r.in.ascii', overwrite = True, flags='i', input = fullpath + file_name, output='training_DEM')
    # TODO read training DEM into array without writing another file 
        g.run_command('r.out.ascii',flags='h',input='training_DEM@user1',output=fullpath + '/ascii_files'+'/training_DEM',null='0')
        f = open('ascii_files/training_DEM', 'r')
        Elev_arr = numpy.loadtxt(f)
        f.close() 
        file_name = "Training/%s" % (landcover_filename)
        Landcover = numpy.loadtxt(file_name) # Read Landcover Data from ascii file
        file_name = "Training/%s" % (river_filename)
        River     = numpy.loadtxt(file_name) # Read River Data from ascii file
        River_dist_arr = dist.CityBlock(River,flag = 1)   #Compute distance from River data
        g.run_command('r.slope.aspect',overwrite=True,elevation='training_DEM@user1',slope='Slope',aspect='Aspect')
        g.run_command('r.out.ascii',flags='h',input='Slope@user1',output=fullpath + '/ascii_files'+'/Slope',null='0')
        f = open('ascii_files/Slope', 'r')
        Slope_arr = numpy.loadtxt(f)  #Get Slope into an array
        f.close()
        g.run_command('r.out.ascii',flags='h',input='Aspect@user1',output=fullpath +'/ascii_files'+ '/Aspect',null='0')
        f = open('ascii_files/Aspect', 'r')
        Aspect_arr = numpy.loadtxt(f) #Get Aspect into an array
        f.close()

        (x_len,y_len) = Elev_arr.shape
        L = [ [] for i in range(0,no_of_veg_class)]
        for i in range(1,x_len-1):   # Ignoring boundary cells 
            for j in range(1,y_len-1):
            # Append the pixel co-ordinates into respective list of lists
            # nodata values already gets handled since we are ignoring it
                for k in range(0, no_of_veg_class):
                    if Landcover[i][j] == k:
                        L[k].append( (i,j) )
                        break

        minimum_elev = numpy.min(Elev_arr)
        factor = numpy.max(Elev_arr) - minimum_elev      # normalize elevation data
        Elev_arr = (Elev_arr[:,:]-minimum_elev)*100/factor
    # Sample training Data for decision tree, we can't take entire data as it take longer processing time
    # various lists to hold sample training data
        Elevation = []
        Slope = []
        RiverDistance = []
        Aspect = []
        Class = []
    # Sample the data
        for i in range(0,no_of_veg_class):  
            if len(L[i]) < 1000:
                limit = len(L[i])
            else:
                limit = 1000
            for j in range(0,limit):
                Elevation.append( int(Elev_arr[ L[i][j][0] ][ L[i][j][1] ]))
                Slope.append(int(Slope_arr[ L[i][j][0] ][ L[i][j][1] ]))
                RiverDistance.append(int(River_dist_arr[ L[i][j][0] ][ L[i][j][1] ]))
                Aspect.append(int(Aspect_arr[ L[i][j][0] ][ L[i][j][1] ]))
                Class.append(i)
    #free space
        Elev_arr = 0
        Slope_arr = 0
        River_dist_arr = 0
        Aspect_arr = 0
    # create dictionary of sample data which will be needed to generate decision tree 
        training_data = {'Elevation':Elevation,'Slope':Slope,'RiverDistance':RiverDistance,'Aspect':Aspect,'Class':Class}
    #free space
        Elevation = []
        Slope = []
        RiverDistance = []
        Aspect = []
        Class = []
        f = open( 'save.p', 'w' )
        pickle.dump(training_data, f )
        f.close()
    else:
        f = open( 'save.p', 'r' )
        training_data = pickle.load( f )
        f.close()
    rpy.set_default_mode(rpy.NO_CONVERSION)
    #Using rpart create the decision tree
    fit = rpy.r.rpart(formula='Class ~ Elevation + RiverDistance + Slope + Aspect',data=training_data,method="class")
    training_data = 0
    #rpy.r.png("DecisionTree.png")  # Output a png image of the decision tree
    #rpy.r.plot(fit)
    #rpy.r.text(fit)
    #rpy.r.dev_off()
    return fit
Exemplo n.º 40
0
def main():
    color = options["color"]
    column = options["column"]
    layer = options["layer"]
    map = options["map"]
    range = options["range"]
    raster = options["raster"]
    rgb_column = options["rgb_column"]
    rules = options["rules"]
    flip = flags["n"]

    global tmp, tmp_colr, tmp_vcol
    pid = os.getpid()
    tmp = tmp_colr = tmp_vcol = None

    mapset = grass.gisenv()["MAPSET"]
    gisbase = os.getenv("GISBASE")

    # does map exist in CURRENT mapset?
    kv = grass.find_file(map, element="vector", mapset=mapset)
    if not kv["file"]:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    vector = map.split("@", 1)

    # sanity check mutually exclusive color options
    if not options["color"] and not options["raster"] and not options["rules"]:
        grass.fatal(_("Pick one of color, rules, or raster options"))

    if color:
        #### check the color rule is valid
        color_opts = os.listdir(os.path.join(gisbase, "etc", "colors"))
        color_opts += ["random", "grey.eq", "grey.log", "rules"]
        if color not in color_opts:
            grass.fatal(
                _("Invalid color rule <%s>\n") % color +
                _("Valid options are: %s") % " ".join(color_opts))
    elif raster:
        if not grass.find_file(raster)["name"]:
            grass.fatal(_("Raster raster map <%s> not found") % raster)
    elif rules:
        if not os.access(rules, os.R_OK):
            grass.fatal(_("Unable to read color rules file <%s>") % rules)

    # column checks
    # check input data column
    cols = grass.vector_columns(map, layer=layer)
    if column not in cols:
        grass.fatal(_("Column <%s> not found") % column)
    ncolumn_type = cols[column]["type"]
    if ncolumn_type not in ["INTEGER", "DOUBLE PRECISION"]:
        grass.fatal(
            _("Column <%s> is not numeric but %s") % (column, ncolumn_type))

    # check if GRASSRGB column exists, make it if it doesn't
    table = grass.vector_db(map)[int(layer)]["table"]
    if rgb_column not in cols:
        # RGB Column not found, create it
        grass.message(_("Creating column <%s>...") % rgb_column)
        try:
            grass.run_command(
                "v.db.addcolumn",
                map=map,
                layer=layer,
                column="%s varchar(11)" % rgb_column,
            )
        except CalledModuleError:
            grass.fatal(_("Creating color column"))
    else:
        column_type = cols[rgb_column]["type"]
        if column_type not in ["CHARACTER", "TEXT"]:
            grass.fatal(
                _("Column <%s> is not of compatible type (found %s)") %
                (rgb_column, column_type))
        else:
            num_chars = dict([
                (v[0], int(v[2])) for v in grass.db_describe(table)["cols"]
            ])[rgb_column]
            if num_chars < 11:
                grass.fatal(
                    _("Color column <%s> is not wide enough (needs 11 characters)"
                      ),
                    rgb_column,
                )

    cvals = grass.vector_db_select(map, layer=int(layer),
                                   columns=column)["values"].values()

    # find data range
    if range:
        # order doesn't matter
        vals = range.split(",")
    else:
        grass.message(_("Scanning values..."))
        vals = [float(x[0]) for x in cvals]

    minval = min(vals)
    maxval = max(vals)

    grass.verbose(_("Range: [%s, %s]") % (minval, maxval))
    if minval is None or maxval is None:
        grass.fatal(_("Scanning data range"))

    # setup internal region
    grass.use_temp_region()
    grass.run_command("g.region", rows=2, cols=2)

    tmp_colr = "tmp_colr_%d" % pid

    # create dummy raster map
    if ncolumn_type == "INTEGER":
        grass.mapcalc(
            "$tmp_colr = int(if(row() == 1, $minval, $maxval))",
            tmp_colr=tmp_colr,
            minval=minval,
            maxval=maxval,
        )
    else:
        grass.mapcalc(
            "$tmp_colr = double(if(row() == 1, $minval, $maxval))",
            tmp_colr=tmp_colr,
            minval=minval,
            maxval=maxval,
        )

    if color:
        color_cmd = {"color": color}
    elif raster:
        color_cmd = {"raster": raster}
    elif rules:
        color_cmd = {"rules": rules}

    if flip:
        flip_flag = "n"
    else:
        flip_flag = ""

    grass.run_command("r.colors",
                      map=tmp_colr,
                      flags=flip_flag,
                      quiet=True,
                      **color_cmd)

    tmp = grass.tempfile()

    # calculate colors and write SQL command file
    grass.message(_("Looking up colors..."))

    f = open(tmp, "w")
    p = grass.feed_command("r.what.color", flags="i", input=tmp_colr, stdout=f)
    lastval = None
    for v in sorted(vals):
        if v == lastval:
            continue
        p.stdin.write("%f\n" % v)
    p.stdin.close()
    p.wait()
    f.close()

    tmp_vcol = "%s_vcol.sql" % tmp
    fi = open(tmp, "r")
    fo = open(tmp_vcol, "w")
    t = string.Template(
        "UPDATE $table SET $rgb_column = '$colr' WHERE $column = $value;\n")
    found = 0
    for line in fi:
        [value, colr] = line.split(": ")
        colr = colr.strip()
        if len(colr.split(":")) != 3:
            continue
        fo.write(
            t.substitute(
                table=table,
                rgb_column=rgb_column,
                colr=colr,
                column=column,
                value=value,
            ))
        found += 1
    fi.close()
    fo.close()

    if not found:
        grass.fatal(_("No values found in color range"))

    # apply SQL commands to update the table with values
    grass.message(_("Writing %s colors...") % found)

    try:
        grass.run_command("db.execute", input=tmp_vcol)
    except CalledModuleError:
        grass.fatal(_("Processing SQL transaction"))

    if flags["s"]:
        vcolors = "vcolors_%d" % pid
        grass.run_command("g.rename", raster=(tmp_colr, vcolors), quiet=True)
        grass.message(
            _("Raster map containing color rules saved to <%s>") % vcolors)
        # TODO save full v.colors command line history
        grass.run_command(
            "r.support",
            map=vcolors,
            history="",
            source1="vector map = %s" % map,
            source2="column = %s" % column,
            title=_("Dummy raster to use as thematic vector legend"),
            description="generated by v.colors using r.mapcalc",
        )
        grass.run_command(
            "r.support",
            map=vcolors,
            history=_("RGB saved into <%s> using <%s%s%s>") %
            (rgb_column, color, raster, rules),
        )
Exemplo n.º 41
0
def main():
    if not hasNumPy:
        grass.fatal(_("Required dependency NumPy not found. Exiting."))

    sharpen = options["method"]  # sharpening algorithm
    ms1_orig = options["blue"]  # blue channel
    ms2_orig = options["green"]  # green channel
    ms3_orig = options["red"]  # red channel
    pan_orig = options["pan"]  # high res pan channel
    out = options["output"]  # prefix for output RGB maps
    bits = options["bitdepth"]  # bit depth of image channels
    bladjust = flags["l"]  # adjust blue channel
    sproc = flags["s"]  # serial processing
    rescale = flags[
        "r"]  # rescale to spread pixel values to entire 0-255 range

    # Checking bit depth
    bits = float(bits)
    if bits < 2 or bits > 30:
        grass.warning(_("Bit depth is outside acceptable range"))
        return

    outb = grass.core.find_file("%s_blue" % out)
    outg = grass.core.find_file("%s_green" % out)
    outr = grass.core.find_file("%s_red" % out)

    if (outb["name"] != "" or outg["name"] != ""
            or outr["name"] != "") and not grass.overwrite():
        grass.warning(
            _("Maps with selected output prefix names already exist."
              " Delete them or use overwrite flag"))
        return

    pid = str(os.getpid())

    # convert input image channels to 8 bit for processing
    ms1 = "tmp%s_ms1" % pid
    ms2 = "tmp%s_ms2" % pid
    ms3 = "tmp%s_ms3" % pid
    pan = "tmp%s_pan" % pid

    if not rescale:
        if bits == 8:
            grass.message(_("Using 8bit image channels"))
            if sproc:
                # serial processing
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms1_orig, ms1),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms2_orig, ms2),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms3_orig, ms3),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (pan_orig, pan),
                    quiet=True,
                    overwrite=True,
                )
            else:
                # parallel processing
                pb = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms1_orig, ms1),
                    quiet=True,
                    overwrite=True,
                )
                pg = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms2_orig, ms2),
                    quiet=True,
                    overwrite=True,
                )
                pr = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms3_orig, ms3),
                    quiet=True,
                    overwrite=True,
                )
                pp = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (pan_orig, pan),
                    quiet=True,
                    overwrite=True,
                )

                pb.wait()
                pg.wait()
                pr.wait()
                pp.wait()

        else:
            grass.message(_("Converting image chanels to 8bit for processing"))
            maxval = pow(2, bits) - 1
            if sproc:
                # serial processing
                grass.run_command(
                    "r.rescale",
                    input=ms1_orig,
                    from_="0,%f" % maxval,
                    output=ms1,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=ms2_orig,
                    from_="0,%f" % maxval,
                    output=ms2,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=ms3_orig,
                    from_="0,%f" % maxval,
                    output=ms3,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=pan_orig,
                    from_="0,%f" % maxval,
                    output=pan,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )

            else:
                # parallel processing
                pb = grass.start_command(
                    "r.rescale",
                    input=ms1_orig,
                    from_="0,%f" % maxval,
                    output=ms1,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pg = grass.start_command(
                    "r.rescale",
                    input=ms2_orig,
                    from_="0,%f" % maxval,
                    output=ms2,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pr = grass.start_command(
                    "r.rescale",
                    input=ms3_orig,
                    from_="0,%f" % maxval,
                    output=ms3,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pp = grass.start_command(
                    "r.rescale",
                    input=pan_orig,
                    from_="0,%f" % maxval,
                    output=pan,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )

                pb.wait()
                pg.wait()
                pr.wait()
                pp.wait()

    else:
        grass.message(_("Rescaling image chanels to 8bit for processing"))

        min_ms1 = int(grass.raster_info(ms1_orig)["min"])
        max_ms1 = int(grass.raster_info(ms1_orig)["max"])
        min_ms2 = int(grass.raster_info(ms2_orig)["min"])
        max_ms2 = int(grass.raster_info(ms2_orig)["max"])
        min_ms3 = int(grass.raster_info(ms3_orig)["min"])
        max_ms3 = int(grass.raster_info(ms3_orig)["max"])
        min_pan = int(grass.raster_info(pan_orig)["min"])
        max_pan = int(grass.raster_info(pan_orig)["max"])

        maxval = pow(2, bits) - 1
        if sproc:
            # serial processing
            grass.run_command(
                "r.rescale",
                input=ms1_orig,
                from_="%f,%f" % (min_ms1, max_ms1),
                output=ms1,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=ms2_orig,
                from_="%f,%f" % (min_ms2, max_ms2),
                output=ms2,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=ms3_orig,
                from_="%f,%f" % (min_ms3, max_ms3),
                output=ms3,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=pan_orig,
                from_="%f,%f" % (min_pan, max_pan),
                output=pan,
                to="0,255",
                quiet=True,
                overwrite=True,
            )

        else:
            # parallel processing
            pb = grass.start_command(
                "r.rescale",
                input=ms1_orig,
                from_="%f,%f" % (min_ms1, max_ms1),
                output=ms1,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pg = grass.start_command(
                "r.rescale",
                input=ms2_orig,
                from_="%f,%f" % (min_ms2, max_ms2),
                output=ms2,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pr = grass.start_command(
                "r.rescale",
                input=ms3_orig,
                from_="%f,%f" % (min_ms3, max_ms3),
                output=ms3,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pp = grass.start_command(
                "r.rescale",
                input=pan_orig,
                from_="%f,%f" % (min_pan, max_pan),
                output=pan,
                to="0,255",
                quiet=True,
                overwrite=True,
            )

            pb.wait()
            pg.wait()
            pr.wait()
            pp.wait()

    # get PAN resolution:
    kv = grass.raster_info(map=pan)
    nsres = kv["nsres"]
    ewres = kv["ewres"]
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()
    grass.run_command("g.region", res=panres, align=pan)

    # Select sharpening method
    grass.message(
        _("Performing pan sharpening with hi res pan image: %f" % panres))
    if sharpen == "brovey":
        brovey(pan, ms1, ms2, ms3, out, pid, sproc)
    elif sharpen == "ihs":
        ihs(pan, ms1, ms2, ms3, out, pid, sproc)
    elif sharpen == "pca":
        pca(pan, ms1, ms2, ms3, out, pid, sproc)
    # Could add other sharpening algorithms here, e.g. wavelet transformation

    grass.message(
        _("Assigning grey equalized color tables to output images..."))

    # equalized grey scales give best contrast
    grass.message(_("setting pan-sharpened channels to equalized grey scale"))
    for ch in ["red", "green", "blue"]:
        grass.run_command("r.colors",
                          quiet=True,
                          map="%s_%s" % (out, ch),
                          flags="e",
                          color="grey")

    # Landsat too blue-ish because panchromatic band less sensitive to blue
    # light, so output blue channed can be modified
    if bladjust:
        grass.message(_("Adjusting blue channel color table..."))
        blue_colors = ["0 0 0 0\n5% 0 0 0\n67% 255 255 255\n100% 255 255 255"]
        # these previous colors are way too blue for landsat
        # blue_colors = ['0 0 0 0\n10% 0 0 0\n20% 200 200 200\n40% 230 230 230\n67% 255 255 255\n100% 255 255 255']
        bc = grass.feed_command("r.colors",
                                quiet=True,
                                map="%s_blue" % out,
                                rules="-")
        bc.stdin.write(grass.encode("\n".join(blue_colors)))
        bc.stdin.close()

    # output notice
    grass.verbose(
        _("The following pan-sharpened output maps have been generated:"))
    for ch in ["red", "green", "blue"]:
        grass.verbose(_("%s_%s") % (out, ch))

    grass.verbose(
        _("To visualize output, run: g.region -p raster=%s_red" % out))
    grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out)))
    grass.verbose(
        _("If desired, combine channels into a single RGB map with 'r.composite'."
          ))
    grass.verbose(
        _("Channel colors can be rebalanced using i.colors.enhance."))

    # write cmd history:
    for ch in ["red", "green", "blue"]:
        grass.raster_history("%s_%s" % (out, ch))

    # create a group with the three outputs
    # grass.run_command('i.group', group=out,
    #                  input="{n}_red,{n}_blue,{n}_green".format(n=out))

    # Cleanup
    grass.message(_("cleaning up temp files"))
    try:
        grass.run_command("g.remove",
                          flags="f",
                          type="raster",
                          pattern="tmp%s*" % pid,
                          quiet=True)
    except:
        ""
Exemplo n.º 42
0
def main():

    bands = {}
    cor_bands = {}
    dem = options["elevation"]
    vis = options["visibility"]
    input_dir = options["input_dir"]
    memory = options["memory"]
    check_ndir = 0
    check_odir = 0
    # Check if the input folder has old or new name
    # Check if the input folder belongs to a L1C image
    level_dir = os.path.basename(input_dir).split("_")
    # Check if the input directory is a .SAFE folder
    if not input_dir.endswith(".SAFE"):
        gscript.fatal(
            "The input directory is not a .SAFE folder. Please check the input directory"
        )
    if level_dir[1] == "OPER" and level_dir[3] == "MSIL1C":
        check_odir = 1
        filename = [i for i in os.listdir(input_dir) if i.startswith("S")]
        string = str(filename).strip("['']")
        mtd_file = os.path.join(input_dir, string)
    elif level_dir[1] == "MSIL1C":
        check_ndir = 1
        mtd_file = os.path.join(input_dir, "MTD_MSIL1C.xml")
    else:
        gscript.fatal(
            "The input directory does not belong to a L1C Sentinel image. Please check the input directory"
        )
    # Check if Metadata file exists
    if not os.path.isfile(mtd_file):
        gscript.fatal(
            "Metadata file not found. Please check the input directory")
    atmo_mod = options["atmospheric_model"]
    aerosol_mod = options["aerosol_model"]
    aeronet_file = options["aeronet_file"]
    check_file = 0
    check_value = 0
    mapset = gscript.gisenv()["MAPSET"]
    suffix = options["suffix"]
    rescale = options["rescale"]
    processid = os.getpid()
    txt_file = options["text_file"]
    tmp_file = gscript.tempfile()
    topo_method = options["topo_method"]

    if topo_method and not flags["c"]:
        gscript.warning(
            _("To computes topographic correction of reflectance "
              "please select also 'c' flag"))
    elif flags["c"] and not topo_method:
        gscript.warning(
            _("Topographic correction of reflectance will use "
              "default method 'c-factor'"))

    if not gscript.find_program("i.sentinel.import", "--help"):
        gscript.fatal(
            "Module requires i.sentinel.import. Please install it using g.extension."
        )

    # Import bands
    if not flags["i"]:
        imp_flags = "o" if flags["o"] else ""
        imp_flags += "l" if flags["l"] else ""
        imp_flags += "r" if flags["r"] else ""
        imp_flags = None if imp_flags == "" else imp_flags
        i_s_imp_dir = os.path.dirname(input_dir)
        pattern_file = os.path.basename(input_dir).split(".")[0]

        # import
        gscript.run_command(
            "i.sentinel.import",
            input=i_s_imp_dir,
            pattern_file=pattern_file,
            flags=imp_flags,
            memory=memory,
        )

    # Create xml "tree" for reading parameters from metadata
    tree = et.parse(mtd_file)
    root = tree.getroot()

    # Start reading the xml file
    if check_ndir == 1:
        for elem in root[0].findall("Product_Info"):
            datatake = elem.find("Datatake")
            # Geometrical conditions = sensor
            sensor = datatake.find("SPACECRAFT_NAME")
            # Acquisition date and time
            time_str = elem.find("GENERATION_TIME")
            # Date and time conversion
            time_py = datetime.strptime(time_str.text, "%Y-%m-%dT%H:%M:%S.%fZ")
            # Compute decimal hour
            dec_hour = (float(time_py.hour) + float(time_py.minute) / 60 +
                        float(time_py.second) / 3600)
            # Read input bands from metadata
            product = elem.find("Product_Organisation")
            g_list = product.find("Granule_List")
            granule = g_list.find("Granule")
            images = granule.find("IMAGE_FILE")
            img_name = images.text.split("/")
            # Check if input exist and if the mtd file corresponds with the input image
            for img in root.iter("IMAGE_FILE"):
                a = img.text.split(".jp2")[0].split("/")
                b = a[3].split("_")
                if (gscript.find_file(a[3], element="cell",
                                      mapset=mapset)["file"]
                        or gscript.find_file(a[3], element="cell")["file"]
                        or b[2] == "TCI"):
                    if b[2] == "B01":
                        bands["costal"] = a[3]
                    elif b[2] == "B02":
                        bands["blue"] = a[3]
                    elif b[2] == "B03":
                        bands["green"] = a[3]
                    elif b[2] == "B04":
                        bands["red"] = a[3]
                    elif b[2] == "B05":
                        bands["re5"] = a[3]
                    elif b[2] == "B06":
                        bands["re6"] = a[3]
                    elif b[2] == "B07":
                        bands["re7"] = a[3]
                    elif b[2] == "B08":
                        bands["nir"] = a[3]
                    elif b[2] == "B8A":
                        bands["nir8a"] = a[3]
                    elif b[2] == "B09":
                        bands["vapour"] = a[3]
                    elif b[2] == "B10":
                        bands["cirrus"] = a[3]
                    elif b[2] == "B11":
                        bands["swir11"] = a[3]
                    elif b[2] == "B12":
                        bands["swir12"] = a[3]
                else:
                    gscript.fatal((
                        "One or more input bands are missing or \n the metadata file belongs to another image ({})."
                    ).format(img_name[3].replace("_B01", "")))

    if check_odir == 1:
        for elem in root[0].findall("Product_Info"):
            datatake = elem.find("Datatake")
            # Geometrical conditions = sensor
            sensor = datatake.find("SPACECRAFT_NAME")
            # Acquisition date and time
            time_str = elem.find("GENERATION_TIME")
            # Date and time conversion
            time_py = datetime.strptime(time_str.text, "%Y-%m-%dT%H:%M:%S.%fZ")
            # Compute decimal hour
            dec_hour = (float(time_py.hour) + float(time_py.minute) / 60 +
                        float(time_py.second) / 3600)
            # Read input bands from metadata
            product = elem.find("Product_Organisation")
            g_list = product.find("Granule_List")
            granule = g_list.find("Granules")
            images = granule.find("IMAGE_ID")
            # Check if input exist and if the mtd file corresponds with the input image
            for img in root.iter("IMAGE_ID"):
                b = img.text.split("_")
                if gscript.find_file(img.text, element="cell",
                                     mapset=mapset)["file"]:
                    if b[10] == "B01":
                        bands["costal"] = img.text
                    elif b[10] == "B02":
                        bands["blue"] = img.text
                    elif b[10] == "B03":
                        bands["green"] = img.text
                    elif b[10] == "B04":
                        bands["red"] = img.text
                    elif b[10] == "B05":
                        bands["re5"] = img.text
                    elif b[10] == "B06":
                        bands["re6"] = img.text
                    elif b[10] == "B07":
                        bands["re7"] = img.text
                    elif b[10] == "B08":
                        bands["nir"] = img.text
                    elif b[10] == "B8A":
                        bands["nir8a"] = img.text
                    elif b[10] == "B09":
                        bands["vapour"] = img.text
                    elif b[10] == "B10":
                        bands["cirrus"] = img.text
                    elif b[10] == "B11":
                        bands["swir11"] = img.text
                    elif b[10] == "B12":
                        bands["swir12"] = img.text
                else:
                    gscript.fatal((
                        "One or more input bands are missing or \n the metadata file belongs to another image ({})."
                    ).format(images.text.replace("_B09", "")))

    # Check if input exist
    for key, value in bands.items():
        if (not gscript.find_file(value, element="cell", mapset=mapset)["file"]
                and not gscript.find_file(value, element="cell")["file"]):
            gscript.fatal(("Raster map <{}> not found.").format(value))

    # Check if output already exist
    for key, value in bands.items():
        if not os.getenv("GRASS_OVERWRITE"):
            if gscript.find_file(value + "_" + suffix,
                                 element="cell",
                                 mapset=mapset)["file"]:
                gscript.fatal(
                    ("Raster map {} already exists.").format(value + "_" +
                                                             suffix))

    # Check if output name for the text file has been specified
    if flags["t"]:
        if options["text_file"] == "":
            gscript.fatal(
                "Output name is required for the text file. Please specified it"
            )
        if not os.access(os.path.dirname(options["text_file"]), os.W_OK):
            gscript.fatal("Output directory for the text file is not writable")

    # Set temp region to image max extent
    gscript.use_temp_region()
    gscript.run_command("g.region", rast=bands.values(), flags="a")
    gscript.message(
        _("--- The computational region has been temporarily set to image max extent ---"
          ))

    if flags["a"]:
        if vis != "":
            if options["aod_value"] != "" and aeronet_file != "":
                gscript.warning(_("--- Visibility map will be ignored ---"))
                gscript.fatal(
                    "Only one parameter must be provided, AOD value or AERONET file"
                )
            elif options["aod_value"] == "" and aeronet_file == "":
                gscript.warning(_("--- Visibility map will be ignored ---"))
                gscript.fatal(
                    "If -a flag is checked an AOD value or AERONET file must be provided"
                )
            elif options["aod_value"] != "":
                gscript.warning(_("--- Visibility map will be ignored ---"))
                check_value = 1
                aot550 = options["aod_value"]
            elif aeronet_file != "":
                gscript.warning(_("--- Visibility map will be ignored ---"))
        elif options["aod_value"] != "" and aeronet_file != "":
            gscript.fatal(
                "Only one parameter must be provided, AOD value or AERONET file"
            )
        elif options["aod_value"] != "":
            check_value = 1
            aot550 = options["aod_value"]
        elif aeronet_file != "":
            gscript.message(_("--- Computing AOD from input AERONET file ---"))
        elif options["aod_value"] == "" and aeronet_file == "":
            gscript.fatal(
                "If -a flag is checked an AOD value or AERONET file must be provided"
            )
    else:
        if vis != "":
            if options["aod_value"] != "" or aeronet_file != "":
                gscript.warning(_("--- AOD will be ignored ---"))
            check_file = 1
            stats_v = gscript.parse_command("r.univar", flags="g", map=vis)
            try:
                vis_mean = int(float(stats_v["mean"]))
                gscript.message(
                    "--- Computed visibility mean value: {} Km ---".format(
                        vis_mean))
            except:
                gscript.fatal(
                    "The input visibility maps is not valid. It could be out of the computational region."
                )
        elif vis == "" and (options["aod_value"] != "" or aeronet_file != ""):
            gscript.fatal("Check the -a flag to use AOD instead of visibility")
        else:
            gscript.fatal("No visibility map has been provided")

    # Retrieve longitude and latitude of the centre of the computational region
    c_region = gscript.parse_command("g.region", flags="bg")
    lon = float(c_region["ll_clon"])
    lat = float(c_region["ll_clat"])

    # Read and compute AOD from AERONET file
    if check_value == 0 and check_file == 0:
        i = 0
        cc = 0
        count = 0
        columns = []
        m_time = []
        dates_list = []
        t_columns = []
        i_col = []
        coll = []
        wl = []

        with open(aeronet_file, "r") as aeronet:
            for row in aeronet:
                count += 1
                if count == 4:
                    columns = row.split(",")
        # Search for the closest date and time to the acquisition one
        count = 0
        with open(aeronet_file, "r") as aeronet:
            for row in aeronet:
                count += 1
                if count >= 5:
                    columns = row.split(",")
                    m_time.append(columns[0] + " " + columns[1])

        dates = [datetime.strptime(row, "%d:%m:%Y %H:%M:%S") for row in m_time]
        dates_list.append(dates)
        format_bd = time_py.strftime("%d/%m/%Y %H:%M:%S")
        base_date = str(format_bd)
        b_d = datetime.strptime(base_date, "%d/%m/%Y %H:%M:%S")

        for line in dates_list:
            closest = min(line, key=lambda x: abs(x - b_d))
            timedelta = abs(closest - b_d)
        # Search for the closest wavelengths (upper and lower) to 550
        count = 0
        with open(aeronet_file, "r") as aeronet:
            for row in aeronet:
                count += 1
                if count == 4:
                    t_columns = row.split(",")
                    for i, col in enumerate(t_columns):
                        if "AOT_" in col:
                            i_col.append(i)
                            coll.append(col)
        for line in coll:
            l = line.split("_")
            wl.append(int(l[1]))

        aot_req = 550
        upper = min([i for i in wl if i >= aot_req],
                    key=lambda x: abs(x - aot_req))
        lower = min([i for i in wl if i < aot_req],
                    key=lambda x: abs(x - aot_req))

        count = 0
        with open(aeronet_file, "r") as aeronet:
            for row in aeronet:
                count += 1
                if count == dates.index(closest) + 5:
                    t_columns = row.split(",")
                    count2 = 0
                    check_up = 0
                    check_lo = 0
                    while count2 < len(i_col) and check_up < 1:
                        # Search for the not null value for the upper wavelength
                        if t_columns[wl.index(upper) + i_col[0]] == "N/A":
                            aot_req_tmp = upper
                            upper = min(
                                [i for i in wl if i > aot_req_tmp],
                                key=lambda x: abs(x - aot_req_tmp),
                            )
                        else:
                            wl_upper = float(upper)
                            aot_upper = float(t_columns[wl.index(upper) +
                                                        i_col[0]])
                            check_up = 1
                        count2 += 1
                    count2 = 0
                    while count2 < len(i_col) and check_lo < 1:
                        # Search for the not null value for the lower wavelength
                        if t_columns[wl.index(lower) + i_col[0]] == "N/A":
                            aot_req_tmp = lower
                            lower = min(
                                [i for i in wl if i < aot_req_tmp],
                                key=lambda x: abs(x - aot_req_tmp),
                            )
                        else:
                            wl_lower = float(lower)
                            aot_lower = float(t_columns[wl.index(lower) +
                                                        i_col[0]])
                            check_lo = 1
                        count2 += 1
        # Compute AOD at 550 nm
        alpha = math.log(aot_lower / aot_upper) / math.log(wl_upper / wl_lower)
        aot550 = math.exp(
            math.log(aot_lower) - math.log(550.0 / wl_lower) * alpha)
        gscript.message("--- Computed AOD at 550 nm: {} ---".format(aot550))

    # Compute mean target elevation in km
    stats_d = gscript.parse_command("r.univar", flags="g", map=dem)
    try:
        mean = float(stats_d["mean"])
        conv_fac = -0.001
        dem_mean = mean * conv_fac
        gscript.message(
            "--- Computed mean target elevation above sea level: {:.3f} m ---".
            format(mean))
    except:
        gscript.fatal(
            "The input elevation maps is not valid. It could be out of the computational region."
        )

    # Start compiling the control file
    for key, bb in bands.items():
        gscript.message(_("--- Compiling the control file.. ---"))
        text = open(tmp_file, "w")
        # Geometrical conditions
        if sensor.text == "Sentinel-2A":
            text.write(str(25) + "\n")
        elif sensor.text == "Sentinel-2B":
            text.write(str(26) + "\n")
        else:
            gscript.fatal(
                "The input image does not seem to be a Sentinel image")
        text.write("{} {} {:.2f} {:.3f} {:.3f}".format(
            time_py.month, time_py.day, dec_hour, lon, lat) + "\n")
        # Atmospheric model
        # See also: https:/harrisgeospatial.com/docs/FLAASH.html
        # for a more fine tuned way of selecting the atmospheric model
        winter = [1, 2, 3, 4, 10, 11, 12]
        summer = [5, 6, 7, 8, 9]
        if atmo_mod == "Automatic":
            if lat > -15.00 and lat <= 15.00:  # Tropical
                text.write("1" + "\n")
            elif lat > 15.00 and lat <= 45.00:
                if time_py.month in winter:  # Midlatitude winter
                    text.write("3" + "\n")
                else:  # Midlatitude summer
                    text.write("2" + "\n")
            elif lat < -15.00 and lat >= -45.00:
                if time_py.month in winter:  # Midlatitude summer
                    text.write("2" + "\n")
                else:  # Midlatitude winter
                    text.write("3" + "\n")
            elif lat > 45.00:  # and lat <= 60.00:
                if time_py.month in winter:  # Subarctic winter
                    text.write("5" + "\n")
                else:  # Subartic summer
                    text.write("4" + "\n")
            elif lat < -45.00:  # and lat >= -60.00:
                if time_py.month in winter:  # Subarctic summer
                    text.write("4" + "\n")
                else:  # Subartic winter
                    text.write("5" + "\n")
            else:
                gscript.fatal("Latitude {} is out of range".format(lat))
        elif atmo_mod == "No gaseous absorption":
            text.write("0" + "\n")  # No gas abs model
        elif atmo_mod == "Tropical":
            text.write("1" + "\n")  # Tropical model
        elif atmo_mod == "Midlatitude summer":
            text.write("2" + "\n")  # Mid sum model
        elif atmo_mod == "Midlatitude winter":
            text.write("3" + "\n")  # Mid win model
        elif atmo_mod == "Subarctic summer":
            text.write("4" + "\n")  # Sub sum model
        elif atmo_mod == "Subarctic winter":
            text.write("5" + "\n")  # Sub win model
        elif atmo_mod == "Us standard 62":
            text.write("6" + "\n")  # Us 62 model
        # Aerosol model
        if aerosol_mod == "No aerosols":
            text.write("0" + "\n")  # No aerosol model
        elif aerosol_mod == "Continental model":
            text.write("1" + "\n")  # Continental aerosol model
        elif aerosol_mod == "Maritime model":
            text.write("2" + "\n")  # Maritimw aerosol model
        elif aerosol_mod == "Urban model":
            text.write("3" + "\n")  # Urban aerosol model
        elif aerosol_mod == "Shettle model for background desert aerosol":
            text.write("4" + "\n")  # Shettle aerosol model
        elif aerosol_mod == "Biomass burning":
            text.write("5" + "\n")  # Biomass aerosol model
        elif aerosol_mod == "Stratospheric model":
            text.write("6" + "\n")  # Stratospheric aerosol model
        # Visibility and/or AOD
        if not flags["a"] and vis != "":
            text.write("{}".format(vis_mean) + "\n")
        elif flags["a"] and vis != "":
            if aot550 != 0:
                text.write("0" + "\n")  # Visibility
                text.write("{}".format(aot550) + "\n")
            elif aot550 == 0:
                text.write("-1" + "\n")  # Visibility
                text.write("{}".format(aot550) + "\n")
        elif vis == "" and aot550 != 0:
            text.write("0" + "\n")  # Visibility
            text.write("{}".format(aot550) + "\n")
        elif vis == "" and aot550 == 0:
            text.write("-1" + "\n")  # Visibility
            text.write("{}".format(aot550) + "\n")
        else:
            gscript.fatal(
                "Unable to retrieve visibility or AOD value, check the input")
        text.write("{:.3f}".format(dem_mean) + "\n")  # Mean elevation
        text.write("-1000" + "\n")  # Sensor height
        # Band number
        b = bb.split("_")
        if check_ndir == 1:
            band_n = b[2]
        else:
            band_n = b[10]
        if band_n == "B01" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("166")
        elif band_n == "B02" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("167")
        elif band_n == "B03" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("168")
        elif band_n == "B04" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("169")
        elif band_n == "B05" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("170")
        elif band_n == "B06" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("171")
        elif band_n == "B07" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("172")
        elif band_n == "B08" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("173")
        elif band_n == "B8A" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("174")
        elif band_n == "B09" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("175")
        elif band_n == "B10" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("176")
        elif band_n == "B11" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("177")
        elif band_n == "B12" and sensor.text == "Sentinel-2A":
            gscript.message(band_n)
            text.write("178")
        elif band_n == "B01" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("179")
        elif band_n == "B02" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("180")
        elif band_n == "B03" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("181")
        elif band_n == "B04" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("182")
        elif band_n == "B05" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("183")
        elif band_n == "B06" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("184")
        elif band_n == "B07" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("185")
        elif band_n == "B08" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("186")
        elif band_n == "B8A" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("187")
        elif band_n == "B09" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("188")
        elif band_n == "B10" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("189")
        elif band_n == "B11" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("190")
        elif band_n == "B12" and sensor.text == "Sentinel-2B":
            gscript.message(band_n)
            text.write("191")
        else:
            gscript.fatal("Bands do not seem to belong to a Sentinel image")
        text.close()

        if flags["a"]:
            gscript.run_command(
                "i.atcorr",
                input=bb,
                parameters=tmp_file,
                output="{}_{}".format(bb, suffix),
                range="1,10000",
                elevation=dem,
                rescale=rescale,
                flags="r",
            )
            cor_bands[key] = "{}_{}".format(bb, suffix)
        else:
            gscript.run_command(
                "i.atcorr",
                input=bb,
                parameters=tmp_file,
                output="{}_{}".format(bb, suffix),
                range="1,10000",
                elevation=dem,
                visibility=vis,
                rescale=rescale,
                flags="r",
            )
            cor_bands[key] = "{}_{}".format(bb, suffix)

    gscript.message(_("--- All bands have been processed ---"))

    if flags["t"]:
        prefix = options["topo_prefix"] + "." if options["topo_prefix"] else ""
        with open(txt_file, "w") as txt:
            for key, value in cor_bands.items():
                if str(key) in [
                        "blue",
                        "green",
                        "red",
                        "nir",
                        "nir8a",
                        "swir11",
                        "swir12",
                ]:
                    txt.write(str(key) + "=" + prefix + str(value) + "\n")
            mtd_tl_xml = glob.glob(
                os.path.join(input_dir, "GRANULE/*/MTD_TL.xml"))[0]
            txt.write("MTD_TL.xml=" + mtd_tl_xml + "\n")

    for key, cb in cor_bands.items():
        gscript.message(cb)
        gscript.run_command("r.colors",
                            map=cb,
                            color="grey",
                            flags="e",
                            quiet=True)

    if flags["c"]:
        gscript.message(
            _("--- Computes topographic correction of reflectance ---"))
        dat = bb.split("_")[1]
        # TODO understand better the timezone
        sunmask = gscript.parse_command(
            "r.sunmask",
            flags="sg",
            elevation=dem,
            year=dat[0:4],
            month=int(dat[4:6]),
            day=int(dat[6:8]),
            hour=int(dat[9:11]),
            minute=int(dat[11:13]),
            second=int(dat[13:15]),
            timezone=0,
        )
        z = 90.0 - float(sunmask["sunangleabovehorizon"])
        if not topo_method:
            topo_method = "c-factor"
        illu = "{}_{}_{}".format(bb, "illu", processid)
        gscript.run_command(
            "i.topo.corr",
            flags="i",
            basemap=dem,
            zenit=z,
            azimuth=sunmask["sunazimuth"],
            output=illu,
        )
        tcor = []
        for ma in cor_bands.values():
            out = "{}_double_{}".format(ma, processid)
            tcor.append(out)
            gscript.raster.mapcalc("{}=double({})".format(out, ma))

        gscript.run_command(
            "i.topo.corr",
            basemap=illu,
            zenith=z,
            input=",".join(tcor),
            method=topo_method,
            output=options["topo_prefix"],
        )
        for ma in tcor:
            inp = "{}.{}".format(options["topo_prefix"], ma)
            gscript.run_command(
                "g.rename",
                quiet=True,
                raster="{},{}".format(
                    inp, inp.replace("_double_{}".format(processid), "")),
            )
        gscript.run_command("g.remove",
                            flags="f",
                            type="raster",
                            name=illu,
                            quiet=True)
        gscript.run_command("g.remove",
                            flags="f",
                            type="raster",
                            name=",".join(tcor),
                            quiet=True)

    gscript.del_temp_region()
    gscript.message(
        _("--- The computational region has been reset to the previous one ---"
          ))
Exemplo n.º 43
0
def main():

    pan = options['pan']
    msxlst = options['msx'].split(',')
    outputsuffix = options['suffix']
    custom_ratio = options['ratio']
    center = options['center']
    center2 = options['center2']
    modulation = options['modulation']
    modulation2 = options['modulation2']

    if options['trim']:
        trimming_factor = float(options['trim'])
    else:
        trimming_factor = False

    histogram_match = flags['l']
    second_pass = flags['2']
    color_match = flags['c']

#    # Check & warn user about "ns == ew" resolution of current region ======
#    region = grass.region()
#    nsr = region['nsres']
#    ewr = region['ewres']
#
#    if nsr != ewr:
#        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
#               'resolutions do not match!')
#        msg = msg.format(ns=nsr, ew=ewr)
#        g.message(msg, flags='w')

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    panres = images[pan].nsres  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    run('g.region', res=panres)  # Respect extent, change resolution
    g.message("|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        g.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        g.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            g.message('Using custom ratio, overriding standard method!',
                      flags='w')

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            g.message("   > Retrieving image resolutions")

            msxres = images[msx].nsres

            # check
            if panres == msxres:
                msg = ("The Panchromatic's image resolution ({pr}) "
                       "equals to the Multi-Spectral's one ({mr}). "
                       "Something is probably not right! "
                       "Please check your input images.")
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = ('   >> Resolution ratio '
                         'low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}')
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            g.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            g.message("   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                      "   >>> If you insist, force it via the <ratio> option!",
                      flags='i')
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        g.message('\n|2 High Pass Filtering the Panchromatic Image')

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = 'tmp.' + grass.basename(tmpfile)  # use its basenam
        tmp_pan_hpf = '{tmp}_pan_hpf'.format(tmp=tmp)  # HPF image
        tmp_msx_blnr = '{tmp}_msx_blnr'.format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = '{tmp}_msx_hpf'.format(tmp=tmp)  # Fused image
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run('r.mfilter', input=pan, filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title='High Pass Filtered Panchromatic image',
            overwrite=True)

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            tmp_pan_hpf_2 = '{tmp}_pan_hpf_2'.format(tmp=tmp)  # 2nd Pass HPF image
            tmp_hpf_matrix_2 = grass.tempfile()  # 2nd Pass ASCII filter
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run('r.mfilter',
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title='2-High-Pass Filtered Panchromatic Image',
                overwrite=True)

        #
        # 3. Upsampling low resolution image
        #

        g.message("\n|3 Upsampling (bilinearly) low resolution image")

        run('r.resamp.interp',
            method='bilinear', input=msx, output=tmp_msx_blnr, overwrite=True)

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        g.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " \
            "Modulating Factor"
        g.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        g.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx, sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        g.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        g.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        g.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = '{hpf} = {msx} + {pan} * {wgt}'
        fusion = fusion.format(hpf=tmp_msx_hpf, msx=tmp_msx_blnr,
                               pan=tmp_pan_hpf, wgt=weighting)
        grass.mapcalc(fusion)

        # command history
        hst = 'Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}'
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            g.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            g.message("   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = '   >> 2nd Pass Modulating Factor: {m:.2f}'
            g.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            g.message("\n|5+ Adding small-kernel-based weighted 2nd HPFi "
                      "back to fused image")

            add_back = '{final} = {msx_hpf} + {pan_hpf} * {wgt}'
            add_back = add_back.format(final=tmp_msx_hpf, msx_hpf=tmp_msx_hpf,
                                       pan_hpf=tmp_pan_hpf_2, wgt=weighting_2)
            grass.mapcalc(add_back)

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        if color_match:
            g.message("\n|* Matching output to input color table")
            run('r.colors', map=tmp_msx_hpf, raster=msx)

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            g.message("\n|+ Matching histogram of Pansharpened image "
                      "to %s" % (msx), flags='v')

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            # expression for mapcalc
            lhm = '{out} = ({hpf} - {hpfavg}) / {hpfsd} * {msxsd} + {msxavg}'
            lhm = lhm.format(out=tmp_msx_hpf, hpf=tmp_msx_hpf,
                             hpfavg=msx_hpf_avg, hpfsd=msx_hpf_sd,
                             msxsd=msx_sd, msxavg=msx_avg)

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = '\n|* Trimming output image border pixels by '
            msg += '{factor} times the low resolution\n'.format(factor=tf)
            nsew = '   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}'
            nsew = nsew.format(n=region.n, s=region.s, e=region.e, w=region.w)
            msg += nsew

            g.message(msg)

            # re-set borders
            region.n -= tf * images[msx].nsres
            region.s += tf * images[msx].nsres
            region.e -= tf * images[msx].ewres
            region.w += tf * images[msx].ewres

            # communicate and act
            msg = '   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}'
            msg = msg.format(n=region.n, s=region.s, e=region.e, w=region.w)
            g.message(msg)

            # modify only the extent
            run('g.region',
                n=region.n, s=region.s, e=region.e, w=region.w)
            trim = "{out} = {input}".format(out=tmp_msx_hpf, input=tmp_msx_hpf)
            grass.mapcalc(trim)

        #
        # End of Algorithm

        # history entry
        run("r.support", map=tmp_msx_hpf, history="\n".join(cmd_history))

        # add suffix to basename & rename end product
        msx_name = "{base}.{suffix}"
        msx_name = msx_name.format(base=msx.split('@')[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Original Region restored")
    g.message("\n>>> Hint, rebalancing colors (via i.colors.enhance) "
              "may improve appearance of RGB composites!",
              flags='i')
Exemplo n.º 44
0
def main():
    global tmp, sqltmp, tmpname, nuldev, vector, rastertmp
    rastertmp = False
    # setup temporary files
    tmp = grass.tempfile()
    sqltmp = tmp + ".sql"
    # we need a random name
    tmpname = grass.basename(tmp)

    nuldev = file(os.devnull, 'w')

    raster = options['raster']
    colprefix = options['column_prefix']
    vector = options['map']
    layer = options['layer']
    percentile = options['percentile']
    basecols = options['method'].split(',')

    ### setup enviro vars ###
    env = grass.gisenv()
    mapset = env['MAPSET']

    vs = vector.split('@')
    if len(vs) > 1:
        vect_mapset = vs[1]
    else:
        vect_mapset = mapset

    # does map exist in CURRENT mapset?
    if vect_mapset != mapset or not grass.find_file(vector, 'vector',
                                                    mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    vector = vs[0]

    rastertmp = "%s_%s" % (vector, tmpname)

    # check the input raster map
    if not grass.find_file(raster, 'cell')['file']:
        grass.fatal(_("Raster map <%s> not found") % raster)

    # save current settings:
    grass.use_temp_region()

    # Temporarily aligning region resolution to $RASTER resolution
    # keep boundary settings
    grass.run_command('g.region', align=raster)

    grass.message(_("Preprocessing input data..."))
    try:
        grass.run_command('v.to.rast',
                          input=vector,
                          layer=layer,
                          output=rastertmp,
                          use='cat',
                          quiet=True)
    except CalledModuleError:
        grass.fatal(_("An error occurred while converting vector to raster"))

    # dump cats to file to avoid "too many argument" problem:
    p = grass.pipe_command('r.category', map=rastertmp, sep=';', quiet=True)
    cats = []

    for line in p.stdout:
        cats.append(line.rstrip('\r\n').split(';')[0])
    p.wait()

    number = len(cats)
    if number < 1:
        grass.fatal(_("No categories found in raster map"))

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # Find out which table is linked to the vector map on the given layer
    if not fi['table']:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))

    # replaced by user choiche
    #basecols = ['n', 'min', 'max', 'range', 'mean', 'stddev', 'variance', 'cf_var', 'sum']

    # we need at least three chars to distinguish [mea]n from [med]ian
    # so colprefix can't be longer than 6 chars with DBF driver
    if dbfdriver:
        colprefix = colprefix[:6]
        variables_dbf = {}

    # by default perccol variable is used only for "variables" variable
    perccol = "percentile"
    perc = None
    for b in basecols:
        if b.startswith('p'):
            perc = b
    if perc:
        # namespace is limited in DBF but the % value is important
        if dbfdriver:
            perccol = "per" + percentile
        else:
            perccol = "percentile_" + percentile
        percindex = basecols.index(perc)
        basecols[percindex] = perccol

    # dictionary with name of methods and position in "r.univar -gt"  output
    variables = {
        'number': 2,
        'minimum': 4,
        'maximum': 5,
        'range': 6,
        'average': 7,
        'stddev': 9,
        'variance': 10,
        'coeff_var': 11,
        'sum': 12,
        'first_quartile': 14,
        'median': 15,
        'third_quartile': 16,
        perccol: 17
    }
    # this list is used to set the 'e' flag for r.univar
    extracols = ['first_quartile', 'median', 'third_quartile', perccol]
    addcols = []
    colnames = []
    extstat = ""
    for i in basecols:
        # this check the complete name of out input that should be truncated
        for k in variables.keys():
            if i in k:
                i = k
                break
        if i in extracols:
            extstat = 'e'
        # check if column already present
        currcolumn = ("%s_%s" % (colprefix, i))
        if dbfdriver:
            currcolumn = currcolumn[:10]
            variables_dbf[currcolumn.replace("%s_" % colprefix, '')] = i

        colnames.append(currcolumn)
        if currcolumn in grass.vector_columns(vector, layer).keys():
            if not flags['c']:
                grass.fatal(
                    (_("Cannot create column <%s> (already present). ") %
                     currcolumn) +
                    _("Use -c flag to update values in this column."))
        else:
            if i == "n":
                coltype = "INTEGER"
            else:
                coltype = "DOUBLE PRECISION"
            addcols.append(currcolumn + ' ' + coltype)

    if addcols:
        grass.verbose(_("Adding columns '%s'") % addcols)
        try:
            grass.run_command('v.db.addcolumn',
                              map=vector,
                              columns=addcols,
                              layer=layer)
        except CalledModuleError:
            grass.fatal(_("Adding columns failed. Exiting."))

    # calculate statistics:
    grass.message(_("Processing input data (%d categories)...") % number)

    # get rid of any earlier attempts
    grass.try_remove(sqltmp)

    f = file(sqltmp, 'w')

    # do the stats
    p = grass.pipe_command('r.univar',
                           flags='t' + extstat,
                           map=raster,
                           zones=rastertmp,
                           percentile=percentile,
                           sep=';')

    first_line = 1

    f.write("{0}\n".format(grass.db_begin_transaction(fi['driver'])))
    for line in p.stdout:
        if first_line:
            first_line = 0
            continue

        vars = line.rstrip('\r\n').split(';')

        f.write("UPDATE %s SET" % fi['table'])
        first_var = 1
        for colname in colnames:
            variable = colname.replace("%s_" % colprefix, '', 1)
            if dbfdriver:
                variable = variables_dbf[variable]
            i = variables[variable]
            value = vars[i]
            # convert nan, +nan, -nan, inf, +inf, -inf, Infinity, +Infinity,
            # -Infinity to NULL
            if value.lower().endswith('nan') or 'inf' in value.lower():
                value = 'NULL'
            if not first_var:
                f.write(" , ")
            else:
                first_var = 0
            f.write(" %s=%s" % (colname, value))

        f.write(" WHERE %s=%s;\n" % (fi['key'], vars[0]))
    f.write("{0}\n".format(grass.db_commit_transaction(fi['driver'])))
    p.wait()
    f.close()

    grass.message(_("Updating the database ..."))
    exitcode = 0
    try:
        grass.run_command('db.execute',
                          input=sqltmp,
                          database=fi['database'],
                          driver=fi['driver'])
        grass.verbose((_("Statistics calculated from raster map <{raster}>"
                         " and uploaded to attribute table"
                         " of vector map <{vector}>.").format(raster=raster,
                                                              vector=vector)))
    except CalledModuleError:
        grass.warning(
            _("Failed to upload statistics to attribute table of vector map <%s>."
              ) % vector)
        exitcode = 1

    sys.exit(exitcode)
Exemplo n.º 45
0
def main():
    global tmp

    landsat = flags['l']
    quickbird = flags['q']
    spot = flags['s']

    ms1 = options['ms1']
    ms2 = options['ms2']
    ms3 = options['ms3']
    pan = options['pan']
    out = options['output_prefix']

    tmp = str(os.getpid())

    if not landsat and not quickbird and not spot:
	grass.fatal(_("Please select a flag to specify the satellite sensor"))

    #get PAN resolution:
    kv = grass.raster_info(map = pan)
    nsres = kv['nsres']
    ewres = kv['ewres']
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()

    grass.verbose("Using resolution from PAN: %f" % panres)
    grass.run_command('g.region', flags = 'a', res = panres)

    grass.verbose("Performing Brovey transformation...")

    # The formula was originally developed for LANDSAT-TM5 and SPOT, 
    # but it also works well with LANDSAT-TM7
    # LANDSAT formula:
    #  r.mapcalc "brov.red=1. *  tm.5 / (tm.2 + tm.4 + tm.5) * etmpan"
    #  r.mapcalc "brov.green=1. * tm.4 /(tm.2 + tm.4 + tm.5) * etmpan"
    #  r.mapcalc "brov.blue=1. * tm.2 / (tm.2 + tm.4 + tm.5) * etmpan"
    #
    # SPOT formula:
    # r.mapcalc "brov.red= 1.  * spot.ms.3 / (spot.ms.1 + spot.ms.2 + spot.ms.3) * spot.p"
    # r.mapcalc "brov.green=1. * spot.ms.2 / (spot.ms.1 + spot.ms.2 + spot.ms.3) * spot.p"
    # r.mapcalc "brov.blue= 1. * spot.ms.1 / (spot.ms.1 + spot.ms.2 + spot.ms.3) * spot.p"
    # note: for RGB composite then revert brov.red and brov.green!

    grass.message(_("Calculating %s.{red,green,blue}: ...") % out)
    e = '''eval(k = float("$pan") / ("$ms1" + "$ms2" + "$ms3"))
	   "$out.red"   = "$ms3" * k
	   "$out.green" = "$ms2" * k
	   "$out.blue"  = "$ms1" * k'''
    grass.mapcalc(e, out = out, pan = pan, ms1 = ms1, ms2 = ms2, ms3 = ms3)

    # Maybe?
    #r.colors   $GIS_OPT_OUTPUTPREFIX.red col=grey
    #r.colors   $GIS_OPT_OUTPUTPREFIX.green col=grey
    #r.colors   $GIS_OPT_OUTPUTPREFIX.blue col=grey
    #to blue-ish, therefore we modify
    #r.colors $GIS_OPT_OUTPUTPREFIX.blue col=rules << EOF
    #5 0 0 0
    #20 200 200 200
    #40 230 230 230
    #67 255 255 255
    #EOF

    if spot:
        #apect table is nice for SPOT:
	grass.message(_("Assigning color tables for SPOT..."))
	for ch in ['red', 'green', 'blue']:
	    grass.run_command('r.colors', map = "%s.%s" % (out, ch), col = 'aspect')
	grass.message(_("Fixing output names..."))
	for s, d in [('green','tmp'),('red','green'),('tmp','red')]:
	    src = "%s.%s" % (out, s)
	    dst = "%s.%s" % (out, d)
	    grass.run_command('g.rename', rast = (src, dst), quiet = True)
    else:
	#aspect table is nice for LANDSAT and QuickBird:
	grass.message(_("Assigning color tables for LANDSAT or QuickBird..."))
	for ch in ['red', 'green', 'blue']:
	    grass.run_command('r.colors', map = "%s.%s" % (out, ch), col = 'aspect')

    grass.message(_("Following pan-sharpened output maps have been generated:"))
    for ch in ['red', 'green', 'blue']:
	grass.message(_("%s.%s") % (out, ch))

    grass.verbose("To visualize output, run:")
    grass.verbose("g.region -p rast=%s.red" % out)
    grass.verbose("d.rgb r=%s.red g=%s.green b=%s.blue" % (out, out, out))
    grass.verbose("If desired, combine channels with 'r.composite' to a single map.")

    # write cmd history:
    for ch in ['red', 'green', 'blue']:
	grass.raster_history("%s.%s" % (out, ch))
Exemplo n.º 46
0
def main(options, flags):

    gisbase = os.getenv("GISBASE")
    if not gisbase:
        gs.fatal(_("$GISBASE not defined"))
        return 0

    # Variables
    ref = options["reference"]
    REF = ref.split(",")
    pro = options["projection"]
    if pro:
        PRO = pro.split(",")
    else:
        PRO = REF
    opn = [z.split("@")[0] for z in PRO]
    out = options["output"]
    region = options["region"]
    flag_d = flags["d"]
    flag_e = flags["e"]
    flag_p = flags["p"]

    # get current region settings, to compare to new ones later
    regbu1 = tmpname("region")
    gs.parse_command("g.region", save=regbu1)

    # Check if region, projected layers or mask is given
    if region:
        ffile = gs.find_file(region, element="region")
        if not ffile:
            gs.fatal(_("the region {} does not exist").format(region))
    if not pro and not checkmask() and not region:
        gs.fatal(
            _("You need to provide projected layers, a region, or "
              "a mask has to be set"))
    if pro and len(REF) != len(PRO):
        gs.fatal(
            _("The number of reference and projection layers need to "
              "be the same. Your provided %d reference and %d"
              "projection variables") % (len(REF), len(PRO)))

    # Text for history in metadata
    opt2 = dict((k, v) for k, v in options.items() if v)
    hist = " ".join("{!s}={!r}".format(k, v) for (k, v) in opt2.items())
    hist = "r.exdet {}".format(hist)
    unused, tmphist = tempfile.mkstemp()
    with open(tmphist, "w") as text_file:
        text_file.write(hist)

    # Create covariance table
    VI = CoVar(maps=REF)

    # Import reference data & compute univar stats per reference layer
    s = len(REF)
    dat_ref = stat_mean = stat_min = stat_max = None

    for i, map in enumerate(REF):
        layer = garray.array(map, null=np.nan)
        r, c = layer.shape
        if dat_ref is None:
            dat_ref = np.empty((s, r, c), dtype=np.double)
        if stat_mean is None:
            stat_mean = np.empty((s), dtype=np.double)
        if stat_min is None:
            stat_min = np.empty((s), dtype=np.double)
        if stat_max is None:
            stat_max = np.empty((s), dtype=np.double)
        stat_min[i] = np.nanmin(layer)
        stat_mean[i] = np.nanmean(layer)
        stat_max[i] = np.nanmax(layer)
        dat_ref[i, :, :] = layer
        del layer

    # Compute Mahalanobis over full set of reference layers
    mahal_ref = mahal(v=dat_ref, m=stat_mean, VI=VI)
    mahal_ref_max = max(mahal_ref[np.isfinite(mahal_ref)])
    if flag_e:
        mahalref = "{}_mahalref".format(out)
        mahal_ref.write(mapname=mahalref)
        gs.info(_("Mahalanobis distance map saved: {}").format(mahalref))
        gs.run_command(
            "r.support",
            map=mahalref,
            title="Mahalanobis distance map",
            units="unitless",
            description="Mahalanobis distance map in reference "
            "domain",
            loadhistory=tmphist,
        )
    del mahal_ref

    # Remove mask and set new region based on user-defined region or
    # otherwise based on projection layers
    if checkmask():
        gs.run_command("r.mask", flags="r")
    if region:
        gs.run_command("g.region", region=region)
        gs.info(_("The region has set to the region {}").format(region))
    if pro:
        gs.run_command("g.region", raster=PRO[0])
        # TODO: only set region to PRO[0] when different from current region
        gs.info(_("The region has set to match the proj raster layers"))

    # Import projected layers in numpy array
    s = len(PRO)
    dat_pro = None
    for i, map in enumerate(PRO):
        layer = garray.array(map, null=np.nan)
        r, c = layer.shape
        if dat_pro is None:
            dat_pro = np.empty((s, r, c), dtype=np.double)
        dat_pro[i, :, :] = layer
        del layer

    # Compute mahalanobis distance
    mahal_pro = mahal(v=dat_pro, m=stat_mean, VI=VI)
    if flag_d:
        mahalpro = "{}_mahalpro".format(out)
        mahal_pro.write(mapname=mahalpro)
        gs.info(_("Mahalanobis distance map saved: {}").format(mahalpro))
        gs.run_command(
            "r.support",
            map=mahalpro,
            title="Mahalanobis distance map projection domain",
            units="unitless",
            loadhistory=tmphist,
            description="Mahalanobis distance map in projection "
            "domain estimated using covariance of reference data",
        )

    # Compute NT1
    tmplay = tmpname(out)
    mnames = [None] * len(REF)
    for i in range(len(REF)):
        tmpout = tmpname("exdet")
        # TODO: computations below sometimes result in very small negative
        # numbers, which are not 'real', but rather due to some differences
        # in handling digits in grass and Python, hence second mapcalc
        # statement. Need to figure out how to handle this better.
        gs.mapcalc(
            "eval("
            "tmp = min(($prolay - $refmin), ($refmax - $prolay),0) / "
            "($refmax - $refmin))\n"
            "$Dij = if(tmp > -0.00000000001, 0, tmp)",
            Dij=tmpout,
            prolay=PRO[i],
            refmin=stat_min[i],
            refmax=stat_max[i],
            quiet=True,
        )
        mnames[i] = tmpout
    gs.run_command("r.series",
                   quiet=True,
                   input=mnames,
                   output=tmplay,
                   method="sum")

    # Compute most influential covariate (MIC) metric for NT1
    if flag_p:
        tmpla1 = tmpname(out)
        gs.run_command("r.series",
                       quiet=True,
                       output=tmpla1,
                       input=mnames,
                       method="min_raster")

    # Compute NT2
    tmpla2 = tmpname(out)
    nt2map = garray.array()
    nt2map[...] = mahal_pro / mahal_ref_max
    nt2map.write(mapname=tmpla2)

    # Compute  most influential covariate (MIC) metric for NT2
    if flag_p:
        tmpla3 = tmpname(out)
        laylist = []
        layer = garray.array()
        for i, map in enumerate(opn):
            gs.use_temp_region()
            gs.run_command("g.region", quiet=True, region=regbu1)
            REFtmp = [x for num, x in enumerate(REF) if not num == i]
            stattmp = np.delete(stat_mean, i, axis=0)
            VItmp = CoVar(maps=REFtmp)
            gs.del_temp_region()
            dat_protmp = np.delete(dat_pro, i, axis=0)
            ymap = mahal(v=dat_protmp, m=stattmp, VI=VItmp)
            # in Mesgaran et al, the MIC2 is the max icp, but that is the
            # same as the minimum Mahalanobis distance (ymap)
            # icp = (mahal_pro - ymap) / mahal_pro * 100
            layer[:, :] = ymap
            tmpmahal = tmpname(out)
            layer.write(tmpmahal)
            laylist.append(tmpmahal)
        gs.run_command(
            "r.series",
            quiet=True,
            output=tmpla3,
            input=laylist,
            method="min_raster",
            overwrite=True,
        )

    # Compute nt1, nt2, and nt1and2 novelty maps
    nt1 = "{}_NT1".format(out)
    nt2 = "{}_NT2".format(out)
    nt12 = "{}_NT1NT2".format(out)
    expr = ";".join([
        "$nt12 = if($tmplay < 0, $tmplay, $tmpla2)",
        "$nt2 = if($tmplay >= 0, $tmpla2, null())",
        "$nt1 = if($tmplay < 0, $tmplay, null())",
    ])
    gs.mapcalc(expr,
               nt12=nt12,
               nt1=nt1,
               nt2=nt2,
               tmplay=tmplay,
               tmpla2=tmpla2,
               quiet=True)

    # Write metadata nt1, nt2, nt1and2  maps
    gs.run_command(
        "r.support",
        map=nt1,
        units="unitless",
        title="Type 1 similarity",
        description="Type 1 similarity (NT1)",
        loadhistory=tmphist,
    )
    gs.run_command(
        "r.support",
        map=nt2,
        units="unitless",
        title="Type 2 similarity",
        description="Type 2 similarity (NT2)",
        loadhistory=tmphist,
    )
    gs.run_command(
        "r.support",
        map=nt12,
        units="unitless",
        title="Type 1 + 2 novelty / similarity",
        description="Type 1 + 2 similarity (NT1)",
        loadhistory=tmphist,
    )

    # Compute MIC maps
    if flag_p:
        mic12 = "{}_MICNT1and2".format(out)
        expr = "$mic12 = if($tmplay < 0, $tmpla1, " "if($tmpla2>1, $tmpla3, -1))"
        gs.mapcalc(
            expr,
            tmplay=tmplay,
            tmpla1=tmpla1,
            tmpla2=tmpla2,
            tmpla3=tmpla3,
            mic12=mic12,
            quiet=True,
        )

        # Write category labels to MIC maps
        tmpcat = tempfile.mkstemp()
        with open(tmpcat[1], "w") as text_file:
            text_file.write("-1:None\n")
            for cats in range(len(opn)):
                text_file.write("{}:{}\n".format(cats, opn[cats]))
        gs.run_command("r.category",
                       quiet=True,
                       map=mic12,
                       rules=tmpcat[1],
                       separator=":")
        os.remove(tmpcat[1])
        CATV = Module("r.category", map=mic12, stdout_=PIPE).outputs.stdout
        Module("r.category", map=mic12, rules="-", stdin_=CATV, quiet=True)
        gs.run_command(
            "r.support",
            map=mic12,
            units="unitless",
            title="Most influential covariate",
            description="Most influential covariate (MIC) for NT1"
            "and NT2",
            loadhistory=tmphist,
        )

    # Write color table
    gs.write_command("r.colors",
                     map=nt12,
                     rules="-",
                     stdin=COLORS_EXDET,
                     quiet=True)

    # Finalize
    gs.info(_("Done...."))
Exemplo n.º 47
0
def main():

    pan = options['pan']
    msxlst = options['msx'].split(',')
    outputsuffix = options['suffix']
    custom_ratio = options['ratio']
    center = options['center']
    center2 = options['center2']
    modulation = options['modulation']
    modulation2 = options['modulation2']

    if options['trim']:
        trimming_factor = float(options['trim'])
    else:
        trimming_factor = False

    histogram_match = flags['l']
    second_pass = flags['2']
    color_match = flags['c']

    #    # Check & warn user about "ns == ew" resolution of current region ======
    #    region = grass.region()
    #    nsr = region['nsres']
    #    ewr = region['ewres']
    #
    #    if nsr != ewr:
    #        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
    #               'resolutions do not match!')
    #        msg = msg.format(ns=nsr, ew=ewr)
    #        g.message(msg, flags='w')

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        images[img] = Info(img, mapset)
        images[img].read()

    panres = images[pan].nsres  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    run('g.region', res=panres)  # Respect extent, change resolution
    g.message("|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        g.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        g.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            g.message('Using custom ratio, overriding standard method!',
                      flags='w')

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            g.message("   > Retrieving image resolutions")

            msxres = images[msx].nsres

            # check
            if panres == msxres:
                msg = ("The Panchromatic's image resolution ({pr}) "
                       "equals to the Multi-Spectral's one ({mr}). "
                       "Something is probably not right! "
                       "Please check your input images.")
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = ('   >> Resolution ratio '
                         'low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}')
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            g.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            g.message(
                "   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                "   >>> If you insist, force it via the <ratio> option!",
                flags='i')
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        g.message('\n|2 High Pass Filtering the Panchromatic Image')

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = 'tmp.' + grass.basename(tmpfile)  # use its basenam
        tmp_pan_hpf = '{tmp}_pan_hpf'.format(tmp=tmp)  # HPF image
        tmp_msx_blnr = '{tmp}_msx_blnr'.format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = '{tmp}_msx_hpf'.format(tmp=tmp)  # Fused image
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run('r.mfilter',
            input=pan,
            filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title='High Pass Filtered Panchromatic image',
            overwrite=True)

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            tmp_pan_hpf_2 = '{tmp}_pan_hpf_2'.format(
                tmp=tmp)  # 2nd Pass HPF image
            tmp_hpf_matrix_2 = grass.tempfile()  # 2nd Pass ASCII filter
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run('r.mfilter',
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title='2-High-Pass Filtered Panchromatic Image',
                overwrite=True)

        #
        # 3. Upsampling low resolution image
        #

        g.message("\n|3 Upsampling (bilinearly) low resolution image")

        run('r.resamp.interp',
            method='bilinear',
            input=msx,
            output=tmp_msx_blnr,
            overwrite=True)

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        g.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " \
            "Modulating Factor"
        g.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        g.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx, sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        g.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        g.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        g.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = '{hpf} = {msx} + {pan} * {wgt}'
        fusion = fusion.format(hpf=tmp_msx_hpf,
                               msx=tmp_msx_blnr,
                               pan=tmp_pan_hpf,
                               wgt=weighting)
        grass.mapcalc(fusion)

        # command history
        hst = 'Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}'
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            g.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            g.message("   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = '   >> 2nd Pass Modulating Factor: {m:.2f}'
            g.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            g.message("\n|5+ Adding small-kernel-based weighted 2nd HPFi "
                      "back to fused image")

            add_back = '{final} = {msx_hpf} + {pan_hpf} * {wgt}'
            add_back = add_back.format(final=tmp_msx_hpf,
                                       msx_hpf=tmp_msx_hpf,
                                       pan_hpf=tmp_pan_hpf_2,
                                       wgt=weighting_2)
            grass.mapcalc(add_back)

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(
                hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        if color_match:
            g.message("\n|* Matching output to input color table")
            run('r.colors', map=tmp_msx_hpf, raster=msx)

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            g.message("\n|+ Matching histogram of Pansharpened image "
                      "to %s" % (msx),
                      flags='v')

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            # expression for mapcalc
            lhm = '{out} = ({hpf} - {hpfavg}) / {hpfsd} * {msxsd} + {msxavg}'
            lhm = lhm.format(out=tmp_msx_hpf,
                             hpf=tmp_msx_hpf,
                             hpfavg=msx_hpf_avg,
                             hpfsd=msx_hpf_sd,
                             msxsd=msx_sd,
                             msxavg=msx_avg)

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = '\n|* Trimming output image border pixels by '
            msg += '{factor} times the low resolution\n'.format(factor=tf)
            nsew = '   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}'
            nsew = nsew.format(n=region.n, s=region.s, e=region.e, w=region.w)
            msg += nsew

            g.message(msg)

            # re-set borders
            region.n -= tf * images[msx].nsres
            region.s += tf * images[msx].nsres
            region.e -= tf * images[msx].ewres
            region.w += tf * images[msx].ewres

            # communicate and act
            msg = '   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}'
            msg = msg.format(n=region.n, s=region.s, e=region.e, w=region.w)
            g.message(msg)

            # modify only the extent
            run('g.region', n=region.n, s=region.s, e=region.e, w=region.w)
            trim = "{out} = {input}".format(out=tmp_msx_hpf, input=tmp_msx_hpf)
            grass.mapcalc(trim)

        #
        # End of Algorithm

        # history entry
        grass.raster_history(tmp_msx_hpf)

        # add suffix to basename & rename end product
        msx_name = "{base}{suffix}"
        msx_name = msx_name.format(base=msx.split('@')[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    g.message("\n|! Original Region restored")
    g.message(
        "\n>>> Hint, rebalancing colors (via i.colors.enhance) "
        "may improve appearance of RGB composites!",
        flags='i')
def main():
    """
    Main program
    """

    # Temporary filenames

    # The following three are meant for a test step-by-step cwv estimation, see
    # unused functions!

    # tmp_ti_mean = tmp_map_name('ti_mean')  # for cwv
    # tmp_tj_mean = tmp_map_name('tj_mean')  # for cwv
    # tmp_ratio = tmp_map_name('ratio')  # for cwv

    tmp_avg_lse = tmp_map_name('avg_lse')
    tmp_delta_lse = tmp_map_name('delta_lse')
    tmp_cwv = tmp_map_name('cwv')
    #tmp_lst = tmp_map_name('lst')

    # basic equation for mapcalc
    global equation, citation_lst
    equation = "{result} = {expression}"

    # user input
    mtl_file = options['mtl']

    if not options['prefix']:
        b10 = options['b10']
        b11 = options['b11']
        t10 = options['t10']
        t11 = options['t11']

        if not options['clouds']:
            qab = options['qab']
            cloud_map = False

        else:
            qab = False
            cloud_map = options['clouds']

    elif options['prefix']:
        prefix = options['prefix']
        b10 = prefix + '10'
        b11 = prefix + '11'

        if not options['clouds']:
            qab = prefix + 'QA'
            cloud_map = False

        else:
            cloud_map = options['clouds']
            qab = False

    qapixel = options['qapixel']
    lst_output = options['lst']

    # save Brightness Temperature maps?
    global brightness_temperature_prefix
    if options['prefix_bt']:
        brightness_temperature_prefix = options['prefix_bt']
    else:
        brightness_temperature_prefix = None

    global cwv_output
    cwv_window_size = int(options['window'])
    assertion_for_cwv_window_size_msg = (
        'A spatial window of size 5^2 or less is not '
        'recommended. Please select a larger window. '
        'Refer to the manual\'s notes for details.')
    assert cwv_window_size >= 7, assertion_for_cwv_window_size_msg
    cwv_output = options['cwv']

    # optional maps
    average_emissivity_map = options['emissivity']
    delta_emissivity_map = options['delta_emissivity']

    # output for in-between maps?
    global emissivity_output, delta_emissivity_output
    emissivity_output = options['emissivity_out']
    delta_emissivity_output = options['delta_emissivity_out']

    global landcover_map, emissivity_class
    landcover_map = options['landcover']
    emissivity_class = options['emissivity_class']

    # flags
    global info, null
    info = flags['i']
    # keep_region = flags['k']
    scene_extent = flags['k']
    timestamping = flags['t']
    null = flags['n']

    global celsius
    celsius = flags['c']

    # ToDo:
    # shell = flags['g']

    #
    # Pre-production actions
    #

    # Set Region
    # if not keep_region:
    if scene_extent:
        grass.use_temp_region()  # safely modify the region
        msg = "\n|! Matching region extent to map {name}"

        # ToDo: check if extent-B10 == extent-B11? Unnecessary?
        # Improve below!

        if b10:
            run('g.region', rast=b10, align=b10)
            msg = msg.format(name=b10)

        elif t10:
            run('g.region', rast=t10, align=t10)
            msg = msg.format(name=t10)

        g.message(msg)

    # elif keep_region:
    elif scene_extent:
        grass.warning(_('Operating on current region'))

    #
    # 1. Mask clouds
    #

    if cloud_map:
        # user-fed cloud map?
        msg = '\n|i Using {cmap} as a MASK'.format(cmap=cloud_map)
        g.message(msg)
        r.mask(raster=cloud_map, flags='i', overwrite=True)

    else:
        # using the quality assessment band and a "QA" pixel value
        mask_clouds(qab, qapixel)

    #
    # 2. TIRS > Brightness Temperatures
    #

    if mtl_file:

        # if MTL and b10 given, use it to compute at-satellite temperature t10
        if b10:
            # convert DNs to at-satellite temperatures
            t10 = tirs_to_at_satellite_temperature(b10, mtl_file)

        # likewise for b11 -> t11
        if b11:
            # convert DNs to at-satellite temperatures
            t11 = tirs_to_at_satellite_temperature(b11, mtl_file)

    #
    # Initialise a SplitWindowLST object
    #

    split_window_lst = SplitWindowLST(emissivity_class)
    citation_lst = split_window_lst.citation

    #
    # 3. Land Surface Emissivities
    #

    # use given fixed class?
    if emissivity_class:

        if split_window_lst.landcover_class is False:
            # replace with meaningful error
            g.warning('Unknown land cover class string! Note, this string '
                      'input option is case sensitive.')

        if emissivity_class == 'Random':
            msg = "\n|! Random emissivity class selected > " + \
                split_window_lst.landcover_class + ' '

        else:
            msg = '\n|! Retrieving average emissivities *only* for {eclass} '

        if info:
            msg += '| Average emissivities (channels 10, 11): '
            msg += str(split_window_lst.emissivity_t10) + ', ' + \
                str(split_window_lst.emissivity_t11)

        msg = msg.format(eclass=split_window_lst.landcover_class)
        g.message(msg)

    # use the FROM-GLC map
    elif landcover_map:

        if average_emissivity_map:
            tmp_avg_lse = average_emissivity_map

        if not average_emissivity_map:
            determine_average_emissivity(tmp_avg_lse, landcover_map,
                                         split_window_lst.average_lse_mapcalc)
            if options['emissivity_out']:
                tmp_avg_lse = options['emissivity_out']

        if delta_emissivity_map:
            tmp_delta_lse = delta_emissivity_map

        if not delta_emissivity_map:
            determine_delta_emissivity(tmp_delta_lse, landcover_map,
                                       split_window_lst.delta_lse_mapcalc)
            if options['delta_emissivity_out']:
                tmp_delta_lse = options['delta_emissivity_out']

    #
    # 4. Modified Split-Window Variance-Covariance Matrix > Column Water Vapor
    #

    if info:
        msg = '\n|i Spatial window of size {n} for Column Water Vapor estimation: '
        msg = msg.format(n=cwv_window_size)
        g.message(msg)

    cwv = Column_Water_Vapor(cwv_window_size, t10, t11)
    citation_cwv = cwv.citation
    estimate_cwv_big_expression(tmp_cwv, t10, t11, cwv._big_cwv_expression())
    if cwv_output:
        tmp_cwv = cwv_output

    #
    # 5. Estimate Land Surface Temperature
    #

    if info and emissivity_class == 'Random':
        msg = '\n|* Will pick a random emissivity class!'
        grass.verbose(msg)

    estimate_lst(lst_output, t10, t11, tmp_avg_lse, tmp_delta_lse, tmp_cwv,
                 split_window_lst.sw_lst_mapcalc)

    #
    # Post-production actions
    #

    # remove MASK
    r.mask(flags='r', verbose=True)

    # time-stamping
    if timestamping:
        add_timestamp(mtl_file, lst_output)

        if cwv_output:
            add_timestamp(mtl_file, cwv_output)

    # Apply color table
    if celsius:
        run('r.colors', map=lst_output, color='celsius')
    else:
        # color table for kelvin
        run('r.colors', map=lst_output, color='kelvin')

    # ToDo: helper function for r.support
    # strings for metadata
    history_lst = '\n' + citation_lst
    history_lst += '\n\n' + citation_cwv
    history_lst += '\n\nSplit-Window model: '
    history_lst += split_window_lst._equation  # :wsw_lst_mapcalc
    description_lst = (
        'Land Surface Temperature derived from a split-window algorithm. ')

    if celsius:
        title_lst = 'Land Surface Temperature (C)'
        units_lst = 'Celsius'

    else:
        title_lst = 'Land Surface Temperature (K)'
        units_lst = 'Kelvin'

    landsat8_metadata = Landsat8_MTL(mtl_file)
    source1_lst = landsat8_metadata.scene_id
    source2_lst = landsat8_metadata.origin

    # history entry
    run("r.support",
        map=lst_output,
        title=title_lst,
        units=units_lst,
        description=description_lst,
        source1=source1_lst,
        source2=source2_lst,
        history=history_lst)

    # (re)name the LST product
    #run("g.rename", rast=(tmp_lst, lst_output))

    # restore region
    # if not keep_region:
    if scene_extent:
        grass.del_temp_region()  # restoring previous region settings
        g.message("|! Original Region restored")

    # print citation
    if info:
        print '\nSource: ' + citation_lst
Exemplo n.º 49
0
 cols = DEM_arr.shape[1]
 null = -9999
 fo.write("north: " + str(north)+"\n")
 fo.write("south: " + str(south)+"\n")
 fo.write("east: "  + str(east) +"\n")
 fo.write("west: "  + str(west) +"\n")
 fo.write("rows: "  + str(rows) +"\n")      
 fo.write("cols: "  + str(cols) +"\n")
 fo.write("null: "  + str(null) +"\n")
 for i in range(0,DEM_arr.shape[0]):
     fo.write("\n")
     for j in range(0,DEM_arr.shape[1]):
         fo.write(str(DEM_arr[i][j])+ " ")
 fo.close()
 # We create a temporary region that is only valid in this python session
 g.use_temp_region()
 rows = DEM_arr.shape[0]
 cols = DEM_arr.shape[1]
 s = 0 #some arbitrary value
 n = s + resolution*rows
 w = 0  #some arbitrary value
 e = w + resolution*cols
 g.run_command('g.region', flags = 'ap', n = n ,s = s, e = e, w = w,res = resolution, rows = rows ,cols = cols)
 pathname = os.path.dirname(sys.argv[0])
 fullpath = os.path.abspath(pathname)
 g.run_command('r.in.ascii', overwrite = True, flags='f', input = fullpath +'/DEM.asc', output='test_DEM')
 
 g.run_command('r.cost', overwrite = True,flags = 'rk',input = 'test_DEM@user1',output = 'cost_map',coordinate = '100,100',stop_coordinate='2400,2400')
 g.run_command('r.out.png', input='cost_map@user1', output = fullpath + '/'+'cost')
 g.run_command('r.slope.aspect',overwrite=True,elevation='test_DEM@user1',slope='DEM_Slope',aspect='DEM_Aspect',format='percent')
 g.run_command('r.walk', overwrite = True,flags = 'rk',elevation = 'test_DEM@user1',friction ='DEM_Slope@user1' ,output = 'walk_map',coordinate = '100,100',stop_coordinate='2400,2400')
Exemplo n.º 50
0
        column=COL_VALUE,
        output=smoothing,
        power=2,
        npoints=inter_points,
    )
    # Reset region to full extent
    gscript.run_command("g.region", raster=high + "," + low)

    # Apply stitching
    smooth_low_res = getTemporaryIdentifier()
    # Sum to low res
    gscript.message(_("[r.mblend] Applying smoothing surface"))
    gscript.mapcalc(smooth_low_res + " = " + low_res_inter + " + " + smoothing)
    # Add both rasters
    try:
        gscript.message(_("[r.mblend] Joining result into a single raster"))
        gscript.run_command("r.patch",
                            input=high + "," + smooth_low_res,
                            output=output)
    except Exception as ex:
        gscript.error(_("[r.mblend] ERROR: Failed to create smoothed raster."))
        exit()

    gscript.message(_("[r.mblend] SUCCESS: smoothed raster created."))


if __name__ == "__main__":
    atexit.register(cleanup)
    gscript.use_temp_region()
    main()
def main():
    options, flags = gs.parser()

    # it does not check if pngs and other files exists,
    # maybe it could check the any/all file(s) dir

    if options['raster'] and options['strds']:
        gs.fatal(_("Options raster and strds cannot be specified together."
                   " Please decide for one of them."))
    if options['raster'] and options['where']:
        gs.fatal(_("Option where cannot be combined with the option raster."
                   " Please don't set where option or use strds option"
                   " instead of raster option."))
    if options['raster']:
        if ',' in options['raster']:
            maps = options['raster'].split(',')  # TODO: skip empty parts
        else:
            maps = [options['raster']]
    elif options['strds']:
        # import and init only when needed
        # init is called anyway when the generated form is used
        import grass.temporal as tgis

        strds = options['strds']
        where = options['where']

        # make sure the temporal database exists
        tgis.init()

        # create the space time raster object
        ds = tgis.open_old_space_time_dataset(strds, 'strds')
        # check if the dataset is in the temporal database
        if not ds.is_in_db():
            gs.fatal(_("Space time dataset <%s> not found") % strds)

        # we need a database interface
        dbiface = tgis.SQLDatabaseInterfaceConnection()
        dbiface.connect()

        # the query
        rows = ds.get_registered_maps(columns='id', where=where,
                                      order='start_time')
        if not rows:
            gs.fatal(_("Cannot get any maps for spatio-temporal raster"
                       " dataset <%s>."
                       " Dataset is empty or you temporal WHERE"
                       " condition filtered all maps out."
                       " Please, specify another dataset,"
                       " put maps into this dataset"
                       " or correct your WHERE condition.") % strds)
        maps = [row['id'] for row in rows]
    else:
        gs.fatal(_("Either raster or strds option must be specified."
                   " Please specify one of them."))
    # get the number of maps for later use
    num_maps = len(maps)

    out_dir = options['output']
    if not os.path.exists(out_dir):
        # TODO: maybe we could create the last dir on specified path?
        gs.fatal(_("Output path <%s> does not exists."
                   " You need to create the (empty) output directory"
                   " yourself before running this module.") % out_dir)
    epsg = int(options['epsg'])

    if ',' in options['opacity']:
        opacities = [float(opacity)
                     for opacity in options['opacity'].split(',')]
        if len(opacities) != num_maps:
            gs.fatal(_("Number of opacities <{no}> does not match number"
                       " of maps <{nm}>.").format(no=len(opacities),
                                                  nm=num_maps))
    else:
        opacities = [float(options['opacity'])] * num_maps

    if ',' in options['info']:
        infos = options['info'].split(',')
    else:
        infos = [options['info']]

    if 'geotiff' in infos and not gs.find_program('r.out.tiff', '--help'):
        gs.fatal(_("Install r.out.tiff add-on module to export GeoTIFF"))

    # r.out.png options
    compression = int(options['compression'])
    # flag w is passed to r.out.png.proj
    # our flag n is inversion of r.out.png.proj's t flag
    # (transparent NULLs are better for overlay)
    # we always need the l flag (ll .wgs84 file)
    routpng_flags = ''
    if not flags['n']:
        routpng_flags += 't'
    if flags['w']:
        routpng_flags += 'w'
    # r.out.png.proj l flag for LL .wgs84 file is now function parameter
    # and is specified bellow

    if flags['m']:
        use_region = False
        # we will use map extent
        gs.use_temp_region()
    else:
        use_region = True

    # hard coded file names
    data_file_name = 'data_file.csv'
    js_data_file_name = 'data_file.js'

    data_file = open(os.path.join(out_dir, data_file_name), 'w')
    js_data_file = open(os.path.join(out_dir, js_data_file_name), 'w')
    js_data_file.write('/* This file was generated by r.out.leaflet GRASS GIS'
                       ' module. */\n\n')
    js_data_file.write('var layerInfos = [\n')

    for i, map_name in enumerate(maps):
        if not use_region:
            gs.run_command('g.region', rast=map_name)
        if '@' in map_name:
            pure_map_name = map_name.split('@')[0]
        else:
            pure_map_name = map_name
        # TODO: mixing current and map's mapset at this point
        if '@' in map_name:
            map_name, src_mapset_name = map_name.split('@')
        else:
            # TODO: maybe mapset is mandatory for those out of current mapset?
            src_mapset_name = gs.gisenv()['MAPSET']
        image_file_name = pure_map_name + '.png'
        image_file_path = os.path.join(out_dir, image_file_name)
        # TODO: skip writing to file and extract the information from
        # function, or use object if function is so large
        wgs84_file = image_file_path + '.wgs84'
        export_png_in_projection(map_name=map_name,
                                 src_mapset_name=src_mapset_name,
                                 output_file=image_file_path,
                                 epsg_code=epsg,
                                 compression=compression,
                                 routpng_flags=routpng_flags,
                                 wgs84_file=wgs84_file,
                                 use_region=True)

        data_file.write(pure_map_name + ',' + image_file_name + '\n')

        # it doesn't matter in which location we are, it just uses the current
        # location, not tested for LL loc, assuming that to be nop.
        map_extent = get_map_extent_for_file(wgs84_file)
        bounds = map_extent_to_js_leaflet_list(map_extent)

        extra_attributes = []

        generate_infos(map_name=map_name,
                       projected_png_file=image_file_path,
                       required_infos=infos,
                       output_directory=out_dir,
                       attributes=extra_attributes)
        # http://www.w3schools.com/js/js_objects.asp
        js_data_file.write("""   {{title: "{title}", file: "{file_}","""
                           """ bounds: {bounds}, opacity: {opacity}"""
                           .format(title=pure_map_name,
                                   file_=image_file_name,
                                   bounds=bounds,
                                   opacity=opacities[i]))
        if extra_attributes:
            extra_js_attributes = [pair[0] + ': "' +
                                   escape_quotes(
                                       escape_endlines(
                                           escape_backslashes(
                                               pair[1]
                                           ))) + '"'
                                   for pair in extra_attributes]
            js_data_file.write(', ' + ', '.join(extra_js_attributes))
        js_data_file.write("""}\n""")
        # do not write after the last item
        if i < num_maps - 1:
            js_data_file.write(',')
    js_data_file.write('];\n')
    data_file.close()
Exemplo n.º 52
0
def main():

    pan = options["pan"]
    msxlst = options["msx"].split(",")
    outputsuffix = options["suffix"]
    custom_ratio = options["ratio"]
    center = options["center"]
    center2 = options["center2"]
    modulation = options["modulation"]
    modulation2 = options["modulation2"]

    if options["trim"]:
        trimming_factor = float(options["trim"])
    else:
        trimming_factor = False

    histogram_match = flags["l"]
    second_pass = flags["2"]
    color_match = flags["c"]

    #    # Check & warn user about "ns == ew" resolution of current region ======
    #    region = grass.region()
    #    nsr = region['nsres']
    #    ewr = region['ewres']
    #
    #    if nsr != ewr:
    #        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
    #               'resolutions do not match!')
    #        msg = msg.format(ns=nsr, ew=ewr)
    #        grass.message(msg, flag='w')

    mapset = grass.gisenv()["MAPSET"]  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    # pygrass.raster.abstract.Info can not cope with
    # Info(name@mapset, mapset)
    # -> fully qualified names and input images from other mapsets are
    # not supported
    # -> use r.info via raster_info

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        # images[img] = Info(img, mapset)
        # images[img].read()
        try:
            images[img] = grass.raster_info(img)
        except:
            grass.fatal(_("msx input not found"))

    panres = images[pan]["nsres"]  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    if flags["a"]:
        run("g.region", align=pan)  # Respect extent, change resolution
    else:
        run("g.region", res=panres)  # Respect extent, change resolution
        grass.message("|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        grass.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        grass.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            grass.warning("Using custom ratio, overriding standard method!")

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            grass.message("   > Retrieving image resolutions")

            msxres = images[msx]["nsres"]

            # check
            if panres == msxres:
                msg = (
                    "The Panchromatic's image resolution ({pr}) "
                    "equals to the Multi-Spectral's one ({mr}). "
                    "Something is probably not right! "
                    "Please check your input images."
                )
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = (
                "   >> Resolution ratio "
                "low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}"
            )
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            grass.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            grass.message(
                "   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                "   >>> If you insist, force it via the <ratio> option!",
                flag="i",
            )
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        grass.message("\n|2 High Pass Filtering the Panchromatic Image")

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = "tmp." + grass.basename(tmpfile)  # use its basename
        tmp_pan_hpf = "{tmp}_pan_hpf".format(tmp=tmp)  # HPF image
        tmp_msx_blnr = "{tmp}_msx_blnr".format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = "{tmp}_msx_hpf".format(tmp=tmp)  # Fused image
        tmp_msx_mapcalc = tmp_msx_hpf + "_mapcalc"
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run(
            "r.mfilter",
            input=pan,
            filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title="High Pass Filtered Panchromatic image",
            overwrite=True,
        )

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            # 2nd Pass HPF image
            tmp_pan_hpf_2 = "{tmp}_pan_hpf_2".format(tmp=tmp)
            # 2nd Pass ASCII filter
            tmp_hpf_matrix_2 = grass.tempfile()
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run(
                "r.mfilter",
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title="2-High-Pass Filtered Panchromatic Image",
                overwrite=True,
            )

        #
        # 3. Upsampling low resolution image
        #

        grass.message("\n|3 Upsampling (bilinearly) low resolution image")

        run(
            "r.resamp.interp",
            method="bilinear",
            input=msx,
            output=tmp_msx_blnr,
            overwrite=True,
        )

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        grass.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " "Modulating Factor"
        grass.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        grass.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx, sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        grass.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        grass.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        grass.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = "{hpf} = {msx} + {pan} * {wgt}"
        fusion = fusion.format(
            hpf=tmp_msx_hpf, msx=tmp_msx_blnr, pan=tmp_pan_hpf, wgt=weighting
        )
        grass.mapcalc(fusion)

        # command history
        hst = "Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}"
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            grass.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            grass.message("   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = "   >> 2nd Pass Modulating Factor: {m:.2f}"
            grass.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            grass.message(
                "\n|5+ Adding small-kernel-based weighted "
                "2nd HPFi back to fused image"
            )

            add_back = "{final} = {msx_hpf} + {pan_hpf} * {wgt}"
            # r.mapcalc: do not use input as output
            add_back = add_back.format(
                final=tmp_msx_mapcalc,
                msx_hpf=tmp_msx_hpf,
                pan_hpf=tmp_pan_hpf_2,
                wgt=weighting_2,
            )
            grass.mapcalc(add_back)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            # technically, this is not histogram matching but
            # normalizing to the input's mean + stddev
            grass.message(
                "\n|+ Matching histogram of Pansharpened image " "to %s" % (msx)
            )

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            msx_info = images[msx]
            outfn = "round"
            if msx_info["datatype"] == "FCELL":
                outfn = "float"
            elif msx_info["datatype"] == "DCELL":
                outfn = "double"

            # expression for mapcalc
            lhm = (
                "{out} = {outfn}(double({hpf} - {hpfavg}) / {hpfsd} * "
                "{msxsd} + {msxavg})"
            )
            # r.mapcalc: do not use input as output
            lhm = lhm.format(
                out=tmp_msx_mapcalc,
                outfn=outfn,
                hpf=tmp_msx_hpf,
                hpfavg=msx_hpf_avg,
                hpfsd=msx_hpf_sd,
                msxsd=msx_sd,
                msxavg=msx_avg,
            )

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # snap outliers to input range
            snapout = (
                "{out} = {outfn}(if({hpf} < {oldmin}, {oldmin}, "
                "if({hpf} > {oldmax}, {oldmax}, {hpf})))"
            )
            snapout = snapout.format(
                out=tmp_msx_mapcalc,
                outfn=outfn,
                hpf=tmp_msx_hpf,
                oldmin=msx_info["min"],
                oldmax=msx_info["max"],
            )

            grass.mapcalc(snapout, quiet=True, overwrite=True)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)
        else:
            # scale result to input using quantiles
            grass.message(
                "\n|+ Quantile scaling of Pansharpened image " "to %s" % (msx)
            )

            msx_info = images[msx]
            outfn = "round"
            if msx_info["datatype"] == "FCELL":
                outfn = "float"
            elif msx_info["datatype"] == "DCELL":
                outfn = "double"

            # quantile scaling
            percentiles = "10,50,90"
            allq = grass.read_command(
                "r.quantile", input=msx, percentiles=percentiles, quiet=True
            )
            allq = allq.splitlines()
            msx_plo = float(allq[0].split(":")[2])
            msx_med = float(allq[1].split(":")[2])
            msx_phi = float(allq[2].split(":")[2])

            allq = grass.read_command(
                "r.quantile", input=tmp_msx_hpf, percentiles=percentiles, quiet=True
            )
            allq = allq.splitlines()
            hpf_plo = float(allq[0].split(":")[2])
            hpf_med = float(allq[1].split(":")[2])
            hpf_phi = float(allq[2].split(":")[2])

            # scale factors
            if msx_med != msx_plo and hpf_med != hpf_plo:
                sfplo = (msx_med - msx_plo) / (hpf_med - hpf_plo)
            else:
                # avoid zero and division by zero
                sfplo = 1
            if msx_phi != msx_med and hpf_phi != hpf_med:
                sfphi = (msx_phi - msx_med) / (hpf_phi - hpf_med)
            else:
                # avoid zero and division by zero
                sfphi = 1

            scale = (
                "{out} = {outfn}(double({hpf} - {hpf_med}) * "
                "if({hpf} < {hpf_med}, {sfplo}, "
                "{sfphi}) + {msx_med})"
            )
            scale = scale.format(
                out=tmp_msx_mapcalc,
                outfn=outfn,
                hpf=tmp_msx_hpf,
                hpf_med=hpf_med,
                sfplo=sfplo,
                sfphi=sfphi,
                msx_med=msx_med,
            )
            grass.mapcalc(scale, quiet=True)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # snap outliers to input range
            snapout = (
                "{out} = {outfn}(if({hpf} < {oldmin}, {oldmin}, "
                "if({hpf} > {oldmax}, {oldmax}, {hpf})))"
            )
            snapout = snapout.format(
                out=tmp_msx_mapcalc,
                outfn=outfn,
                hpf=tmp_msx_hpf,
                oldmin=msx_info["min"],
                oldmax=msx_info["max"],
            )

            grass.mapcalc(snapout, quiet=True, overwrite=True)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # update history string
            cmd_history.append("Linear Scaling: %s" % scale)

        if color_match:
            grass.message("\n|* Matching output to input color table")
            run("r.colors", map=tmp_msx_hpf, raster=msx)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = "\n|* Trimming output image border pixels by "
            msg += "{factor} times the low resolution\n".format(factor=tf)
            nsew = "   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}"
            nsew = nsew.format(
                n=region["n"], s=region["s"], e=region["e"], w=region["w"]
            )
            msg += nsew

            grass.message(msg)

            # re-set borders
            region.n -= tf * images[msx]["nsres"]
            region.s += tf * images[msx]["nsres"]
            region.e -= tf * images[msx]["ewres"]
            region.w += tf * images[msx]["ewres"]

            # communicate and act
            msg = "   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}"
            msg = msg.format(n=region["n"], s=region["s"], e=region["e"], w=region["w"])
            grass.message(msg)

            # modify only the extent
            run("g.region", n=region["n"], s=region["s"], e=region["e"], w=region["w"])
            # r.mapcalc: do not use input as output
            trim = "{out} = {input}".format(out=tmp_msx_mapcalc, input=tmp_msx_hpf)
            grass.mapcalc(trim)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

        #
        # End of Algorithm

        # history entry
        run("r.support", map=tmp_msx_hpf, history="\n".join(cmd_history))

        # add suffix to basename & rename end product
        msx_name = "{base}.{suffix}"
        msx_name = msx_name.format(base=msx.split("@")[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    grass.message("\n|! Original Region restored")
    grass.message(
        "\n>>> Hint, rebalancing colors (via i.colors.enhance) "
        "may improve appearance of RGB composites!",
        flag="i",
    )
def main():
    """
    Main program
    """

    # Temporary filenames

    # The following three are meant for a test step-by-step cwv estimation, see
    # unused functions!

    # tmp_ti_mean = tmp_map_name('ti_mean')  # for cwv
    # tmp_tj_mean = tmp_map_name('tj_mean')  # for cwv
    # tmp_ratio = tmp_map_name('ratio')  # for cwv

    tmp_avg_lse = tmp_map_name('avg_lse')
    tmp_delta_lse = tmp_map_name('delta_lse')
    tmp_cwv = tmp_map_name('cwv')
    #tmp_lst = tmp_map_name('lst')

    # basic equation for mapcalc
    global equation, citation_lst
    equation = "{result} = {expression}"

    # user input
    mtl_file = options['mtl']

    if not options['prefix']:
        b10 = options['b10']
        b11 = options['b11']
        t10 = options['t10']
        t11 = options['t11']

        if not options['clouds']:
            qab = options['qab']
            cloud_map = False

        else:
            qab = False
            cloud_map = options['clouds']

    elif options['prefix']:
        prefix = options['prefix']
        b10 = prefix + '10'
        b11 = prefix + '11'

        if not options['clouds']:
            qab = prefix + 'QA'
            cloud_map = False

        else:
            cloud_map = options['clouds']
            qab = False

    qapixel = options['qapixel']
    lst_output = options['lst']

    # save Brightness Temperature maps?
    global brightness_temperature_prefix
    if options['prefix_bt']:
        brightness_temperature_prefix = options['prefix_bt']
    else:
        brightness_temperature_prefix = None

    global cwv_output
    cwv_window_size = int(options['window'])
    assertion_for_cwv_window_size_msg = ('A spatial window of size 5^2 or less is not '
                                         'recommended. Please select a larger window. '
                                         'Refer to the manual\'s notes for details.')
    assert cwv_window_size >= 7, assertion_for_cwv_window_size_msg
    cwv_output = options['cwv']

    # optional maps
    average_emissivity_map = options['emissivity']
    delta_emissivity_map = options['delta_emissivity']

    # output for in-between maps?
    global emissivity_output, delta_emissivity_output
    emissivity_output = options['emissivity_out']
    delta_emissivity_output = options['delta_emissivity_out']

    global landcover_map, emissivity_class
    landcover_map = options['landcover']
    emissivity_class = options['emissivity_class']

    # flags
    global info, null
    info = flags['i']
    scene_extent = flags['e']
    timestamping = flags['t']
    null = flags['n']

    global rounding
    rounding = flags['r']

    global celsius
    celsius = flags['c']

    # ToDo:
    # shell = flags['g']

    #
    # Pre-production actions
    #

    # Set Region
    if scene_extent:
        grass.use_temp_region()  # safely modify the region
        msg = "\n|! Matching region extent to map {name}"

        # ToDo: check if extent-B10 == extent-B11? Unnecessary?
        # Improve below!

        if b10:
            run('g.region', rast=b10, align=b10)
            msg = msg.format(name=b10)

        elif t10:
            run('g.region', rast=t10, align=t10)
            msg = msg.format(name=t10)

        g.message(msg)

    elif scene_extent:
        grass.warning(_('Operating on current region'))

    #
    # 1. Mask clouds
    #

    if cloud_map:
        # user-fed cloud map?
        msg = '\n|i Using {cmap} as a MASK'.format(cmap=cloud_map)
        g.message(msg)
        r.mask(raster=cloud_map, flags='i', overwrite=True)

    else:
        # using the quality assessment band and a "QA" pixel value
        mask_clouds(qab, qapixel)

    #
    # 2. TIRS > Brightness Temperatures
    #

    if mtl_file:

        # if MTL and b10 given, use it to compute at-satellite temperature t10
        if b10:
            # convert DNs to at-satellite temperatures
            t10 = tirs_to_at_satellite_temperature(b10, mtl_file)

        # likewise for b11 -> t11
        if b11:
            # convert DNs to at-satellite temperatures
            t11 = tirs_to_at_satellite_temperature(b11, mtl_file)

    #
    # Initialise a SplitWindowLST object
    #

    split_window_lst = SplitWindowLST(emissivity_class)
    citation_lst = split_window_lst.citation

    #
    # 3. Land Surface Emissivities
    #

    # use given fixed class?
    if emissivity_class:

        if split_window_lst.landcover_class is False:
            # replace with meaningful error
            g.warning('Unknown land cover class string! Note, this string '
                      'input option is case sensitive.')

        if emissivity_class == 'Random':
            msg = "\n|! Random emissivity class selected > " + \
                split_window_lst.landcover_class + ' '

        else:
            msg = '\n|! Retrieving average emissivities *only* for {eclass} '

        if info:
            msg += '| Average emissivities (channels 10, 11): '
            msg += str(split_window_lst.emissivity_t10) + ', ' + \
                str(split_window_lst.emissivity_t11)

        msg = msg.format(eclass=split_window_lst.landcover_class)
        g.message(msg)

    # use the FROM-GLC map
    elif landcover_map:

        if average_emissivity_map:
            tmp_avg_lse = average_emissivity_map

        if not average_emissivity_map:
            determine_average_emissivity(tmp_avg_lse, landcover_map,
                                         split_window_lst.average_lse_mapcalc)
            if options['emissivity_out']:
                tmp_avg_lse = options['emissivity_out']

        if delta_emissivity_map:
            tmp_delta_lse = delta_emissivity_map

        if not delta_emissivity_map:
            determine_delta_emissivity(tmp_delta_lse, landcover_map,
                                       split_window_lst.delta_lse_mapcalc)
            if options['delta_emissivity_out']:
                tmp_delta_lse = options['delta_emissivity_out']

    #
    # 4. Modified Split-Window Variance-Covariance Matrix > Column Water Vapor
    #
    

    if info:
        msg = '\n|i Spatial window of size {n} for Column Water Vapor estimation: '
        msg = msg.format(n=cwv_window_size)
        g.message(msg)

    cwv = Column_Water_Vapor(cwv_window_size, t10, t11)
    citation_cwv = cwv.citation
    estimate_cwv_big_expression(tmp_cwv, t10, t11, cwv._big_cwv_expression())
    if cwv_output:
        tmp_cwv = cwv_output

    #
    # 5. Estimate Land Surface Temperature
    #

    if info and emissivity_class == 'Random':
        msg = '\n|* Will pick a random emissivity class!'
        grass.verbose(msg)

    estimate_lst(lst_output, t10, t11,
                 tmp_avg_lse, tmp_delta_lse, tmp_cwv,
                 split_window_lst.sw_lst_mapcalc)

    #
    # Post-production actions
    #

    # remove MASK
    r.mask(flags='r', verbose=True)

    # time-stamping
    if timestamping:
        add_timestamp(mtl_file, lst_output)

        if cwv_output:
            add_timestamp(mtl_file, cwv_output)

    # Apply color table
    if celsius:
        run('r.colors', map=lst_output, color='celsius')
    else:
        # color table for kelvin
        run('r.colors', map=lst_output, color='kelvin')

    # ToDo: helper function for r.support
    # strings for metadata
    history_lst = '\n' + citation_lst
    history_lst += '\n\n' + citation_cwv
    history_lst += '\n\nSplit-Window model: '
    history_lst += split_window_lst._equation  # :wsw_lst_mapcalc
    description_lst = ('Land Surface Temperature derived from a split-window algorithm. ')

    if celsius:
        title_lst = 'Land Surface Temperature (C)'
        units_lst = 'Celsius'

    else:
        title_lst = 'Land Surface Temperature (K)'
        units_lst = 'Kelvin'

    landsat8_metadata = Landsat8_MTL(mtl_file)
    source1_lst = landsat8_metadata.scene_id
    source2_lst = landsat8_metadata.origin

    # history entry
    run("r.support", map=lst_output, title=title_lst,
        units=units_lst, description=description_lst,
        source1=source1_lst, source2=source2_lst,
        history=history_lst)

    # (re)name the LST product
    #run("g.rename", rast=(tmp_lst, lst_output))

    # restore region
    if scene_extent:
        grass.del_temp_region()  # restoring previous region settings
        g.message("|! Original Region restored")

    # print citation
    if info:
        print '\nSource: ' + citation_lst
Exemplo n.º 54
0
def main():

    # Temporary map names
    global tmp, t, mapset
    tmp = {}
    mapset = gscript.gisenv()["MAPSET"]
    mapset2 = "@{}".format(mapset)
    processid = os.getpid()
    processid = str(processid)
    tmp["shadow_temp"] = "shadow_temp" + processid
    tmp["cloud_v"] = "cloud_v_" + processid
    tmp["shadow_temp_v"] = "shadow_temp_v_" + processid
    tmp["shadow_temp_mask"] = "shadow_temp_mask_" + processid
    tmp["centroid"] = "centroid_" + processid
    tmp["dissolve"] = "dissolve_" + processid
    tmp["delcat"] = "delcat_" + processid
    tmp["addcat"] = "addcat_" + processid
    tmp["cl_shift"] = "cl_shift_" + processid
    tmp["overlay"] = "overlay_" + processid

    # Check temporary map names are not existing maps
    for key, value in tmp.items():
        if gscript.find_file(value, element="vector", mapset=mapset)["file"]:
            gscript.fatal(
                ("Temporary vector map <{}> already exists.").format(value))
        if gscript.find_file(value, element="cell", mapset=mapset)["file"]:
            gscript.fatal(
                ("Temporary raster map <{}> already exists.").format(value))

    # Input files
    mtd_file = options["mtd_file"]
    metadata_file = options["metadata"]
    bands = {}
    error_msg = "Syntax error in the txt file. See the manual for further information about the right syntax."
    if options["input_file"] == "":
        bands["blue"] = options["blue"]
        bands["green"] = options["green"]
        bands["red"] = options["red"]
        bands["nir"] = options["nir"]
        bands["nir8a"] = options["nir8a"]
        bands["swir11"] = options["swir11"]
        bands["swir12"] = options["swir12"]
    else:
        txt_bands = []
        with open(options["input_file"], "r") as input_file:
            for line in input_file:
                a = line.split("=")
                if len(a) != 2:
                    gscript.fatal(error_msg)
                elif a[0] == "MTD_TL.xml" and not mtd_file:
                    mtd_file = a[1].strip()
                elif a[0] == "metadata" and not metadata_file:
                    metadata_file = a[1].strip()
                elif a[0] in [
                        "blue",
                        "green",
                        "red",
                        "nir",
                        "nir8a",
                        "swir11",
                        "swir12",
                ]:
                    txt_bands.append(a[0])
                    bands[a[0]] = a[1].strip()
            if len(txt_bands) < 7:
                gscript.fatal((
                    "One or more bands are missing in the input text file.\n Only these bands have been found: {}"
                ).format(txt_bands))
            if mtd_file and metadata_file != "default":
                gscript.fatal((
                    "Metadata json file and mtd_file are both given as input text files.\n Only one of these should be specified."
                ))

    # we want cloud and shadows: check input and output for shadow mask
    if not flags["c"]:
        if mtd_file != "":
            if not os.path.isfile(mtd_file):
                gscript.fatal(
                    "Metadata file <{}> not found. Please select the right .xml file"
                    .format(mtd_file))
        elif metadata_file == "default":
            # use default json
            env = gscript.gisenv()
            json_standard_folder = os.path.join(env["GISDBASE"],
                                                env["LOCATION_NAME"],
                                                env["MAPSET"], "cell_misc")
            for key, value in bands.items():
                metadata_file = os.path.join(json_standard_folder, value,
                                             "description.json")
                if os.path.isfile(metadata_file):
                    break
                else:
                    metadata_file = None
            if not metadata_file:
                gscript.fatal(
                    "No default metadata files found. Did you use -j in i.sentinel.import?"
                )
        elif metadata_file:
            if not os.path.isfile(metadata_file):
                gscript.fatal(
                    "Metadata file <{}> not found. Please select the right file"
                    .format(metadata_file))
        else:
            gscript.fatal(
                "Metadata (file) is required for shadow mask computation. Please specify it"
            )

    d = "double"
    f_bands = {}
    scale_fac = options["scale_fac"]
    cloud_threshold = options["cloud_threshold"]
    shadow_threshold = options["shadow_threshold"]
    raster_max = {}
    check_cloud = 1  # by default the procedure finds clouds
    check_shadow = 1  # by default the procedure finds shadows

    if options["cloud_raster"]:
        cloud_raster = options["cloud_raster"]
    else:
        tmp["cloud_def"] = "cloud_def" + processid
        cloud_raster = tmp["cloud_def"]
    if options["cloud_mask"]:
        cloud_mask = options["cloud_mask"]
        if "." in options["cloud_mask"]:
            gscript.fatal("Name for cloud_mask output \
                           is not SQL compliant".format(options["cloud_mask"]))
    else:
        tmp["cloud_mask"] = "cloud_mask" + processid
        cloud_mask = tmp["cloud_mask"]
    if options["shadow_mask"]:
        shadow_mask = options["shadow_mask"]
        if "." in options["shadow_mask"]:
            gscript.fatal("Name for shadow_mask output \
                           is not SQL compliant".format(
                options["shadow_mask"]))
    else:
        tmp["shadow_mask"] = "shadow_mask" + processid
        shadow_mask = tmp["shadow_mask"]
    shadow_raster = options["shadow_raster"]

    # Check if all required input bands are specified in the text file
    if (bands["blue"] == "" or bands["green"] == "" or bands["red"] == ""
            or bands["nir"] == "" or bands["nir8a"] == ""
            or bands["swir11"] == "" or bands["swir12"] == ""):
        gscript.fatal(
            "All input bands (blue, green, red, nir, nir8a, swir11, swir12) are required"
        )

    # Check if input bands exist
    for key, value in bands.items():
        if not gscript.find_file(value, element="cell", mapset=mapset)["file"]:
            gscript.fatal(("Raster map <{}> not found.").format(value))

    if flags["r"]:
        gscript.use_temp_region()
        gscript.run_command("g.region", rast=bands.values(), flags="a")
        gscript.message(
            _("--- The computational region has been temporarily set to image max extent ---"
              ))
    else:
        gscript.warning(
            _("All subsequent operations will be limited to the current computational region"
              ))

    if flags["s"]:
        gscript.message(_("--- Start rescaling bands ---"))
        check_b = 0
        for key, b in bands.items():
            gscript.message(b)
            b = gscript.find_file(b, element="cell")["name"]
            tmp["band_double{}".format(check_b)] = "{}_{}".format(b, d)
            band_double = tmp["band_double{}".format(check_b)]
            gscript.mapcalc("{r} = 1.0 * ({b})/{scale_fac}".format(
                r=(band_double), b=b, scale_fac=scale_fac))
            f_bands[key] = band_double
            check_b += 1
        gscript.message(f_bands.values())
        gscript.message(_("--- All bands have been rescaled ---"))
    else:
        gscript.warning(_("No rescale factor has been applied"))
        for key, b in bands.items():
            if (gscript.raster_info(b)["datatype"] != "DCELL"
                    and gscript.raster_info(b)["datatype"] != "FCELL"):
                gscript.fatal("Raster maps must be DCELL o FCELL")
            else:
                f_bands = bands

    gscript.message(_("--- Start computing maximum values of bands ---"))
    for key, fb in f_bands.items():
        gscript.message(fb)
        stats = gscript.parse_command("r.univar", flags="g", map=fb)
        raster_max[key] = float(stats["max"])
    gscript.message("--- Computed maximum value: {} ---".format(
        raster_max.values()))
    gscript.message(_("--- Statistics have been computed! ---"))

    # Start of Clouds detection  (some rules from litterature)
    gscript.message(_("--- Start clouds detection procedure ---"))
    gscript.message(_("--- Computing cloud mask... ---"))
    first_rule = "(({} > (0.08*{})) && ({} > (0.08*{})) && ({} > (0.08*{})))".format(
        f_bands["blue"],
        raster_max["blue"],
        f_bands["green"],
        raster_max["green"],
        f_bands["red"],
        raster_max["red"],
    )
    second_rule = "(({} < ((0.08*{})*1.5)) && ({} > {}*1.3))".format(
        f_bands["red"], raster_max["red"], f_bands["red"], f_bands["swir12"])
    third_rule = "(({} < (0.1*{})) && ({} < (0.1*{})))".format(
        f_bands["swir11"], raster_max["swir11"], f_bands["swir12"],
        raster_max["swir12"])
    fourth_rule = "(if({} == max({}, 2 * {}, 2 * {}, 2 * {})))".format(
        f_bands["nir8a"],
        f_bands["nir8a"],
        f_bands["blue"],
        f_bands["green"],
        f_bands["red"],
    )
    fifth_rule = "({} > 0.2)".format(f_bands["blue"])
    cloud_rules = (
        "({} == 1) && ({} == 0) && ({} == 0) && ({} == 0) && ({} == 1)".format(
            first_rule, second_rule, third_rule, fourth_rule, fifth_rule))
    expr_c = "{} = if({}, 0, null())".format(cloud_raster, cloud_rules)
    gscript.mapcalc(expr_c, overwrite=True)
    gscript.message(_("--- Converting raster cloud mask into vector map ---"))
    gscript.run_command("r.to.vect",
                        input=cloud_raster,
                        output=tmp["cloud_v"],
                        type="area",
                        flags="s")
    info_c = gscript.parse_command("v.info", map=tmp["cloud_v"], flags="t")
    if info_c["areas"] == "0":
        gscript.warning(_("No clouds have been detected"))
        check_cloud = 0
    else:
        gscript.message(_("--- Cleaning geometries ---"))
        gscript.run_command(
            "v.clean",
            input=tmp["cloud_v"],
            output=cloud_mask,
            tool="rmarea",
            threshold=cloud_threshold,
        )
        info_c_clean = gscript.parse_command("v.info",
                                             map=cloud_mask,
                                             flags="t")
        if info_c_clean["areas"] == "0":
            gscript.warning(_("No clouds have been detected"))
            check_cloud = 0
        else:
            check_cloud = 1
    gscript.message(_("--- Finish cloud detection procedure ---"))
    # End of Clouds detection

    if options["shadow_mask"] or options["shadow_raster"]:
        # Start of shadows detection
        gscript.message(_("--- Start shadows detection procedure ---"))
        gscript.message(_("--- Computing shadow mask... ---"))
        sixth_rule = "((({} > {}) && ({} < {}) && ({} < 0.1) && ({} < 0.1)) \
        || (({} < {}) && ({} < {}) && ({} < 0.1) && ({} < 0.1) && ({} < 0.1)))".format(
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["nir"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["nir"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["nir"],
        )
        seventh_rule = "({} - {})".format(f_bands["green"], f_bands["blue"])
        shadow_rules = "(({} == 1) && ({} < 0.007))".format(
            sixth_rule, seventh_rule)
        expr_s = "{} = if({}, 0, null())".format(tmp["shadow_temp"],
                                                 shadow_rules)
        gscript.mapcalc(expr_s, overwrite=True)
        gscript.message(
            _("--- Converting raster shadow mask into vector map ---"))
        gscript.run_command(
            "r.to.vect",
            input=tmp["shadow_temp"],
            output=tmp["shadow_temp_v"],
            type="area",
            flags="s",
            overwrite=True,
        )
        info_s = gscript.parse_command("v.info",
                                       map=tmp["shadow_temp_v"],
                                       flags="t")
        if info_s["areas"] == "0":
            gscript.warning(_("No shadows have been detected"))
            check_shadow = 0
        else:
            gscript.message(_("--- Cleaning geometries ---"))
            gscript.run_command(
                "v.clean",
                input=tmp["shadow_temp_v"],
                output=tmp["shadow_temp_mask"],
                tool="rmarea",
                threshold=shadow_threshold,
            )
            info_s_clean = gscript.parse_command("v.info",
                                                 map=tmp["shadow_temp_mask"],
                                                 flags="t")
            if info_s_clean["areas"] == "0":
                gscript.warning(_("No shadows have been detected"))
                check_shadow = 0
            else:
                check_shadow = 1
            gscript.message(_("--- Finish Shadows detection procedure ---"))
            # End of shadows detection

            # START shadows cleaning Procedure (remove shadows misclassification)
            # Start shadow mask preparation
            if check_shadow == 1 and check_cloud == 1:
                gscript.message(
                    _("--- Start removing misclassification from the shadow mask ---"
                      ))
                gscript.message(_("--- Data preparation... ---"))
                gscript.run_command(
                    "v.centroids",
                    input=tmp["shadow_temp_mask"],
                    output=tmp["centroid"],
                    quiet=True,
                )
                gscript.run_command("v.db.droptable",
                                    map=tmp["centroid"],
                                    flags="f",
                                    quiet=True)
                gscript.run_command("v.db.addtable",
                                    map=tmp["centroid"],
                                    columns="value",
                                    quiet=True)
                gscript.run_command(
                    "v.db.update",
                    map=tmp["centroid"],
                    layer=1,
                    column="value",
                    value=1,
                    quiet=True,
                )
                gscript.run_command(
                    "v.dissolve",
                    input=tmp["centroid"],
                    column="value",
                    output=tmp["dissolve"],
                    quiet=True,
                )
                gscript.run_command(
                    "v.category",
                    input=tmp["dissolve"],
                    type="point,line,boundary,centroid,area,face,kernel",
                    output=tmp["delcat"],
                    option="del",
                    cat=-1,
                    quiet=True,
                )
                gscript.run_command(
                    "v.category",
                    input=tmp["delcat"],
                    type="centroid,area",
                    output=tmp["addcat"],
                    option="add",
                    quiet=True,
                )
                gscript.run_command("v.db.droptable",
                                    map=tmp["addcat"],
                                    flags="f",
                                    quiet=True)
                gscript.run_command("v.db.addtable",
                                    map=tmp["addcat"],
                                    columns="value",
                                    quiet=True)

                # End shadow mask preparation
                # Start cloud mask preparation

                gscript.run_command("v.db.droptable",
                                    map=cloud_mask,
                                    flags="f",
                                    quiet=True)
                gscript.run_command("v.db.addtable",
                                    map=cloud_mask,
                                    columns="value",
                                    quiet=True)

                # End cloud mask preparation
                # Shift cloud mask using dE e dN
                # Start reading mean sun zenith and azimuth from xml file to compute
                # dE and dN automatically
                gscript.message(
                    _("--- Reading mean sun zenith and azimuth from metadata file to compute clouds shift ---"
                      ))
                if mtd_file != "":
                    try:
                        xml_tree = et.parse(mtd_file)
                        root = xml_tree.getroot()
                        ZA = []
                        try:
                            for elem in root[1]:
                                for subelem in elem[1]:
                                    ZA.append(subelem.text)
                            if ZA == ["0", "0"]:
                                zenith_val = (root[1].find("Tile_Angles").find(
                                    "Sun_Angles_Grid").find("Zenith").find(
                                        "Values_List"))
                                ZA[0] = numpy.mean([
                                    numpy.array(elem.text.split(" "),
                                                dtype=numpy.float)
                                    for elem in zenith_val
                                ])
                                azimuth_val = (
                                    root[1].find("Tile_Angles").find(
                                        "Sun_Angles_Grid").find(
                                            "Azimuth").find("Values_List"))
                                ZA[1] = numpy.mean([
                                    numpy.array(elem.text.split(" "),
                                                dtype=numpy.float)
                                    for elem in azimuth_val
                                ])
                            z = float(ZA[0])
                            a = float(ZA[1])
                            gscript.message(
                                "--- the mean sun Zenith is: {:.3f} deg ---".
                                format(z))
                            gscript.message(
                                "--- the mean sun Azimuth is: {:.3f} deg ---".
                                format(a))
                        except:
                            gscript.fatal(
                                "The selected input metadata file is not the right one. Please check the manual page."
                            )
                    except:
                        gscript.fatal(
                            "The selected input metadata file is not an .xml file. Please check the manual page."
                        )
                elif metadata_file != "":
                    with open(metadata_file) as json_file:
                        data = json.load(json_file)
                    z = float(data["MEAN_SUN_ZENITH_ANGLE"])
                    a = float(data["MEAN_SUN_AZIMUTH_ANGLE"])

                # Stop reading mean sun zenith and azimuth from xml file to compute dE
                # and dN automatically
                # Start computing the east and north shift for clouds and the
                # overlapping area between clouds and shadows at steps of 100m
                gscript.message(
                    _("--- Start computing the east and north clouds shift at steps of 100m of clouds height---"
                      ))
                H = 1000
                dH = 100
                HH = []
                dE = []
                dN = []
                AA = []
                while H <= 4000:
                    z_deg_to_rad = math.radians(z)
                    tan_Z = math.tan(z_deg_to_rad)
                    a_deg_to_rad = math.radians(a)
                    cos_A = math.cos(a_deg_to_rad)
                    sin_A = math.sin(a_deg_to_rad)

                    E_shift = -H * tan_Z * sin_A
                    N_shift = -H * tan_Z * cos_A
                    dE.append(E_shift)
                    dN.append(N_shift)

                    HH.append(H)
                    H = H + dH

                    gscript.run_command(
                        "v.transform",
                        input=cloud_mask,
                        output=tmp["cl_shift"],
                        xshift=E_shift,
                        yshift=N_shift,
                        overwrite=True,
                        quiet=True,
                        stderr=subprocess.DEVNULL,
                    )
                    gscript.run_command(
                        "v.overlay",
                        ainput=tmp["addcat"],
                        binput=tmp["cl_shift"],
                        operator="and",
                        output=tmp["overlay"],
                        overwrite=True,
                        quiet=True,
                        stderr=subprocess.DEVNULL,
                    )
                    gscript.run_command(
                        "v.db.addcolumn",
                        map=tmp["overlay"],
                        columns="area double",
                        quiet=True,
                    )
                    area = gscript.read_command(
                        "v.to.db",
                        map=tmp["overlay"],
                        option="area",
                        columns="area",
                        flags="c",
                        quiet=True,
                    )
                    area2 = gscript.parse_key_val(area, sep="|")
                    AA.append(float(area2["total area"]))

                # Find the maximum overlapping area between clouds and shadows
                index_maxAA = numpy.argmax(AA)

                # Clouds are shifted using the clouds height corresponding to the
                # maximum overlapping area then are intersected with shadows
                gscript.run_command(
                    "v.transform",
                    input=cloud_mask,
                    output=tmp["cl_shift"],
                    xshift=dE[index_maxAA],
                    yshift=dN[index_maxAA],
                    overwrite=True,
                    quiet=True,
                )
                gscript.run_command(
                    "v.select",
                    ainput=tmp["addcat"],
                    atype="point,line,boundary,centroid,area",
                    binput=tmp["cl_shift"],
                    btype="point,line,boundary,centroid,area",
                    output=shadow_mask,
                    operator="intersects",
                    quiet=True,
                )
                if gscript.find_file(name=shadow_mask,
                                     element="vector")["file"]:
                    info_cm = gscript.parse_command("v.info",
                                                    map=shadow_mask,
                                                    flags="t")
                else:
                    info_cm = None
                    gscript.warning(_("No cloud shadows detected"))

                if options["shadow_raster"] and info_cm:
                    if info_cm["areas"] > "0":
                        gscript.run_command(
                            "v.to.rast",
                            input=tmp["shadow_temp_mask"],
                            output=shadow_raster,
                            use="val",
                        )
                    else:
                        gscript.warning(_("No cloud shadows detected"))

                gscript.message(
                    "--- the estimated clouds height is: {} m ---".format(
                        HH[index_maxAA]))
                gscript.message(
                    "--- the estimated east shift is: {:.2f} m ---".format(
                        dE[index_maxAA]))
                gscript.message(
                    "--- the estimated north shift is: {:.2f} m ---".format(
                        dN[index_maxAA]))
            else:
                if options["shadow_raster"]:
                    gscript.run_command(
                        "v.to.rast",
                        input=tmp["shadow_temp_mask"],
                        output=shadow_raster,
                        use="val",
                    )
                if options["shadow_mask"]:
                    gscript.run_command("g.rename",
                                        vector=(tmp["shadow_temp_mask"],
                                                shadow_mask))
                gscript.warning(
                    _("The removing misclassification procedure from shadow mask was not performed since no cloud have been detected"
                      ))
    else:
        if shadow_mask != "":
            gscript.warning(_("No shadow mask will be computed"))
Exemplo n.º 55
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]

    # Make sure the temporal database exists
    tgis.init()

    mapset = grass.gisenv()["MAPSET"]

    sp = tgis.open_old_stds(input, "strds")

    grass.use_temp_region()

    maps = sp.get_registered_maps_as_objects_by_granularity()
    num_maps = len(maps)
    # get datatype of the first map
    if maps:
        maps[0][0].select()
        datatype = maps[0][0].metadata.get_datatype()
    else:
        datatype = None

    # Get the granularity and set bottom, top and top-bottom resolution
    granularity = sp.get_granularity()

    # This is the reference time to scale the z coordinate
    reftime = datetime(1900, 1, 1)

    # We set top and bottom according to the start time in relation
    # to the date 1900-01-01 00:00:00
    # In case of days, hours, minutes and seconds, a double number
    # is used to represent days and fracs of a day

    # Space time voxel cubes with montly or yearly granularity can not be
    # mixed with other temporal units

    # Compatible temporal units are : days, hours, minutes and seconds
    # Incompatible are years and moths
    start, end = sp.get_temporal_extent_as_tuple()

    if sp.is_time_absolute():
        unit = granularity.split(" ")[1]
        granularity = float(granularity.split(" ")[0])

        print "Gran from stds %0.15f"%(granularity)

        if unit == "years" or unit == "year":
            bottom = float(start.year - 1900)
            top = float(granularity * num_maps)
        elif unit == "months" or unit == "month":
            bottom = float((start.year - 1900) * 12 + start.month)
            top = float(granularity * num_maps)
        else:
            bottom = float(tgis.time_delta_to_relative_time(start - reftime))
            days = 0.0
            hours = 0.0
            minutes = 0.0
            seconds = 0.0
            if unit == "days" or unit == "day":
                days = float(granularity)
            if unit == "hours" or unit == "hour":
                hours = float(granularity)
            if unit == "minutes" or unit == "minute":
                minutes = float(granularity)
            if unit == "seconds" or unit == "second":
                seconds = float(granularity)

            granularity = float(days + hours / 24.0 + minutes / \
                1440.0 + seconds / 86400.0)
    else:
        unit = sp.get_relative_time_unit()
        bottom = start

    top = float(bottom + granularity * float(num_maps))
    try:
        grass.run_command("g.region", t=top, b=bottom, tbres=granularity)
    except CalledModuleError:
        grass.fatal(_("Unable to set 3D region"))

    # Create a NULL map to fill the gaps
    null_map = "temporary_null_map_%i" % os.getpid()
    if datatype == 'DCELL':
        grass.mapcalc("%s = double(null())" % (null_map))
    elif datatype == 'FCELL':
        grass.mapcalc("%s = float(null())" % (null_map))
    else:
        grass.mapcalc("%s = null()" % (null_map))

    if maps:
        count = 0
        map_names = ""
        for map in maps:
            # Use the first map
            id = map[0].get_id()
            # None ids will be replaced by NULL maps
            if id is None:
                id = null_map

            if count == 0:
                map_names = id
            else:
                map_names += ",%s" % id

            count += 1

        try:
            grass.run_command("r.to.rast3", input=map_names,
                              output=output, overwrite=grass.overwrite())
        except CalledModuleError:
            grass.fatal(_("Unable to create 3D raster map <%s>" % output))

    grass.run_command("g.remove", flags='f', type='raster', name=null_map)

    title = _("Space time voxel cube")
    descr = _("This space time voxel cube was created with t.rast.to.rast3")

    # Set the unit
    try:
        grass.run_command("r3.support", map=output, vunit=unit,
                          title=title, description=descr,
                          overwrite=grass.overwrite())
    except CalledModuleError:
        grass.warning(_("%s failed to set units.") % 'r3.support')

    # Register the space time voxel cube in the temporal GIS
    if output.find("@") >= 0:
        id = output
    else:
        id = output + "@" + mapset

    start, end = sp.get_temporal_extent_as_tuple()
    r3ds = tgis.Raster3DDataset(id)

    if r3ds.is_in_db():
        r3ds.select()
        r3ds.delete()
        r3ds = tgis.Raster3DDataset(id)

    r3ds.load()

    if sp.is_time_absolute():
        r3ds.set_absolute_time(start, end)
    else:
        r3ds.set_relative_time(start, end, sp.get_relative_time_unit())

    r3ds.insert()
Exemplo n.º 56
0
def main():
    """Do the main work"""

    alias_output = options["alias_output"]

    bgr_mask = options["bgr_mask"]

    null_value = options["null_value"]

    bgr_output = options["bgr_output"]
    species_output = options["species_output"]

    alias, parameters = parse_bgr_input(
        options["alias_input"], options["env_maps"], options["alias_names"]
    )

    species_dict = parse_species_input(
        options["species_masks"], options["species_names"]
    )

    # Check if a mask file allready exists
    if RasterRow("MASK", Mapset().name).exist():
        gscript.verbose(
            _("A mask allready exists. Renaming existing mask to old_MASK...")
        )
        gscript.run_command(
            "g.rename", rast="MASK,{}_MASK".format(TMP_NAME), quiet=True
        )

    # Build parameter header if necessary
    header = ",".join(alias)

    # Write alias output if requested
    if alias_output:
        with open(alias_output, "w") as alias_out:
            for idx, name in enumerate(alias):
                alias_out.write("{},{}\n".format(name, parameters[idx]))

    # Check if specie output is requested and produce it
    if species_output and species_dict:
        # Write header to species output SWD file
        species_header = "species,X,Y,{}\n".format(header)

        with open(species_output, "w") as sp_out:
            sp_out.write(species_header)

        # Parse species input variables
        for species in species_dict:

            species_map = species_dict[species]
            # Zoom region to match specie map if requested
            if flags["z"]:
                gscript.verbose(
                    _("Zooming region to species {} temporarily.".format(species))
                )
                gscript.use_temp_region()
                gscript.run_command(
                    "g.region", align="@".join(species_map), zoom="@".join(species_map)
                )
            #
            # Apply specie mask
            gscript.run_command(
                "r.mask", raster="@".join(species_map), overwrite=True, quiet=True
            )

            # Export data using r.stats
            gscript.verbose(_("Producing output for species {}".format(species)))
            stats = gscript.pipe_command(
                "r.stats",
                flags="1gN",
                verbose=True,
                input=",".join(parameters),
                separator=",",
                null_value=null_value,
            )

            with open(species_output, "a") as sp_out:
                for row in stats.stdout:
                    sp_out.write("{},{}".format(species, gscript.decode(row)))

            # Redo zoom region to match specie map if it had been requested
            if flags["z"]:
                gscript.del_temp_region()
            # Remove mask
            gscript.run_command("r.mask", flags="r", quiet=True)

    # Write header to background output SWD file
    bgr_header = "bgr,X,Y,{}\n".format(",".join(alias))

    with open(bgr_output, "w") as bgr_out:
        bgr_out.write(bgr_header)

    # Process map data for background
    # Check if a mask file allready exists
    if bgr_mask:
        gscript.verbose(
            _("Using map {} as mask for the background landscape...".format(bgr_mask))
        )
        # Apply mask
        gscript.run_command("r.mask", raster=bgr_mask, overwrite=True, quiet=True)
    #
    # Export data using r.stats
    gscript.verbose(_("Producing output for background landscape"))
    stats = gscript.pipe_command(
        "r.stats",
        flags="1gN",
        input=",".join(parameters),
        separator=",",
        null_value=null_value,
    )

    with open(bgr_output, "a") as bgr_out:
        for row in stats.stdout:
            bgr_out.write("bgr,{}".format(gscript.decode(row)))

    cleanup()
Exemplo n.º 57
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': ['1 x 1 degree', '15 x 15 minute'],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011':
                (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness', 'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)
            },
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extension = nav_string['extension']
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']
    memory = options['memory']
    nprocs = options['nprocs']

    preserve_extracted_files = gui_k_flag
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(
            _("The default resampling method for product {product} is {res}").
            format(product=gui_product, res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    min_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['w'], gregion['s']),
                                      proj_out=product_proj4,
                                      separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['e'], gregion['n']),
                                      proj_out=product_proj4,
                                      separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _(
        "Possibly, the query has timed out. Check network configuration and try again."
    )
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(
            _("HTTP(S) error from USGS TNM API:"
              " {code}: {reason} ({instructions})").format(
                  reason=error.reason,
                  code=error.code,
                  instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(
            _("Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir,
                                              ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size -
                       TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(
            _("TNM API ERROR or Zero tiles available for given input parameters."
              ))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: simply continue with whatever is needed to be done in this case
    if cleanup_list:
        cleanup_msg = _(
            "\n{0} existing incomplete file(s) detected and removed. Run module again."
        ).format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(
                        gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print(data_info)

    if gui_i_flag:
        gscript.info(
            _("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except URLError:
            gscript.fatal(
                _("USGS download request has timed out. Network or formatting error."
                  ))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.endswith(product_extension):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(
                                            extracted_tile) < os.path.getmtime(
                                                z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(
                    _("Unable to locate or extract IMG file '{filename}'"
                      " from ZIP archive '{zipname}': {error}").format(
                          filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(
            _("Extracted {extracted} new tiles and"
              " used {used} existing tiles").format(
                  used=used_existing_extracted_tiles_num,
                  extracted=extracted_tiles_num))
        if old_extracted_tiles_num:
            gscript.verbose(
                _("Found {removed} existing tiles older"
                  " than the corresponding downloaded archive").format(
                      removed=old_extracted_tiles_num))
        if removed_extracted_tiles_num:
            gscript.verbose(
                _("Removed {removed} existing tiles").format(
                    removed=removed_extracted_tiles_num))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    def run_file_import(identifier, results, input, output, resolution,
                        resolution_value, extent, resample, memory):
        result = {}
        try:
            gscript.run_command('r.import',
                                input=input,
                                output=output,
                                resolution=resolution,
                                resolution_value=resolution_value,
                                extent=extent,
                                resample=resample,
                                memory=memory)
        except CalledModuleError:
            error = ("Unable to import <{0}>").format(output)
            result["errors"] = error
        else:
            result["output"] = output
        results[identifier] = result

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists(
                    "raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
                continue
            in_info = _("Importing and reprojecting {name}"
                        " ({count} out of {total})...").format(
                            name=LT_file_name,
                            count=i + 1,
                            total=files_to_import)
            gscript.info(in_info)

            process_count += 1
            process = Process(name="Import-{}-{}-{}".format(
                process_count, i, LT_layer_name),
                              target=run_file_import,
                              kwargs=dict(identifier=i,
                                          results=results,
                                          input=t,
                                          output=LT_layer_name,
                                          resolution='value',
                                          resolution_value=product_resolution,
                                          extent="region",
                                          resample=product_interpolation,
                                          memory=memory))
            process.start()
            process_list.append(process)
            process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(
                            _("Parallel import and reprojection failed."
                              " Try running with nprocs=1."))
                    else:
                        gscript.fatal(
                            _("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0

    gscript.verbose(
        _("Imported {imported} new tiles and"
          " used {used} existing tiles").format(
              used=used_existing_imported_tiles_num,
              imported=imported_tiles_num))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region',
                                        res=product_resolution,
                                        flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [
                            name + '.' + i for name in patch_names
                        ]
                        output = gui_output_layer + '.' + i
                        gscript.run_command('r.patch',
                                            input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                else:
                    gscript.run_command('r.patch',
                                        input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added"
                            ).format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [
                                name + '.' + i for name in patch_names
                            ]
                            gscript.run_command('g.remove',
                                                type='raster',
                                                name=patch_names_i,
                                                flags='f')
                    else:
                        gscript.run_command('g.remove',
                                            type='raster',
                                            name=patch_names,
                                            flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename',
                                        raster=(patch_names[0] + '.' + i,
                                                gui_output_layer + '.' + i))
            else:
                gscript.run_command('g.rename',
                                    raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(
                _("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(
            _("Error in getting or importing the data (see above). Please retry."
              ))

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = (
            "<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors',
                            map=gui_output_layer,
                            color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite',
                            red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2',
                            blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()
Exemplo n.º 58
0
def compute(pnt, dem, obs_heigh, maxdist, hcurv, downward, oradius, i, nprocs, obsabselev, memory):
    try:
        #the followig lines help to set a delay between a process and the others
        starting.acquire() # no other process can get it until it is released
        threading.Timer(0.1, starting.release).start() # release in a 0.1 seconds
        #using temporary regions (on the same mapset) for the different parallel computations
        gscript.use_temp_region()
        #extracting a point from the map of the locations
        Module("v.extract", input=pnt, output="zzpnt"+i, cats=i, flags="t", overwrite=True, quiet=True)
        #getting goordinates of the point location
        coords=Module("v.to.db", flags="p", map="zzpnt"+i, type="point", option="coor", separator="|", stdout_=PIPE)
        coords=coords.outputs.stdout.splitlines()[1:][0]
        x=float(coords.split("|")[1])
        y=float(coords.split("|")[2])
        z=float(coords.split("|")[3])
        coords=str(x)+","+str(y)
        #get elevation of the terrain at the point location
        querydem=Module("r.what", coordinates=coords.split(), map=dem, stdout_=PIPE)
        obselev=float(querydem.outputs.stdout.split("|")[3])
        #setting the working region around the point location
        Module("g.region", vector="zzpnt"+i)
        region = grasscore.region()
        E = region['e']
        W = region['w']
        N = region['n']
        S = region['s']
        #Module("g.region", flags="a", e=E+maxdist, w=W-maxdist, s=S-maxdist, n=N+maxdist) 
        Module("g.region", align=dem, e=E+maxdist, w=W-maxdist, s=S-maxdist, n=N+maxdist)
        #now we check if the size of the object for which we calculate solid angle in each pixel is equal to half the resolution or is set by the user
        if oradius == 0: 
            circle_radius=region['nsres']/2
        else:
            circle_radius=oradius
        #Executing viewshed analysis
        if obsabselev:
            relative_height = z - obselev
            #message1 = "* Considered elevation of dem/dsm is: %s *"
            #message2 = "* Relative height of observer above the dem/dsm is: %s *"
            #message3 = "* Absolute elevation of observer used in r.viewshed is: %s *"
            #gscript.message(message1 % (str(obselev)) )
            #gscript.message(message2 % (str(relative_height)))
            #gscript.message(message3 % (str(z)))
            if hcurv:
                Module("r.viewshed", input=dem, output="zzview"+i, coordinates=coords.split(), memory=memory, observer_elevation=relative_height, max_distance=maxdist,  flags="c", overwrite=True, quiet=True)
            else:
                Module("r.viewshed", input=dem, output="zzview"+i, coordinates=coords.split(), memory=memory, observer_elevation=relative_height, max_distance=maxdist, overwrite=True, quiet=True)                
            if downward:
                #Since UAV nor Satellite are not expected to see above their level (they are only looking to the ground) vertical angles above 90 are set to null. 
                Module("r.mapcalc", expression="zzview{I} = if(zzview{I}>90 && zzview{I}<180,null(),zzview{I})".format(I=i), overwrite=True, quiet=True)
        else:
            #message1 = "* Considered elevation of dem/dsm is: %s *"
            #message2 = "* Relative height of observer above the dem/dsm is: %s *"
            #message3 = "* Absolute elevation of observer used in r.viewshed is: %s *"
            #gscript.message(message1 % (str(obselev)) )
            #gscript.message(message2 % (str(obs_heigh)))
            #gscript.message(message3 % (str(obselev + obs_heigh)))            
            if hcurv:
                Module("r.viewshed", input=dem, output="zzview"+i, coordinates=coords.split(), memory=memory, observer_elevation=obs_heigh, max_distance=maxdist, flags="c", overwrite=True, quiet=True)
            else:
                Module("r.viewshed", input=dem, output="zzview"+i, coordinates=coords.split(), memory=memory, observer_elevation=obs_heigh, max_distance=maxdist, overwrite=True, quiet=True)
        #Since r.viewshed set the cell of the output visibility layer to 180 under the point, this cell is set to 0.01 
        Module("r.mapcalc",expression="zzview{I} = if(zzview{I}==180,0,zzview{I})".format(I=i), overwrite=True, quiet=True)
        #estimating the layer of the horizontal angle between point and each visible cell (angle of the horizontal line of sight)         
        Module("r.mapcalc", expression="{A} = \
            if( y()>{py} && x()>{px}, atan(({px}-x())/({py}-y())),  \
            if( y()<{py} && x()>{px}, 180+atan(({px}-x())/({py}-y())),  \
            if( y()<{py} && x()<{px}, 180+atan(({px}-x())/({py}-y())),  \
            if( y()>{py} && x()<{px}, 360+atan(({px}-x())/({py}-y())), \
            if( y()=={py} && x()>{px}, 90, \
            if( y()<{py} && x()=={px}, 180, \
            if( y()=={py} && x()<{px}, 270, \
            if( y()>{py} && x()=={px}, 0 \
            ) ) ) ) ) ) ) )".format(A='zzview_angle'+i,py=y, px=x), overwrite=True, quiet=True)
        #estimating the layer of the vertical angle between point and each visible cell  (angle of the vertical line of sight) ()
        Module("r.mapcalc", expression="zzview90_{I} = zzview{I} - 90".format(I=i), overwrite=True, quiet=True)
        #evaluate the vertical component of the versor oriented along the line of sight         
        Module("r.mapcalc", expression="zzc_view{I} = sin(zzview90_{I})".format(I=i), overwrite=True, quiet=True)
        #evaluate the northern component of the versor oriented along the line of sight  
        Module("r.mapcalc", expression="zzb_view{I} = cos(zzview90_{I})*cos(zzview_angle{I})".format(I=i), overwrite=True, quiet=True)
        #evaluate the eastern component of the versor oriented along the line of sight  
        Module("r.mapcalc", expression="zza_view{I} = cos(zzview90_{I})*sin(zzview_angle{I})".format(I=i), overwrite=True, quiet=True)    
        #estimate the three-dimensional distance between the point and each visible cell
        if obsabselev:
            Module("r.mapcalc", expression="{D} = pow(pow(abs(y()-{py}),2)+pow(abs(x()-{px}),2)+pow(abs({dtm}-{Z}),2),0.5)".format(D='zzdistance'+i, dtm=dem, Z=z, py=y, px=x), overwrite=True, quiet=True)
        else:
            Module("r.mapcalc", expression="{D} = pow(pow(abs(y()-{py}),2)+pow(abs(x()-{px}),2)+pow(abs({dtm}-({obs}+{obs_h})),2),0.5)".format(D='zzdistance'+i, dtm=dem, obs=obselev, obs_h=obs_heigh, py=y, px=x), overwrite=True, quiet=True)
        
        #estimating the layer of the angle between the versor of the terrain and the line of sight
        Module("r.mapcalc", expression="zzangle{I} = acos((zza_view{I}*zza_dem+zzb_view{I}*zzb_dem+zzc_view{I}*zzc_dem)/(sqrt(zza_view{I}*zza_view{I}+zzb_view{I}*zzb_view{I}+zzc_view{I}*zzc_view{I})*sqrt(zza_dem*zza_dem+zzb_dem*zzb_dem+zzc_dem*zzc_dem)))".format(I=i), overwrite=True, quiet=True)
        #in rare cases the angles may results, erroneusly, less than 90. Setting them to 90
        Module("r.mapcalc", expression="zzangle{I} = if(zzangle{I} > 90, zzangle{I}, 90)".format(I=i), overwrite=True, quiet=True) 
        #filtering 3d distance based on angle{I} map
        Module("r.mapcalc", expression="{D} = if(isnull(zzangle{I}),null(),{D})".format(D="zzdistance"+str(i),I=i), overwrite=True, quiet=True)
        #calculating H1 and H2 that are the distances from the observer to the more distant and less distant points of the inclinded circle representing the pixel
        Module("r.mapcalc", expression="zzH1_{I} = pow(pow({r},2)+pow({d},2)-(2*{r}*{d}*cos(270-zzangle{I})),0.5)".format(r=circle_radius,d="zzdistance"+str(i),I=i), overwrite=True, quiet=True) 
        Module("r.mapcalc", expression="zzH2_{I} = pow(pow({r},2)+pow({d},2)-(2*{r}*{d}*cos(zzangle{I}-90)),0.5)".format(r=circle_radius,d="zzdistance"+str(i),I=i), overwrite=True, quiet=True) 
        #calculating B1 and B2 that are the angles between the line passing through the observer and the center of the pixel and the distant and less distant points of the inclinded circle representing the pixel
        Module("r.mapcalc", expression="zzB1_{I} = acos( (pow({r},2)-pow(zzH1_{I},2)-pow({d},2)) / (-2*zzH1_{I}*{d}) ) ".format(r=circle_radius,d="zzdistance"+str(i),I=i), overwrite=True, quiet=True) 
        Module("r.mapcalc", expression="zzB2_{I} = acos( (pow({r},2)-pow(zzH2_{I},2)-pow({d},2)) / (-2*zzH2_{I}*{d}) ) ".format(r=circle_radius,d="zzdistance"+str(i),I=i), overwrite=True, quiet=True) 
        #calculating solid angle considering that the area of an asimetric ellipse is equal to the one of an ellipse having the minor axis equal to the sum of the tqo unequal half minor axes 
        Module("r.mapcalc", expression="zzsangle{I} = ({pi}*{r}*( {d}*tan(zzB1_{I}) + {d}*tan(zzB2_{I}) )/2 )  / (pow({r},2)+pow({d},2)) ".format(r=circle_radius,d="zzdistance"+str(i),I=i,pi=pi), overwrite=True, quiet=True) 
        #approximations for calculating solid angle can create too much larger values under or very close the position of the oserver. in such a case we assume that the solid angle is half of the visible sphere (2*pi)
        #The same occur when it is used an object_radius that is larger than the pixel size. In some cases this can produce negative values of zzB2 with the effect of creating negative values 
        Module("r.mapcalc", expression="zzsangle{I} = if(zzsangle{I}>2*{pi} || zzB2_{I}>=90,2*{pi},zzsangle{I})".format(I=i, pi=pi), overwrite=True, quiet=True)
        #removing temporary region    
        gscript.del_temp_region()
    except:
        #cleaning termporary layers
        #cleanup()
        #message = " ******** Something went wrong: please try to reduce the number of CPU (parameter 'procs') ******* "
        #gscript.message(message)
        #sys.exit()
        f = open("error_cat_"+i+".txt", "x")
        f.write("error in category: "+i)
        f.close()
Exemplo n.º 59
0
def main():
    if not hasNumPy:
        grass.fatal(_("Required dependency NumPy not found. Exiting."))

    sharpen = options['method']  # sharpening algorithm
    ms1 = options['blue']  # blue channel
    ms2 = options['green']  # green channel
    ms3 = options['red']  # red channel
    pan = options['pan']  # high res pan channel
    out = options['output']  # prefix for output RGB maps
    bladjust = flags['l']  # adjust blue channel
    sproc = flags['s']  # serial processing

    outb = grass.core.find_file('%s_blue' % out)
    outg = grass.core.find_file('%s_green' % out)
    outr = grass.core.find_file('%s_red' % out)

    if (outb['name'] != '' or outg['name'] != '' or outr['name'] != '') and not grass.overwrite():
        grass.warning(_('Maps with selected output prefix names already exist.'
                        ' Delete them or use overwrite flag'))
        return

    pid = str(os.getpid())

    # get PAN resolution:
    kv = grass.raster_info(map=pan)
    nsres = kv['nsres']
    ewres = kv['ewres']
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()

    grass.run_command('g.region', res=panres, align=pan)

    grass.message(_("Performing pan sharpening with hi res pan image: %f" % panres))

    if sharpen == "brovey":
        grass.verbose(_("Using Brovey algorithm"))

        # pan/intensity histogram matching using linear regression
        outname = 'tmp%s_pan1' % pid
        panmatch1 = matchhist(pan, ms1, outname)

        outname = 'tmp%s_pan2' % pid
        panmatch2 = matchhist(pan, ms2, outname)

        outname = 'tmp%s_pan3' % pid
        panmatch3 = matchhist(pan, ms3, outname)

        outr = '%s_red' % out
        outg = '%s_green' % out
        outb = '%s_blue' % out

        # calculate brovey transformation
        grass.message(_("Calculating Brovey transformation..."))

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0 * "$ms1" * "$panmatch1" / k'''
            grass.mapcalc(e, outr=outr, outg=outg, outb=outb,
                          panmatch1=panmatch1, panmatch2=panmatch2,
                          panmatch3=panmatch3, ms1=ms1, ms2=ms2, ms3=ms3,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms1, panmatch1, ms1, ms2, ms3),
                                     overwrite=True)
            pg = grass.mapcalc_start('%s_green = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms2, panmatch2, ms1, ms2, ms3),
                                     overwrite=True)
            pr = grass.mapcalc_start('%s_red = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms3, panmatch3, ms1, ms2, ms3),
                                     overwrite=True)

            pb.wait()
            pg.wait()
            pr.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name='%s,%s,%s' % (panmatch1, panmatch2, panmatch3))

    elif sharpen == "ihs":
        grass.verbose(_("Using IHS<->RGB algorithm"))
        # transform RGB channels into IHS color space
        grass.message(_("Transforming to IHS color space..."))
        grass.run_command('i.rgb.his', overwrite=True,
                          red=ms3,
                          green=ms2,
                          blue=ms1,
                          hue="tmp%s_hue" % pid,
                          intensity="tmp%s_int" % pid,
                          saturation="tmp%s_sat" % pid)

        # pan/intensity histogram matching using linear regression
        target = "tmp%s_int" % pid
        outname = "tmp%s_pan_int" % pid
        panmatch = matchhist(pan, target, outname)

        # substitute pan for intensity channel and transform back to RGB color space
        grass.message(_("Transforming back to RGB color space and sharpening..."))
        grass.run_command('i.his.rgb', overwrite=True,
                          hue="tmp%s_hue" % pid,
                          intensity="%s" % panmatch,
                          saturation="tmp%s_sat" % pid,
                          red="%s_red" % out,
                          green="%s_green" % out,
                          blue="%s_blue" % out)

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name=panmatch)

    elif sharpen == "pca":
        grass.verbose(_("Using PCA/inverse PCA algorithm"))
        grass.message(_("Creating PCA images and calculating eigenvectors..."))

        # initial PCA with RGB channels
        pca_out = grass.read_command('i.pca', quiet=True, rescale='0,0',
                                     input='%s,%s,%s' % (ms1, ms2, ms3),
                                     output='tmp%s.pca' % pid)
        if len(pca_out) < 1:
            grass.fatal(_("Input has no data. Check region settings."))

        b1evect = []
        b2evect = []
        b3evect = []
        for l in pca_out.replace('(', ',').replace(')', ',').splitlines():
            b1evect.append(float(l.split(',')[1]))
            b2evect.append(float(l.split(',')[2]))
            b3evect.append(float(l.split(',')[3]))

        # inverse PCA with hi res pan channel substituted for principal component 1
        pca1 = 'tmp%s.pca.1' % pid
        pca2 = 'tmp%s.pca.2' % pid
        pca3 = 'tmp%s.pca.3' % pid
        b1evect1 = b1evect[0]
        b1evect2 = b1evect[1]
        b1evect3 = b1evect[2]
        b2evect1 = b2evect[0]
        b2evect2 = b2evect[1]
        b2evect3 = b2evect[2]
        b3evect1 = b3evect[0]
        b3evect2 = b3evect[1]
        b3evect3 = b3evect[2]

        outname = 'tmp%s_pan' % pid
        panmatch = matchhist(pan, ms1, outname)

        grass.message(_("Performing inverse PCA ..."))

        stats1 = grass.parse_command("r.univar", map=ms1, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats2 = grass.parse_command("r.univar", map=ms2, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats3 = grass.parse_command("r.univar", map=ms3, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))

        b1mean = float(stats1['mean'])
        b2mean = float(stats2['mean'])
        b3mean = float(stats3['mean'])

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0* "$ms1" * "$panmatch1" / k'''

            outr = '%s_red' % out
            outg = '%s_green' % out
            outb = '%s_blue' % out

            cmd1 = "$outb = (1.0 * $panmatch * $b1evect1) + ($pca2 * $b2evect1) + ($pca3 * $b3evect1) + $b1mean"
            cmd2 = "$outg = (1.0 * $panmatch * $b1evect2) + ($pca2 * $b2evect1) + ($pca3 * $b3evect2) + $b2mean"
            cmd3 = "$outr = (1.0 * $panmatch * $b1evect3) + ($pca2 * $b2evect3) + ($pca3 * $b3evect3) + $b3mean"

            cmd = '\n'.join([cmd1, cmd2, cmd3])

            grass.mapcalc(cmd, outb=outb, outg=outg, outr=outr,
                          panmatch=panmatch, pca2=pca2, pca3=pca3,
                          b1evect1=b1evect1, b2evect1=b2evect1, b3evect1=b3evect1,
                          b1evect2=b1evect2, b2evect2=b2evect2, b3evect2=b3evect2,
                          b1evect3=b1evect3, b2evect3=b2evect3, b3evect3=b3evect3,
                          b1mean=b1mean, b2mean=b2mean, b3mean=b3mean,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect1, pca2,
                                        b2evect1, pca3, b3evect1, b1mean),
                                     overwrite=True)

            pg = grass.mapcalc_start('%s_green = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect2, pca2,
                                        b2evect2, pca3, b3evect2, b2mean),
                                     overwrite=True)

            pr = grass.mapcalc_start('%s_red = (%s * %f) + (%s * %f) + (%s * ''%f) + %f'
                                     % (out, panmatch, b1evect3, pca2,
                                        b2evect3, pca3, b3evect3, b3mean),
                                     overwrite=True)

            pr.wait()
            pg.wait()
            pb.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type="raster",
                          pattern='tmp%s*,%s' % (pid, panmatch))

    # Could add other sharpening algorithms here, e.g. wavelet transformation

    grass.message(_("Assigning grey equalized color tables to output images..."))
    # equalized grey scales give best contrast
    for ch in ['red', 'green', 'blue']:
        grass.run_command('r.colors', quiet=True, map="%s_%s" % (out, ch),
                          flags="e", color='grey')

    # Landsat too blue-ish because panchromatic band less sensitive to blue
    # light, so output blue channed can be modified
    if bladjust:
        grass.message(_("Adjusting blue channel color table..."))
        rules = grass.tempfile()
        colors = open(rules, 'w')
        colors.write('5 0 0 0\n20 200 200 200\n40 230 230 230\n67 255 255 255 \n')
        colors.close()

        grass.run_command('r.colors', map="%s_blue" % out, rules=rules)
        os.remove(rules)

    # output notice
    grass.verbose(_("The following pan-sharpened output maps have been generated:"))
    for ch in ['red', 'green', 'blue']:
        grass.verbose(_("%s_%s") % (out, ch))

    grass.verbose(_("To visualize output, run: g.region -p raster=%s_red" % out))
    grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out)))
    grass.verbose(_("If desired, combine channels into a single RGB map with 'r.composite'."))
    grass.verbose(_("Channel colors can be rebalanced using i.colors.enhance."))

    # write cmd history:
    for ch in ['red', 'green', 'blue']:
        grass.raster_history("%s_%s" % (out, ch))

    # create a group with the three output
    grass.run_command('i.group', group=out,
                      input="{n}_red,{n}_blue,{n}_green".format(n=out))

    # Cleanup
    grass.run_command('g.remove', flags="f", type="raster",
                      pattern="tmp%s*" % pid, quiet=True)
Exemplo n.º 60
0
def main():
    input = options["input"]
    output = options["output"]
    column = options["column"]
    ftype = options["type"]
    xtiles = int(options["x"])
    ytiles = int(options["y"])

    rtvflags = ""
    for key in "sbtvz":
        if flags[key]:
            rtvflags += key

    # check options
    if xtiles <= 0:
        grass.fatal(_("Number of tiles in x direction must be > 0"))
    if ytiles < 0:
        grass.fatal(_("Number of tiles in y direction must be > 0"))
    if grass.find_file(name=input)["name"] == "":
        grass.fatal(_("Input raster %s not found") % input)

    grass.use_temp_region()
    curr = grass.region()
    width = int(curr["cols"] / xtiles)
    if width <= 1:
        grass.fatal("The requested number of tiles in x direction is too large")
    height = int(curr["rows"] / ytiles)
    if height <= 1:
        grass.fatal("The requested number of tiles in y direction is too large")

    do_clip = False
    overlap = 0
    if flags["s"] and ftype == "area":
        do_clip = True
        overlap = 2

    ewres = curr["ewres"]
    nsres = curr["nsres"]
    xoverlap = overlap * ewres
    yoverlap = overlap * nsres
    xoverlap2 = (overlap / 2) * ewres
    yoverlap2 = (overlap / 2) * nsres

    e = curr["e"]
    w = curr["w"] + xoverlap
    if w >= e:
        grass.fatal(_("Overlap is too large"))
    n = curr["n"] - yoverlap
    s = curr["s"]
    if s >= n:
        grass.fatal(_("Overlap is too large"))

    datatype = grass.raster_info(input)["datatype"]
    vtiles = None

    # north to south
    for ytile in range(ytiles):
        n = curr["n"] - ytile * height * nsres
        s = n - height * nsres - yoverlap
        if ytile == ytiles - 1:
            s = curr["s"]
        # west to east
        for xtile in range(xtiles):
            w = curr["w"] + xtile * width * ewres
            e = w + width * ewres + xoverlap

            if xtile == xtiles - 1:
                e = curr["e"]

            grass.run_command("g.region", n=n, s=s, e=e, w=w, nsres=nsres, ewres=ewres)

            if do_clip:
                tilename = output + "_stile_" + str(ytile) + str(xtile)
            else:
                tilename = output + "_tile_" + str(ytile) + str(xtile)

            outname = output + "_tile_" + str(ytile) + str(xtile)

            grass.run_command(
                "r.to.vect",
                input=input,
                output=tilename,
                type=ftype,
                column=column,
                flags=rtvflags,
            )

            if do_clip:
                n2 = curr["n"] - ytile * height * nsres - yoverlap2
                s2 = n2 - height * nsres
                if ytile == 0:
                    n2 = curr["n"]
                    s2 = n2 - height * nsres - yoverlap2
                if ytile == ytiles - 1:
                    s2 = curr["s"]

                w2 = curr["w"] + xtile * width * ewres + xoverlap2
                e2 = w2 + width * ewres
                if xtile == 0:
                    w2 = curr["w"]
                    e2 = w2 + width * ewres + xoverlap2
                if xtile == xtiles - 1:
                    e2 = curr["e"]

                tilename = output + "_stile_" + str(ytile) + str(xtile)
                if grass.vector_info_topo(tilename)["areas"] > 0:
                    grass.run_command(
                        "g.region", n=n2, s=s2, e=e2, w=w2, nsres=nsres, ewres=ewres
                    )

                    extname = "extent_tile_" + str(ytile) + str(xtile)
                    grass.run_command("v.in.region", output=extname, flags="d")
                    outname = output + "_tile_" + str(ytile) + str(xtile)
                    grass.run_command(
                        "v.overlay",
                        ainput=tilename,
                        binput=extname,
                        output=outname,
                        operator="and",
                        olayer="0,1,0",
                    )
                    grass.run_command(
                        "g.remove", flags="f", type="vector", name=extname, quiet=True
                    )

                    if vtiles is None:
                        vtiles = outname
                    else:
                        vtiles = vtiles + "," + outname

                grass.run_command(
                    "g.remove", flags="f", type="vector", name=tilename, quiet=True
                )

            else:
                # write cmd history:
                grass.vector_history(outname)
                if vtiles is None:
                    vtiles = outname
                else:
                    vtiles = vtiles + "," + outname

    if flags["p"]:
        grass.run_command("v.patch", input=vtiles, output=output, flags="e")

        grass.run_command("g.remove", flags="f", type="vector", name=vtiles, quiet=True)

        if grass.vector_info_topo(output)["boundaries"] > 0:
            outpatch = output + "_patch"
            grass.run_command("g.rename", vector=(output, outpatch))
            grass.run_command(
                "v.clean", input=outpatch, output=output, tool="break", flags="c"
            )
            grass.run_command("g.remove", flags="f", type="vector", name=outpatch)

    grass.message(_("%s complete") % "r.to.vect.tiled")

    return 0