Пример #1
0
def main():
    url = options['url']
    coverage = options['coverage']
    output = options['output']
    location = options['location']
    region = options['region']
    urlparams = options['urlparams']
    username = options['username']
    password = options['password']
    flag_c = flags['c']
    flag_e = flags['e']


    options['version']="1.0.0" # right now only supported version, therefore not in GUI

    gscript.debug("Using GDAL WCS driver")
    wcs = WCSGdalDrv()  # only supported driver

    if flag_c:
        wcs.GetCapabilities(options,flags)

    elif flag_e:
        external_map = wcs.LinkMap(options,flags)

    else:
        gscript.message("Importing raster map into GRASS...")
        fetched_map = wcs.GetMap(options,flags)
        if not fetched_map:
            gscript.warning(_("Nothing imported.\n Data not has been downloaded from wcs server."))
            return 1

    return 0
def mask_data(band_filter, cfmask_filter, cloud_mask_value, file_separator):
    # do cleanup first
    grass.run_command('g.remove', type='raster', pattern='*_masked', flags='f', quiet=True)
    
    # find band1 raster maps first
    bands1 = grass.list_grouped('raster', pattern='*{}1*'.format(band_filter))[mapset]
    count = len(bands1)
    i = 0
    for b1 in bands1:
        i += 1
        basename = b1.split(file_separator)[0]
        grass.message("Processing <{0}> ({1}/{2})...".format(basename, i, count))
        grass.percent(i, count, 5)
        # set computational region based on first band
        grass.run_command('g.region', raster=b1)
        maskname = '{}{}{}'.format(basename, file_separator, cfmask_filter)
        mask = grass.find_file(maskname, element='raster')['fullname']
        # apply cloud mask if found
        if mask:
            grass.run_command('r.mask', flags='i', raster=maskname, maskcats=cloud_mask_value, overwrite=True, quiet=True)
        else:
            grass.warning("Mask missing for <{}>".format(basename))
        # create copy of band with mask applied
        bands = grass.list_grouped('raster', pattern='{}{}{}*'.format(basename, file_separator, band_filter))[mapset]
        for b in bands:
            grass.mapcalc('{name}_masked={name}'.format(name=b), quiet=True, overwrite=True)
            grass.run_command('r.colors', map=b, color='grey.eq')
        # remove mask if applied
        if mask:
            grass.run_command('r.mask', flags='r', quiet=True)
Пример #3
0
    def _checkIgnoeredParams(self, options, flags, driver_props):
        """!Write warnings for set parameters and flags, which chosen driver does not use."""

        not_relevant_params = []
        for i_param in driver_props['ignored_params']:

            if options.has_key(i_param) and \
               options[i_param] and \
               i_param not in ['srs', 'wms_version', 'format']: # params with default value
                not_relevant_params.append('<' + i_param  + '>')

        if len(not_relevant_params) > 0:
            grass.warning(_("These parameter are ignored: %s\n\
                             %s driver does not support the parameters." %\
                            (','.join(not_relevant_params), options['driver'])))

        not_relevant_flags = []
        for i_flag in driver_props['ignored_flags']:

            if flags[i_flag]:
                not_relevant_flags.append('<' + i_flag  + '>')

        if len(not_relevant_flags) > 0:
            grass.warning(_("These flags are ignored: %s\n\
                             %s driver does not support the flags." %\
                            (','.join(not_relevant_flags), options['driver'])))
Пример #4
0
def check_progs():
    found_missing = False
    if not grass.find_program('v.mc.py'):
        found_missing = True
        grass.warning(_("'%s' required. Please install '%s' first \n using 'g.extension %s' or check \n PATH and GRASS_ADDON_PATH variables") % ('v.mc.py','v.mc.py','v.mc.py'))
        if found_missing:
            grass.fatal(_("An ERROR occurred running <v.ldm.py>"))
Пример #5
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = ""
        if len(rows) > 1000:
            grass.warning(_("Processing over 1000 maps: activating -z flag of r.series which slows down processing"))
            flag += "z"
        if nulls:
            flag += "n"

        try:
            grass.run_command("r.series", flags=flag, file=filename,
                              output=output, overwrite=grass.overwrite(),
                              method=method)
        except CalledModuleError:
            grass.fatal(_("%s failed. Check above error messages.") % 'r.series')

        if not add_time:
            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()
            map.set_temporal_extent(sp.get_temporal_extent())

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
Пример #6
0
def main():

    if 'GRASS' in options['driver']:
        grass.debug("Using GRASS driver")
        from wms_drv import WMSDrv
        wms = WMSDrv()
    elif 'GDAL' in options['driver']:
        grass.debug("Using GDAL WMS driver")
        from wms_gdal_drv import WMSGdalDrv
        wms = WMSGdalDrv()

    if flags['c']:
        wms.GetCapabilities(options)
    else:
        from wms_base import GRASSImporter
        options['region'] = GetRegionParams(options['region'])
        fetched_map = wms.GetMap(options, flags)

        grass.message(_("Importing raster map into GRASS..."))
        if not fetched_map:
            grass.warning(_("Nothing to import.\nNo data has been downloaded from wms server."))
            return
        importer = GRASSImporter(options['output'])
        importer.ImportMapIntoGRASS(fetched_map)

    return 0
Пример #7
0
    def _checkIgnoeredParams(self, options, flags, driver_props):
        """!Write warnings for set parameters and flags, which chosen driver does not use."""

        not_relevant_params = []
        for i_param in driver_props["ignored_params"]:

            if (
                options.has_key(i_param) and options[i_param] and i_param not in ["srs", "wms_version", "format"]
            ):  # params with default value
                not_relevant_params.append("<" + i_param + ">")

        if len(not_relevant_params) > 0:
            grass.warning(
                _(
                    "These parameter are ignored: %s\n\
                             %s driver does not support the parameters."
                    % (",".join(not_relevant_params), options["driver"])
                )
            )

        not_relevant_flags = []
        for i_flag in driver_props["ignored_flags"]:

            if flags[i_flag]:
                not_relevant_flags.append("<" + i_flag + ">")

        if len(not_relevant_flags) > 0:
            grass.warning(
                _(
                    "These flags are ignored: %s\n\
                             %s driver does not support the flags."
                    % (",".join(not_relevant_flags), options["driver"])
                )
            )
Пример #8
0
def import_file(filename, archive, output, region):
    """Extracts one binary file from its archive and import it."""

    # open the archive
    with ZipFile(archive, 'r') as a:

        # create temporary file and directory
        tempdir = grass.tempdir()
        tempfile = os.path.join(tempdir, filename)

        # try to inflate and import the layer

        if os.path.isfile(archive):
            try:
                grass.message("Inflating {} ...".format(filename))
                a.extract(filename, tempdir)
                grass.message("Importing {} as {} ..."
                              .format(filename, output))
                grass.run_command('r.in.bin',  flags='s', input=tempfile,
                                  output=output, bytes=2, anull=-9999,
                                  **region)

            # if file is not present in the archive
            except KeyError:
                grass.warning("Could not find {} in {}. Skipping"
                              .format(filename, archive))

            # make sure temporary files are cleaned
            finally:
                grass.try_remove(tempfile)
                grass.try_rmdir(tempdir)
        else:
            grass.warning("Could not find file {}. Skipping"
                          .format(archive))
Пример #9
0
def sqlTbl(name,columns):
    '''Create a new empty table in the same sqlite.db as stations and connect.'''
    db = grass.vector_db(options['stationsvect'])
    # check if table already linked to this vector
    if name in [db[l]['name'] for l in db]:
        grass.warning('Table %s already attached to %s.' %(name,options['stationsvect']))
        return None        
    # connect
    try: con = sql.connect(db[1]['database'])
    except:
        grass.warning('''Cant connect to sqlite database, make sure %s is connected
        to a sqlite database on layer 1.''' %options['stationsvect'])
        return None
    # sql connection
    cur = con.cursor()
    # create column type
    cols = []
    for c in columns:
        if 'i' in c[1]: typ='INT'
        elif 'f' in c[1]: typ='DOUBLE'
        elif 's' in c[1]: typ='VARCHAR(%s)' %abs(int(c[1][:-1]))
        else: raise ValueError('Dont know how to convert %s for table %s'%(c,name))
        cols += [c[0]+' '+typ]
    # Create table
    stm = 'CREATE TABLE IF NOT EXISTS %s (%s)' %(name,', '.join(cols))
    cur.execute(stm)
    con.commit()
    con.close()
    # attach to stations
    grass.run_command('v.db.connect',map=options['stationsvect'],
                      table=name, key=columns[0][0], layer=max(db)+1)
                      
    return
Пример #10
0
    def _createVRT(self):
        '''! create VRT with help of gdalbuildvrt program
        VRT is a virtual GDAL dataset format

        @return path to VRT file
        '''
        self._debug("_createVRT", "started")
        vrt_file = self._tempfile()
        command = ["gdalbuildvrt", '-te']
        command += self.params['boundingbox']
        command += [vrt_file, self.xml_file]
        command = [str(i) for i in command]

        gscript.verbose(' '.join(command))

        self.process = subprocess.Popen(command,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.PIPE)
        self.out, self.err = self.process.communicate()
        gscript.verbose(self.out)

        if self.err:
            gscript.verbose(self.err+"\n")
            if "does not exist" in self.err:
                gscript.warning('Coverage "%s" cannot be opened / does not exist.' % self.params['coverage'])
            gscript.fatal("Generation of VRT-File failed (gdalbuildvrt ERROR). Set verbose-flag for details.")

        self._debug("_createVRT", "finished")
        return vrt_file
Пример #11
0
def main():
    images = options['input'].split(',')
    output = options['output']

    count = len(images)
    msg = _('Do not forget to set region properly to cover all images.')
    gscript.warning(msg)

    offset = 0
    offsets = []
    parms = {}
    for n, img in enumerate(images):
        offsets.append(offset)
        parms['image%d' % (n + 1)] = img
        parms['offset%d' % (n + 1)] = offset
        offset += get_limit(img) + 1

    gscript.message(_("Mosaicing %d images...") % count)

    gscript.mapcalc("$output = " + make_expression(1, count),
                    output=output, **parms)

    # modify the color table:
    p = gscript.feed_command('r.colors', map=output, rules='-')
    for img, offset in zip(images, offsets):
        print(img, offset)
        copy_colors(p.stdin, img, offset)
    p.stdin.close()
    p.wait()

    gscript.message(_("Done. Raster map <%s> created.") % output)

    # write cmd history:
    gscript.raster_history(output)
Пример #12
0
 def printStats(table, tablefmt='simple'):
     try:
         from tabulate import tabulate
     except:
         gscript.warning('Install tabulate for pretty printing tabular output ($ pip install tabulate). Using pprint instead...')
         from pprint import pprint
         pprint(statout)
     print tabulate(table,headers='firstrow',tablefmt=tablefmt)
Пример #13
0
def create_index(driver, database, table, index_name, key):
    if driver == 'dbf':
	return False

    grass.info(_("Creating index <%s>...") % index_name)
    if 0 != grass.run_command('db.execute', quiet = True,
                              driver = driver, database = database,
                              sql = "create unique index %s on %s(%s)" % (index_name, table, key)):
        grass.warning(_("Unable to create index <%s>") % index_name)
Пример #14
0
def check_progs():
    found_missing = False
    for prog in ('r.hypso', 'r.stream.basins', 'r.stream.distance', 'r.stream.extract',
    'r.stream.order','r.stream.snap','r.stream.stats', 'r.width.funct'):
        if not grass.find_program(prog, '--help'):
            found_missing = True
            grass.warning(_("'%s' required. Please install '%s' first using 'g.extension %s'") % (prog, prog, prog))
    if found_missing:
        grass.fatal(_("An ERROR occurred running r.basin"))
Пример #15
0
def main():
    input = options['input']
    maskcats = options['maskcats']
    remove = flags['r']
    invert = flags['i']

    if not remove and not input:
	grass.fatal(_("Required parameter <input> not set"))

    #check if input file exists
    if not grass.find_file(input)['file'] and not remove:
        grass.fatal(_("<%s> does not exist.") % input)

    if not 'MASKCATS' in grass.gisenv() and not remove:
        ## beware: next check is made with != , not with 'is', otherwise:
        #>>> grass.raster_info("basin_50K")['datatype'] is "CELL"
        #False
        # even if:
        #>>> "CELL" is "CELL"
        #True 
        if grass.raster_info(input)['datatype'] != "CELL":
            grass.fatal(_("Raster map %s must be integer for maskcats parameter") % input)

    mapset = grass.gisenv()['MAPSET']
    exists = bool(grass.find_file('MASK', element = 'cell', mapset = mapset)['file'])

    if remove:
	if exists:
	    grass.run_command('g.remove', rast = 'MASK')
	    grass.message(_("Raster MASK removed"))
 	else:
	    grass.fatal(_("No existing MASK to remove"))
    else:
	if exists:
            if not grass.overwrite():
                grass.fatal(_("MASK already found in current mapset. Delete first or overwrite."))
            else:
                grass.warning(_("MASK already exists and will be overwritten"))

	p = grass.feed_command('r.reclass', input = input, output = 'MASK', overwrite = True, rules = '-')
	p.stdin.write("%s = 1" % maskcats)
	p.stdin.close()
	p.wait()

	if invert:
	    global tmp
	    tmp = "r_mask_%d" % os.getpid()
	    grass.run_command('g.rename', rast = ('MASK',tmp), quiet = True)
	    grass.mapcalc("MASK=if(isnull($tmp),1,null())", tmp = tmp)
	    grass.run_command('g.remove', rast = tmp, quiet = True)
	    grass.message(_("Inverted MASK created."))
	else:
	    grass.message(_("MASK created."))

        grass.message(_("All subsequent raster operations will be limited to MASK area. ") +
		      "Removing or renaming raster file named MASK will " +
		      "restore raster operations to normal")
Пример #16
0
def thumbnail_image(input_file, output_file):
    try:
        import Image
        image = Image.open(input_file)
        image.thumbnail((200, 200), Image.ANTIALIAS)
        image.save(output_file, 'PNG')
    except ImportError, error:
        gs.warning(_("Cannot thumbnail image ({error})."
                     " Maybe you don't have PIL.") % error)
Пример #17
0
def main():
    if flags['l']:
        # list of available layers
        list_layers()
        return 0
    elif not options['output']:
        grass.fatal(_("No output map specified"))
    
    if options['cap_file'] and not flags['l']:
        grass.warning(_("Option <cap_file> ignored. It requires '-l' flag."))
    
    # set directory for download
    if not options['folder']:
        options['folder'] = os.path.join(grass.gisenv()['GISDBASE'], 'wms_download')
    
    # region settings
    if options['region']:
        if not grass.find_file(name = options['region'], element = 'windows')['name']:
            grass.fatal(_("Region <%s> not found") % options['region'])

    request = wms_request.WMSRequest(flags, options)    
    if not flags['d']:
        # request data first
        request.GetTiles()
    if not request:
        grass.fatal(_("WMS request failed"))
    
    if flags['a']:
        # use GDAL WMS driver
        ### TODO: use GDAL Python bindings instead
        if not wms_gdal.checkGdalWms():
            grass.fatal(_("GDAL WMS driver is not available"))

        # create local service description XML file
        gdalWms = wms_gdal.GdalWms(options, request)
        options['input'] = gdalWms.GetFile()
    else:
        # download data
        download = wms_download.WMSDownload(flags, options)
        download.GetTiles(request.GetRequests())
    
        # list of files
        files = []
        for item in request.GetRequests():
            files.append(item['output'])
        files = ','.join(files)
        options['input'] = files

    # add flags for r.in.gdalwarp
    flags['e'] = False
    flags['c'] = True
    options['warpoptions'] = ''
    
    return gdalwarp.GDALWarp(flags, options).run()
    
    return 0
Пример #18
0
def main():
    options, flags = gscript.parser()

    import wx
    
    from grass.script.setup import set_gui_path
    set_gui_path()
    
    from core.utils import _
    from core.giface import StandaloneGrassInterface
    try:
        from tplot.frame import TplotFrame
    except ImportError as e:
        gscript.fatal(e.message)
    rasters = None
    if options['strds']:
        rasters = options['strds'].strip().split(',')
    coords = None
    if options['coordinates']:
        coords = options['coordinates'].strip().split(',')
    cats = None
    if options['cats']:
        cats = options['cats']
    output = options['output']
    vectors = None
    attr = None
    if options['stvds']:
        vectors = options['stvds'].strip().split(',')
        if not options['attr']:
            gscript.fatal(_("With stvds you have to set 'attr' option"))
        else:
            attr = options['attr']
        if coords and cats:
            gscript.fatal(_("With stvds it is not possible to use 'coordinates' "
                            "and 'cats' options together"))
        elif not coords and not cats:
            gscript.warning(_("With stvds you have to use 'coordinates' or "
                              "'cats' option"))
    app = wx.App()
    frame = TplotFrame(parent=None, giface=StandaloneGrassInterface())
    frame.SetDatasets(rasters, vectors, coords, cats, attr)
    if output:
        frame.OnRedraw()
        if options['size']:
            sizes = options['size'].strip().split(',')
            sizes = [int(s) for s in sizes]
            frame.canvas.SetSize(sizes)
        if output.split('.')[-1].lower() == 'png':
            frame.canvas.print_png(output)
        if output.split('.')[-1].lower() in ['jpg', 'jpeg']:
            frame.canvas.print_jpg(output)
        if output.split('.')[-1].lower() in ['tif', 'tiff']:
            frame.canvas.print_tif(output)
    else:
        frame.Show()
        app.MainLoop()
Пример #19
0
def drop_tab(vector, layer, table, driver, database):
    # disconnect
    if 0 != grass.run_command('v.db.connect', flags = 'd', quiet = True, map = vector,
			      layer = layer, table = table):
	grass.warning(_("Unable to disconnect table <%s> from vector <%s>") % (table, vector))
    # drop table
    if 0 != grass.run_command('db.droptable', quiet = True, flags = 'f',
                              driver = driver, database = database,
                              table = table):
        grass.fatal(_("Unable to drop table <%s>") % table)
def check_strds(name):
    strds = grass.read_command('t.list', quiet=True).splitlines()
    if name + '@' + mapset not in strds:
        return
    
    if os.environ.get('GRASS_OVERWRITE', '0') == '1':
        grass.warning("Space time raster dataset <{}> is already exists "
                      "and will be overwritten.".format(name))
        grass.run_command('t.remove', inputs=name, quiet=True)
    else:
        grass.fatal("Space time raster dataset <{}> is already in the database. "
                    "Use the overwrite flag.".format(name))
Пример #21
0
def create_index(driver, database, table, index_name, key):
    if driver == 'dbf':
        return False

    gscript.info(_("Creating index <%s>...") % index_name)
    try:
        gscript.run_command('db.execute', quiet=True, driver=driver,
                            database=database,
                            sql="create unique index %s on %s(%s)" %
                                (index_name, table, key))
    except CalledModuleError:
        gscript.warning(_("Unable to create index <%s>") % index_name)
Пример #22
0
def main():
    global output, tmp

    input = options['input']
    output = options['output']
    layer = options['layer']
    column = options['column']

    # setup temporary file
    tmp = str(os.getpid())

    # does map exist?
    if not grass.find_file(input, element='vector')['file']:
        grass.fatal(_("Vector map <%s> not found") % input)

    if not column:
        grass.warning(
            _("No '%s' option specified. Dissolving based on category values from layer <%s>.") %
            ("column", layer))
        grass.run_command('v.extract', flags='d', input=input,
                          output=output, type='area', layer=layer)
    else:
        if int(layer) == -1:
            grass.warning(_("Invalid layer number (%d). "
                            "Parameter '%s' specified, assuming layer '1'.") %
                          (int(layer), 'column'))
            layer = '1'
        try:
            coltype = grass.vector_columns(input, layer)[column]
        except KeyError:
            grass.fatal(_('Column <%s> not found') % column)

        if coltype['type'] not in ('INTEGER', 'SMALLINT', 'CHARACTER', 'TEXT'):
            grass.fatal(_("Key column must be of type integer or string"))

        f = grass.vector_layer_db(input, layer)

        table = f['table']

        tmpfile = '%s_%s' % (output, tmp)

        try:
            grass.run_command('v.reclass', input=input, output=tmpfile,
                              layer=layer, column=column)
            grass.run_command('v.extract', flags='d', input=tmpfile,
                              output=output, type='area', layer=layer)
        except CalledModuleError as e:
            grass.fatal(_("Final extraction steps failed."
                          " Check above error messages and"
                          " see following details:\n%s") % e)

    # write cmd history:
    grass.vector_history(output)
Пример #23
0
    def _download(self):
        """!Downloads data from WMS server using GDAL WMS driver
        
        @return temp_map with stored downloaded data
        """
        grass.message("Downloading data from WMS server...")

        # GDAL WMS driver does not flip geographic coordinates 
        # according to WMS standard 1.3.0.
        if ("+proj=latlong" in self.proj_srs or \
            "+proj=longlat" in self.proj_srs) and \
            self.params['wms_version'] == "1.3.0":
            grass.warning(_("If module will not be able to fetch the data in this " +
                            "geographic projection, \n try 'WMS_GRASS' driver or use WMS version 1.1.1."))

        self._debug("_download", "started")
        
        temp_map = self._tempfile()        

        xml_file = self._createXML()
        wms_dataset = gdal.Open(xml_file, gdal.GA_ReadOnly)
        grass.try_remove(xml_file)
        if wms_dataset is None:
            grass.fatal(_("Unable to open GDAL WMS driver"))
        
        self._debug("_download", "GDAL dataset created")
        
        driver = gdal.GetDriverByName(self.gdal_drv_format)
        if driver is None:
            grass.fatal(_("Unable to find %s driver" % format))
        
        metadata = driver.GetMetadata()
        if not metadata.has_key(gdal.DCAP_CREATECOPY) or \
           metadata[gdal.DCAP_CREATECOPY] == 'NO':
            grass.fatal(_('Driver %s supports CreateCopy() method.') % self.gdal_drv_name)
        
        self._debug("_download", "calling GDAL CreateCopy...")
        
        temp_map_dataset = driver.CreateCopy(temp_map, wms_dataset, 0)
        
        if temp_map_dataset is None:
            grass.fatal(_("Incorrect WMS query"))
        
        temp_map_dataset  = None
        wms_dataset = None
        
        self._debug("_download", "finished")
        
        return temp_map
Пример #24
0
def drop_tab(vector, layer, table, driver, database):
    # disconnect
    try:
        gscript.run_command('v.db.connect', flags='d', quiet=True, map=vector,
                            layer=layer, table=table)
    except CalledModuleError:
        gscript.warning(_("Unable to disconnect table <%s> from vector <%s>") %
                        (table, vector))
    # drop table
    try:
        gscript.run_command('db.droptable', quiet=True, flags='f',
                            driver=driver, database=database,
                            table=table)
    except CalledModuleError:
        gscript.fatal(_("Unable to drop table <%s>") % table)
def main():

    files=os.listdir(options['path'])
    map_string=''
    #download and convert  blocks of table
    for block in files:
        map='%s_0%s'%(options['out'],block)
        block=os.path.join(options['path'],block)
        map_build = GrassMapBuilderEsriToEsri(block,
                                              map,
                                              options['attributes'])
        try:
            map_build.build()
            map_string+='%s,'%map
        except Exception ,e:
            grass.warning("Error: %s\n     Map < %s >  conversion failed"%(e,block))
Пример #26
0
def main():
    table = options['table']
    force = flags['f']

    if not options['driver'] or not options['database']:
        # check if DB parameters are set, and if not set them.
        grass.run_command('db.connect', flags='c')

    kv = grass.db_connection()
    if options['database']:
        database = options['database']
    else:
        database = kv['database']
    if options['driver']:
        driver = options['driver']
    else:
        driver = kv['driver']
    # schema needed for PG?

    if force:
        grass.message(_("Forcing ..."))

    # check if table exists
    if not grass.db_table_exist(table):
        grass.fatal(_("Table <%s> not found in database <%s>") %
                    (table, database))

    # check if table is used somewhere (connected to vector map)
    used = grass.db.db_table_in_vector(table)
    if used:
        grass.warning(_("Deleting table <%s> which is attached to following map(s):") % table)
        for vect in used:
            grass.warning("%s" % vect)

    if not force:
        grass.message(_("The table <%s> would be deleted.") % table)
        grass.message("")
        grass.message(_("You must use the force flag to actually remove it. Exiting."))
        sys.exit(0)

    p = grass.feed_command('db.execute', input='-', database=database,
                           driver=driver)
    p.stdin.write("DROP TABLE " + table)
    p.stdin.close()
    p.wait()
    if p.returncode != 0:
        grass.fatal(_("Cannot continue (problem deleting table)."))
Пример #27
0
def main():
    table = options['table']
    force = flags['f']

    # check if DB parameters are set, and if not set them.
    grass.run_command('db.connect', flags = 'c')

    kv = grass.db_connection()
    database = kv['database']
    driver = kv['driver']
    # schema needed for PG?

    if force:
	grass.message(_("Forcing ..."))

    # check if table exists
    nuldev = file(os.devnull, 'w')
    if not grass.db_table_exist(table, stdout = nuldev, stderr = nuldev):
	grass.fatal(_("Table <%s> not found in current mapset") % table)

    # check if table is used somewhere (connected to vector map)
    used = []
    vects = grass.list_strings('vect')
    for vect in vects:
	for f in grass.vector_db(vect, stderr = nuldev).itervalues():
	    if not f:
		continue
	    if f['table'] == table:
		used.append(vect)
		break
    if used:
	grass.warning(_("Deleting table <%s> which is attached to following map(s):") % table)
	for vect in used:
	    grass.message(vect)

    if not force:
	grass.message(_("The table <%s> would be deleted.") % table)
	grass.message("")
	grass.message(_("You must use the force flag to actually remove it. Exiting."))
	sys.exit(0)

    p = grass.feed_command('db.execute', input = '-', database = database, driver = driver)
    p.stdin.write("DROP TABLE " + table)
    p.stdin.close()
    p.wait()
    if p.returncode != 0:
  	grass.fatal(_("Cannot continue (problem deleting table)."))
Пример #28
0
def main():
    """Main function, called at execution time."""

    # parse options to import layers
    variables = options['variables'].split(',')
    if options['bioclim']:
        bioclim = map(int, options['bioclim'].split(','))
        if not all(1 <= x <= 19 for x in bioclim):
            grass.warning("Values for 'bioclim' need to be within the "
                          "range 1-19. Ignoring values outside this range")
    else:
        bioclim = range(1, 20)

    if options['months']:
        months = map(int, options['months'].split(','))
        if not all(1 <= x <= 12 for x in months):
            grass.warning("Values for 'months' need to be within the range"
                          " 1-12. Ignoring values outside this range")
    else:
        months = range(1, 13)

    allres = options['res'].split(',')

    # import tiles
    if options['tiles']:
        tiles = options['tiles'].split(',')
        legaltiles = [str(j)+str(i) for j in range(5) for i in range(12)]
        for t in tiles:
            if t not in legaltiles:
                grass.error("Tile {} is not a valid WorldClim tile, see "
                            "http://www.worldclim.org/tiles.php"
                            .format(t))
        for tile in tiles:
            import_variables(tile=tile, variables=variables,
                             bioclim=bioclim, months=months)

        # Merge tiles
        if not flags['p']:
            merge_tiles(variables=variables, tiles=tiles, bioclim=bioclim,
                        months=months)

    # import global datasets
    if allres != ['']:
        for res in allres:
            import_variables(res=res, variables=variables,
                             bioclim=bioclim, months=months)
Пример #29
0
def raster_to_png(map_name, output_file,
                  compression=None, routpng_flags=None, backend=None):
    """Convert raster map ``map_name`` to PNG file named ``output_file``

    :param compression: PNG file compression (0-9)
    :param routpng_flags: flags for r.out.png (see r.out.png --help)
    :param backend: ``r.out.png`` or ``d.rast``

    ``backend`` can be set to ``r.out.png`` for export using this module
    or ``d.rast`` for rendering using this module. The flags are
    applied in both cases. Default is platform dependent and it is subject
    to change based on the most reliable option for each platform.
    """
    if not backend:
        if sys.platform.startswith('win'):
            backend = 'd.rast'
        else:
            backend = 'r.out.png'
    if backend == 'r.out.png':
        gs.run_command('r.out.png', input=map_name, output=output_file,
                       compression=compression, flags=routpng_flags)
    else:
        from routleaflet.outputs import set_rendering_environment
        region = get_region()
        if region['nsres'] > region['ewres']:
            # oversample in rows, do not loose columns
            width = region['cols']
            height = region['rows'] * (region['nsres'] / region['ewres'])
        else:
            # oversample in columns, do not loose rows
            width = region['cols'] * (region['ewres'] / region['nsres'])
            height = region['rows']
        if 't' in routpng_flags:
            transparent = True
        else:
            transparent = False
        set_rendering_environment(width=width, height=height,
                                  filename=output_file,
                                  transparent=True, driver='cairo',
                                  compression=compression)
        gs.run_command('d.rast', map=map_name)
        if 'w' in routpng_flags:
            # TODO: the r.out.png flag -w (world file) is ignored
            gs.warning(_("World file for PNG with its actual SRS"
                         " not generated with conversion (export)"
                         " backend <{}>").format(backend))
Пример #30
0
def export_legend(mapname, filename, width, height):
    # using png driver but need to set bg color if we want transparency
    # otherwise png driver will set pixels to ffffff and PIL will
    # not crop the legend
    set_rendering_environment(width, height, filename, transparent=True,
                              backgroud_color='000000',
                              driver='png')
    gs.run_command('d.legend', raster=mapname)
    try:
        from PIL import Image
        image = Image.open(filename)
        imageBox = image.getbbox()
        cropped_image = image.crop(imageBox)
        cropped_image.save(filename, 'PNG')
    except ImportError, error:
        gs.warning(_("Cannot crop legend image ({error})."
                     " Maybe you don't have PIL."
                     " Uncropped legend image will be used.") % error)
Пример #31
0
    def _initializeParameters(self, options, flags):
        self._debug("_initialize_parameters", "started")

        # initialization of module parameters (options, flags)
        self.params['driver'] = options['driver']
        drv_info = WMSDriversInfo()

        driver_props = drv_info.GetDrvProperties(options['driver'])
        self._checkIgnoeredParams(options, flags, driver_props)

        self.params['capfile'] = options['capfile'].strip()

        for key in ['url', 'layers', 'styles', 'method']:
            self.params[key] = options[key].strip()

        self.flags = flags

        if self.flags['o']:
            self.params['transparent'] = 'FALSE'
        else:
            self.params['transparent'] = 'TRUE'

        for key in ['password', 'username', 'urlparams']:
            self.params[key] = options[key]

        if (self.params ['password'] and self.params ['username'] == '') or \
           (self.params ['password'] == '' and self.params ['username']):
            grass.fatal(
                _("Please insert both %s and %s parameters or none of them." %
                  ('password', 'username')))

        self.params['bgcolor'] = options['bgcolor'].strip()

        if options['format'] == "jpeg" and \
           not 'format' in driver_props['ignored_params']:
            if not flags['o'] and \
              'WMS' in self.params['driver']:
                grass.warning(_("JPEG format does not support transparency"))

        self.params['format'] = drv_info.GetFormat(options['format'])
        if not self.params['format']:
            self.params['format'] = self.params['format']

        #TODO: get srs from Tile Service file in OnEarth_GRASS driver
        self.params['srs'] = int(options['srs'])
        if self.params['srs'] <= 0 and not 'srs' in driver_props[
                'ignored_params']:
            grass.fatal(_("Invalid EPSG code %d") % self.params['srs'])

        self.params['wms_version'] = options['wms_version']
        if "CRS" in GetSRSParamVal(
                self.params['srs']) and self.params['wms_version'] == "1.1.1":
            self.params['wms_version'] = "1.3.0"
            grass.warning(
                _("WMS version <1.3.0> will be used, because version <1.1.1> does not support <%s>projection"
                  ) % GetSRSParamVal(self.params['srs']))

        if self.params['wms_version'] == "1.3.0":
            self.params['proj_name'] = "CRS"
        else:
            self.params['proj_name'] = "SRS"

        # read projection info
        self.proj_location = grass.read_command('g.proj',
                                                flags='jf').rstrip('\n')
        self.proj_location = self._modifyProj(self.proj_location)

        if self.params['srs'] in [3857, 900913]:
            # HACK: epsg 3857 def: http://spatialreference.org/ref/sr-org/7483/
            # g.proj can return: ...+a=6378137 +rf=298.257223563... (WGS84 elipsoid def instead of sphere), it can make 20km shift in Y, when raster is transformed
            # needed to be tested on more servers
            self.proj_srs = '+proj=merc +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +no_defs +a=6378137 +b=6378137 +nadgrids=@null +to_meter=1'
        else:
            self.proj_srs = grass.read_command(
                'g.proj', flags='jf',
                epsg=str(GetEpsg(self.params['srs']))).rstrip('\n')

        self.proj_srs = self._modifyProj(self.proj_srs)

        if not self.proj_srs or not self.proj_location:
            grass.fatal(_("Unable to get projection info"))

        self.region = options['region']

        min_tile_size = 100
        maxcols = int(options['maxcols'])
        if maxcols <= min_tile_size:
            grass.fatal(_("Maxcols must be greater than 100"))

        maxrows = int(options['maxrows'])
        if maxrows <= min_tile_size:
            grass.fatal(_("Maxrows must be greater than 100"))

        # setting optimal tile size according to maxcols and maxrows constraint and region cols and rows
        self.tile_size['cols'] = int(
            self.region['cols'] / ceil(self.region['cols'] / float(maxcols)))
        self.tile_size['rows'] = int(
            self.region['rows'] / ceil(self.region['rows'] / float(maxrows)))

        # default format for GDAL library
        self.gdal_drv_format = "GTiff"

        self._debug("_initialize_parameters", "finished")
Пример #32
0
def main():
    options, flags = gscript.parser()

    import wx

    from grass.script.setup import set_gui_path
    set_gui_path()

    from core.giface import StandaloneGrassInterface
    try:
        from tplot.frame import TplotFrame
    except ImportError as e:
        gscript.fatal(e.message)
    rasters = None
    if options['strds']:
        rasters = options['strds'].strip().split(',')
    coords = None
    if options['coordinates']:
        coords = options['coordinates'].strip().split(',')
    cats = None
    if options['cats']:
        cats = options['cats']
    output = options['output']
    vectors = None
    attr = None
    if options['stvds']:
        vectors = options['stvds'].strip().split(',')
        if not options['attr']:
            gscript.fatal(_("With stvds you have to set 'attr' option"))
        else:
            attr = options['attr']
        if coords and cats:
            gscript.fatal(_("With stvds it is not possible to use 'coordinates' "
                            "and 'cats' options together"))
        elif not coords and not cats:
            gscript.warning(_("With stvds you have to use 'coordinates' or "
                              "'cats' option"))
    title = None
    if options['title']:
        title = options['title']
    xlabel = None
    if options['xlabel']:
        xlabel = options['xlabel']
    ylabel = None
    if options['ylabel']:
        ylabel = options['ylabel']
    csvfile = None
    if options['csv']:
        csvfile = options['csv']
    app = wx.App()
    frame = TplotFrame(
        parent=None,
        giface=StandaloneGrassInterface(),
        title=_("Temporal Plot Tool - GRASS GIS"),
    )
    if flags['l']:
        frame.linRegRaster.SetValue(state=True)
        frame.linRegVector.SetValue(state=True)
    frame.SetDatasets(rasters, vectors, coords, cats, attr, title, xlabel,
                      ylabel, csvfile, flags['h'], gscript .overwrite)
    if output:
        frame.OnRedraw()
        if options['size']:
            sizes = options['size'].strip().split(',')
            sizes = [int(s) for s in sizes]
            frame.canvas.SetSize(sizes)
        frame.canvas.figure.savefig(output)
    else:
        frame.Show()
        app.MainLoop()
Пример #33
0
def calcPhotoPol(row, col, reg, max_distance, elevation, lights,
                 obs_elevation):
    y, x = pixel2coor(
        (col, row), reg
    )  # (col, row) = pixel, https://grass.osgeo.org/grass70/manuals/libpython/_modules/pygrass/utils.html#pixel2coor

    try:

        y = y - float(
            Region().nsres
        ) / 2  #pixel2coor returns upper left px coordinates, reposition observer at the centre of pixel
        x = x + float(Region().ewres) / 2

        script.use_temp_region()
        script.run_command('g.region',
                           n=y + max_distance,
                           s=y - max_distance,
                           e=x + max_distance,
                           w=x - max_distance,
                           align=elevation)

        script.run_command('r.viewshed',
                           input=elevation,
                           output=tmp_rviewshed,
                           coordinates=(x, y),
                           max_distance=max_distance,
                           observer_elevation=obs_elevation,
                           overwrite=True,
                           flags='b')

        script.del_temp_region()

        # set region to viewshed
        script.run_command("r.null", map=tmp_rviewshed, setnull=0)
        script.use_temp_region()
        script.run_command("g.region",
                           raster=tmp_rviewshed,
                           zoom=tmp_rviewshed)

        # generate a new raster with NA in observer cell
        script.write_command(
            "v.in.ascii",
            input="-",
            output=tmp_observer,
            stdin="{}|{}".format(x, y),
            overwrite=True
        )  #https://grass.osgeo.org/grass70/manuals/libpython/script.html#script.core.write_command

        script.run_command("v.to.rast",
                           input=tmp_observer,
                           output=tmp_robserver,
                           use="cat",
                           overwrite=True)

        script.run_command(
            "r.mapcalc",
            expression="\"{}\" = if(isnull (\"{}\"), 1, null ())".format(
                tmp_rcalc, tmp_robserver),
            overwrite=True)

        #use tmp_rcalc raster as input in r.grow.distance to calculate distance from observer to other cells
        script.run_command("r.grow.distance",
                           flags="n",
                           input=tmp_rcalc,
                           distance=tmp_rdist_dis,
                           value="rdist.value",
                           overwrite=True)

        # keep only cells that match viewshed analysis
        script.run_command("r.mask",
                           raster=tmp_rviewshed,
                           maskcats="1",
                           overwrite=True)

        # calculate DN/distance
        script.run_command("r.mapcalc",
                           expression="\"{}\" = \"{}\" / \"{}\"".format(
                               tmp_photopol, tmp_res_lights, tmp_rdist_dis),
                           overwrite=True)

        process = script.parse_command("r.univar", map=tmp_photopol, flags='g')

        index_for_pixel = float(process[u'sum'])
        lights_DN = float(lights.get_value(Point(x, y),
                                           reg))  #get night lights value

    except:
        #in case of error, set pixel value to -999
        index_for_pixel = -999
        lights_DN = 0
        script.warning("Calculation failed for row:{},col:{}".format(row, col))

    finally:
        script.run_command("r.mask", flags="r")  # remove any mask
        result = index_for_pixel + lights_DN
        script.del_temp_region()
        return (result)
def main(options, flags):
    # TODO: inervals flag, s should be defaut behavior
    n_colors = int(options['ncolors'])
    discrete = flags['d']

    fallback = True
    try:
        import seaborn as sns
        fallback = False
    except ImportError:
        # perhaps this can be function in the core
        gscript.error(_("{} Python package not installed.").format('seaborn'))
    if not fallback:
        cmap = sns.cubehelix_palette(n_colors=n_colors,
                                     start=float(options['start']),
                                     rot=float(options['nrotations']),
                                     gamma=float(options['gamma']),
                                     hue=float(options['hue']),
                                     light=float(options['light']),
                                     dark=float(options['dark']),
                                     reverse=flags['n'],
                                     as_cmap=False)
        # as_cmap ignores n_colors in 0.7.0
        # but we want n_colors to be exact when we are exporting
        # the color table or doing discrete one
        import matplotlib  # required by windows
        matplotlib.use('wxAGG')  # required by windows
        import matplotlib.colors as clr
        cmap = clr.LinearSegmentedColormap.from_list('from_list',
                                                     cmap,
                                                     N=n_colors)
    else:
        gscript.warning(
            _("Using Matplotlib cubehelix color table."
              " Most of cubehelix parameters ignored"))
        # we are very nice and provide a fallback
        import matplotlib.pyplot as plt
        name = 'cubehelix'
        # Matplotlib one goes from dark to light but Seaborn goes
        # the other way around by default
        if not flags['n']:
            name += '_r'
        cmap = plt.get_cmap(name, lut=n_colors)

    comments = []
    comments.append("Cubehelix color table generated using:")
    command = [sys.argv[0].split(os.path.sep)[-1]]
    command.extend(sys.argv[1:])
    comments.append("  {}".format(' '.join(command)))

    rules = mpl_cmap_to_rules(cmap,
                              n_colors=n_colors,
                              discrete=discrete,
                              comments=comments)

    if options['map']:
        rcf = ''
        for char in 'gae':
            if flags[char]:
                rcf += char
        gscript.write_command(
            'r.colors',
            map=options['map'],
            flags=rcf,
            rules='-',
            stdin=rules,
        )
    if options['output']:
        with open(options['output'], 'w') as f:
            f.write(rules)
            f.write('\n')
    elif not options['map']:
        print(rules)
Пример #35
0
def main():

    inmaps = options["map"].split(",")

    # save current region
    # why is this global needed for rm_region but not for rm_rasters ?
    global rm_region
    rm_region = "i_zero2null_region_" + str(os.getpid())
    gscript.run_command("g.region", save=rm_region)

    gisenv = gscript.gisenv()

    for inmap in inmaps:
        gscript.message("Processing <%s>..." % inmap)
        # check if map is in current mapset
        inmap = inmap.split("@")[0]
        mapname = gscript.find_file(name=inmap, element="cell", mapset=".")["name"]
        if mapname is None or len(mapname) == 0:
            gscript.warning("Raster map <%s> is not in the current mapset." % mapname)
            continue

        # set current region to map
        gscript.run_command("g.region", raster=inmap)

        # check if there are any zero cells
        rinfo = gscript.raster_info(inmap)
        if rinfo["datatype"] != "CELL":
            gscript.warning(
                "Input map <%s> is not of CELL type but %s."
                % (inmap, rinfo["datatype"])
            )
            continue

        if rinfo["min"] > 0:
            gscript.message("No zero cells in input map <%s>, nothing to do." % inmap)
            continue

        gscript.run_command("g.region", raster=inmap)

        # create clumps of zero cells
        # reclass rules
        tmpfile = gscript.tempfile()
        f = open(tmpfile, "w")
        f.write("0 = 1\n")
        f.write("* = NULL\n")
        f.close()

        gscript.run_command(
            "r.reclass", input=inmap, output=inmap + "_rcl", rules=tmpfile
        )
        gscript.try_remove(tmpfile)
        rm_rasters.append(inmap + "_rcl")

        gscript.run_command(
            "r.clump", input=inmap + "_rcl", output=inmap + "_rcl_clump"
        )

        map_region = gscript.region()

        # get center coordinates of the corner pixels
        nc = map_region["n"] - map_region["nsres"] / 2.0
        sc = map_region["s"] + map_region["nsres"] / 2.0
        ec = map_region["e"] - map_region["ewres"] / 2.0
        wc = map_region["w"] + map_region["ewres"] / 2.0

        # get clump IDs of corner cells
        corner_clumps = []
        # strip line endings from r.what output
        clump = (
            gscript.read_command(
                "r.what", map=inmap + "_rcl_clump", coordinates=("%s,%s" % (wc, nc))
            )
            .rstrip()
            .split("|")[3]
        )
        if clump != "*" and clump not in corner_clumps:
            corner_clumps.append(clump)

        clump = (
            gscript.read_command(
                "r.what", map=inmap + "_rcl_clump", coordinates=("%s,%s" % (ec, nc))
            )
            .rstrip()
            .split("|")[3]
        )
        if clump != "*" and clump not in corner_clumps:
            corner_clumps.append(clump)

        clump = (
            gscript.read_command(
                "r.what", map=inmap + "_rcl_clump", coordinates=("%s,%s" % (ec, sc))
            )
            .rstrip()
            .split("|")[3]
        )
        if clump != "*" and clump not in corner_clumps:
            corner_clumps.append(clump)

        clump = (
            gscript.read_command(
                "r.what", map=inmap + "_rcl_clump", coordinates=("%s,%s" % (wc, sc))
            )
            .rstrip()
            .split("|")[3]
        )
        if clump != "*" and clump not in corner_clumps:
            corner_clumps.append(clump)

        # check if any clumps are not covered by corner cells:
        # internal patches of zero cells
        clumpinfo = gscript.raster_info(inmap + "_rcl_clump")
        maptomask = None
        n_inner_clumps = int(clumpinfo["max"]) - len(corner_clumps)
        if n_inner_clumps > 0:
            gscript.message("Filling %(n)d inner clumps..." % {"n": n_inner_clumps})
            exp = "%(inmap)s_nozero = if(%(inmap)s == 0, null(), %(inmap)s)" % {
                "inmap": inmap
            }
            gscript.mapcalc(exp)
            rm_rasters.append(inmap + "_nozero")
            gscript.run_command(
                "r.grow.distance", input=inmap + "_nozero", value=inmap + "_nearest_flt"
            )
            rm_rasters.append(inmap + "_nearest_flt")
            exp = "%(inmap)s_nearest = round(%(inmap)s_nearest_flt)" % {"inmap": inmap}
            gscript.mapcalc(exp)
            rm_rasters.append(inmap + "_nearest")
            gscript.run_command(
                "r.patch",
                input="%(inmap)s_nearest,%(inmap)s_nozero" % {"inmap": inmap},
                output=inmap + "_filled",
            )
            rm_rasters.append(inmap + "_filled")
            maptomask = inmap + "_filled"
        else:
            maptomask = inmap

        # corner clumps of zero cells
        if len(corner_clumps) > 0:
            gscript.message(
                "Removing %(n)d corner clumps..." % {"n": len(corner_clumps)}
            )
            corner_clumps = sorted(set(corner_clumps))
            tmpfile = gscript.tempfile()
            f = open(tmpfile, "w")
            have_corner_clumps = False
            for clump in corner_clumps:
                f.write("%s = 1\n" % clump)
                have_clump = True

            # create a nodata mask and set masked cells to null
            f.write("* = NULL\n")
            f.close()
            gscript.run_command(
                "r.reclass",
                input=inmap + "_rcl_clump",
                output=inmap + "_nodatamask",
                rules=tmpfile,
            )
            gscript.try_remove(tmpfile)
            rm_rasters.append(inmap + "_nodatamask")

            exp = (
                "%(inmap)s_null = if(isnull(%(inmap)s_nodatamask), %(maptomask)s, null())"
                % {"inmap": inmap, "maptomask": maptomask}
            )
            gscript.mapcalc(exp)
        else:
            if maptomask != inmap:
                gscript.run_command(
                    "g.rename",
                    raster="%(maptomask)s,%(inmap)s_null"
                    % {"maptomask": maptomask, "inmap": inmap},
                    quiet=True,
                )

        # *_rcl_clump are base maps for reclassed maps, need to be removed last
        rm_rasters.append(inmap + "_rcl_clump")

        # list of support files to be preserved:
        # cell_misc/<inmap>/timestamp
        # cell_misc/<inmap>/description.json
        # copy hist/<inmap>
        # colr/<inmap>
        # anything missing ?

        # copy cell_misc/<inmap>/timestamp
        path = os.path.join(
            gisenv["GISDBASE"],
            gisenv["LOCATION_NAME"],
            gisenv["MAPSET"],
            "cell_misc",
            inmap,
            "timestamp",
        )

        if os.path.exists(path):
            newpath = os.path.join(
                gisenv["GISDBASE"],
                gisenv["LOCATION_NAME"],
                gisenv["MAPSET"],
                "cell_misc",
                inmap + "_null",
                "timestamp",
            )
            shutil.copyfile(path, newpath)

        # copy cell_misc/<inmap>/description.json
        path = os.path.join(
            gisenv["GISDBASE"],
            gisenv["LOCATION_NAME"],
            gisenv["MAPSET"],
            "cell_misc",
            inmap,
            "description.json",
        )

        if os.path.exists(path):
            newpath = os.path.join(
                gisenv["GISDBASE"],
                gisenv["LOCATION_NAME"],
                gisenv["MAPSET"],
                "cell_misc",
                inmap + "_null",
                "description.json",
            )
            shutil.copyfile(path, newpath)

        # copy hist/<inmap>
        path = os.path.join(
            gisenv["GISDBASE"], gisenv["LOCATION_NAME"], gisenv["MAPSET"], "hist", inmap
        )
        newpath = os.path.join(
            gisenv["GISDBASE"],
            gisenv["LOCATION_NAME"],
            gisenv["MAPSET"],
            "hist",
            inmap + "_null",
        )
        shutil.copyfile(path, newpath)

        # copy colr/<inmap>
        path = os.path.join(
            gisenv["GISDBASE"], gisenv["LOCATION_NAME"], gisenv["MAPSET"], "colr", inmap
        )

        if os.path.exists(path):
            newpath = os.path.join(
                gisenv["GISDBASE"],
                gisenv["LOCATION_NAME"],
                gisenv["MAPSET"],
                "colr",
                inmap + "_null",
            )
            shutil.copyfile(path, newpath)

        # remove <inmap>_rcl first
        gscript.run_command(
            "g.remove", type="raster", name=inmap + "_rcl", flags="f", quiet=True
        )
        # remove <inmap>
        gscript.run_command(
            "g.remove", type="raster", name=inmap, flags="f", quiet=True
        )

        # rename <inmap>_null to <inmap>
        gscript.run_command(
            "g.rename", raster="%(inmap)s_null,%(inmap)s" % {"inmap": inmap}, quiet=True
        )
Пример #36
0
    def download(self, output, sleep=False, maxretry=False,
                 datasource='ESA_COAH'):
        if self._products_df_sorted is None:
            return

        create_dir(output)
        gs.message(_("Downloading data into <{}>...").format(output))
        if datasource == 'USGS_EE':
            from landsatxplore.earthexplorer import EarthExplorer
            from landsatxplore.errors import EarthExplorerError
            from zipfile import ZipFile
            ee_login = False
            while ee_login is False:
                # avoid login conflict in possible parallel execution
                try:
                    ee = EarthExplorer(self._user, self._password)
                    ee_login = True
                except EarthExplorerError as e:
                    time.sleep(1)
            for idx in range(len(self._products_df_sorted['entity_id'])):
                scene = self._products_df_sorted['entity_id'][idx]
                identifier = self._products_df_sorted['display_id'][idx]
                zip_file = os.path.join(output, '{}.zip'.format(identifier))
                gs.message(_("Downloading {}...").format(identifier))
                try:
                    ee.download(identifier=identifier, output_dir=output, timeout=600)
                except EarthExplorerError as e:
                    gs.fatal(_(e))
                ee.logout()
                # extract .zip to get "usual" .SAFE
                with ZipFile(zip_file, 'r') as zip:
                    safe_name = zip.namelist()[0].split('/')[0]
                    outpath = os.path.join(output, safe_name)
                    zip.extractall(path=output)
                gs.message(_("Downloaded to <{}>").format(outpath))
                try:
                    os.remove(zip_file)
                except Exception as e:
                    gs.warning(_("Unable to remove {0}:{1}").format(
                        zip_file, e))

        elif datasource == "ESA_COAH":
            for idx in range(len(self._products_df_sorted['uuid'])):
                gs.message('{} -> {}.SAFE'.format(
                    self._products_df_sorted['uuid'][idx],
                    os.path.join(output, self._products_df_sorted['identifier'][idx])
                ))
                # download
                out = self._api.download(self._products_df_sorted['uuid'][idx],
                                         output)
                if sleep:
                    x = 1
                    online = out['Online']
                    while not online:
                        # sleep is in minutes so multiply by 60
                        time.sleep(int(sleep) * 60)
                        out = self._api.download(self._products_df_sorted['uuid'][idx],
                                                 output)
                        x += 1
                        if x > maxretry:
                            online = True
        elif datasource == 'GCS':
            for scene_id in self._products_df_sorted['identifier']:
                gs.message(_("Downloading {}...").format(scene_id))
                dl_code = download_gcs(scene_id, output)
                if dl_code == 0:
                    gs.message(_("Downloaded to {}").format(
                        os.path.join(output, '{}.SAFE'.format(scene_id))))
                else:
                    # remove incomplete file
                    del_folder = os.path.join(output,
                                              '{}.SAFE'.format(scene_id))
                    try:
                        shutil.rmtree(del_folder)
                    except Exception as e:
                        gs.warning(_("Unable to removed unfinished "
                                     "download {}".format(del_folder)))
Пример #37
0
def extendLine(map, map_out, maxlen=200, scale=0.5, debug=False, verbose=1):
    #
    # map=Input map name
    # map_out=Output map with extensions
    # maxlen=Max length in map units that line can be extended (def=200)
    # scale=Maximum length of extension as proportion of original line, disabled if 0 (def=0.5)
    # vlen=number of verticies to look back in calculating line end direction (def=1)
    # Not sure if it is worth putting this in as parameter.
    #
    allowOverwrite = os.getenv('GRASS_OVERWRITE', '0') == '1'
    grass.info("map={}, map_out={}, maxlen={}, scale={}, debug={}".format(
        map, map_out, maxlen, scale, debug))
    vlen = 1  # not sure if this is worth putting in as parameter
    cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'parent', 'INTEGER'),
            (u'dend', 'TEXT'), (u'orgx', 'DOUBLE PRECISION'),
            (u'orgy', 'DOUBLE PRECISION'), (u'search_len', 'DOUBLE PRECISION'),
            (u'search_az', 'DOUBLE PRECISION'), (u'best_xid', 'INTEGER'),
            (u'near_x', 'DOUBLE PRECISION'), (u'near_y', 'DOUBLE PRECISION'),
            (u'other_cat', 'INTEGER'), (u'xtype', 'TEXT'),
            (u'x_len', 'DOUBLE PRECISION')]
    extend = VectorTopo('extend')
    if extend.exist():
        extend.remove()
    extend.open('w', tab_name='extend', tab_cols=cols)
    #
    # Go through input map, looking at each line and it's two nodes to find nodes
    # with only a single line starting/ending there - i.e. a dangle.
    # For each found, generate an extension line in the new map "extend"
    #
    inMap = VectorTopo(map)
    inMap.open('r')
    dangleCnt = 0
    tickLen = len(inMap)
    grass.info("Searching {} features for dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")
    for ln in inMap:
        ticker = (ticker + 1)
        grass.percent(ticker, tickLen, 5)
        if ln.gtype == 2:  # Only process lines
            for nd in ln.nodes():
                if nd.nlines == 1:  # We have a dangle
                    dangleCnt = dangleCnt + 1
                    vtx = min(len(ln) - 1, vlen)
                    if len([1 for _ in nd.lines(only_out=True)
                            ]) == 1:  # Dangle starting at node
                        dend = "head"
                        sx = ln[0].x
                        sy = ln[0].y
                        dx = sx - ln[vtx].x
                        dy = sy - ln[vtx].y
                    else:  # Dangle ending at node
                        dend = "tail"
                        sx = ln[-1].x
                        sy = ln[-1].y
                        dx = sx - ln[-(vtx + 1)].x
                        dy = sy - ln[-(vtx + 1)].y
                    endaz = math.atan2(dy, dx)
                    if scale > 0:
                        extLen = min(ln.length() * scale, maxlen)
                    else:
                        extLen = maxlen
                    ex = extLen * math.cos(endaz) + sx
                    ey = extLen * math.sin(endaz) + sy
                    extLine = geo.Line([(sx, sy), (ex, ey)])
                    quiet = extend.write(extLine,
                                         (ln.cat, dend, sx, sy, extLen, endaz,
                                          0, 0, 0, 0, 'null', extLen))

    grass.info(
        "{} dangle nodes found, committing table extend".format(dangleCnt))
    extend.table.conn.commit()
    extend.close(build=True, release=True)
    inMap.close()

    #
    # Create two tables where extensions intersect;
    # 1. intersect with original lines
    # 2. intersect with self - to extract intersects between extensions
    #
    # First the intersects with original lines
    grass.info(
        "Searching for intersects between potential extensions and original lines"
    )
    table_isectIn = Table('isectIn',
                          connection=sqlite3.connect(get_path(path)))
    if table_isectIn.exist():
        table_isectIn.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to=map,
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectIn")
    # Will have touched the dangle it comes from, so remove those touches
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn INNER JOIN extend ON from_cat=cat WHERE near_cat=parent)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectIn ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectIn SET ntype = 'orig' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Now second self intersect table
    #
    grass.info("Searching for intersects of potential extensions")
    table_isectX = Table('isectX', connection=sqlite3.connect(get_path(path)))
    if table_isectX.exist():
        table_isectX.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to="extend",
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectX")
    # Obviously all extensions will intersect with themself, so remove those "intersects"
    run_command("db.execute",
                sql="DELETE FROM isectX WHERE from_cat = near_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectX ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectX SET ntype = 'ext' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Combine the two tables and add a few more attributes
    #
    run_command("db.execute",
                sql="INSERT INTO isectIn SELECT * FROM isectX",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    cols_isectIn = Columns('isectIn',
                           connection=sqlite3.connect(get_path(path)))
    cols_isectIn.add(['from_x'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['from_y'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['ext_len'], ['DOUBLE PRECISION'])
    # Get starting coordinate at the end of the dangle
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_x = (SELECT extend.orgx FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_y = (SELECT extend.orgy FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    # For each intersect point, calculate the distance along extension line from end of dangle
    # Would be nicer to do this in the database but SQLite dosen't support sqrt or exponents
    grass.info(
        "Calculating distances of intersects along potential extensions")
    cur = table_isectIn.execute(
        sql_code="SELECT rowid, from_x, from_y, nx, ny FROM isectIn")
    for row in cur.fetchall():
        rowid, fx, fy, nx, ny = row
        x_len = math.sqrt((fx - nx)**2 + (fy - ny)**2)
        sqlStr = "UPDATE isectIn SET ext_len={:.8f} WHERE rowid={:d}".format(
            x_len, rowid)
        table_isectIn.execute(sql_code=sqlStr)
    grass.verbose("Ready to commit isectIn changes")
    table_isectIn.conn.commit()
    # Remove any zero distance from end of their dangle.
    # This happens when another extension intersects exactly at that point
    run_command("db.execute",
                sql="DELETE FROM isectIn WHERE ext_len = 0.0",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()

    # Go through the extensions and find the intersect closest to each origin.
    grass.info("Searching for closest intersect for each potential extension")

    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN bst INTEGER"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nrx DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nry DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN ocat TEXT"
    #    run_command("db.execute",
    #                sql = "INSERT OR REPLACE INTO extend_t1 (bst, nrx, nry, ocat) VALUES ((SELECT isectIn.rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=extend_t1.cat ORDER BY ext_len ASC LIMIT 1))",
    #               driver = "sqlite",
    #               database = "$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    grass.verbose("CREATE index")
    run_command("db.execute",
                sql="CREATE INDEX idx_from_cat ON isectIn (from_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE best_xid")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET best_xid = (SELECT isectIn.rowid FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE x_len")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET x_len = (SELECT ext_len FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_x")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_x = (SELECT nx FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_y")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_y = (SELECT ny FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE other_cat")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET other_cat = (SELECT near_cat FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE xtype")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET xtype = (SELECT ntype FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("DROP index")
    run_command("db.execute",
                sql="DROP INDEX idx_from_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("CREATE index on near_cat")
    run_command("db.execute",
                sql="CREATE INDEX idx_near_cat ON isectIn (near_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    quiet = table_isectIn.filters.select('rowid', 'ext_len', 'nx', 'ny',
                                         'near_cat', 'ntype')
    #    quiet=table_isectIn.filters.order_by(['ext_len ASC'])
    quiet = table_isectIn.filters.order_by('ext_len ASC')
    quiet = table_isectIn.filters.limit(1)
    table_extend = Table('extend', connection=sqlite3.connect(get_path(path)))

    # Code below was relplaced by commands above untill memory problem can be sorted
    #    table_extend.filters.select('cat')
    #    cur=table_extend.execute()
    #    updateCnt = 0
    #    for row in cur.fetchall():
    #        cat, = row
    #        quiet=table_isectIn.filters.where('from_cat={:d}'.format(cat))

    ##SELECT rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=32734 ORDER BY ext_len ASC LIMIT 1

    #        x_sect=table_isectIn.execute().fetchone()
    #        if x_sect is not None:
    #            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
    #            sqlStr="UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
    #            table_extend.execute(sql_code=sqlStr)
    ## Try periodic commit to avoide crash!
    #            updateCnt = (updateCnt + 1) % 10000
    #            if updateCnt == 0:
    #              table_extend.conn.commit()
    grass.verbose("Ready to commit extend changes")
    table_extend.conn.commit()
    #
    # There may be extensions that crossed, and that intersection chosen by one but
    # not "recripricated" by the other.
    # Need to remove those possibilities and allow the jilted extension to re-search.
    #
    grass.verbose("Deleting intersects already resolved")
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn JOIN extend ON near_cat=cat WHERE ntype='ext' AND xtype!='null')",  #"AND from_cat!=other_cat" no second chance!
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    grass.verbose("Deleting complete")

    # To find the jilted - need a copy of extensions that have found an
    # intersection (won't overwrite so drop first)
    grass.verbose(
        "Re-searching for mis-matched intersects between potential extensions")
    table_imatch = Table('imatch', connection=sqlite3.connect(get_path(path)))
    if table_imatch.exist():
        table_imatch.drop(force=True)
    wvar = "xtype!='null'"
    run_command(
        "db.copy",
        overwrite=True,
        quiet=True,
        from_driver="sqlite",
        from_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        from_table="extend",
        to_driver="sqlite",
        to_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        to_table="imatch",
        where=wvar)
    # Memory problems?
    if gc.isenabled():
        grass.verbose("Garbage collection enabled - forcing gc cycle")
        gc.collect()
    else:
        grass.verbose("Garbage collection not enabled")
# Ensure tables are commited
    table_extend.conn.commit()
    table_imatch.conn.commit()
    table_isectIn.conn.commit()
    # Identify the jilted
    sqlStr = "SELECT extend.cat FROM extend JOIN imatch ON extend.other_cat=imatch.cat WHERE extend.xtype='ext' and extend.cat!=imatch.other_cat"
    cur = table_extend.execute(sql_code=sqlStr)
    updateCnt = 0
    for row in cur.fetchall():
        cat, = row
        grass.verbose("Reworking extend.cat={}".format(cat))
        quiet = table_isectIn.filters.where('from_cat={:d}'.format(cat))
        #print("SQL: {}".format(table_isectIn.filters.get_sql()))
        x_sect = table_isectIn.execute().fetchone(
        )  ## Problem here under modules
        if x_sect is None:
            sqlStr = "UPDATE extend SET best_xid=0, x_len=search_len, near_x=0, near_y=0, other_cat=0, xtype='null' WHERE cat={:d}".format(
                cat)
        else:
            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
            sqlStr = "UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(
                x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
        table_extend.execute(sql_code=sqlStr)
        ## Try periodic commit to avoide crash!
        updateCnt = (updateCnt + 1) % 100
        if (updateCnt == 0):  # or (cat == 750483):
            grass.verbose(
                "XXXXXXXXXXX Committing table_extend XXXXXXXXXXXXXXXXXXXXXX")
            table_extend.conn.commit()

    grass.verbose("Committing adjustments to table extend")
    table_extend.conn.commit()
    #
    # For debugging, create a map with the chosen intersect points
    #
    if debug:
        wvar = "xtype!='null' AND x_len!=0"
        #        print(wvar)
        run_command(
            "v.in.db",
            overwrite=True,
            quiet=True,
            table="extend",
            driver="sqlite",
            database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
            x="near_x",
            y="near_y",
            key="cat",
            where=wvar,
            output="chosen")
#
# Finally adjust the dangle lines in input map - use a copy (map_out) if requested
#
    if map_out:
        run_command("g.copy",
                    overwrite=allowOverwrite,
                    quiet=True,
                    vector=map + "," + map_out)
    else:  # Otherwise just modify the original dataset (map)
        if allowOverwrite:
            grass.warning("Modifying vector map ({})".format(map))
            map_out = map
        else:
            grass.error(
                "Use switch --o to modifying input vector map ({})".format(
                    map))
            return 1
#
# Get info for lines that need extending
    table_extend.filters.select(
        'parent, dend, near_x, near_y, search_az, xtype')
    table_extend.filters.where("xtype!='null'")
    extLines = table_extend.execute().fetchall()
    cat_mods = [ext[0] for ext in extLines]
    tickLen = len(cat_mods)
    grass.info("Extending {} dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")

    # Open up the map_out copy (or the original) and work through looking for lines that need modifying
    inMap = VectorTopo(map_out)
    inMap.open('rw', tab_name=map_out)

    for ln_idx in range(len(inMap)):
        ln = inMap.read(ln_idx + 1)
        if ln.gtype == 2:  # Only process lines
            while ln.cat in cat_mods:  # Note: could be 'head' and 'tail'
                ticker = (ticker + 1)
                grass.percent(ticker, tickLen, 5)
                cat_idx = cat_mods.index(ln.cat)
                cat, dend, nx, ny, endaz, xtype = extLines.pop(cat_idx)
                dump = cat_mods.pop(cat_idx)
                if xtype == 'orig':  # Overshoot by 0.1 as break lines is unreliable
                    nx = nx + 0.1 * math.cos(endaz)
                    ny = ny + 0.1 * math.sin(endaz)
                newEnd = geo.Point(x=nx, y=ny, z=None)
                if dend == 'head':
                    ln.insert(0, newEnd)
                else:  # 'tail'
                    ln.append(newEnd)
                quiet = inMap.rewrite(ln_idx + 1, ln)
        else:
            quite = inMap.delete(ln_idx + 1)


## Try periodic commit and garbage collection to avoide crash!
        if (ln_idx % 1000) == 0:
            #           inMap.table.conn.commit()  - no such thing - Why??
            if gc.isenabled():
                quiet = gc.collect()

    inMap.close(build=True, release=True)
    grass.message("v.extendlines completing")
    #
    # Clean up temporary tables and maps
    #
    if not debug:
        table_isectIn.drop(force=True)
        table_isectX.drop(force=True)
        table_imatch.drop(force=True)
        extend.remove()
        chosen = VectorTopo('chosen')
        if chosen.exist():
            chosen.remove()
    return 0
Пример #38
0
def main():

    # Temporary map names
    global tmp, t, mapset
    tmp = {}
    mapset = gscript.gisenv()["MAPSET"]
    mapset2 = "@{}".format(mapset)
    processid = os.getpid()
    processid = str(processid)
    tmp["shadow_temp"] = "shadow_temp" + processid
    tmp["cloud_v"] = "cloud_v_" + processid
    tmp["shadow_temp_v"] = "shadow_temp_v_" + processid
    tmp["shadow_temp_mask"] = "shadow_temp_mask_" + processid
    tmp["centroid"] = "centroid_" + processid
    tmp["dissolve"] = "dissolve_" + processid
    tmp["delcat"] = "delcat_" + processid
    tmp["addcat"] = "addcat_" + processid
    tmp["cl_shift"] = "cl_shift_" + processid
    tmp["overlay"] = "overlay_" + processid

    # Check temporary map names are not existing maps
    for key, value in tmp.items():
        if gscript.find_file(value, element="vector", mapset=mapset)["file"]:
            gscript.fatal(("Temporary vector map <{}> already exists.").format(value))
        if gscript.find_file(value, element="cell", mapset=mapset)["file"]:
            gscript.fatal(("Temporary raster map <{}> already exists.").format(value))

    # Input files
    mtd_file = options["mtd_file"]
    metadata_file = options["metadata"]
    bands = {}
    error_msg = "Syntax error in the txt file. See the manual for further information about the right syntax."
    if options["input_file"] == "":
        bands["blue"] = options["blue"]
        bands["green"] = options["green"]
        bands["red"] = options["red"]
        bands["nir"] = options["nir"]
        bands["nir8a"] = options["nir8a"]
        bands["swir11"] = options["swir11"]
        bands["swir12"] = options["swir12"]
    else:
        txt_bands = []
        with open(options["input_file"], "r") as input_file:
            for line in input_file:
                a = line.split("=")
                if len(a) != 2:
                    gscript.fatal(error_msg)
                elif a[0] == "MTD_TL.xml" and not mtd_file:
                    mtd_file = a[1].strip()
                elif a[0] == "metadata" and not metadata_file:
                    metadata_file = a[1].strip()
                elif a[0] in [
                    "blue",
                    "green",
                    "red",
                    "nir",
                    "nir8a",
                    "swir11",
                    "swir12",
                ]:
                    txt_bands.append(a[0])
                    bands[a[0]] = a[1].strip()
            if len(txt_bands) < 7:
                gscript.fatal(
                    (
                        "One or more bands are missing in the input text file.\n Only these bands have been found: {}"
                    ).format(txt_bands)
                )
            if mtd_file and metadata_file != "default":
                gscript.fatal(
                    (
                        "Metadata json file and mtd_file are both given as input text files.\n Only one of these should be specified."
                    )
                )

    # we want cloud and shadows: check input and output for shadow mask
    if not flags["c"]:
        if mtd_file != "":
            if not os.path.isfile(mtd_file):
                gscript.fatal(
                    "Metadata file <{}> not found. Please select the right .xml file".format(
                        mtd_file
                    )
                )
        elif metadata_file == "default":
            # use default json
            env = gscript.gisenv()
            json_standard_folder = os.path.join(
                env["GISDBASE"], env["LOCATION_NAME"], env["MAPSET"], "cell_misc"
            )
            for key, value in bands.items():
                metadata_file = os.path.join(
                    json_standard_folder, value, "description.json"
                )
                if os.path.isfile(metadata_file):
                    break
                else:
                    metadata_file = None
            if not metadata_file:
                gscript.fatal(
                    "No default metadata files found. Did you use -j in i.sentinel.import?"
                )
        elif metadata_file:
            if not os.path.isfile(metadata_file):
                gscript.fatal(
                    "Metadata file <{}> not found. Please select the right file".format(
                        metadata_file
                    )
                )
        else:
            gscript.fatal(
                "Metadata (file) is required for shadow mask computation. Please specify it"
            )

    d = "double"
    f_bands = {}
    scale_fac = options["scale_fac"]
    cloud_threshold = options["cloud_threshold"]
    shadow_threshold = options["shadow_threshold"]
    raster_max = {}
    check_cloud = 1  # by default the procedure finds clouds
    check_shadow = 1  # by default the procedure finds shadows

    if options["cloud_raster"]:
        cloud_raster = options["cloud_raster"]
    else:
        tmp["cloud_def"] = "cloud_def" + processid
        cloud_raster = tmp["cloud_def"]
    if options["cloud_mask"]:
        cloud_mask = options["cloud_mask"]
        if "." in options["cloud_mask"]:
            gscript.fatal(
                "Name for cloud_mask output \
                           is not SQL compliant".format(
                    options["cloud_mask"]
                )
            )
    else:
        tmp["cloud_mask"] = "cloud_mask" + processid
        cloud_mask = tmp["cloud_mask"]
    if options["shadow_mask"]:
        shadow_mask = options["shadow_mask"]
        if "." in options["shadow_mask"]:
            gscript.fatal(
                "Name for shadow_mask output \
                           is not SQL compliant".format(
                    options["shadow_mask"]
                )
            )
    else:
        tmp["shadow_mask"] = "shadow_mask" + processid
        shadow_mask = tmp["shadow_mask"]
    shadow_raster = options["shadow_raster"]

    # Check if all required input bands are specified in the text file
    if (
        bands["blue"] == ""
        or bands["green"] == ""
        or bands["red"] == ""
        or bands["nir"] == ""
        or bands["nir8a"] == ""
        or bands["swir11"] == ""
        or bands["swir12"] == ""
    ):
        gscript.fatal(
            "All input bands (blue, green, red, nir, nir8a, swir11, swir12) are required"
        )

    # Check if input bands exist
    for key, value in bands.items():
        if not gscript.find_file(value, element="cell", mapset=mapset)["file"]:
            gscript.fatal(("Raster map <{}> not found.").format(value))

    if flags["r"]:
        gscript.use_temp_region()
        gscript.run_command("g.region", rast=bands.values(), flags="a")
        gscript.message(
            _(
                "--- The computational region has been temporarily set to image max extent ---"
            )
        )
    else:
        gscript.warning(
            _(
                "All subsequent operations will be limited to the current computational region"
            )
        )

    if flags["s"]:
        gscript.message(_("--- Start rescaling bands ---"))
        check_b = 0
        for key, b in bands.items():
            gscript.message(b)
            b = gscript.find_file(b, element="cell")["name"]
            tmp["band_double{}".format(check_b)] = "{}_{}".format(b, d)
            band_double = tmp["band_double{}".format(check_b)]
            gscript.mapcalc(
                "{r} = 1.0 * ({b})/{scale_fac}".format(
                    r=(band_double), b=b, scale_fac=scale_fac
                )
            )
            f_bands[key] = band_double
            check_b += 1
        gscript.message(f_bands.values())
        gscript.message(_("--- All bands have been rescaled ---"))
    else:
        gscript.warning(_("No rescale factor has been applied"))
        for key, b in bands.items():
            if (
                gscript.raster_info(b)["datatype"] != "DCELL"
                and gscript.raster_info(b)["datatype"] != "FCELL"
            ):
                gscript.fatal("Raster maps must be DCELL o FCELL")
            else:
                f_bands = bands

    gscript.message(_("--- Start computing maximum values of bands ---"))
    for key, fb in f_bands.items():
        gscript.message(fb)
        stats = gscript.parse_command("r.univar", flags="g", map=fb)
        raster_max[key] = float(stats["max"])
    gscript.message("--- Computed maximum value: {} ---".format(raster_max.values()))
    gscript.message(_("--- Statistics have been computed! ---"))

    # Start of Clouds detection  (some rules from litterature)
    gscript.message(_("--- Start clouds detection procedure ---"))
    gscript.message(_("--- Computing cloud mask... ---"))
    first_rule = "(({} > (0.08*{})) && ({} > (0.08*{})) && ({} > (0.08*{})))".format(
        f_bands["blue"],
        raster_max["blue"],
        f_bands["green"],
        raster_max["green"],
        f_bands["red"],
        raster_max["red"],
    )
    second_rule = "(({} < ((0.08*{})*1.5)) && ({} > {}*1.3))".format(
        f_bands["red"], raster_max["red"], f_bands["red"], f_bands["swir12"]
    )
    third_rule = "(({} < (0.1*{})) && ({} < (0.1*{})))".format(
        f_bands["swir11"], raster_max["swir11"], f_bands["swir12"], raster_max["swir12"]
    )
    fourth_rule = "(if({} == max({}, 2 * {}, 2 * {}, 2 * {})))".format(
        f_bands["nir8a"],
        f_bands["nir8a"],
        f_bands["blue"],
        f_bands["green"],
        f_bands["red"],
    )
    fifth_rule = "({} > 0.2)".format(f_bands["blue"])
    cloud_rules = (
        "({} == 1) && ({} == 0) && ({} == 0) && ({} == 0) && ({} == 1)".format(
            first_rule, second_rule, third_rule, fourth_rule, fifth_rule
        )
    )
    expr_c = "{} = if({}, 0, null())".format(cloud_raster, cloud_rules)
    gscript.mapcalc(expr_c, overwrite=True)
    gscript.message(_("--- Converting raster cloud mask into vector map ---"))
    gscript.run_command(
        "r.to.vect", input=cloud_raster, output=tmp["cloud_v"], type="area", flags="s"
    )
    info_c = gscript.parse_command("v.info", map=tmp["cloud_v"], flags="t")
    if info_c["areas"] == "0":
        gscript.warning(_("No clouds have been detected"))
        check_cloud = 0
    else:
        gscript.message(_("--- Cleaning geometries ---"))
        gscript.run_command(
            "v.clean",
            input=tmp["cloud_v"],
            output=cloud_mask,
            tool="rmarea",
            threshold=cloud_threshold,
        )
        info_c_clean = gscript.parse_command("v.info", map=cloud_mask, flags="t")
        if info_c_clean["areas"] == "0":
            gscript.warning(_("No clouds have been detected"))
            check_cloud = 0
        else:
            check_cloud = 1
    gscript.message(_("--- Finish cloud detection procedure ---"))
    # End of Clouds detection

    if options["shadow_mask"] or options["shadow_raster"]:
        # Start of shadows detection
        gscript.message(_("--- Start shadows detection procedure ---"))
        gscript.message(_("--- Computing shadow mask... ---"))
        sixth_rule = "((({} > {}) && ({} < {}) && ({} < 0.1) && ({} < 0.1)) \
        || (({} < {}) && ({} < {}) && ({} < 0.1) && ({} < 0.1) && ({} < 0.1)))".format(
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["nir"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["blue"],
            f_bands["nir"],
            f_bands["blue"],
            f_bands["swir12"],
            f_bands["nir"],
        )
        seventh_rule = "({} - {})".format(f_bands["green"], f_bands["blue"])
        shadow_rules = "(({} == 1) && ({} < 0.007))".format(sixth_rule, seventh_rule)
        expr_s = "{} = if({}, 0, null())".format(tmp["shadow_temp"], shadow_rules)
        gscript.mapcalc(expr_s, overwrite=True)
        gscript.message(_("--- Converting raster shadow mask into vector map ---"))
        gscript.run_command(
            "r.to.vect",
            input=tmp["shadow_temp"],
            output=tmp["shadow_temp_v"],
            type="area",
            flags="s",
            overwrite=True,
        )
        info_s = gscript.parse_command("v.info", map=tmp["shadow_temp_v"], flags="t")
        if info_s["areas"] == "0":
            gscript.warning(_("No shadows have been detected"))
            check_shadow = 0
        else:
            gscript.message(_("--- Cleaning geometries ---"))
            gscript.run_command(
                "v.clean",
                input=tmp["shadow_temp_v"],
                output=tmp["shadow_temp_mask"],
                tool="rmarea",
                threshold=shadow_threshold,
            )
            info_s_clean = gscript.parse_command(
                "v.info", map=tmp["shadow_temp_mask"], flags="t"
            )
            if info_s_clean["areas"] == "0":
                gscript.warning(_("No shadows have been detected"))
                check_shadow = 0
            else:
                check_shadow = 1
            gscript.message(_("--- Finish Shadows detection procedure ---"))
            # End of shadows detection

            # START shadows cleaning Procedure (remove shadows misclassification)
            # Start shadow mask preparation
            if check_shadow == 1 and check_cloud == 1:
                gscript.message(
                    _("--- Start removing misclassification from the shadow mask ---")
                )
                gscript.message(_("--- Data preparation... ---"))
                gscript.run_command(
                    "v.centroids",
                    input=tmp["shadow_temp_mask"],
                    output=tmp["centroid"],
                    quiet=True,
                )
                gscript.run_command(
                    "v.db.droptable", map=tmp["centroid"], flags="f", quiet=True
                )
                gscript.run_command(
                    "v.db.addtable", map=tmp["centroid"], columns="value", quiet=True
                )
                gscript.run_command(
                    "v.db.update",
                    map=tmp["centroid"],
                    layer=1,
                    column="value",
                    value=1,
                    quiet=True,
                )
                gscript.run_command(
                    "v.dissolve",
                    input=tmp["centroid"],
                    column="value",
                    output=tmp["dissolve"],
                    quiet=True,
                )
                gscript.run_command(
                    "v.category",
                    input=tmp["dissolve"],
                    type="point,line,boundary,centroid,area,face,kernel",
                    output=tmp["delcat"],
                    option="del",
                    cat=-1,
                    quiet=True,
                )
                gscript.run_command(
                    "v.category",
                    input=tmp["delcat"],
                    type="centroid,area",
                    output=tmp["addcat"],
                    option="add",
                    quiet=True,
                )
                gscript.run_command(
                    "v.db.droptable", map=tmp["addcat"], flags="f", quiet=True
                )
                gscript.run_command(
                    "v.db.addtable", map=tmp["addcat"], columns="value", quiet=True
                )

                # End shadow mask preparation
                # Start cloud mask preparation

                gscript.run_command(
                    "v.db.droptable", map=cloud_mask, flags="f", quiet=True
                )
                gscript.run_command(
                    "v.db.addtable", map=cloud_mask, columns="value", quiet=True
                )

                # End cloud mask preparation
                # Shift cloud mask using dE e dN
                # Start reading mean sun zenith and azimuth from xml file to compute
                # dE and dN automatically
                gscript.message(
                    _(
                        "--- Reading mean sun zenith and azimuth from metadata file to compute clouds shift ---"
                    )
                )
                if mtd_file != "":
                    try:
                        xml_tree = et.parse(mtd_file)
                        root = xml_tree.getroot()
                        ZA = []
                        try:
                            for elem in root[1]:
                                for subelem in elem[1]:
                                    ZA.append(subelem.text)
                            if ZA == ["0", "0"]:
                                zenith_val = (
                                    root[1]
                                    .find("Tile_Angles")
                                    .find("Sun_Angles_Grid")
                                    .find("Zenith")
                                    .find("Values_List")
                                )
                                ZA[0] = numpy.mean(
                                    [
                                        numpy.array(
                                            elem.text.split(" "), dtype=numpy.float
                                        )
                                        for elem in zenith_val
                                    ]
                                )
                                azimuth_val = (
                                    root[1]
                                    .find("Tile_Angles")
                                    .find("Sun_Angles_Grid")
                                    .find("Azimuth")
                                    .find("Values_List")
                                )
                                ZA[1] = numpy.mean(
                                    [
                                        numpy.array(
                                            elem.text.split(" "), dtype=numpy.float
                                        )
                                        for elem in azimuth_val
                                    ]
                                )
                            z = float(ZA[0])
                            a = float(ZA[1])
                            gscript.message(
                                "--- the mean sun Zenith is: {:.3f} deg ---".format(z)
                            )
                            gscript.message(
                                "--- the mean sun Azimuth is: {:.3f} deg ---".format(a)
                            )
                        except:
                            gscript.fatal(
                                "The selected input metadata file is not the right one. Please check the manual page."
                            )
                    except:
                        gscript.fatal(
                            "The selected input metadata file is not an .xml file. Please check the manual page."
                        )
                elif metadata_file != "":
                    with open(metadata_file) as json_file:
                        data = json.load(json_file)
                    z = float(data["MEAN_SUN_ZENITH_ANGLE"])
                    a = float(data["MEAN_SUN_AZIMUTH_ANGLE"])

                # Stop reading mean sun zenith and azimuth from xml file to compute dE
                # and dN automatically
                # Start computing the east and north shift for clouds and the
                # overlapping area between clouds and shadows at steps of 100m
                gscript.message(
                    _(
                        "--- Start computing the east and north clouds shift at steps of 100m of clouds height---"
                    )
                )
                H = 1000
                dH = 100
                HH = []
                dE = []
                dN = []
                AA = []
                while H <= 4000:
                    z_deg_to_rad = math.radians(z)
                    tan_Z = math.tan(z_deg_to_rad)
                    a_deg_to_rad = math.radians(a)
                    cos_A = math.cos(a_deg_to_rad)
                    sin_A = math.sin(a_deg_to_rad)

                    E_shift = -H * tan_Z * sin_A
                    N_shift = -H * tan_Z * cos_A
                    dE.append(E_shift)
                    dN.append(N_shift)

                    HH.append(H)
                    H = H + dH

                    gscript.run_command(
                        "v.transform",
                        input=cloud_mask,
                        output=tmp["cl_shift"],
                        xshift=E_shift,
                        yshift=N_shift,
                        overwrite=True,
                        quiet=True,
                        stderr=subprocess.DEVNULL,
                    )
                    gscript.run_command(
                        "v.overlay",
                        ainput=tmp["addcat"],
                        binput=tmp["cl_shift"],
                        operator="and",
                        output=tmp["overlay"],
                        overwrite=True,
                        quiet=True,
                        stderr=subprocess.DEVNULL,
                    )
                    gscript.run_command(
                        "v.db.addcolumn",
                        map=tmp["overlay"],
                        columns="area double",
                        quiet=True,
                    )
                    area = gscript.read_command(
                        "v.to.db",
                        map=tmp["overlay"],
                        option="area",
                        columns="area",
                        flags="c",
                        quiet=True,
                    )
                    area2 = gscript.parse_key_val(area, sep="|")
                    AA.append(float(area2["total area"]))

                # Find the maximum overlapping area between clouds and shadows
                index_maxAA = numpy.argmax(AA)

                # Clouds are shifted using the clouds height corresponding to the
                # maximum overlapping area then are intersected with shadows
                gscript.run_command(
                    "v.transform",
                    input=cloud_mask,
                    output=tmp["cl_shift"],
                    xshift=dE[index_maxAA],
                    yshift=dN[index_maxAA],
                    overwrite=True,
                    quiet=True,
                )
                gscript.run_command(
                    "v.select",
                    ainput=tmp["addcat"],
                    atype="point,line,boundary,centroid,area",
                    binput=tmp["cl_shift"],
                    btype="point,line,boundary,centroid,area",
                    output=shadow_mask,
                    operator="intersects",
                    quiet=True,
                )
                if gscript.find_file(name=shadow_mask, element="vector")["file"]:
                    info_cm = gscript.parse_command(
                        "v.info", map=shadow_mask, flags="t"
                    )
                else:
                    info_cm = None
                    gscript.warning(_("No cloud shadows detected"))

                if options["shadow_raster"] and info_cm:
                    if info_cm["areas"] > "0":
                        gscript.run_command(
                            "v.to.rast",
                            input=tmp["shadow_temp_mask"],
                            output=shadow_raster,
                            use="val",
                        )
                    else:
                        gscript.warning(_("No cloud shadows detected"))

                gscript.message(
                    "--- the estimated clouds height is: {} m ---".format(
                        HH[index_maxAA]
                    )
                )
                gscript.message(
                    "--- the estimated east shift is: {:.2f} m ---".format(
                        dE[index_maxAA]
                    )
                )
                gscript.message(
                    "--- the estimated north shift is: {:.2f} m ---".format(
                        dN[index_maxAA]
                    )
                )
            else:
                if options["shadow_raster"]:
                    gscript.run_command(
                        "v.to.rast",
                        input=tmp["shadow_temp_mask"],
                        output=shadow_raster,
                        use="val",
                    )
                if options["shadow_mask"]:
                    gscript.run_command(
                        "g.rename", vector=(tmp["shadow_temp_mask"], shadow_mask)
                    )
                gscript.warning(
                    _(
                        "The removing misclassification procedure from shadow mask was not performed since no cloud have been detected"
                    )
                )
    else:
        if shadow_mask != "":
            gscript.warning(_("No shadow mask will be computed"))
Пример #39
0
def main():
    input_map = opt['input']
    clip_map = opt['clip']
    output_map = opt['output']

    flag_dissolve = flg['d']
    flag_region = flg['r']

    # ======================================== #
    # ========== INPUT MAP TOPOLOGY ========== #
    # ======================================== #
    vinfo = grass.vector_info_topo(input_map)

    # ==== only points ==== #
    if (vinfo['points'] > 0 and vinfo['lines'] == 0 and vinfo['areas'] == 0):

        # ==================================== #
        # ========== CLIP BY REGION ========== #
        # ==================================== #
        if (flag_region):
            clip_by_region(input_map, output_map, clip_select)

        # ================================== #
        # ========== DEFAULT CLIP ========== #
        # ================================== #
        else:
            section_message("Clipping.")
            # perform clipping
            clip_select(input_map, clip_map, output_map)

    # ==== lines, areas, lines + areas ==== #
    # ==== points + areas, points + lines, points + areas + lines ==== #
    else:
        if (vinfo['points'] > 0):
            grass.warning("Input map contains multiple geometry, "
                          "only lines and areas will be clipped.")

        # ==================================== #
        # ========== CLIP BY REGION ========== #
        # ==================================== #
        if (flag_region):
            clip_by_region(input_map, output_map, clip_overlay)

        # ===================================================== #
        # ========== CLIP WITHOUT DISSOLVED CLIP MAP ========== #
        # ===================================================== #
        elif (flag_dissolve):
            section_message("Clipping without dissolved clip map.")
            clip_overlay(input_map, clip_map, output_map)

        # ========================================================== #
        # ========== DEFAULT CLIP WITH DISSOLVED CLIP MAP ========== #
        # ========================================================== #
        else:
            section_message("Default clipping with dissolved clip map.")

            # setup temporary map
            temp_clip_map = '%s_%s' % ("temp", str(os.getpid()))
            TMP.append(temp_clip_map)

            # dissolve clip_map
            grass.run_command('v.dissolve',
                              input=clip_map,
                              output=temp_clip_map)

            # perform clipping
            clip_overlay(input_map, temp_clip_map, output_map)

    # ======================================== #
    # ========== OUTPUT MAP TOPOLOGY========== #
    # ======================================== #
    vinfo = grass.vector_info_topo(output_map)
    if vinfo['primitives'] == 0:
        grass.warning("Output map is empty.")

    return 0
Пример #40
0
import os
import sys
import grass.script as gs

sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
from smoderp2d import GrassRunner
from smoderp2d.providers.base import CompType
from smoderp2d.exceptions import ProviderError

if __name__ == "__main__":
    options, flags = gs.parser()

    if flags['d'] and not options['pickle_file']:
        gs.fatal("Required parameter <{}> not set".format('pickle_file'))
    if options['pickle_file'] and not flags['d']:
        gs.warning("No pickle file will be generated. Flag -{} not given".format('d'))

    try:
        runner = GrassRunner()

        runner.set_options(options)
        if flags['d']:
            runner.set_comptype(
                comp_type=CompType.dpre,
                data_file=options['pickle_file']
            )

        sys.exit(
            runner.run()
        )
    except ProviderError as e:
Пример #41
0
def main():

    pan = options['pan']
    msxlst = options['msx'].split(',')
    outputsuffix = options['suffix']
    custom_ratio = options['ratio']
    center = options['center']
    center2 = options['center2']
    modulation = options['modulation']
    modulation2 = options['modulation2']

    if options['trim']:
        trimming_factor = float(options['trim'])
    else:
        trimming_factor = False

    histogram_match = flags['l']
    second_pass = flags['2']
    color_match = flags['c']

    #    # Check & warn user about "ns == ew" resolution of current region ======
    #    region = grass.region()
    #    nsr = region['nsres']
    #    ewr = region['ewres']
    #
    #    if nsr != ewr:
    #        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
    #               'resolutions do not match!')
    #        msg = msg.format(ns=nsr, ew=ewr)
    #        grass.message(msg, flag='w')

    mapset = grass.gisenv()['MAPSET']  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    # pygrass.raster.abstract.Info can not cope with
    # Info(name@mapset, mapset)
    # -> fully qualified names and input images from other mapsets are
    # not supported
    # -> use r.info via raster_info

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        # images[img] = Info(img, mapset)
        # images[img].read()
        try:
            images[img] = grass.raster_info(img)
        except:
            grass.fatal(_("msx input not found"))

    panres = images[pan]['nsres']  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    if flags['a']:
        run('g.region', align=pan)  # Respect extent, change resolution
    else:
        run('g.region', res=panres)  # Respect extent, change resolution
        grass.message(
            "|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        grass.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        grass.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            grass.warning('Using custom ratio, overriding standard method!')

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            grass.message("   > Retrieving image resolutions")

            msxres = images[msx]['nsres']

            # check
            if panres == msxres:
                msg = ("The Panchromatic's image resolution ({pr}) "
                       "equals to the Multi-Spectral's one ({mr}). "
                       "Something is probably not right! "
                       "Please check your input images.")
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = ('   >> Resolution ratio '
                         'low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}')
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            grass.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            grass.message(
                "   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                "   >>> If you insist, force it via the <ratio> option!",
                flag='i')
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        grass.message('\n|2 High Pass Filtering the Panchromatic Image')

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = 'tmp.' + grass.basename(tmpfile)  # use its basename
        tmp_pan_hpf = '{tmp}_pan_hpf'.format(tmp=tmp)  # HPF image
        tmp_msx_blnr = '{tmp}_msx_blnr'.format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = '{tmp}_msx_hpf'.format(tmp=tmp)  # Fused image
        tmp_msx_mapcalc = tmp_msx_hpf + '_mapcalc'
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run('r.mfilter',
            input=pan,
            filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title='High Pass Filtered Panchromatic image',
            overwrite=True)

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            # 2nd Pass HPF image
            tmp_pan_hpf_2 = '{tmp}_pan_hpf_2'.format(tmp=tmp)
            # 2nd Pass ASCII filter
            tmp_hpf_matrix_2 = grass.tempfile()
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run('r.mfilter',
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title='2-High-Pass Filtered Panchromatic Image',
                overwrite=True)

        #
        # 3. Upsampling low resolution image
        #

        grass.message("\n|3 Upsampling (bilinearly) low resolution image")

        run('r.resamp.interp',
            method='bilinear',
            input=msx,
            output=tmp_msx_blnr,
            overwrite=True)

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        grass.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " \
            "Modulating Factor"
        grass.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        grass.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx,
                                                               sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        grass.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        grass.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        grass.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = '{hpf} = {msx} + {pan} * {wgt}'
        fusion = fusion.format(hpf=tmp_msx_hpf,
                               msx=tmp_msx_blnr,
                               pan=tmp_pan_hpf,
                               wgt=weighting)
        grass.mapcalc(fusion)

        # command history
        hst = 'Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}'
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            grass.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            grass.message(
                "   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = '   >> 2nd Pass Modulating Factor: {m:.2f}'
            grass.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            grass.message("\n|5+ Adding small-kernel-based weighted "
                          "2nd HPFi back to fused image")

            add_back = '{final} = {msx_hpf} + {pan_hpf} * {wgt}'
            # r.mapcalc: do not use input as output
            add_back = add_back.format(final=tmp_msx_mapcalc,
                                       msx_hpf=tmp_msx_hpf,
                                       pan_hpf=tmp_pan_hpf_2,
                                       wgt=weighting_2)
            grass.mapcalc(add_back)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(
                hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            # technically, this is not histogram matching but
            # normalizing to the input's mean + stddev
            grass.message("\n|+ Matching histogram of Pansharpened image "
                          "to %s" % (msx))

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            msx_info = images[msx]
            outfn = 'round'
            if msx_info['datatype'] == 'FCELL':
                outfn = 'float'
            elif msx_info['datatype'] == 'DCELL':
                outfn = 'double'

            # expression for mapcalc
            lhm = "{out} = {outfn}(double({hpf} - {hpfavg}) / {hpfsd} * " \
                          "{msxsd} + {msxavg})"
            # r.mapcalc: do not use input as output
            lhm = lhm.format(out=tmp_msx_mapcalc,
                             outfn=outfn,
                             hpf=tmp_msx_hpf,
                             hpfavg=msx_hpf_avg,
                             hpfsd=msx_hpf_sd,
                             msxsd=msx_sd,
                             msxavg=msx_avg)

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # snap outliers to input range
            snapout = "{out} = {outfn}(if({hpf} < {oldmin}, {oldmin}, " \
                              "if({hpf} > {oldmax}, {oldmax}, {hpf})))"
            snapout = snapout.format(out=tmp_msx_mapcalc,
                                     outfn=outfn,
                                     hpf=tmp_msx_hpf,
                                     oldmin=msx_info['min'],
                                     oldmax=msx_info['max'])

            grass.mapcalc(snapout, quiet=True, overwrite=True)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)
        else:
            # scale result to input using quantiles
            grass.message("\n|+ Quantile scaling of Pansharpened image "
                          "to %s" % (msx))

            msx_info = images[msx]
            outfn = 'round'
            if msx_info['datatype'] == 'FCELL':
                outfn = 'float'
            elif msx_info['datatype'] == 'DCELL':
                outfn = 'double'

            # quantile scaling
            percentiles = "10,50,90"
            allq = grass.read_command('r.quantile',
                                      input=msx,
                                      percentiles=percentiles,
                                      quiet=True)
            allq = allq.splitlines()
            msx_plo = float(allq[0].split(':')[2])
            msx_med = float(allq[1].split(':')[2])
            msx_phi = float(allq[2].split(':')[2])

            allq = grass.read_command('r.quantile',
                                      input=tmp_msx_hpf,
                                      percentiles=percentiles,
                                      quiet=True)
            allq = allq.splitlines()
            hpf_plo = float(allq[0].split(':')[2])
            hpf_med = float(allq[1].split(':')[2])
            hpf_phi = float(allq[2].split(':')[2])

            # scale factors
            sfplo = (msx_med - msx_plo) / (hpf_med - hpf_plo)
            sfphi = (msx_phi - msx_med) / (hpf_phi - hpf_med)

            scale = "{out} = {outfn}(double({hpf} - {hpf_med}) * " \
                            "if({hpf} < {hpf_med}, {sfplo}, " \
                            "{sfphi}) + {msx_med})"
            scale = scale.format(out=tmp_msx_mapcalc,
                                 outfn=outfn,
                                 hpf=tmp_msx_hpf,
                                 hpf_med=hpf_med,
                                 sfplo=sfplo,
                                 sfphi=sfphi,
                                 msx_med=msx_med)
            grass.mapcalc(scale, quiet=True)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # snap outliers to input range
            snapout = "{out} = {outfn}(if({hpf} < {oldmin}, {oldmin}, " \
                              "if({hpf} > {oldmax}, {oldmax}, {hpf})))"
            snapout = snapout.format(out=tmp_msx_mapcalc,
                                     outfn=outfn,
                                     hpf=tmp_msx_hpf,
                                     oldmin=msx_info['min'],
                                     oldmax=msx_info['max'])

            grass.mapcalc(snapout, quiet=True, overwrite=True)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # update history string
            cmd_history.append("Linear Scaling: %s" % scale)

        if color_match:
            grass.message("\n|* Matching output to input color table")
            run('r.colors', map=tmp_msx_hpf, raster=msx)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = '\n|* Trimming output image border pixels by '
            msg += '{factor} times the low resolution\n'.format(factor=tf)
            nsew = '   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}'
            nsew = nsew.format(n=region['n'],
                               s=region['s'],
                               e=region['e'],
                               w=region['w'])
            msg += nsew

            grass.message(msg)

            # re-set borders
            region.n -= tf * images[msx]['nsres']
            region.s += tf * images[msx]['nsres']
            region.e -= tf * images[msx]['ewres']
            region.w += tf * images[msx]['ewres']

            # communicate and act
            msg = '   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}'
            msg = msg.format(n=region['n'],
                             s=region['s'],
                             e=region['e'],
                             w=region['w'])
            grass.message(msg)

            # modify only the extent
            run('g.region',
                n=region['n'],
                s=region['s'],
                e=region['e'],
                w=region['w'])
            # r.mapcalc: do not use input as output
            trim = "{out} = {input}".format(out=tmp_msx_mapcalc,
                                            input=tmp_msx_hpf)
            grass.mapcalc(trim)
            run('g.remove', flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

        #
        # End of Algorithm

        # history entry
        run("r.support", map=tmp_msx_hpf, history="\n".join(cmd_history))

        # add suffix to basename & rename end product
        msx_name = "{base}{suffix}"
        msx_name = msx_name.format(base=msx.split('@')[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    grass.message("\n|! Original Region restored")
    grass.message(
        "\n>>> Hint, rebalancing colors (via i.colors.enhance) "
        "may improve appearance of RGB composites!",
        flag='i')
Пример #42
0
def main():
    global output, tmp

    input = options['input']
    output = options['output']
    layer = options['layer']
    column = options['column']

    # setup temporary file
    tmp = str(os.getpid())

    # does map exist?
    if not grass.find_file(input, element='vector')['file']:
        grass.fatal(_("Vector map <%s> not found") % input)

    if not column:
        grass.warning(
            _("No '%s' option specified. Dissolving based on category values from layer <%s>."
              ) % ("column", layer))
        grass.run_command('v.extract',
                          flags='d',
                          input=input,
                          output=output,
                          type='area',
                          layer=layer)
    else:
        if int(layer) == -1:
            grass.warning(
                _("Invalid layer number (%d). "
                  "Parameter '%s' specified, assuming layer '1'.") %
                (int(layer), 'column'))
            layer = '1'
        try:
            coltype = grass.vector_columns(input, layer)[column]
        except KeyError:
            grass.fatal(_('Column <%s> not found') % column)

        if coltype['type'] not in ('INTEGER', 'SMALLINT', 'CHARACTER', 'TEXT'):
            grass.fatal(_("Key column must be of type integer or string"))

        f = grass.vector_layer_db(input, layer)

        table = f['table']

        tmpfile = '%s_%s' % (output, tmp)

        try:
            grass.run_command('v.reclass',
                              input=input,
                              output=tmpfile,
                              layer=layer,
                              column=column)
            grass.run_command('v.extract',
                              flags='d',
                              input=tmpfile,
                              output=output,
                              type='area',
                              layer=layer)
        except CalledModuleError as e:
            grass.fatal(
                _("Final extraction steps failed."
                  " Check above error messages and"
                  " see following details:\n%s") % e)

    # write cmd history:
    grass.vector_history(output)
Пример #43
0
def main():
    global tmp, sqltmp, tmpname, nuldev, vector, rastertmp
    rastertmp = False
    # setup temporary files
    tmp = grass.tempfile()
    sqltmp = tmp + ".sql"
    # we need a random name
    tmpname = grass.basename(tmp)

    nuldev = open(os.devnull, 'w')

    rasters = options['raster'].split(',')
    colprefixes = options['column_prefix'].split(',')
    vector = options['map']
    layer = options['layer']
    percentile = options['percentile']
    basecols = options['method'].split(',')

    ### setup enviro vars ###
    env = grass.gisenv()
    mapset = env['MAPSET']

    vs = vector.split('@')
    if len(vs) > 1:
        vect_mapset = vs[1]
    else:
        vect_mapset = mapset

    # does map exist in CURRENT mapset?
    if vect_mapset != mapset or not grass.find_file(vector, 'vector',
                                                    mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # colprefix for every raster map?
    if len(colprefixes) != len(rasters):
        grass.fatal(
            _("Number of raster maps ({0}) different from \
                      number of column prefixes ({1})".format(
                len(rasters), len(colprefixes))))

    vector = vs[0]

    rastertmp = "%s_%s" % (vector, tmpname)

    for raster in rasters:
        # check the input raster map
        if not grass.find_file(raster, 'cell')['file']:
            grass.fatal(_("Raster map <%s> not found") % raster)

    # save current settings:
    grass.use_temp_region()

    # Temporarily aligning region resolution to $RASTER resolution
    # keep boundary settings
    grass.run_command('g.region', align=rasters[0])

    # prepare base raster for zonal statistics
    try:
        nlines = grass.vector_info_topo(vector)['lines']
        # Create densified lines rather than thin lines
        if flags['d'] and nlines > 0:
            grass.run_command('v.to.rast',
                              input=vector,
                              layer=layer,
                              output=rastertmp,
                              use='cat',
                              flags='d',
                              quiet=True)
        else:
            grass.run_command('v.to.rast',
                              input=vector,
                              layer=layer,
                              output=rastertmp,
                              use='cat',
                              quiet=True)
    except CalledModuleError:
        grass.fatal(_("An error occurred while converting vector to raster"))

    # dump cats to file to avoid "too many argument" problem:
    p = grass.pipe_command('r.category', map=rastertmp, sep=';', quiet=True)
    cats = []

    for line in p.stdout:
        line = decode(line)
        cats.append(line.rstrip('\r\n').split(';')[0])
    p.wait()

    number = len(cats)
    if number < 1:
        grass.fatal(_("No categories found in raster map"))

    # Check if all categories got converted
    # Report categories from vector map
    vect_cats = grass.read_command('v.category',
                                   input=vector,
                                   option='report',
                                   flags='g').rstrip('\n').split('\n')

    # get number of all categories in selected layer
    for vcl in vect_cats:
        if vcl.split(' ')[0] == layer and vcl.split(' ')[1] == 'all':
            vect_cats_n = int(vcl.split(' ')[2])

    if vect_cats_n != number:
        grass.warning(
            _("Not all vector categories converted to raster. \
                         Converted {0} of {1}.".format(number, vect_cats_n)))

    # check if DBF driver used, in this case cut to 10 chars col names:
    try:
        fi = grass.vector_db(map=vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))
    # we need this for non-DBF driver:
    dbfdriver = fi['driver'] == 'dbf'

    # Find out which table is linked to the vector map on the given layer
    if not fi['table']:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))

    # replaced by user choiche
    #basecols = ['n', 'min', 'max', 'range', 'mean', 'stddev', 'variance', 'cf_var', 'sum']

    for i in xrange(len(rasters)):
        raster = rasters[i]
        colprefix = colprefixes[i]
        # we need at least three chars to distinguish [mea]n from [med]ian
        # so colprefix can't be longer than 6 chars with DBF driver
        if dbfdriver:
            colprefix = colprefix[:6]
            variables_dbf = {}

        # by default perccol variable is used only for "variables" variable
        perccol = "percentile"
        perc = None
        for b in basecols:
            if b.startswith('p'):
                perc = b
        if perc:
            # namespace is limited in DBF but the % value is important
            if dbfdriver:
                perccol = "per" + percentile
            else:
                perccol = "percentile_" + percentile
            percindex = basecols.index(perc)
            basecols[percindex] = perccol

        # dictionary with name of methods and position in "r.univar -gt"  output
        variables = {
            'number': 2,
            'null_cells': 2,
            'minimum': 4,
            'maximum': 5,
            'range': 6,
            'average': 7,
            'stddev': 9,
            'variance': 10,
            'coeff_var': 11,
            'sum': 12,
            'first_quartile': 14,
            'median': 15,
            'third_quartile': 16,
            perccol: 17
        }
        # this list is used to set the 'e' flag for r.univar
        extracols = ['first_quartile', 'median', 'third_quartile', perccol]
        addcols = []
        colnames = []
        extstat = ""
        for i in basecols:
            # this check the complete name of out input that should be truncated
            for k in variables.keys():
                if i in k:
                    i = k
                    break
            if i in extracols:
                extstat = 'e'
            # check if column already present
            currcolumn = ("%s_%s" % (colprefix, i))
            if dbfdriver:
                currcolumn = currcolumn[:10]
                variables_dbf[currcolumn.replace("%s_" % colprefix, '')] = i

            colnames.append(currcolumn)
            if currcolumn in grass.vector_columns(vector, layer).keys():
                if not flags['c']:
                    grass.fatal(
                        (_("Cannot create column <%s> (already present). ") %
                         currcolumn) +
                        _("Use -c flag to update values in this column."))
            else:
                if i == "n":
                    coltype = "INTEGER"
                else:
                    coltype = "DOUBLE PRECISION"
                addcols.append(currcolumn + ' ' + coltype)

        if addcols:
            grass.verbose(_("Adding columns '%s'") % addcols)
            try:
                grass.run_command('v.db.addcolumn',
                                  map=vector,
                                  columns=addcols,
                                  layer=layer)
            except CalledModuleError:
                grass.fatal(_("Adding columns failed. Exiting."))

        # calculate statistics:
        grass.message(_("Processing input data (%d categories)...") % number)

        # get rid of any earlier attempts
        grass.try_remove(sqltmp)

        f = open(sqltmp, 'w')

        # do the stats
        p = grass.pipe_command('r.univar',
                               flags='t' + extstat,
                               map=raster,
                               zones=rastertmp,
                               percentile=percentile,
                               sep=';')

        first_line = 1

        f.write("{0}\n".format(grass.db_begin_transaction(fi['driver'])))
        for line in p.stdout:
            if first_line:
                first_line = 0
                continue

            vars = decode(line).rstrip('\r\n').split(';')

            f.write("UPDATE %s SET" % fi['table'])
            first_var = 1
            for colname in colnames:
                variable = colname.replace("%s_" % colprefix, '', 1)
                if dbfdriver:
                    variable = variables_dbf[variable]
                i = variables[variable]
                value = vars[i]
                # convert nan, +nan, -nan, inf, +inf, -inf, Infinity, +Infinity,
                # -Infinity to NULL
                if value.lower().endswith('nan') or 'inf' in value.lower():
                    value = 'NULL'
                if not first_var:
                    f.write(" , ")
                else:
                    first_var = 0
                f.write(" %s=%s" % (colname, value))

            f.write(" WHERE %s=%s;\n" % (fi['key'], vars[0]))
        f.write("{0}\n".format(grass.db_commit_transaction(fi['driver'])))
        p.wait()
        f.close()

        grass.message(_("Updating the database ..."))
        exitcode = 0
        try:
            grass.run_command('db.execute',
                              input=sqltmp,
                              database=fi['database'],
                              driver=fi['driver'])
            grass.verbose(
                (_("Statistics calculated from raster map <{raster}>"
                   " and uploaded to attribute table"
                   " of vector map <{vector}>.").format(raster=raster,
                                                        vector=vector)))
        except CalledModuleError:
            grass.warning(
                _("Failed to upload statistics to attribute table of vector map <%s>."
                  ) % vector)
            exitcode = 1

            sys.exit(exitcode)
Пример #44
0
def write_update(update_file, training_file, unlabeled_file,
                 new_training_filename, new_unlabeled_filename):
    """
        Transfer samples from the unlabeled set to the training set based on an update file
        with IDs of samples to transfer and their classes.

        :param update_file: Path to the update file
        :param training_file: Path to the training file
        :param unlabeled_file: Path to the unlabeled file
        :param new_training_filename: Path to the new training file that will be created
        :param new_unlabeled_filename: Path to the new unlabeled file that will be created

        :type update_file: string
        :type training_file: string
        :type unlabeled_file: string
        :type new_training_filename: string
        :type new_unlabeled_filename: string
    """
    update = np.genfromtxt(update_file, delimiter=',', skip_header=1)
    training = np.genfromtxt(training_file,
                             delimiter=',',
                             skip_header=0,
                             dtype=None)
    unlabeled = np.genfromtxt(unlabeled_file,
                              delimiter=',',
                              skip_header=0,
                              dtype=None)
    successful_updates = []

    if update.size == 0:
        return
    elif update.ndim == 1:
        update = [update]

    for index_update, row in enumerate(update):
        index = np.where(unlabeled == str(
            row[0]))  # Find in 'unlabeled' the line corresping to the ID
        if index[0].size != 0:  # Check if row exists
            data = unlabeled[index[0][0]][1:]  # Features
            data = np.insert(data, 0, row[0], axis=0)  # ID
            data = np.insert(data, 1, row[1], axis=0)  # Class
            training = np.append(training, [data], axis=0)
            unlabeled = np.delete(unlabeled, index[0][0], axis=0)
            successful_updates.append(index_update)
        else:
            gcore.warning(
                "Unable to update completely: the following sample could not be found in the unlabeled set:{}"
                .format(row[0]))

    with open(update_file) as f:
        header = f.readline()
        header = header.split(',')

    update = np.delete(update, successful_updates, axis=0)
    update = np.insert(update.astype(str), 0, header, axis=0)

    # Save files
    if new_training_filename != '':
        write_updated_file(new_training_filename, training)
        gcore.message(
            "New training file written to {}".format(new_training_filename))
    if new_unlabeled_filename != '':
        write_updated_file(new_unlabeled_filename, unlabeled)
        gcore.message(
            "New unlabeled file written to {}".format(new_unlabeled_filename))
Пример #45
0
def main():
    infile = options['input']
    outfile = options['output']

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)
    if kv['+proj'] != 'longlat':
        grass.fatal(_("This module only operates in LatLong/WGS84 locations"))

    # input test
    if not os.access(infile, os.R_OK):
        grass.fatal(_("File <%s> not found") % infile)

    # DBF doesn't support lengthy text fields
    kv = grass.db_connection()
    dbfdriver = kv['driver'] == 'dbf'
    if dbfdriver:
        grass.warning(
            _("Since DBF driver is used, the content of the 'alternatenames' column might be cut with respect to the original Geonames.org column content"))

    with open(infile, encoding='utf-8') as f:
        num_places = sum(1 for each in f)
    grass.message(_("Converting %d place names...") % num_places)

    # pump data into GRASS:
    #  http://download.geonames.org/export/dump/readme.txt
    #  The main 'geoname' table has the following fields :
    #  ---------------------------------------------------
    #  geonameid         : integer id of record in geonames database
    #  name              : name of geographical point (utf8) varchar(200)
    #  asciiname         : name of geographical point in plain ascii characters, varchar(200)
    #  alternatenames    : alternatenames, comma separated varchar(4000)
    #  latitude          : latitude in decimal degrees (wgs84)
    #  longitude         : longitude in decimal degrees (wgs84)
    #  feature class     : see http://www.geonames.org/export/codes.html, char(1)
    #  feature code      : see http://www.geonames.org/export/codes.html, varchar(10)
    #  country code      : ISO-3166 2-letter country code, 2 characters
    #  cc2               : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters
    #  admin1 code       : fipscode (subject to change to iso code), isocode for the us and ch, see file admin1Codes.txt for display names of this code; varchar(20)
    #  admin2 code       : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80)
    #  admin3 code       : code for third level administrative division, varchar(20)
    #  admin4 code       : code for fourth level administrative division, varchar(20)
    #  population        : integer
    #  elevation         : in meters, integer
    #  gtopo30           : average elevation of 30'x30' (ca 900mx900m) area in meters, integer
    #  timezone          : the timezone id (see file http://download.geonames.org/export/dump/timeZones.txt)
    #  modification date : date of last modification in yyyy-MM-dd format

    # geonameid|name|asciiname|alternatenames|latitude|longitude|featureclass|featurecode|countrycode|cc2|admin1code|admin2code|admin3code|admin4code|population|elevation|gtopo30|timezone|modificationdate

    # debug:
    # head -n 3 ${TMPFILE}.csv

    # use different column names limited to 10 chars for dbf
    if dbfdriver:
        columns = ['geonameid integer',
                   'name varchar(200)',
                   'asciiname varchar(200)',
                   'altname varchar(4000)',
                   'latitude double precision',
                   'longitude double precision',
                   'featrclass varchar(1)',
                   'featrcode varchar(10)',
                   'cntrycode varchar(2)',
                   'cc2 varchar(60)',
                   'admin1code varchar(20)',
                   'admin2code varchar(20)',
                   'admin3code varchar(20)',
                   'admin4code varchar(20)',
                   'population integer',
                   'elevation integer',
                   'gtopo30 integer',
                   'timezone varchar(50)',
                   'mod_date date']
    else:
        columns = ['geonameid integer',
                   'name varchar(200)',
                   'asciiname varchar(200)',
                   'alternatename varchar(4000)',
                   'latitude double precision',
                   'longitude double precision',
                   'featureclass varchar(1)',
                   'featurecode varchar(10)',
                   'countrycode varchar(2)',
                   'cc2 varchar(60)',
                   'admin1code varchar(20)',
                   'admin2code varchar(20)',
                   'admin3code varchar(20)',
                   'admin4code varchar(20)',
                   'population integer',
                   'elevation integer',
                   'gtopo30 integer',
                   'timezone varchar(50)',
                   'modification date']

    grass.run_command('v.in.ascii', cat=0, x=6, y=5, sep='tab',
                      input=infile, output=outfile,
                      columns=columns)

    # write cmd history:
    grass.vector_history(outfile)
Пример #46
0
def main():

    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    global tile, tmpdir, in_temp, currdir, tmpregionname

    in_temp = False

    nasadem_version = options["version"]
    nasadem_layer = options["layer"]
    url = options["url"]
    username = options["username"]
    password = options["password"]
    local = options["local"]
    output = options["output"]
    dozerotile = flags["z"]
    reproj_res = options["resolution"]

    overwrite = grass.overwrite()

    tile = None
    tmpdir = None
    in_temp = None
    currdir = None
    tmpregionname = None

    if len(local) == 0:
        local = None
        if len(username) == 0 or len(password) == 0:
            grass.fatal(_("NASADEM download requires username and password."))

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags="j")
    kv = grass.parse_key_val(s)

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    currdir = os.getcwd()
    pid = os.getpid()

    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True

    # save region
    tmpregionname = "r_in_nasadem_region_" + str(pid)
    grass.run_command("g.region", save=tmpregionname, overwrite=overwrite)

    # get extents
    if kv["+proj"] == "longlat":
        reg = grass.region()
        if options["region"] is None or options["region"] == "":
            north = reg["n"]
            south = reg["s"]
            east = reg["e"]
            west = reg["w"]
        else:
            west, south, east, north = options["region"].split(",")
            west = float(west)
            south = float(south)
            east = float(east)
            north = float(north)

    else:
        if not options["resolution"]:
            grass.fatal(
                _("The <resolution> must be set if the projection is not 'longlat'.")
            )
        if options["region"] is None or options["region"] == "":
            reg2 = grass.parse_command("g.region", flags="uplg")
            north_vals = [float(reg2["ne_lat"]), float(reg2["nw_lat"])]
            south_vals = [float(reg2["se_lat"]), float(reg2["sw_lat"])]
            east_vals = [float(reg2["ne_long"]), float(reg2["se_long"])]
            west_vals = [float(reg2["nw_long"]), float(reg2["sw_long"])]
            reg = {}
            if np.mean(north_vals) > np.mean(south_vals):
                north = max(north_vals)
                south = min(south_vals)
            else:
                north = min(north_vals)
                south = max(south_vals)
            if np.mean(west_vals) > np.mean(east_vals):
                west = max(west_vals)
                east = min(east_vals)
            else:
                west = min(west_vals)
                east = max(east_vals)
            # get actual location, mapset, ...
            grassenv = grass.gisenv()
            tgtloc = grassenv["LOCATION_NAME"]
            tgtmapset = grassenv["MAPSET"]
            GISDBASE = grassenv["GISDBASE"]
            TGTGISRC = os.environ["GISRC"]
        else:
            grass.fatal(
                _(
                    "The option <resolution> is only supported in the projection 'longlat'"
                )
            )

    # adjust extents to cover SRTM tiles: 1 degree bounds
    tmpint = int(north)
    if tmpint < north:
        north = tmpint + 1
    else:
        north = tmpint

    tmpint = int(south)
    if tmpint > south:
        south = tmpint - 1
    else:
        south = tmpint

    tmpint = int(east)
    if tmpint < east:
        east = tmpint + 1
    else:
        east = tmpint

    tmpint = int(west)
    if tmpint > west:
        west = tmpint - 1
    else:
        west = tmpint

    if north == south:
        north += 1
    if east == west:
        east += 1

    # switch to longlat location
    if kv["+proj"] != "longlat":
        SRCGISRC, TMPLOC = createTMPlocation()

    rows = abs(north - south)
    cols = abs(east - west)
    ntiles = rows * cols
    grass.message(_("Importing %d NASADEM tiles...") % ntiles, flag="i")
    counter = 1

    srtmtiles = ""
    valid_tiles = 0
    for ndeg in range(south, north):
        for edeg in range(west, east):
            grass.percent(counter, ntiles, 1)
            counter += 1
            if ndeg < 0:
                tile = "s"
            else:
                tile = "n"
            tile = tile + "%02d" % abs(ndeg)
            if edeg < 0:
                tile = tile + "w"
            else:
                tile = tile + "e"
            tile = tile + "%03d" % abs(edeg)
            grass.debug("Tile: %s" % tile, debug=1)

            if local is None:
                download_tile(tile, url, pid, nasadem_version, username, password)

            gotit = import_local_tile(tile, local, pid, nasadem_layer)
            if gotit == 1:
                grass.verbose(_("Tile %s successfully imported") % tile)
                valid_tiles += 1
            elif dozerotile:
                # create tile with zeros
                # north
                if ndeg < -1:
                    tmpn = "%02d:59:59.5S" % (abs(ndeg) - 2)
                else:
                    tmpn = "%02d:00:00.5N" % (ndeg + 1)
                # south
                if ndeg < 1:
                    tmps = "%02d:00:00.5S" % abs(ndeg)
                else:
                    tmps = "%02d:59:59.5N" % (ndeg - 1)
                # east
                if edeg < -1:
                    tmpe = "%03d:59:59.5W" % (abs(edeg) - 2)
                else:
                    tmpe = "%03d:00:00.5E" % (edeg + 1)
                # west
                if edeg < 1:
                    tmpw = "%03d:00:00.5W" % abs(edeg)
                else:
                    tmpw = "%03d:59:59.5E" % (edeg - 1)

                grass.run_command("g.region", n=tmpn, s=tmps, e=tmpe, w=tmpw, res=res)
                grass.run_command(
                    "r.mapcalc",
                    expression="%s = 0" % (tile + ".r.in.nasadem.tmp." + str(pid)),
                    quiet=True,
                )
                grass.run_command("g.region", region=tmpregionname)

    # g.list with sep = comma does not work ???
    pattern = "*.r.in.nasadem.tmp.%d" % pid
    demtiles = grass.read_command(
        "g.list", type="raster", pattern=pattern, sep="newline", quiet=True
    )

    demtiles = demtiles.splitlines()
    demtiles = ",".join(demtiles)
    grass.debug("'List of Tiles: %s" % demtiles, debug=1)

    if valid_tiles == 0:
        grass.run_command(
            "g.remove", type="raster", name=str(demtiles), flags="f", quiet=True
        )
        grass.warning(_("No tiles imported"))
        if local is not None:
            grass.fatal(_("Please check if local folder <%s> is correct.") % local)
        else:
            grass.fatal(
                _(
                    "Please check internet connection, credentials, and if url <%s> is correct."
                )
                % url
            )

    grass.run_command("g.region", raster=str(demtiles))

    if valid_tiles > 1:
        grass.message(_("Patching tiles..."))
        if kv["+proj"] != "longlat":
            grass.run_command("r.buildvrt", input=demtiles, output=output)
        else:
            grass.run_command("r.patch", input=demtiles, output=output)
            grass.run_command(
                "g.remove", type="raster", name=str(demtiles), flags="f", quiet=True
            )
    else:
        grass.run_command("g.rename", raster="%s,%s" % (demtiles, output), quiet=True)

    # switch to target location and repoject nasadem
    if kv["+proj"] != "longlat":
        os.environ["GISRC"] = str(TGTGISRC)
        # r.proj
        grass.message(_("Reprojecting <%s>...") % output)
        kwargs = {
            "location": TMPLOC,
            "mapset": "PERMANENT",
            "input": output,
            "resolution": reproj_res,
        }
        if options["memory"]:
            kwargs["memory"] = options["memory"]
        if options["method"]:
            kwargs["method"] = options["method"]
        try:
            grass.run_command("r.proj", **kwargs)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % output)

    # nice color table
    grass.run_command("r.colors", map=output, color="srtm", quiet=True)

    # write metadata:
    tmphist = grass.tempfile()
    f = open(tmphist, "w+")
    # hide username and password
    cmdline = os.environ["CMDLINE"]
    if username is not None and len(username) > 0:
        cmdline = cmdline.replace("=" + username, "=xxx")
    if password is not None and len(password) > 0:
        cmdline = cmdline.replace("=" + password, "=xxx")

    f.write(cmdline)
    f.close()
    source1 = nasadem_version
    grass.run_command(
        "r.support",
        map=output,
        loadhistory=tmphist,
        description="generated by r.in.nasadem",
        source1=source1,
        source2=(local if local != tmpdir else url),
    )
    grass.try_remove(tmphist)

    grass.message(_("Done: generated map <%s>") % output)
Пример #47
0
def download_gcs(scene, output):
    """Downloads a single S2 scene from Google Cloud Storage.
    """
    final_scene_dir = os.path.join(output, '{}.SAFE'.format(scene))
    create_dir(final_scene_dir)
    level = scene.split('_')[1]
    if level == 'MSIL1C':
        baseurl = ('https://storage.googleapis.com/'
                   'gcp-public-data-sentinel-2/tiles')
    elif level == 'MSIL2A':
        baseurl = ('https://storage.googleapis.com/'
                   'gcp-public-data-sentinel-2/L2/tiles')
    tile_block = scene.split('_')[-2]
    tile_no = tile_block[1:3]
    tile_first_letter = tile_block[3]
    tile_last_letters = tile_block[4:]

    url_scene = os.path.join(baseurl, tile_no, tile_first_letter,
                             tile_last_letters, '{}.SAFE'.format(scene))

    # download the manifest.safe file
    safe_file = 'manifest.safe'
    safe_url = os.path.join(url_scene, safe_file)
    output_path_safe = os.path.join(final_scene_dir, safe_file)
    r_safe = requests.get(safe_url, allow_redirects=True)
    if r_safe.status_code != 200:
        gs.warning(_("Scene <{}> was not found on Google Cloud").format(scene))
        return 1
    root_manifest = ET.fromstring(r_safe.content)
    open(output_path_safe, 'wb').write(r_safe.content)
    # parse manifest.safe for the rest of the data
    files_list = parse_manifest_gcs(root_manifest)

    # get all required folders
    hrefs = [file['href'] for file in files_list]
    hrefs_heads = [os.path.split(path)[0] for path in hrefs]
    required_rel_folders = list(set(hrefs_heads))
    # some paths inconsistently start with "." and some don't
    if any([not folder.startswith('.') for folder in required_rel_folders]):
        required_abs_folders = [os.path.join(final_scene_dir, item) for item
                                in required_rel_folders if item != '.']

    else:
        required_abs_folders = [item.replace('.', final_scene_dir) for item
                                in required_rel_folders if item != '.']

    # some scenes don't have additional metadata (GRANULE/.../AUX_DATA or
    # DATASTRIP/.../QI_DATA) but sen2cor seems to require at least the empty folder
    rest_folders = []
    check_folders = [("GRANULE", "AUX_DATA"), ("DATASTRIP", "QI_DATA")]
    for check_folder in check_folders:
        if len(fnmatch.filter(required_abs_folders, '*{}*/{}*'.format(
                check_folder[0], check_folder[1]))) == 0:
            # get required path
            basepath = min([fol for fol in required_abs_folders
                            if check_folder[0] in fol], key=len)
            rest_folders.append(os.path.join(basepath, check_folder[1]))

    # two folders are not in the manifest.safe, but the empty folders may
    # be required for other software (e.g. sen2cor)
    rest_folders.extend([os.path.join(final_scene_dir, 'rep_info'),
                         os.path.join(final_scene_dir, 'AUX_DATA')])
    required_abs_folders.extend(rest_folders)

    # create folders
    for folder in required_abs_folders:
        req_folder_code = create_dir(folder)
    if req_folder_code != 0:
        return 1
    failed_downloads = []
    # no .html files are available on GCS but the folder might be required
    files_list_dl = [file for file in files_list if "HTML" not in file["href"]]
    for dl_file in tqdm(files_list_dl):
        # remove the '.' for relative path in the URLS
        if dl_file['href'].startswith('.'):
            href_url = dl_file['href'][1:]
        else:
            href_url = '/{}'.format(dl_file['href'])
        # neither os.path.join nor urljoin join these properly...
        dl_url = '{}{}'.format(url_scene, href_url)
        output_path_file = '{}{}'.format(final_scene_dir, href_url)
        checksum_function = dl_file['checksumName'].lower()
        dl_code = download_gcs_file(url=dl_url, destination=output_path_file,
                                    checksum_function=checksum_function,
                                    checksum=dl_file["checksum"])
        if dl_code != 0:
            failed_downloads.append(dl_url)

    if len(failed_downloads) > 0:
        gs.verbose(_("Downloading was not successful for urls \n{}").format(
            '\n'.join(failed_downloads)))
        gs.warning(_("Downloading was not successful for scene <{}>").format(
            scene))
        return 1
    else:
        return 0
def main():
    # Temporary filenames
    tmp_avg_lse = tmp_map_name('avg_lse')
    tmp_delta_lse = tmp_map_name('delta_lse')
    tmp_cwv = tmp_map_name('cwv')
    #tmp_lst = tmp_map_name('lst')

    # user input
    mtl_file = options['mtl']

    if not options['prefix']:
        b10 = options['b10']
        b11 = options['b11']
        t10 = options['t10']
        t11 = options['t11']

        if not options['clouds']:
            qab = options['qab']
            cloud_map = False

        else:
            qab = False
            cloud_map = options['clouds']

    elif options['prefix']:
        prefix = options['prefix']
        b10 = prefix + '10'
        b11 = prefix + '11'

        if not options['clouds']:
            qab = prefix + 'QA'
            cloud_map = False

        else:
            cloud_map = options['clouds']
            qab = False

    qapixel = options['qapixel']
    lst_output = options['lst']

    # save Brightness Temperature maps?
    if options['prefix_bt']:
        brightness_temperature_prefix = options['prefix_bt']
    else:
        brightness_temperature_prefix = None

    cwv_window_size = int(options['window'])
    assertion_for_cwv_window_size_msg = (
        'A spatial window of size 5^2 or less is not '
        'recommended. Please select a larger window. '
        'Refer to the manual\'s notes for details.')
    assert cwv_window_size >= 7, assertion_for_cwv_window_size_msg
    cwv_output = options['cwv']

    # optional maps
    average_emissivity_map = options['emissivity']
    delta_emissivity_map = options['delta_emissivity']

    # output for in-between maps?
    emissivity_output = options['emissivity_out']
    delta_emissivity_output = options['delta_emissivity_out']
    landcover_map = options['landcover']
    landcover_class = options['landcover_class']

    # flags
    info = flags['i']
    scene_extent = flags['e']
    timestamping = flags['t']
    null = flags['n']
    rounding = flags['r']
    celsius = flags['c']

    #
    # Pre-production actions
    #

    # Set Region
    if scene_extent:
        grass.use_temp_region()  # safely modify the region
        msg = "\n|! Matching region extent to map {name}"

        # ToDo: check if extent-B10 == extent-B11? Unnecessary?
        # Improve below!

        if b10:
            run('g.region', rast=b10, align=b10)
            msg = msg.format(name=b10)

        elif t10:
            run('g.region', rast=t10, align=t10)
            msg = msg.format(name=t10)

        g.message(msg)

    elif not scene_extent:
        grass.warning(_('Operating on current region'))

    #
    # 1. Mask clouds
    #

    if cloud_map:
        # user-fed cloud map?
        msg = '\n|i Using {cmap} as a MASK'.format(cmap=cloud_map)
        g.message(msg)
        r.mask(raster=cloud_map, flags='i', overwrite=True)

    else:
        # using the quality assessment band and a "QA" pixel value
        mask_clouds(qab, qapixel)

    #
    # 2. TIRS > Brightness Temperatures
    #

    if mtl_file:
        # if MTL and b10 given, use it to compute at-satellite temperature t10
        if b10:
            t10 = tirs_to_at_satellite_temperature(
                b10,
                mtl_file,
                brightness_temperature_prefix,
                null,
                quiet=info,
            )

        # likewise for b11 -> t11
        if b11:
            t11 = tirs_to_at_satellite_temperature(
                b11,
                mtl_file,
                brightness_temperature_prefix,
                null,
                quiet=info,
            )

    #
    # Initialise a SplitWindowLST object
    #

    split_window_lst = SplitWindowLST(landcover_class)
    citation_lst = split_window_lst.citation

    #
    # 3. Land Surface Emissivities
    #

    # use given fixed class?
    if landcover_class:

        if split_window_lst.landcover_class is False:
            # replace with meaningful error
            grass.warning('Unknown land cover class string! Note, this string '
                          'input option is case sensitive.')

        if landcover_class == 'Random':
            msg = "\n|! Random emissivity class selected > " + \
                split_window_lst.landcover_class + ' '

        if landcover_class == 'Barren_Land':
            msg = "\n|! For barren land, the last quadratic term of the Split-Window algorithm will be set to 0" + \
                split_window_lst.landcover_class + ' '

        else:
            msg = '\n|! Retrieving average emissivities *only* for {eclass} '

        if info:
            msg += '| Average emissivities (channels 10, 11): '
            msg += str(split_window_lst.emissivity_t10) + ', ' + \
                str(split_window_lst.emissivity_t11)

        msg = msg.format(eclass=split_window_lst.landcover_class)
        g.message(msg)

    # use the FROM-GLC map
    elif landcover_map:

        if average_emissivity_map:
            tmp_avg_lse = average_emissivity_map

        if not average_emissivity_map:
            determine_average_emissivity(
                tmp_avg_lse,
                emissivity_output,
                landcover_map,
                split_window_lst.average_lse_mapcalc,
                quiet=info,
            )
            if options['emissivity_out']:
                tmp_avg_lse = options['emissivity_out']

        if delta_emissivity_map:
            tmp_delta_lse = delta_emissivity_map

        if not delta_emissivity_map:
            determine_delta_emissivity(
                tmp_delta_lse,
                delta_emissivity_output,
                landcover_map,
                split_window_lst.delta_lse_mapcalc,
                quiet=info,
            )
            if options['delta_emissivity_out']:
                tmp_delta_lse = options['delta_emissivity_out']

    #
    # 4. Modified Split-Window Variance-Covariance Matrix > Column Water Vapor
    #

    if info:
        msg = '\n|i Spatial window of size {n} for Column Water Vapor estimation: '
        msg = msg.format(n=cwv_window_size)
        g.message(msg)

    cwv = Column_Water_Vapor(cwv_window_size, t10, t11)
    citation_cwv = cwv.citation
    estimate_cwv_big_expression(
        tmp_cwv,
        cwv_output,
        t10,
        t11,
        cwv._big_cwv_expression(),
    )
    if cwv_output:
        tmp_cwv = cwv_output

    #
    # 5. Estimate Land Surface Temperature
    #

    if info and landcover_class == 'Random':
        msg = '\n|* Will pick a random emissivity class!'
        grass.verbose(msg)

    estimate_lst(
        lst_output,
        t10,
        t11,
        landcover_map,
        landcover_class,
        tmp_avg_lse,
        tmp_delta_lse,
        tmp_cwv,
        split_window_lst.sw_lst_mapcalc,
        rounding,
        celsius,
        quiet=info,
    )

    #
    # Post-production actions
    #

    # remove MASK
    r.mask(flags='r', verbose=True)

    # time-stamping
    if timestamping:
        add_timestamp(mtl_file, lst_output)

        if cwv_output:
            add_timestamp(mtl_file, cwv_output)

    # Apply color table
    if celsius:
        run('r.colors', map=lst_output, color='celsius')
    else:
        # color table for kelvin
        run('r.colors', map=lst_output, color='kelvin')

    # ToDo: helper function for r.support
    # strings for metadata
    history_lst = '\n' + citation_lst
    history_lst += '\n\n' + citation_cwv
    history_lst += '\n\nSplit-Window model: '
    history_lst += split_window_lst._equation  # :wsw_lst_mapcalc
    description_lst = (
        'Land Surface Temperature derived from a split-window algorithm. ')

    if celsius:
        title_lst = 'Land Surface Temperature (C)'
        units_lst = 'Celsius'

    else:
        title_lst = 'Land Surface Temperature (K)'
        units_lst = 'Kelvin'

    landsat8_metadata = Landsat8_MTL(mtl_file)
    source1_lst = landsat8_metadata.scene_id
    source2_lst = landsat8_metadata.origin

    # history entry
    run(
        "r.support",
        map=lst_output,
        title=title_lst,
        units=units_lst,
        description=description_lst,
        source1=source1_lst,
        source2=source2_lst,
        history=history_lst,
    )

    # restore region
    if scene_extent:
        grass.del_temp_region()  # restoring previous region settings
        g.message("|! Original Region restored")

    # print citation
    if info:
        g.message('\nSource: ' + citation_lst)
Пример #49
0
def main():
    table = options['table']
    column = options['column']
    force = flags['f']

    # check if DB parameters are set, and if not set them.
    grass.run_command('db.connect', flags='c')

    kv = grass.db_connection()
    database = kv['database']
    driver = kv['driver']
    # schema needed for PG?

    if force:
        grass.message(_("Forcing ..."))

    if column == "cat":
        grass.warning(
            _("Deleting <%s> column which may be needed to keep table connected to a vector map"
              ) % column)

    cols = [f[0] for f in grass.db_describe(table)['cols']]
    if column not in cols:
        grass.fatal(_("Column <%s> not found in table") % column)

    if not force:
        grass.message(_("Column <%s> would be deleted.") % column)
        grass.message("")
        grass.message(
            _("You must use the force flag (-f) to actually remove it. Exiting."
              ))
        return 0

    if driver == "sqlite":
        #echo "Using special trick for SQLite"
        # http://www.sqlite.org/faq.html#q13
        colnames = []
        coltypes = []
        for f in grass.db_describe(table)['cols']:
            if f[0] == column:
                continue
            colnames.append(f[0])
            coltypes.append("%s %s" % (f[0], f[1]))

        colnames = ", ".join(colnames)
        coltypes = ", ".join(coltypes)

        cmds = [
            "BEGIN TRANSACTION",
            "CREATE TEMPORARY TABLE ${table}_backup(${coldef})",
            "INSERT INTO ${table}_backup SELECT ${colnames} FROM ${table}",
            "DROP TABLE ${table}", "CREATE TABLE ${table}(${coldef})",
            "INSERT INTO ${table} SELECT ${colnames} FROM ${table}_backup",
            "DROP TABLE ${table}_backup", "COMMIT"
        ]
        tmpl = string.Template(';\n'.join(cmds))
        sql = tmpl.substitute(table=table, coldef=coltypes, colnames=colnames)
    else:
        sql = "ALTER TABLE %s DROP COLUMN %s" % (table, column)

    if grass.write_command(
            'db.execute', input='-', database=database, driver=driver,
            stdin=sql) != 0:
        grass.fatal(_("Cannot continue (problem deleting column)"))

    return 0
Пример #50
0
    def _download(self):
        """!Downloads data from WMS server using GDAL WMS driver

        @return temp_map with stored downloaded data
        """
        grass.message("Downloading data from WMS server...")

        # GDAL WMS driver does not flip geographic coordinates
        # according to WMS standard 1.3.0.
        if ("+proj=latlong" in self.proj_srs or "+proj=longlat"
                in self.proj_srs) and self.params["wms_version"] == "1.3.0":
            grass.warning(
                _("If module will not be able to fetch the data in this " +
                  "geographic projection, \n try 'WMS_GRASS' driver or use WMS version 1.1.1."
                  ))

        self._debug("_download", "started")
        temp_map = self._tempfile()

        xml_file = self._createXML()

        # print xml file content for debug level 1
        file = open(xml_file, "r")
        grass.debug("WMS request XML:\n%s" % file.read(), 1)
        file.close()

        if self.proxy:
            gdal.SetConfigOption("GDAL_HTTP_PROXY", str(self.proxy))
        if self.proxy_user_pw:
            gdal.SetConfigOption("GDAL_HTTP_PROXYUSERPWD",
                                 str(self.proxy_user_pw))
        wms_dataset = gdal.Open(xml_file, gdal.GA_ReadOnly)
        grass.try_remove(xml_file)
        if wms_dataset is None:
            grass.fatal(_("Unable to open GDAL WMS driver"))

        self._debug("_download", "GDAL dataset created")

        driver = gdal.GetDriverByName(self.gdal_drv_format)
        if driver is None:
            grass.fatal(_("Unable to find %s driver" % format))

        metadata = driver.GetMetadata()
        if (gdal.DCAP_CREATECOPY not in metadata
                or metadata[gdal.DCAP_CREATECOPY] == "NO"):
            grass.fatal(
                _("Driver %s supports CreateCopy() method.") %
                self.gdal_drv_name)

        self._debug("_download", "calling GDAL CreateCopy...")

        if self.createopt is None:
            temp_map_dataset = driver.CreateCopy(temp_map, wms_dataset, 0)
        else:
            self._debug("_download",
                        "Using GDAL createopt <%s>" % str(self.createopt))
            temp_map_dataset = driver.CreateCopy(temp_map, wms_dataset, 0,
                                                 self.createopt)

        if temp_map_dataset is None:
            grass.fatal(_("Incorrect WMS query"))

        temp_map_dataset = None
        wms_dataset = None

        self._debug("_download", "finished")

        return temp_map
Пример #51
0
def main():
    global learning_steps
    global diversity_lambda
    global nbr_uncertainty
    global search_iter

    global svm, preprocessing, train_test_split, RandomizedSearchCV
    global StratifiedKFold, rbf_kernel
    try:
        from sklearn import svm
        from sklearn import preprocessing
        from sklearn.model_selection import train_test_split
        from sklearn.model_selection import RandomizedSearchCV
        from sklearn.model_selection import StratifiedKFold
        from sklearn.metrics.pairwise import rbf_kernel
    except ImportError:
        gcore.fatal(
            "This module requires the scikit-learn python package. Please install it."
        )

    learning_steps = int(
        options['learning_steps']) if options['learning_steps'] != '0' else 5
    search_iter = int(options['search_iter']) if options[
        'search_iter'] != '0' else 10  # Number of samples to label at each iteration
    diversity_lambda = float(options['diversity_lambda']) if options[
        'diversity_lambda'] != '' else 0.25  # Lambda parameter used in the diversity heuristic
    nbr_uncertainty = int(options['nbr_uncertainty']) if options[
        'nbr_uncertainty'] != '0' else 15  # Number of samples to select (based on uncertainty criterion) before applying the diversity criterion. Must be at least greater or equal to [LEARNING][steps]

    X_train, ID_train, y_train, header_train = load_data(
        options['training_set'], labeled=True)
    X_test, ID_test, y_test, header_test = load_data(options['test_set'],
                                                     labeled=True)
    X_unlabeled, ID_unlabeled, y_unlabeled, header_unlabeled = load_data(
        options['unlabeled_set'])

    nbr_train = ID_train.shape[0]

    if (options['update'] !=
            ''):  # If an update file has been specified, transfer samples
        X_train, ID_train, y_train = update(options['update'], X_train,
                                            ID_train, y_train, X_unlabeled,
                                            ID_unlabeled)
        if (options['training_updated'] != ''
                or options['unlabeled_updated'] != ''):
            write_update(options['update'], options['training_set'],
                         options['unlabeled_set'], options['training_updated'],
                         options['unlabeled_updated'])
    elif (options['update'] == '' and (options['training_updated'] != ''
                                       or options['unlabeled_updated'] != '')):
        gcore.warning(
            'No update file specified : could not write the updated files.')
    nbr_new_train = ID_train.shape[0]

    samples_to_label_IDs, score, predictions = learning(
        X_train, y_train, X_test, y_test, X_unlabeled, ID_unlabeled,
        learning_steps, active_diversity_sample_selection)

    X_unlabeled, ID_unlabeled, y_unlabeled, header_unlabeled = load_data(
        options['unlabeled_set'], scale=False)  # Load unscaled data

    predictions_file = options['predictions']
    if (
            predictions_file != ''
    ):  # Write the class prediction only if an output file has been specified by the user
        write_result_file(ID_unlabeled, X_unlabeled, predictions,
                          header_unlabeled, predictions_file)
        gcore.message(
            "Class predictions written to {}".format(predictions_file))

    gcore.message('Training set : {}'.format(X_train.shape[0]))
    gcore.message('Test set : {}'.format(X_test.shape[0]))
    gcore.message('Unlabeled set : {}'.format(X_unlabeled.shape[0] -
                                              (nbr_new_train - nbr_train)))
    gcore.message('Score : {}'.format(score))

    for ID in samples_to_label_IDs:
        print((int(ID)))
Пример #52
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    use_raster_region = flags["r"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    gregion_module = pymod.Module(
        "g.region",
        raster="dummy",
        run_=False,
        finish_=False,
    )

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())

        if use_raster_region is True:
            reg = copy.deepcopy(gregion_module)
            reg(raster=map.get_id())
            print(reg.get_bash())
            print(mod.get_bash())
            mm = pymod.MultiModule([reg, mod],
                                   sync=False,
                                   set_temp_region=True)
            process_queue.put(mm)
        else:
            print(mod.get_bash())
            process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()
    proc_list = process_queue.get_finished_modules()

    # Check return status of all finished modules
    error = 0
    for proc in proc_list:
        if proc.popen.returncode != 0:
            grass.error(
                _("Error running module: %\n    stderr: %s") %
                (proc.get_bash(), proc.outputs.stderr))
            error += 1

    if error > 0:
        grass.fatal(_("Error running modules."))

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
Пример #53
0
def main():
    if not hasNumPy:
        grass.fatal(_("Required dependency NumPy not found. Exiting."))

    sharpen = options['method']  # sharpening algorithm
    ms1 = options['blue']  # blue channel
    ms2 = options['green']  # green channel
    ms3 = options['red']  # red channel
    pan = options['pan']  # high res pan channel
    out = options['output']  # prefix for output RGB maps
    bladjust = flags['l']  # adjust blue channel
    sproc = flags['s']  # serial processing

    outb = grass.core.find_file('%s_blue' % out)
    outg = grass.core.find_file('%s_green' % out)
    outr = grass.core.find_file('%s_red' % out)

    if (outb['name'] != '' or outg['name'] != '' or outr['name'] != '') and not grass.overwrite():
        grass.warning(_('Maps with selected output prefix names already exist.'
                        ' Delete them or use overwrite flag'))
        return

    pid = str(os.getpid())

    # get PAN resolution:
    kv = grass.raster_info(map=pan)
    nsres = kv['nsres']
    ewres = kv['ewres']
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()

    grass.run_command('g.region', res=panres, align=pan)

    grass.message(_("Performing pan sharpening with hi res pan image: %f" % panres))

    if sharpen == "brovey":
        grass.verbose(_("Using Brovey algorithm"))

        # pan/intensity histogram matching using linear regression
        outname = 'tmp%s_pan1' % pid
        panmatch1 = matchhist(pan, ms1, outname)

        outname = 'tmp%s_pan2' % pid
        panmatch2 = matchhist(pan, ms2, outname)

        outname = 'tmp%s_pan3' % pid
        panmatch3 = matchhist(pan, ms3, outname)

        outr = '%s_red' % out
        outg = '%s_green' % out
        outb = '%s_blue' % out

        # calculate brovey transformation
        grass.message(_("Calculating Brovey transformation..."))

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0 * "$ms1" * "$panmatch1" / k'''
            grass.mapcalc(e, outr=outr, outg=outg, outb=outb,
                          panmatch1=panmatch1, panmatch2=panmatch2,
                          panmatch3=panmatch3, ms1=ms1, ms2=ms2, ms3=ms3,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms1, panmatch1, ms1, ms2, ms3),
                                     overwrite=True)
            pg = grass.mapcalc_start('%s_green = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms2, panmatch2, ms1, ms2, ms3),
                                     overwrite=True)
            pr = grass.mapcalc_start('%s_red = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms3, panmatch3, ms1, ms2, ms3),
                                     overwrite=True)

            pb.wait()
            pg.wait()
            pr.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name='%s,%s,%s' % (panmatch1, panmatch2, panmatch3))

    elif sharpen == "ihs":
        grass.verbose(_("Using IHS<->RGB algorithm"))
        # transform RGB channels into IHS color space
        grass.message(_("Transforming to IHS color space..."))
        grass.run_command('i.rgb.his', overwrite=True,
                          red=ms3,
                          green=ms2,
                          blue=ms1,
                          hue="tmp%s_hue" % pid,
                          intensity="tmp%s_int" % pid,
                          saturation="tmp%s_sat" % pid)

        # pan/intensity histogram matching using linear regression
        target = "tmp%s_int" % pid
        outname = "tmp%s_pan_int" % pid
        panmatch = matchhist(pan, target, outname)

        # substitute pan for intensity channel and transform back to RGB color space
        grass.message(_("Transforming back to RGB color space and sharpening..."))
        grass.run_command('i.his.rgb', overwrite=True,
                          hue="tmp%s_hue" % pid,
                          intensity="%s" % panmatch,
                          saturation="tmp%s_sat" % pid,
                          red="%s_red" % out,
                          green="%s_green" % out,
                          blue="%s_blue" % out)

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name=panmatch)

    elif sharpen == "pca":
        grass.verbose(_("Using PCA/inverse PCA algorithm"))
        grass.message(_("Creating PCA images and calculating eigenvectors..."))

        # initial PCA with RGB channels
        pca_out = grass.read_command('i.pca', quiet=True, rescale='0,0',
                                     input='%s,%s,%s' % (ms1, ms2, ms3),
                                     output='tmp%s.pca' % pid)
        if len(pca_out) < 1:
            grass.fatal(_("Input has no data. Check region settings."))

        b1evect = []
        b2evect = []
        b3evect = []
        for l in pca_out.replace('(', ',').replace(')', ',').splitlines():
            b1evect.append(float(l.split(',')[1]))
            b2evect.append(float(l.split(',')[2]))
            b3evect.append(float(l.split(',')[3]))

        # inverse PCA with hi res pan channel substituted for principal component 1
        pca1 = 'tmp%s.pca.1' % pid
        pca2 = 'tmp%s.pca.2' % pid
        pca3 = 'tmp%s.pca.3' % pid
        b1evect1 = b1evect[0]
        b1evect2 = b1evect[1]
        b1evect3 = b1evect[2]
        b2evect1 = b2evect[0]
        b2evect2 = b2evect[1]
        b2evect3 = b2evect[2]
        b3evect1 = b3evect[0]
        b3evect2 = b3evect[1]
        b3evect3 = b3evect[2]

        outname = 'tmp%s_pan' % pid
        panmatch = matchhist(pan, ms1, outname)

        grass.message(_("Performing inverse PCA ..."))

        stats1 = grass.parse_command("r.univar", map=ms1, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats2 = grass.parse_command("r.univar", map=ms2, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats3 = grass.parse_command("r.univar", map=ms3, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))

        b1mean = float(stats1['mean'])
        b2mean = float(stats2['mean'])
        b3mean = float(stats3['mean'])

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0* "$ms1" * "$panmatch1" / k'''

            outr = '%s_red' % out
            outg = '%s_green' % out
            outb = '%s_blue' % out

            cmd1 = "$outb = (1.0 * $panmatch * $b1evect1) + ($pca2 * $b2evect1) + ($pca3 * $b3evect1) + $b1mean"
            cmd2 = "$outg = (1.0 * $panmatch * $b1evect2) + ($pca2 * $b2evect1) + ($pca3 * $b3evect2) + $b2mean"
            cmd3 = "$outr = (1.0 * $panmatch * $b1evect3) + ($pca2 * $b2evect3) + ($pca3 * $b3evect3) + $b3mean"

            cmd = '\n'.join([cmd1, cmd2, cmd3])

            grass.mapcalc(cmd, outb=outb, outg=outg, outr=outr,
                          panmatch=panmatch, pca2=pca2, pca3=pca3,
                          b1evect1=b1evect1, b2evect1=b2evect1, b3evect1=b3evect1,
                          b1evect2=b1evect2, b2evect2=b2evect2, b3evect2=b3evect2,
                          b1evect3=b1evect3, b2evect3=b2evect3, b3evect3=b3evect3,
                          b1mean=b1mean, b2mean=b2mean, b3mean=b3mean,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect1, pca2,
                                        b2evect1, pca3, b3evect1, b1mean),
                                     overwrite=True)

            pg = grass.mapcalc_start('%s_green = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect2, pca2,
                                        b2evect2, pca3, b3evect2, b2mean),
                                     overwrite=True)

            pr = grass.mapcalc_start('%s_red = (%s * %f) + (%s * %f) + (%s * ''%f) + %f'
                                     % (out, panmatch, b1evect3, pca2,
                                        b2evect3, pca3, b3evect3, b3mean),
                                     overwrite=True)

            pr.wait()
            pg.wait()
            pb.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type="raster",
                          pattern='tmp%s*,%s' % (pid, panmatch))

    # Could add other sharpening algorithms here, e.g. wavelet transformation

    grass.message(_("Assigning grey equalized color tables to output images..."))
    # equalized grey scales give best contrast
    for ch in ['red', 'green', 'blue']:
        grass.run_command('r.colors', quiet=True, map="%s_%s" % (out, ch),
                          flags="e", color='grey')

    # Landsat too blue-ish because panchromatic band less sensitive to blue
    # light, so output blue channed can be modified
    if bladjust:
        grass.message(_("Adjusting blue channel color table..."))
        rules = grass.tempfile()
        colors = open(rules, 'w')
        colors.write('5 0 0 0\n20 200 200 200\n40 230 230 230\n67 255 255 255 \n')
        colors.close()

        grass.run_command('r.colors', map="%s_blue" % out, rules=rules)
        os.remove(rules)

    # output notice
    grass.verbose(_("The following pan-sharpened output maps have been generated:"))
    for ch in ['red', 'green', 'blue']:
        grass.verbose(_("%s_%s") % (out, ch))

    grass.verbose(_("To visualize output, run: g.region -p raster=%s_red" % out))
    grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out)))
    grass.verbose(_("If desired, combine channels into a single RGB map with 'r.composite'."))
    grass.verbose(_("Channel colors can be rebalanced using i.colors.enhance."))

    # write cmd history:
    for ch in ['red', 'green', 'blue']:
        grass.raster_history("%s_%s" % (out, ch))

    # create a group with the three output
    grass.run_command('i.group', group=out,
                      input="{n}_red,{n}_blue,{n}_green".format(n=out))

    # Cleanup
    grass.run_command('g.remove', flags="f", type="raster",
                      pattern="tmp%s*" % pid, quiet=True)
Пример #54
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': [
                '1 x 1 degree',
                '15 x 15 minute'
            ],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001': (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006': (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011': (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness',
                'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)},
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "lidar": {
            'product': 'Lidar Point Cloud (LPC)',
            'dataset': {
                'Lidar Point Cloud (LPC)': (1. / 3600 / 9, 3, 10)},
            'subset': {},
            'extent': [''],
            'format': 'LAS,LAZ',
            'extension': 'las,laz',
            'zip': True,
            'srs': '',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extensions = tuple(nav_string['extension'].split(','))
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    has_pdal = gscript.find_program(pgm='v.in.pdal')
    if gui_product == 'lidar':
        gui_dataset = 'Lidar Point Cloud (LPC)'
        product_tag = nav_string['product']
        if not has_pdal:
            gscript.warning(_("Module v.in.pdal is missing,"
                              " any downloaded data will not be processed."))
    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']
    memory = options['memory']
    nprocs = options['nprocs']

    preserve_extracted_files = gui_k_flag
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    if not os.path.isdir(work_dir):
        gscript.fatal(_("Directory <{}> does not exist."
                        " Please create it.").format(work_dir))

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False
    if gui_product == 'lidar' and options['resolution']:
        product_resolution = float(options['resolution'])

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(_("The default resampling method for product {product} is {res}").format(product=gui_product,
                        res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
    min_coords = gscript.read_command('m.proj', coordinates=(gregion['w'], gregion['s']),
                                      proj_out=wgs84, separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj', coordinates=(gregion['e'], gregion['n']),
                                      proj_out=wgs84, separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _("Possibly, the query has timed out. Check network configuration and try again.")
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(_(
            "HTTP(S) error from USGS TNM API:"
            " {code}: {reason} ({instructions})").format(
                reason=error.reason, code=error.code, instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(_(
            "Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)
        if gui_product == 'lidar' and options['title_filter']:
            return_JSON['items'] = [item for item in return_JSON['items'] if options['title_filter'] in item['title']]
            return_JSON['total'] = len(return_JSON['items'])

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size - TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(_("TNM API ERROR or Zero tiles available for given input parameters."))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: simply continue with whatever is needed to be done in this case
    if cleanup_list:
        cleanup_msg = _("\n{0} existing incomplete file(s) detected and removed. Run module again.").format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print(data_info)

    if gui_i_flag:
        gscript.info(_("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except URLError:
            gscript.fatal(_("USGS download request has timed out. Network or formatting error."))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.lower().endswith(product_extensions):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(extracted_tile) < os.path.getmtime(z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(_(
                    "Unable to locate or extract IMG file '{filename}'"
                    " from ZIP archive '{zipname}': {error}").format(
                        filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(_("Extracted {extracted} new tiles and"
                          " used {used} existing tiles").format(
            used=used_existing_extracted_tiles_num,
            extracted=extracted_tiles_num
        ))
        if old_extracted_tiles_num:
            gscript.verbose(_("Found {removed} existing tiles older"
                              " than the corresponding downloaded archive").format(
                            removed=old_extracted_tiles_num
                            ))
        if removed_extracted_tiles_num:
            gscript.verbose(_("Removed {removed} existing tiles").format(
                            removed=removed_extracted_tiles_num
                            ))

    if gui_product == 'lidar' and not has_pdal:
        gscript.fatal(_("Module v.in.pdal is missing,"
                        " cannot process downloaded data."))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists("raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
            else:
                in_info = _("Importing and reprojecting {name}"
                            " ({count} out of {total})...").format(
                                name=LT_file_name, count=i + 1, total=files_to_import)
                gscript.info(in_info)

                process_count += 1
                if gui_product != 'lidar':
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i, LT_layer_name),
                        target=run_file_import, kwargs=dict(
                            identifier=i, results=results,
                            input=t, output=LT_layer_name,
                            resolution='value', resolution_value=product_resolution,
                            extent="region", resample=product_interpolation,
                            memory=memory
                        ))
                else:
                    srs = options['input_srs']
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i, LT_layer_name),
                        target=run_lidar_import, kwargs=dict(
                            identifier=i, results=results,
                            input=t, output=LT_layer_name,
                            input_srs=srs if srs else None
                        ))
                process.start()
                process_list.append(process)
                process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(_("Parallel import and reprojection failed."
                                        " Try running with nprocs=1."))
                    else:
                        gscript.fatal(_("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0
        # no process should be left now
        assert not process_list
        assert not process_id_list
        assert not process_count

    gscript.verbose(_("Imported {imported} new tiles and"
                      " used {used} existing tiles").format(
        used=used_existing_imported_tiles_num,
        imported=imported_tiles_num
    ))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # v.surf.rst lidar params
    rst_params = dict(tension=25, smooth=0.1, npmin=100)

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [name + '.' + i for name in patch_names]
                        output = gui_output_layer + '.' + i
                        gscript.run_command('r.patch', input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                elif gui_product == 'lidar':
                    gscript.run_command('v.patch', flags='nzb', input=patch_names,
                                        output=gui_output_layer)
                    gscript.run_command('v.surf.rst', input=gui_output_layer,
                                        elevation=gui_output_layer, nprocs=nprocs,
                                        **rst_params)
                else:
                    gscript.run_command('r.patch', input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added").format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [name + '.' + i for name in patch_names]
                            gscript.run_command('g.remove', type='raster',
                                                name=patch_names_i, flags='f')
                    elif gui_product == 'lidar':
                        gscript.run_command('g.remove', type='vector',
                                            name=patch_names + [gui_output_layer], flags='f')
                    else:
                        gscript.run_command('g.remove', type='raster',
                                            name=patch_names, flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename', raster=(patch_names[0] + '.' + i, gui_output_layer + '.' + i))
            elif gui_product == 'lidar':
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                gscript.run_command('v.surf.rst', input=patch_names[0],
                                    elevation=gui_output_layer, nprocs=nprocs,
                                    **rst_params)
                if not preserve_imported_tiles:
                    gscript.run_command('g.remove', type='vector',
                                        name=patch_names[0], flags='f')
            else:
                gscript.run_command('g.rename', raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(_("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(_(
            "Error in getting or importing the data (see above). Please retry."))

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = ("<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors', map=gui_output_layer, color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite', red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2', blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()
Пример #55
0
def main():
    # old connection
    old_database = options['old_database']
    old_schema = options['old_schema']
    # new connection
    default_connection = grass.db_connection()
    if options['new_driver']:
        new_driver = options['new_driver']
    else:
        new_driver = default_connection['driver']
    if options['new_database']:
        new_database = options['new_database']
    else:
        new_database = default_connection['database']
    if options['new_schema']:
        new_schema = options['new_schema']
    else:
        new_schema = default_connection['schema']

    if old_database == '':
    	old_database = None
    old_database_subst = None
    if old_database is not None:
	old_database_subst = substitute_db(old_database)

    new_database_subst = substitute_db(new_database)
    
    if old_database_subst == new_database_subst and old_schema == new_schema:
	grass.fatal(_("Old and new database connection is identical. Nothing to do."))
    
    mapset = grass.gisenv()['MAPSET']
        
    vectors = grass.list_grouped('vect')[mapset]
    num_vectors = len(vectors)

    if flags['c']:
	# create new database if not existing
	create_db(new_driver, new_database)
    
    i = 0
    for vect in vectors:
        vect = "%s@%s" % (vect, mapset)
        i += 1
	grass.message(_("%s\nReconnecting vector map <%s> (%d of %d)...\n%s") % \
                          ('-' * 80, vect, i, num_vectors, '-' * 80))
        for f in grass.vector_db(vect, stderr = nuldev).itervalues():
            layer = f['layer']
            schema_table = f['table']
            key = f['key']
            database = f['database']
            driver = f['driver']
            
            # split schema.table
            if '.' in schema_table:
                schema, table = schema_table.split('.', 1)
            else:
                schema = ''
                table = schema_table
            
            if new_schema:
                new_schema_table = "%s.%s" % (new_schema, table)
            else:
                new_schema_table = table
            
            grass.debug("DATABASE = '%s' SCHEMA = '%s' TABLE = '%s' ->\n"
                        "      NEW_DATABASE = '%s' NEW_SCHEMA_TABLE = '%s'" % \
                            (old_database, schema, table, new_database, new_schema_table))

            do_reconnect = True
	    if old_database_subst is not None:
		if database != old_database_subst:
		    do_reconnect = False
	    if database == new_database_subst:
		do_reconnect = False
	    if schema != old_schema:
		do_reconnect = False
		
            if do_reconnect == True:
                grass.verbose(_("Reconnecting layer %d...") % layer)
                                          
                if flags['c']:
                    # check if table exists in new database
                    copy_tab(driver, database, schema_table,
                             new_driver, new_database, new_schema_table)
                
                # drop original table if required
                if flags['d']:
                    drop_tab(vect, layer, schema_table, driver, substitute_db(database))

                # reconnect tables (don't use substituted new_database)
		# NOTE: v.db.connect creates an index on the key column
                try:
                    grass.run_command('v.db.connect', flags = 'o', quiet = True, map = vect,
                                      layer = layer, driver = new_driver, database = new_database,
                                      table = new_schema_table, key = key)
                except CalledModuleError:
                    grass.warning(_("Unable to connect table <%s> to vector <%s> on layer <%s>") %
				  (table, vect, str(layer)))

            else:
		if database != new_database_subst:
		    grass.warning(_("Layer <%d> will not be reconnected because "
				    "database or schema do not match.") % layer)
	
    return 0
Пример #56
0
def main():

    try:
        import pysptools.eea as eea
    except ImportError:
        gs.fatal(_("Cannot import pysptools \
                      (https://pypi.python.org/pypi/pysptools) library."
                      " Please install it (pip install pysptools)"
                      " or ensure that it is on path"
                      " (use PYTHONPATH variable)."))

    try:
        # sklearn is a dependency of used pysptools functionality
        import sklearn
    except ImportError:
        gs.fatal(_("Cannot import sklearn \
                      (https://pypi.python.org/pypi/scikit-learn) library."
                      " Please install it (pip install scikit-learn)"
                      " or ensure that it is on path"
                      " (use PYTHONPATH variable)."))

    try:
        from cvxopt import solvers, matrix
    except ImportError:
        gs.fatal(_("Cannot import cvxopt \
                      (https://pypi.python.org/pypi/cvxopt) library."
                      " Please install it (pip install cvxopt)"
                      " or ensure that it is on path"
                      " (use PYTHONPATH variable)."))

    # Parse input options
    input = options['input']
    output = options['output']
    prefix = options['prefix']
    endmember_n = int(options['endmember_n'])
    endmembers = options['endmembers']
    if options['maxit']:
        maxit = options['maxit']
    else:
        maxit = 0
    extraction_method = options['extraction_method']
    unmixing_method = options['unmixing_method']
    atgp_init = True if not flags['n'] else False

    # List maps in imagery group
    try:
        maps = gs.read_command('i.group', flags='g', group=input,
                               quiet=True).rstrip('\n').split('\n')
    except:
        pass

    # Validate input
    # q and maxit can be None according to manual, but does not work in current pysptools version
    if endmember_n <= 0:
        gs.fatal('Number of endmembers has to be > 0')
        """if (extraction_method == 'PPI' or
            extraction_method == 'NFINDR'):
            gs.fatal('Extraction methods PPI and NFINDR require endmember_n >= 2')
        endmember_n = None"""

    if maxit <= 0:
        maxit = 3 * len(maps)

    if endmember_n > len(maps) + 1:
        gs.warning('More endmembers ({}) requested than bands in \
                   input imagery group ({})'.format(endmember_n, len(maps)))
        if extraction_method != 'PPI':
            gs.fatal('Only PPI method can extract more endmembers than number \
                     of bands in the imagery group')

    if not atgp_init and extraction_method != 'NFINDR':
        gs.verbose('ATGP is only taken into account in \
                   NFINDR extraction method...')

    # Get metainformation from input bands
    band_types = {}
    img = None
    n = 0
    gs.verbose('Reading imagery group...')
    for m in maps:
        map = m.split('@')

        # Build numpy stack from imagery group
        raster = r.raster2numpy(map[0], mapset=map[1])
        if raster == np.float64:
            raster = float32(raster)
            gs.warning('{} is of type Float64.\
                        Float64 is currently not supported.\
                        Reducing precision to Float32'.format(raster))

        # Determine map type
        band_types[map[0]] = get_rastertype(raster)

        # Create cube and mask from GRASS internal NoData value
        if n == 0:
            img = raster
            # Create mask from GRASS internal NoData value
            mask = mask_rasternd(raster)
        else:
            img = np.dstack((img, raster))
            mask = np.logical_and((mask_rasternd(raster)), mask)

        n = n + 1

    # Read a mask if present and give waringing if not
    # Note that otherwise NoData is read as values
    gs.verbose('Checking for MASK...')
    try:
        MASK = r.raster2numpy('MASK', mapset=getenv('MAPSET')) == 1
        mask = np.logical_and(MASK, mask)
        MASK = None
    except:
        pass

    if extraction_method == 'NFINDR':
    # Extract endmembers from valid pixels using NFINDR function from pysptools
        gs.verbose('Extracting endmembers using NFINDR...')
        nfindr = eea.NFINDR()
        E = nfindr.extract(img, endmember_n, maxit=maxit, normalize=False,
                           ATGP_init=atgp_init, mask=mask)
    elif extraction_method == 'PPI':
    # Extract endmembers from valid pixels using PPI function from pysptools
        gs.verbose('Extracting endmembers using PPI...')
        ppi = eea.PPI()
        E = ppi.extract(img, endmember_n, numSkewers=10000, normalize=False,
                        mask=mask)
    elif extraction_method == 'FIPPI':
    # Extract endmembers from valid pixels using FIPPI function from pysptools
        gs.verbose('Extracting endmembers using FIPPI...')
        fippi = eea.FIPPI()
        # q and maxit can be None according to manual, but does not work
        """if not maxit and not endmember_n:
            E = fippi.extract(img, q=None, normalize=False, mask=mask)
        if not maxit:
            E = fippi.extract(img, q=endmember_n, normalize=False, mask=mask)
        if not endmember_n:
            E = fippi.extract(img, q=int(), maxit=maxit, normalize=False,
                              mask=mask)
        else:
            E = fippi.extract(img, q=endmember_n, maxit=maxit, normalize=False,
                              mask=mask)"""
        E = fippi.extract(img, q=endmember_n, maxit=maxit, normalize=False,
                          mask=mask)

    # Write output file in format required for i.spec.unmix addon
    if output:
        gs.verbose('Writing spectra file...')
        n = 0
        with open(output, 'w') as o:
            o.write('# Channels: {}\n'.format('\t'.join(band_types.keys())))
            o.write('# Wrote {} spectra line wise.\n#\n'.format(endmember_n))
            o.write('Matrix: {0} by {1}\n'.format(endmember_n, len(maps)))
            for e in E:
                o.write('row{0}: {1}\n'.format(n, '\t'.join([str(i) for i in  e])))
                n = n + 1

    # Write vector map with endmember information if requested
    if endmembers:
        gs.verbose('Writing vector map with endmembers...')
        from grass.pygrass import utils as u
        from grass.pygrass.gis.region import Region
        from grass.pygrass.vector import Vector
        from grass.pygrass.vector import VectorTopo
        from grass.pygrass.vector.geometry import Point

        # Build attribute table
        # Deinfe columns for attribute table
        cols = [(u'cat',       'INTEGER PRIMARY KEY')]
        for b in band_types.keys():
            cols.append((b.replace('.','_'), band_types[b]))
        
        # Get region information
        reg = Region()

        # Create vector map
        new = Vector(endmembers)
        new.open('w', tab_name=endmembers, tab_cols=cols)

        cat = 1
        for e in E:
            # Get indices
            idx = np.where((img[:,:]==e).all(-1))

            # Numpy array is ordered rows, columns (y,x)
            if len(idx[0]) == 0 or len(idx[1]) == 0:
                gs.warning('Could not compute coordinated for endmember {}. \
                            Please consider rescaling your data to integer'.format(cat))
                cat = cat + 1
                continue

            coords = u.pixel2coor((idx[1][0], idx[0][0]), reg)
            point = Point(coords[1] + reg.ewres / 2.0,
                          coords[0] - reg.nsres / 2.0)

            # Get attributes
            n = 0
            attr = []
            for b in band_types.keys():
                if band_types[b] == u'INTEGER':
                    attr.append(int(e[n]))
                else:
                    attr.append(float(e[n]))
                n = n + 1

            # Write geometry with attributes
            new.write(point, cat=cat,
                      attrs=tuple(attr))
            cat = cat + 1

        # Close vector map
        new.table.conn.commit()
        new.close(build=True)

    if prefix:
        # Run spectral unmixing
        import pysptools.abundance_maps as amaps
        if unmixing_method == 'FCLS':
            fcls = amaps.FCLS()
            result = fcls.map(img, E, normalize=False, mask=mask)
        elif unmixing_method == 'NNLS':
            nnls = amaps.NNLS()
            result = nnls.map(img, E, normalize=False, mask=mask)
        elif unmixing_method == 'UCLS':
            ucls = amaps.UCLS()
            result = ucls.map(img, E, normalize=False, mask=mask)

        # Write results
        for l in range(endmember_n):
            rastname = '{0}_{1}'.format(prefix, l + 1)
            r.numpy2raster(result[:,:,l], 'FCELL', rastname)
Пример #57
0
def main(options, flags):

    # Get the options
    points = options["points"]
    strds = options["strds"]
    output = options["output"]
    where = options["where"]
    order = options["order"]
    column = options["column"]
    separator = options["separator"]
    coordinates = options["coordinates"]

    # Setup separator
    if separator == "pipe":
        separator = "|"
    if separator == "comma":
        separator = ","
    if separator == "space":
        separator = " "
    if separator == "tab":
        separator = "\t"
    if separator == "newline":
        separator = "\n"

    use_cats = False

    write_header = flags["n"]
    use_raster_region = flags["r"]

    overwrite = gscript.overwrite()

    if points and coordinates:
        gscript.fatal(_("points and coordinates are mutually exclusive"))

    if not points and not coordinates:
        gscript.fatal(_("You must specify points or coordinates"))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, order=order,
                                             dbif=dbif)
    dbif.close()

    if not maps:
        gscript.fatal(_("Space time raster dataset <%s> is empty") % sp.get_id())

    # The list of sample points
    p_list = []

    if not coordinates:
        # Check if the chosen header column is in the vector map
        vname = points
        vmapset= ""
        if "@" in points:
            vname, vmapset = points.split("@")

        v = pyvect.VectorTopo(vname, vmapset)
        v.open("r")

        col_index = 0

        if v.exist() is False:
            gscript.fatal(_("Vector map <%s> does not exist" %(points)))

        if not v.table:
            use_cats = True
            gscript.warning(_("Vector map <%s> does not have an attribute table, using cats as header column." %(points)))

        if v.table and column not in v.table.columns:
            gscript.fatal(_("Vector map <%s> has no column named %s" %(points, column)))

        if use_cats is False:
            col_index = list(v.table.columns.names()).index(column)

        # Create the point list
        for line in v:
            if line.gtype == libvect.GV_POINT:
                if use_cats is False:
                    p = SamplePoint(line.x, line.y, line.cat, line.attrs.values()[col_index])
                elif use_cats is True:
                    p = SamplePoint(line.x, line.y, line.cat)

                p_list.append(p)

        v.close()
    else:
        # Convert the coordinates into sample points
        coord_list = coordinates.split(",")
        use_cats = True

        count = 0
        cat = 1
        while count < len(coord_list):
            x = coord_list[count]
            count += 1
            y = coord_list[count]
            count += 1

            p = SamplePoint(float(x), float(y), cat)
            p_list.append(p)
            cat += 1

    if output:
        out_file = open(output, "w")
    else:
        out_file = sys.stdout

    # Write the header
    if write_header:
        out_file.write("start_time")
        out_file.write(separator)
        out_file.write("end_time")
        out_file.write(separator)
        count = 0
        for p in p_list:
            count += 1
            if use_cats is True:
                out_file.write(str(p.cat))
            else:
                out_file.write(str(p.column))
            if count != len(p_list):
                out_file.write(separator)
        out_file.write("\n")

    # Sorting the points by y-coordinate to make use of the single row cache and read direction
    sorted_p_list = sorted(p_list, key=SamplePointComparisonY)

    # Sample the raster maps
    num = 0
    for map in maps:
        num += 1
        sys.stderr.write("Sample map <%s> number  %i out of %i\n" %(map.get_name(), num, len(maps)))

        start, end = map.get_temporal_extent_as_tuple()
        out_file.write(str(start))
        out_file.write(separator)
        if not end:
            out_file.write(str(start))
        else:
            out_file.write(str(end))
        out_file.write(separator)

        r = pyrast.RasterRow(map.get_name(), map.get_mapset())
        if r.exist() is False:
            gscript.fatal(_("Raster map <%s> does not exist" %(map.get_id())))

        region = None
        if use_raster_region is True:
            r.set_region_from_rast()
            region = pyregion.Region()
            region.from_rast(map.get_name())
        # Open the raster layer after the region settings
        r.open("r")

        # Sample the raster maps with the sorted points
        for p in sorted_p_list:
            p.value = r.get_value(point=p, region=region)

        # Write the point values from the original list
        count = 0
        for p in p_list:
            count += 1
            out_file.write(str(p.value))
            if count != len(p_list):
                out_file.write(separator)
        out_file.write("\n")

        r.close()

    out_file.close()
Пример #58
0
    def _download(self):
        """!Downloads data from WMS server using own driver

        @return temp_map with downloaded data
        """
        grass.message(_("Downloading data from WMS server..."))

        if "?" in self.params["url"]:
            self.params["url"] += "&"
        else:
            self.params["url"] += "?"

        if not self.params['capfile']:
            self.cap_file = self._fetchCapabilities(self.params)
        else:
            self.cap_file = self.params['capfile']

        # initialize correct manager according to chosen OGC service
        if self.params['driver'] == 'WMTS_GRASS':
            req_mgr = WMTSRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.proj_srs,
                self.cap_file)
        elif self.params['driver'] == 'WMS_GRASS':
            req_mgr = WMSRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.tile_size,
                self.proj_srs)
        elif self.params['driver'] == 'OnEarth_GRASS':
            req_mgr = OnEarthRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.proj_srs,
                self.cap_file)

        # get information about size in pixels and bounding box of raster, where
        # all tiles will be joined
        map_region = req_mgr.GetMapRegion()

        init = True
        temp_map = None

        fetch_try = 0

        # iterate through all tiles and download them
        while True:

            if fetch_try == 0:
                # get url for request the tile and information for placing the tile into
                # raster with other tiles
                tile = req_mgr.GetNextTile()

            # if last tile has been already downloaded
            if not tile:
                break

            # url for request the tile
            query_url = tile[0]

            # the tile size and offset in pixels for placing it into raster where tiles are joined
            tile_ref = tile[1]
            grass.debug(query_url, 2)
            try:
                wms_data = self._fetchDataFromServer(
                    query_url, self.params['username'],
                    self.params['password'])
            except (IOError, HTTPException) as e:
                if isinstance(e, HTTPError) and e.code == 401:
                    grass.fatal(
                        _("Authorization failed to '%s' when fetching data.\n%s") %
                        (self.params['url'], str(e)))
                else:
                    grass.fatal(
                        _("Unable to fetch data from: '%s'\n%s") %
                        (self.params['url'], str(e)))

            temp_tile = self._tempfile()

            # download data into temporary file
            try:
                temp_tile_opened = open(temp_tile, 'wb')
                temp_tile_opened.write(wms_data.read())
            except IOError as e:
                # some servers are not happy with many subsequent requests for tiles done immediately,
                # if immediate request was unsuccessful, try to repeat the request after 5s and 30s breaks
                # TODO probably servers can return more kinds of errors related to this
                # problem (not only 104)
                if isinstance(e, socket.error) and e[0] == 104 and fetch_try < 2:
                    fetch_try += 1

                    if fetch_try == 1:
                        sleep_time = 5
                    elif fetch_try == 2:
                        sleep_time = 30

                    grass.warning(
                        _("Server refused to send data for a tile.\nRequest will be repeated after %d s.") %
                        sleep_time)

                    sleep(sleep_time)
                    continue
                else:
                    grass.fatal(_("Unable to write data into tempfile.\n%s") % str(e))
            finally:
                temp_tile_opened.close()

            fetch_try = 0

            tile_dataset_info = gdal.Open(temp_tile, gdal.GA_ReadOnly)
            if tile_dataset_info is None:
                # print error xml returned from server
                try:
                    error_xml_opened = open(temp_tile, 'rb')
                    err_str = error_xml_opened.read()
                except IOError as e:
                    grass.fatal(_("Unable to read data from tempfile.\n%s") % str(e))
                finally:
                    error_xml_opened.close()

                if err_str is not None:
                    grass.fatal(_("WMS server error: %s") % err_str)
                else:
                    grass.fatal(_("WMS server unknown error"))

            temp_tile_pct2rgb = None
            if tile_dataset_info.RasterCount == 1 and \
               tile_dataset_info.GetRasterBand(1).GetRasterColorTable() is not None:
                # expansion of color table into bands
                temp_tile_pct2rgb = self._tempfile()
                tile_dataset = self._pct2rgb(temp_tile, temp_tile_pct2rgb)
            else:
                tile_dataset = tile_dataset_info

            # initialization of temp_map_dataset, where all tiles are merged
            if init:
                temp_map = self._tempfile()

                driver = gdal.GetDriverByName(self.gdal_drv_format)
                metadata = driver.GetMetadata()
                if gdal.DCAP_CREATE not in metadata or \
                        metadata[gdal.DCAP_CREATE] == 'NO':
                    grass.fatal(_('Driver %s does not supports Create() method') % drv_format)
                self.temp_map_bands_num = tile_dataset.RasterCount
                temp_map_dataset = driver.Create(temp_map, map_region['cols'], map_region['rows'],
                                                 self.temp_map_bands_num,
                                                 tile_dataset.GetRasterBand(1).DataType)
                init = False

            # tile is written into temp_map
            tile_to_temp_map = tile_dataset.ReadRaster(0, 0, tile_ref['sizeX'], tile_ref['sizeY'],
                                                       tile_ref['sizeX'], tile_ref['sizeY'])

            temp_map_dataset.WriteRaster(tile_ref['t_cols_offset'], tile_ref['t_rows_offset'],
                                         tile_ref['sizeX'], tile_ref['sizeY'], tile_to_temp_map)

            tile_dataset = None
            tile_dataset_info = None
            grass.try_remove(temp_tile)
            grass.try_remove(temp_tile_pct2rgb)

        if not temp_map:
            return temp_map
        # georeferencing and setting projection of temp_map
        projection = grass.read_command('g.proj',
                                        flags='wf',
                                        epsg=self.params['srs']).rstrip('\n')
        temp_map_dataset.SetProjection(projection)

        pixel_x_length = (map_region['maxx'] - map_region['minx']) / int(map_region['cols'])
        pixel_y_length = (map_region['miny'] - map_region['maxy']) / int(map_region['rows'])

        geo_transform = [
            map_region['minx'],
            pixel_x_length,
            0.0,
            map_region['maxy'],
            0.0,
            pixel_y_length]
        temp_map_dataset.SetGeoTransform(geo_transform)
        temp_map_dataset = None

        return temp_map
Пример #59
0
def main():
    from dateutil.parser import parse

    try:
        from pygbif import occurrences
        from pygbif import species
    except ImportError:
        grass.fatal(
            _("Cannot import pygbif (https://github.com/sckott/pygbif)"
              " library."
              " Please install it (pip install pygbif)"
              " or ensure that it is on path"
              " (use PYTHONPATH variable)."))

    # Parse input options
    output = options["output"]
    mask = options["mask"]
    species_maps = flags["i"]
    no_region_limit = flags["r"]
    no_topo = flags["b"]
    print_species = flags["p"]
    print_species_table = flags["t"]
    print_species_shell = flags["g"]
    print_occ_number = flags["o"]
    allow_no_geom = flags["n"]
    hasGeoIssue = flags["s"]
    taxa_list = options["taxa"].split(",")
    institutionCode = options["institutioncode"]
    basisofrecord = options["basisofrecord"]
    recordedby = options["recordedby"].split(",")
    date_from = options["date_from"]
    date_to = options["date_to"]
    country = options["country"]
    continent = options["continent"]
    rank = options["rank"]

    # Define static variable
    # Initialize cat
    cat = 0
    # Number of occurrences to fetch in one request
    chunk_size = 300
    # lat/lon proj string
    latlon_crs = [
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0.000,0.000,0.000",
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0,0,0,0,0,0,0",
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0.000,0.000,0.000 +type=crs",
    ]
    # List attributes available in Darwin Core
    # not all attributes are returned in each request
    # to avoid key errors when accessing the dictionary returned by pygbif
    # presence of DWC keys in the returned dictionary is checked using this list
    # The number of keys in this list has to be equal to the number of columns
    # in the attribute table and the attributes written for each occurrence
    dwc_keys = [
        "key",
        "taxonRank",
        "taxonKey",
        "taxonID",
        "scientificName",
        "species",
        "speciesKey",
        "genericName",
        "genus",
        "genusKey",
        "family",
        "familyKey",
        "order",
        "orderKey",
        "class",
        "classKey",
        "phylum",
        "phylumKey",
        "kingdom",
        "kingdomKey",
        "eventDate",
        "verbatimEventDate",
        "startDayOfYear",
        "endDayOfYear",
        "year",
        "month",
        "day",
        "occurrenceID",
        "occurrenceStatus",
        "occurrenceRemarks",
        "Habitat",
        "basisOfRecord",
        "preparations",
        "sex",
        "type",
        "locality",
        "verbatimLocality",
        "decimalLongitude",
        "decimalLatitude",
        "coordinateUncertaintyInMeters",
        "geodeticDatum",
        "higerGeography",
        "continent",
        "country",
        "countryCode",
        "stateProvince",
        "gbifID",
        "protocol",
        "identifier",
        "recordedBy",
        "identificationID",
        "identifiers",
        "dateIdentified",
        "modified",
        "institutionCode",
        "lastInterpreted",
        "lastParsed",
        "references",
        "relations",
        "catalogNumber",
        "occurrenceDetails",
        "datasetKey",
        "datasetName",
        "collectionCode",
        "rights",
        "rightsHolder",
        "license",
        "publishingOrgKey",
        "publishingCountry",
        "lastCrawled",
        "specificEpithet",
        "facts",
        "issues",
        "extensions",
        "language",
    ]
    # Deinfe columns for attribute table
    cols = [
        ("cat", "INTEGER PRIMARY KEY"),
        ("g_search", "varchar(100)"),
        ("g_key", "integer"),
        ("g_taxonrank", "varchar(50)"),
        ("g_taxonkey", "integer"),
        ("g_taxonid", "varchar(50)"),
        ("g_scientificname", "varchar(255)"),
        ("g_species", "varchar(255)"),
        ("g_specieskey", "integer"),
        ("g_genericname", "varchar(255)"),
        ("g_genus", "varchar(50)"),
        ("g_genuskey", "integer"),
        ("g_family", "varchar(50)"),
        ("g_familykey", "integer"),
        ("g_order", "varchar(50)"),
        ("g_orderkey", "integer"),
        ("g_class", "varchar(50)"),
        ("g_classkey", "integer"),
        ("g_phylum", "varchar(50)"),
        ("g_phylumkey", "integer"),
        ("g_kingdom", "varchar(50)"),
        ("g_kingdomkey", "integer"),
        ("g_eventdate", "text"),
        ("g_verbatimeventdate", "varchar(50)"),
        ("g_startDayOfYear", "integer"),
        ("g_endDayOfYear", "integer"),
        ("g_year", "integer"),
        ("g_month", "integer"),
        ("g_day", "integer"),
        ("g_occurrenceid", "varchar(255)"),
        ("g_occurrenceStatus", "varchar(50)"),
        ("g_occurrenceRemarks", "varchar(50)"),
        ("g_Habitat", "varchar(50)"),
        ("g_basisofrecord", "varchar(50)"),
        ("g_preparations", "varchar(50)"),
        ("g_sex", "varchar(50)"),
        ("g_type", "varchar(50)"),
        ("g_locality", "varchar(255)"),
        ("g_verbatimlocality", "varchar(255)"),
        ("g_decimallongitude", "double precision"),
        ("g_decimallatitude", "double precision"),
        ("g_coordinateUncertaintyInMeters", "double precision"),
        ("g_geodeticdatum", "varchar(50)"),
        ("g_higerGeography", "varchar(255)"),
        ("g_continent", "varchar(50)"),
        ("g_country", "varchar(50)"),
        ("g_countryCode", "varchar(50)"),
        ("g_stateProvince", "varchar(50)"),
        ("g_gbifid", "varchar(255)"),
        ("g_protocol", "varchar(255)"),
        ("g_identifier", "varchar(50)"),
        ("g_recordedby", "varchar(255)"),
        ("g_identificationid", "varchar(255)"),
        ("g_identifiers", "text"),
        ("g_dateidentified", "text"),
        ("g_modified", "text"),
        ("g_institutioncode", "varchar(50)"),
        ("g_lastinterpreted", "text"),
        ("g_lastparsed", "text"),
        ("g_references", "varchar(255)"),
        ("g_relations", "text"),
        ("g_catalognumber", "varchar(50)"),
        ("g_occurrencedetails", "text"),
        ("g_datasetkey", "varchar(50)"),
        ("g_datasetname", "varchar(255)"),
        ("g_collectioncode", "varchar(50)"),
        ("g_rights", "varchar(255)"),
        ("g_rightsholder", "varchar(255)"),
        ("g_license", "varchar(50)"),
        ("g_publishingorgkey", "varchar(50)"),
        ("g_publishingcountry", "varchar(50)"),
        ("g_lastcrawled", "text"),
        ("g_specificepithet", "varchar(50)"),
        ("g_facts", "text"),
        ("g_issues", "text"),
        ("g_extensions", "text"),
        ("g_language", "varchar(50)"),
    ]

    # maybe no longer required in Python3
    set_output_encoding()
    # Set temporal filter if requested by user
    # Initialize eventDate filter
    eventDate = None
    # Check if date from is compatible (ISO compliant)
    if date_from:
        try:
            parse(date_from)
        except:
            grass.fatal("Invalid invalid start date provided")

        if date_from and not date_to:
            eventDate = "{}".format(date_from)
    # Check if date to is compatible (ISO compliant)
    if date_to:
        try:
            parse(date_to)
        except:
            grass.fatal("Invalid invalid end date provided")
        # Check if date to is after date_from
        if parse(date_from) < parse(date_to):
            eventDate = "{},{}".format(date_from, date_to)
        else:
            grass.fatal(
                "Invalid date range: End date has to be after start date!")
    # Set filter on basisOfRecord if requested by user
    if basisofrecord == "ALL":
        basisOfRecord = None
    else:
        basisOfRecord = basisofrecord
    # Allow also occurrences with spatial issues if requested by user
    hasGeospatialIssue = False
    if hasGeoIssue:
        hasGeospatialIssue = True
    # Allow also occurrences without coordinates if requested by user
    hasCoordinate = True
    if allow_no_geom:
        hasCoordinate = False

    # Set reprojection parameters
    # Set target projection of current LOCATION
    proj_info = grass.parse_command("g.proj", flags="g")
    target_crs = grass.read_command("g.proj", flags="fj").rstrip()
    target = osr.SpatialReference()

    # Prefer EPSG CRS definitions
    if proj_info["epsg"]:
        target.ImportFromEPSG(int(proj_info["epsg"]))
    else:
        target.ImportFromProj4(target_crs)

    # GDAL >= 3 swaps x and y axis, see: github.com/gdal/issues/1546
    if int(gdal_version[0]) >= 3:
        target.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

    if target_crs == "XY location (unprojected)":
        grass.fatal("Sorry, XY locations are not supported!")

    # Set source projection from GBIF
    source = osr.SpatialReference()
    source.ImportFromEPSG(4326)
    # GDAL >= 3 swaps x and y axis, see: github.com/gdal/issues/1546
    if int(gdal_version[0]) >= 3:
        source.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

    if target_crs not in latlon_crs:
        transform = osr.CoordinateTransformation(source, target)
        reverse_transform = osr.CoordinateTransformation(target, source)

    # Generate WKT polygon to use for spatial filtering if requested
    if mask:
        if len(mask.split("@")) == 2:
            m = VectorTopo(mask.split("@")[0], mapset=mask.split("@")[1])
        else:
            m = VectorTopo(mask)
        if not m.exist():
            grass.fatal("Could not find vector map <{}>".format(mask))
        m.open("r")
        if not m.is_open():
            grass.fatal("Could not open vector map <{}>".format(mask))

        # Use map Bbox as spatial filter if map contains <> 1 area
        if m.number_of("areas") == 1:
            region_pol = [area.to_wkt() for area in m.viter("areas")][0]
        else:
            bbox = (str(m.bbox()).replace("Bbox(", "").replace(
                " ", "").rstrip(")").split(","))
            region_pol = "POLYGON (({0} {1}, {0} {3}, {2} {3}, {2} {1}, {0} {1}))".format(
                bbox[2], bbox[0], bbox[3], bbox[1])
        m.close()
    else:
        # Do not limit import spatially if LOCATION is able to take global data
        if no_region_limit:
            if target_crs not in latlon_crs:
                grass.fatal("Import of data from outside the current region is"
                            "only supported in a WGS84 location!")
            region_pol = None
        else:
            # Limit import spatially to current region
            # if LOCATION is !NOT! able to take global data
            # to avoid pprojection ERRORS
            region = grass.parse_command("g.region", flags="g")
            region_pol = "POLYGON (({0} {1},{0} {3},{2} {3},{2} {1},{0} {1}))".format(
                region["e"], region["n"], region["w"], region["s"])

    # Do not reproject in latlon LOCATIONS
    if target_crs not in latlon_crs:
        pol = ogr.CreateGeometryFromWkt(region_pol)
        pol.Transform(reverse_transform)
        pol = pol.ExportToWkt()
    else:
        pol = region_pol

    # Create output map if not output maps for each species are requested
    if (not species_maps and not print_species and not print_species_shell
            and not print_occ_number and not print_species_table):
        mapname = output
        new = Vector(mapname)
        new.open("w", tab_name=mapname, tab_cols=cols)
        cat = 1

    # Import data for each species
    for s in taxa_list:
        # Get the taxon key if not the taxon key is provided as input
        try:
            key = int(s)
        except:
            try:
                species_match = species.name_backbone(s,
                                                      rank=rank,
                                                      strict=False,
                                                      verbose=True)
                key = species_match["usageKey"]
            except:
                grass.error(
                    "Data request for taxon {} failed. Are you online?".format(
                        s))
                continue

        # Return matching taxon and alternatives and exit
        if print_species:
            print("Matching taxon for {} is:".format(s))
            print("{} {}".format(species_match["scientificName"],
                                 species_match["status"]))
            if "alternatives" in list(species_match.keys()):
                print("Alternative matches might be: {}".format(s))
                for m in species_match["alternatives"]:
                    print("{} {}".format(m["scientificName"], m["status"]))
            else:
                print("No alternatives found for the given taxon")
            continue
        if print_species_shell:
            print("match={}".format(species_match["scientificName"]))
            if "alternatives" in list(species_match.keys()):
                alternatives = []
                for m in species_match["alternatives"]:
                    alternatives.append(m["scientificName"])
                print("alternatives={}".format(",".join(alternatives)))
            continue
        if print_species_table:
            if "alternatives" in list(species_match.keys()):
                if len(species_match["alternatives"]) == 0:
                    print("{0}|{1}|{2}|".format(
                        s, key, species_match["scientificName"]))
                else:
                    alternatives = []
                    for m in species_match["alternatives"]:
                        alternatives.append(m["scientificName"])
                    print("{0}|{1}|{2}|{3}".format(
                        s,
                        key,
                        species_match["scientificName"],
                        ",".join(alternatives),
                    ))
            continue
        try:
            returns_n = occurrences.search(
                taxonKey=key,
                hasGeospatialIssue=hasGeospatialIssue,
                hasCoordinate=hasCoordinate,
                institutionCode=institutionCode,
                basisOfRecord=basisOfRecord,
                recordedBy=recordedby,
                eventDate=eventDate,
                continent=continent,
                country=country,
                geometry=pol,
                limit=1,
            )["count"]
        except:
            grass.error(
                "Data request for taxon {} faild. Are you online?".format(s))
            returns_n = 0

        # Exit if search does not give a return
        # Print only number of returns for the given search and exit
        if print_occ_number:
            print("Found {0} occurrences for taxon {1}...".format(
                returns_n, s))
            continue
        elif returns_n <= 0:
            grass.warning(
                "No occurrences for current search for taxon {0}...".format(s))
            continue
        elif returns_n >= 200000:
            grass.warning(
                "Your search for {1} returns {0} records.\n"
                "Unfortunately, the GBIF search API is limited to 200,000 records per request.\n"
                "The download will be incomplete. Please consider to split up your search."
                .format(returns_n, s))

        # Get the number of chunks to download
        chunks = int(math.ceil(returns_n / float(chunk_size)))
        grass.verbose("Downloading {0} occurrences for taxon {1}...".format(
            returns_n, s))

        # Create a map for each species if requested using map name as suffix
        if species_maps:
            mapname = "{}_{}".format(s.replace(" ", "_"), output)

            new = Vector(mapname)
            new.open("w", tab_name=mapname, tab_cols=cols)
            cat = 0

        # Download the data from GBIF
        for c in range(chunks):
            # Define offset
            offset = c * chunk_size
            # Adjust chunk_size to the hard limit of 200,000 records in GBIF API
            # if necessary
            if offset + chunk_size >= 200000:
                chunk_size = 200000 - offset
            # Get the returns for the next chunk
            returns = occurrences.search(
                taxonKey=key,
                hasGeospatialIssue=hasGeospatialIssue,
                hasCoordinate=hasCoordinate,
                institutionCode=institutionCode,
                basisOfRecord=basisOfRecord,
                recordedBy=recordedby,
                eventDate=eventDate,
                continent=continent,
                country=country,
                geometry=pol,
                limit=chunk_size,
                offset=offset,
            )

            # Write the returned data to map and attribute table
            for res in returns["results"]:
                if target_crs not in latlon_crs:
                    point = ogr.CreateGeometryFromWkt("POINT ({} {})".format(
                        res["decimalLongitude"], res["decimalLatitude"]))
                    point.Transform(transform)
                    x = point.GetX()
                    y = point.GetY()
                else:
                    x = res["decimalLongitude"]
                    y = res["decimalLatitude"]

                point = Point(x, y)

                for k in dwc_keys:
                    if k not in list(res.keys()):
                        res.update({k: None})

                cat = cat + 1
                new.write(
                    point,
                    cat=cat,
                    attrs=(
                        "{}".format(s),
                        res["key"],
                        res["taxonRank"],
                        res["taxonKey"],
                        res["taxonID"],
                        res["scientificName"],
                        res["species"],
                        res["speciesKey"],
                        res["genericName"],
                        res["genus"],
                        res["genusKey"],
                        res["family"],
                        res["familyKey"],
                        res["order"],
                        res["orderKey"],
                        res["class"],
                        res["classKey"],
                        res["phylum"],
                        res["phylumKey"],
                        res["kingdom"],
                        res["kingdomKey"],
                        "{}".format(res["eventDate"])
                        if res["eventDate"] else None,
                        "{}".format(res["verbatimEventDate"])
                        if res["verbatimEventDate"] else None,
                        res["startDayOfYear"],
                        res["endDayOfYear"],
                        res["year"],
                        res["month"],
                        res["day"],
                        res["occurrenceID"],
                        res["occurrenceStatus"],
                        res["occurrenceRemarks"],
                        res["Habitat"],
                        res["basisOfRecord"],
                        res["preparations"],
                        res["sex"],
                        res["type"],
                        res["locality"],
                        res["verbatimLocality"],
                        res["decimalLongitude"],
                        res["decimalLatitude"],
                        res["coordinateUncertaintyInMeters"],
                        res["geodeticDatum"],
                        res["higerGeography"],
                        res["continent"],
                        res["country"],
                        res["countryCode"],
                        res["stateProvince"],
                        res["gbifID"],
                        res["protocol"],
                        res["identifier"],
                        res["recordedBy"],
                        res["identificationID"],
                        ",".join(res["identifiers"]),
                        "{}".format(res["dateIdentified"])
                        if res["dateIdentified"] else None,
                        "{}".format(res["modified"])
                        if res["modified"] else None,
                        res["institutionCode"],
                        "{}".format(res["lastInterpreted"])
                        if res["lastInterpreted"] else None,
                        "{}".format(res["lastParsed"])
                        if res["lastParsed"] else None,
                        res["references"],
                        ",".join(res["relations"]),
                        res["catalogNumber"],
                        "{}".format(res["occurrenceDetails"])
                        if res["occurrenceDetails"] else None,
                        res["datasetKey"],
                        res["datasetName"],
                        res["collectionCode"],
                        res["rights"],
                        res["rightsHolder"],
                        res["license"],
                        res["publishingOrgKey"],
                        res["publishingCountry"],
                        "{}".format(res["lastCrawled"])
                        if res["lastCrawled"] else None,
                        res["specificEpithet"],
                        ",".join(res["facts"]),
                        ",".join(res["issues"]),
                        ",".join(res["extensions"]),
                        res["language"],
                    ),
                )

                cat = cat + 1

        # Close the current map if a map for each species is requested
        if species_maps:
            new.table.conn.commit()
            new.close()
            if not no_topo:
                grass.run_command("v.build", map=mapname, option="build")

            # Write history to map
            grass.vector_history(mapname)

    # Close the output map if not a map for each species is requested
    if (not species_maps and not print_species and not print_species_shell
            and not print_occ_number and not print_species_table):
        new.table.conn.commit()
        new.close()
        if not no_topo:
            grass.run_command("v.build", map=mapname, option="build")

        # Write history to map
        grass.vector_history(mapname)
Пример #60
0
    def _computeRequestData(self, bbox, tl_corner, tile_span, tile_size,
                            mat_num_bbox):
        """!Initialize data needed for iteration through tiles. Used by WMTS_GRASS and OnEarth_GRASS drivers.
        """
        epsilon = 1e-15

        # request data bbox specified in row and col number
        self.t_num_bbox = {}

        self.t_num_bbox['min_col'] = int(
            floor((bbox['minx'] - tl_corner['minx']) / tile_span['x'] +
                  epsilon))
        self.t_num_bbox['max_col'] = int(
            floor((bbox['maxx'] - tl_corner['minx']) / tile_span['x'] -
                  epsilon))

        self.t_num_bbox['min_row'] = int(
            floor((tl_corner['maxy'] - bbox['maxy']) / tile_span['y'] +
                  epsilon))
        self.t_num_bbox['max_row'] = int(
            floor((tl_corner['maxy'] - bbox['miny']) / tile_span['y'] -
                  epsilon))

        # Does required bbox intersects bbox of data available on server?
        self.intersects = False
        for col in ['min_col', 'max_col']:
            for row in ['min_row', 'max_row']:
                if (self.t_num_bbox['min_row'] <= self.t_num_bbox[row] and self.t_num_bbox[row] <= mat_num_bbox['max_row']) and \
                   (self.t_num_bbox['min_col'] <= self.t_num_bbox[col] and self.t_num_bbox[col] <= mat_num_bbox['max_col']):
                    self.intersects = True

        if not self.intersects:
            grass.warning(_('Region is out of server data extend.'))
            self.map_region = None
            return

        # crop request bbox to server data bbox extend
        if self.t_num_bbox['min_col'] < (mat_num_bbox['min_col']):
            self.t_num_bbox['min_col'] = int(mat_num_bbox['min_col'])

        if self.t_num_bbox['max_col'] > (mat_num_bbox['max_col']):
            self.t_num_bbox['max_col'] = int(mat_num_bbox['max_col'])

        if self.t_num_bbox['min_row'] < (mat_num_bbox['min_row']):
            self.t_num_bbox['min_row'] = int(mat_num_bbox['min_row'])

        if self.t_num_bbox['max_row'] > (mat_num_bbox['max_row']):
            self.t_num_bbox['max_row'] = int(mat_num_bbox['max_row'])

        num_tiles = (self.t_num_bbox['max_col'] - self.t_num_bbox['min_col'] +
                     1) * (self.t_num_bbox['max_row'] -
                           self.t_num_bbox['min_row'] + 1)
        grass.message(
            _('Fetching %d tiles with %d x %d pixel size per tile...') %
            (num_tiles, tile_size['x'], tile_size['y']))

        # georeference of raster, where tiles will be merged
        self.map_region = {}
        self.map_region['minx'] = self.t_num_bbox['min_col'] * tile_span[
            'x'] + tl_corner['minx']
        self.map_region['maxy'] = tl_corner['maxy'] - (
            self.t_num_bbox['min_row']) * tile_span['y']

        self.map_region['maxx'] = (self.t_num_bbox['max_col'] +
                                   1) * tile_span['x'] + tl_corner['minx']
        self.map_region['miny'] = tl_corner['maxy'] - (
            self.t_num_bbox['max_row'] + 1) * tile_span['y']

        # size of raster, where tiles will be merged
        self.map_region['cols'] = int(
            tile_size['x'] *
            (self.t_num_bbox['max_col'] - self.t_num_bbox['min_col'] + 1))
        self.map_region['rows'] = int(
            tile_size['y'] *
            (self.t_num_bbox['max_row'] - self.t_num_bbox['min_row'] + 1))

        # hold information about current column and row during iteration
        self.i_col = self.t_num_bbox['min_col']
        self.i_row = self.t_num_bbox['min_row']

        # bbox for first tile request
        self.query_bbox = {
            'minx': tl_corner['minx'],
            'maxy': tl_corner['maxy'],
            'maxx': tl_corner['minx'] + tile_span['x'],
            'miny': tl_corner['maxy'] - tile_span['y'],
        }

        self.tile_ref = {'sizeX': tile_size['x'], 'sizeY': tile_size['y']}